file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
issue-35668.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
fn func<'a, T>(a: &'a [T]) -> impl Iterator<Item=&'a T> {
a.iter().map(|a| a*a)
//~^ ERROR binary operation `*` cannot be applied to type `&T`
}
fn main() {
let a = (0..30).collect::<Vec<_>>();
for k in func(&a) {
println!("{}", k);
}
}
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
random_line_split
|
sudoku.rs
|
// http://rosettacode.org/wiki/Sudoku
#![feature(core)]
#![feature(step_by)]
use std::fmt;
use std::str::FromStr;
const BOARD_WIDTH: usize = 9;
const BOARD_HEIGHT: usize = 9;
const GROUP_WIDTH: usize = 3;
const GROUP_HEIGHT: usize = 3;
const MAX_NUMBER: usize = 9;
type BITS = u16;
const MASK_ALL: BITS = 0x1ff;
const INVALID_CELL: u32 =!0;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
struct Sudoku {
map: [[BITS; BOARD_WIDTH]; BOARD_HEIGHT]
}
impl Sudoku {
fn
|
() -> Sudoku {
Sudoku { map: [[MASK_ALL; BOARD_WIDTH]; BOARD_HEIGHT] }
}
fn get(&self, x: usize, y: usize) -> u32 {
match self.map[y][x].count_ones() {
0 => INVALID_CELL,
1 => self.map[y][x].trailing_zeros() + 1,
_ => 0
}
}
fn set(&mut self, x: usize, y: usize, n: u32) {
self.map[y][x] = 1 << (n - 1);
}
}
impl FromStr for Sudoku {
type Err = ();
fn from_str(s: &str) -> Result<Sudoku, ()> {
let mut sudoku = Sudoku::new();
for (y, line) in s.lines().filter(|l|!l.is_empty()).enumerate() {
let line = line.trim_matches(|c: char| c.is_whitespace());
for (x, c) in line.chars().enumerate() {
if let Some(d) = c.to_digit(10) {
if d!= 0 { sudoku.set(x, y, d); }
} else {
return Err(())
}
}
}
Ok(sudoku)
}
}
impl fmt::Display for Sudoku {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let hbar = "+---+---+---+";
for y in (0.. BOARD_HEIGHT) {
if y % GROUP_HEIGHT == 0 {
try!(writeln!(f, "{}", hbar));
}
for x in (0.. BOARD_WIDTH) {
if x % GROUP_WIDTH == 0 {
try!(write!(f, "|"));
}
match self.get(x, y) {
INVALID_CELL => try!(write!(f, "!")),
0 => try!(write!(f, " ")),
d => try!(write!(f, "{}", d))
}
}
try!(writeln!(f, "|"));
}
try!(writeln!(f, "{}", hbar));
Ok(())
}
}
fn solve_sudoku(mut puzzle: Sudoku) -> Vec<Sudoku> {
let idx_in_grp = [(0, 0), (0, 1), (0, 2),
(1, 0), (1, 1), (1, 2),
(2, 0), (2, 1), (2, 2)];
loop {
let bkup = puzzle;
// If the number at cell (x, y) is uniquely determined, that number must
// not have appeared at the cells in the same row/column/group.
for y in (0.. BOARD_HEIGHT) {
for x in (0.. BOARD_WIDTH) {
if puzzle.map[y][x].count_ones()!= 1 { continue }
let (x0, y0) = ((x / GROUP_WIDTH) * GROUP_WIDTH,
(y / GROUP_HEIGHT) * GROUP_HEIGHT);
let row = (0.. BOARD_WIDTH).map(|x| (x, y));
let col = (0.. BOARD_HEIGHT).map(|y| (x, y));
let grp = idx_in_grp.iter().map(|&(dx, dy)| (x0 + dx, y0 + dy));
let it = row.chain(col).chain(grp)
.filter(|&pos: &(usize, usize)| pos!= (x, y));
let mask =!puzzle.map[y][x] & MASK_ALL;
for (x, y) in it {
puzzle.map[y][x] &= mask;
}
}
}
// If `n` appears only once at the cell in the row/column/group, the
// number of the cell must be `n`.
for n in (0.. MAX_NUMBER) {
let bit = 1 << n;
// Check each rows
for y in (0.. BOARD_HEIGHT) {
let next = {
let mut it = (0.. BOARD_WIDTH)
.filter(|&x| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
puzzle.map[y][next.unwrap()] = bit;
}
// Check each column
for x in (0.. BOARD_WIDTH) {
let next = {
let mut it = (0.. BOARD_HEIGHT)
.filter(|&y| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
puzzle.map[next.unwrap()][x] = bit;
}
// Check each group
for y0 in (0..BOARD_HEIGHT).step_by(GROUP_WIDTH) {
for x0 in (0..BOARD_WIDTH).step_by(GROUP_HEIGHT) {
let next = {
let mut it = idx_in_grp
.iter()
.map(|&(dx, dy)| (x0 + dx, y0 + dy))
.filter(|&(x, y)| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
let (x, y) = next.unwrap();
puzzle.map[y][x] = bit;
}
}
}
// Loop until no cell can be filled.
if puzzle == bkup { break }
}
let it = (0.. BOARD_HEIGHT * BOARD_WIDTH)
.map(|i| (i % BOARD_WIDTH, i / BOARD_WIDTH))
.map(|(x, y)| (x, y, puzzle.map[y][x].count_ones() as BITS))
.collect::<Vec<_>>();
// If some cells have no possible number, there is no answer.
if it.iter().any(|&(_x, _y, cnt)| cnt == 0) { return vec![]; }
// If all cells have exact one possible number, this is a answer.
if it.iter().all(|&(_x, _y, cnt)| cnt == 1) { return vec![puzzle]; }
// Find the first undetermined cell.
let (x, y, _cnt) = *it.iter()
.filter(|& &(_x, _y, cnt)| cnt > 1)
.min_by(|& &(_x, _y, cnt)| cnt)
.unwrap();
let mut answers = vec![];
for n in (0.. MAX_NUMBER) {
let bit = 1 << n;
if puzzle.map[y][x] & bit == 0 { continue }
// Assuming the number at (x, y) is `n`, try to solve the problem again.
// If some answers are found, append them to the `answers`.
let mut p2 = puzzle;
p2.map[y][x] = bit;
answers.extend(solve_sudoku(p2).into_iter());
}
answers
}
const INPUT: &'static str = "
850002400
720000009
004000000
000107002
305000900
040000000
000080070
017000000
000036040
";
#[cfg(not(test))]
fn main() {
let puzzle = INPUT.parse::<Sudoku>().unwrap();
println!("{}", puzzle);
for answer in &solve_sudoku(puzzle) {
println!("{}", answer);
}
}
#[cfg(test)]
const SOLUTION: &'static str = "
859612437
723854169
164379528
986147352
375268914
241593786
432981675
617425893
598736241
";
#[test]
fn solution() {
let puzzle = INPUT.parse::<Sudoku>().unwrap();
let answer = SOLUTION.parse::<Sudoku>().unwrap();
let solution = solve_sudoku(puzzle);
assert_eq!(solution, [answer]);
}
|
new
|
identifier_name
|
sudoku.rs
|
// http://rosettacode.org/wiki/Sudoku
#![feature(core)]
#![feature(step_by)]
use std::fmt;
use std::str::FromStr;
const BOARD_WIDTH: usize = 9;
const BOARD_HEIGHT: usize = 9;
const GROUP_WIDTH: usize = 3;
const GROUP_HEIGHT: usize = 3;
const MAX_NUMBER: usize = 9;
type BITS = u16;
const MASK_ALL: BITS = 0x1ff;
const INVALID_CELL: u32 =!0;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
struct Sudoku {
map: [[BITS; BOARD_WIDTH]; BOARD_HEIGHT]
}
impl Sudoku {
fn new() -> Sudoku {
Sudoku { map: [[MASK_ALL; BOARD_WIDTH]; BOARD_HEIGHT] }
}
fn get(&self, x: usize, y: usize) -> u32 {
match self.map[y][x].count_ones() {
0 => INVALID_CELL,
1 => self.map[y][x].trailing_zeros() + 1,
_ => 0
}
}
fn set(&mut self, x: usize, y: usize, n: u32) {
self.map[y][x] = 1 << (n - 1);
}
}
impl FromStr for Sudoku {
type Err = ();
fn from_str(s: &str) -> Result<Sudoku, ()> {
let mut sudoku = Sudoku::new();
for (y, line) in s.lines().filter(|l|!l.is_empty()).enumerate() {
let line = line.trim_matches(|c: char| c.is_whitespace());
for (x, c) in line.chars().enumerate() {
if let Some(d) = c.to_digit(10) {
if d!= 0 { sudoku.set(x, y, d); }
} else {
return Err(())
}
}
}
Ok(sudoku)
}
}
impl fmt::Display for Sudoku {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let hbar = "+---+---+---+";
for y in (0.. BOARD_HEIGHT) {
if y % GROUP_HEIGHT == 0 {
try!(writeln!(f, "{}", hbar));
}
for x in (0.. BOARD_WIDTH) {
if x % GROUP_WIDTH == 0 {
try!(write!(f, "|"));
}
match self.get(x, y) {
INVALID_CELL => try!(write!(f, "!")),
0 => try!(write!(f, " ")),
d => try!(write!(f, "{}", d))
}
}
try!(writeln!(f, "|"));
}
try!(writeln!(f, "{}", hbar));
Ok(())
}
}
fn solve_sudoku(mut puzzle: Sudoku) -> Vec<Sudoku> {
let idx_in_grp = [(0, 0), (0, 1), (0, 2),
(1, 0), (1, 1), (1, 2),
(2, 0), (2, 1), (2, 2)];
loop {
let bkup = puzzle;
// If the number at cell (x, y) is uniquely determined, that number must
// not have appeared at the cells in the same row/column/group.
for y in (0.. BOARD_HEIGHT) {
for x in (0.. BOARD_WIDTH) {
if puzzle.map[y][x].count_ones()!= 1 { continue }
let (x0, y0) = ((x / GROUP_WIDTH) * GROUP_WIDTH,
(y / GROUP_HEIGHT) * GROUP_HEIGHT);
let row = (0.. BOARD_WIDTH).map(|x| (x, y));
let col = (0.. BOARD_HEIGHT).map(|y| (x, y));
let grp = idx_in_grp.iter().map(|&(dx, dy)| (x0 + dx, y0 + dy));
let it = row.chain(col).chain(grp)
.filter(|&pos: &(usize, usize)| pos!= (x, y));
let mask =!puzzle.map[y][x] & MASK_ALL;
for (x, y) in it {
puzzle.map[y][x] &= mask;
}
}
}
// If `n` appears only once at the cell in the row/column/group, the
// number of the cell must be `n`.
for n in (0.. MAX_NUMBER) {
let bit = 1 << n;
// Check each rows
for y in (0.. BOARD_HEIGHT) {
let next = {
let mut it = (0.. BOARD_WIDTH)
.filter(|&x| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
puzzle.map[y][next.unwrap()] = bit;
}
// Check each column
for x in (0.. BOARD_WIDTH) {
let next = {
let mut it = (0.. BOARD_HEIGHT)
.filter(|&y| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some()
|
next
};
puzzle.map[next.unwrap()][x] = bit;
}
// Check each group
for y0 in (0..BOARD_HEIGHT).step_by(GROUP_WIDTH) {
for x0 in (0..BOARD_WIDTH).step_by(GROUP_HEIGHT) {
let next = {
let mut it = idx_in_grp
.iter()
.map(|&(dx, dy)| (x0 + dx, y0 + dy))
.filter(|&(x, y)| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
let (x, y) = next.unwrap();
puzzle.map[y][x] = bit;
}
}
}
// Loop until no cell can be filled.
if puzzle == bkup { break }
}
let it = (0.. BOARD_HEIGHT * BOARD_WIDTH)
.map(|i| (i % BOARD_WIDTH, i / BOARD_WIDTH))
.map(|(x, y)| (x, y, puzzle.map[y][x].count_ones() as BITS))
.collect::<Vec<_>>();
// If some cells have no possible number, there is no answer.
if it.iter().any(|&(_x, _y, cnt)| cnt == 0) { return vec![]; }
// If all cells have exact one possible number, this is a answer.
if it.iter().all(|&(_x, _y, cnt)| cnt == 1) { return vec![puzzle]; }
// Find the first undetermined cell.
let (x, y, _cnt) = *it.iter()
.filter(|& &(_x, _y, cnt)| cnt > 1)
.min_by(|& &(_x, _y, cnt)| cnt)
.unwrap();
let mut answers = vec![];
for n in (0.. MAX_NUMBER) {
let bit = 1 << n;
if puzzle.map[y][x] & bit == 0 { continue }
// Assuming the number at (x, y) is `n`, try to solve the problem again.
// If some answers are found, append them to the `answers`.
let mut p2 = puzzle;
p2.map[y][x] = bit;
answers.extend(solve_sudoku(p2).into_iter());
}
answers
}
const INPUT: &'static str = "
850002400
720000009
004000000
000107002
305000900
040000000
000080070
017000000
000036040
";
#[cfg(not(test))]
fn main() {
let puzzle = INPUT.parse::<Sudoku>().unwrap();
println!("{}", puzzle);
for answer in &solve_sudoku(puzzle) {
println!("{}", answer);
}
}
#[cfg(test)]
const SOLUTION: &'static str = "
859612437
723854169
164379528
986147352
375268914
241593786
432981675
617425893
598736241
";
#[test]
fn solution() {
let puzzle = INPUT.parse::<Sudoku>().unwrap();
let answer = SOLUTION.parse::<Sudoku>().unwrap();
let solution = solve_sudoku(puzzle);
assert_eq!(solution, [answer]);
}
|
{ continue }
|
conditional_block
|
sudoku.rs
|
// http://rosettacode.org/wiki/Sudoku
#![feature(core)]
#![feature(step_by)]
use std::fmt;
use std::str::FromStr;
const BOARD_WIDTH: usize = 9;
const BOARD_HEIGHT: usize = 9;
const GROUP_WIDTH: usize = 3;
const GROUP_HEIGHT: usize = 3;
const MAX_NUMBER: usize = 9;
type BITS = u16;
const MASK_ALL: BITS = 0x1ff;
const INVALID_CELL: u32 =!0;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
struct Sudoku {
map: [[BITS; BOARD_WIDTH]; BOARD_HEIGHT]
}
impl Sudoku {
fn new() -> Sudoku {
Sudoku { map: [[MASK_ALL; BOARD_WIDTH]; BOARD_HEIGHT] }
}
fn get(&self, x: usize, y: usize) -> u32 {
match self.map[y][x].count_ones() {
0 => INVALID_CELL,
1 => self.map[y][x].trailing_zeros() + 1,
_ => 0
}
}
fn set(&mut self, x: usize, y: usize, n: u32) {
self.map[y][x] = 1 << (n - 1);
}
}
impl FromStr for Sudoku {
type Err = ();
fn from_str(s: &str) -> Result<Sudoku, ()>
|
}
impl fmt::Display for Sudoku {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let hbar = "+---+---+---+";
for y in (0.. BOARD_HEIGHT) {
if y % GROUP_HEIGHT == 0 {
try!(writeln!(f, "{}", hbar));
}
for x in (0.. BOARD_WIDTH) {
if x % GROUP_WIDTH == 0 {
try!(write!(f, "|"));
}
match self.get(x, y) {
INVALID_CELL => try!(write!(f, "!")),
0 => try!(write!(f, " ")),
d => try!(write!(f, "{}", d))
}
}
try!(writeln!(f, "|"));
}
try!(writeln!(f, "{}", hbar));
Ok(())
}
}
fn solve_sudoku(mut puzzle: Sudoku) -> Vec<Sudoku> {
let idx_in_grp = [(0, 0), (0, 1), (0, 2),
(1, 0), (1, 1), (1, 2),
(2, 0), (2, 1), (2, 2)];
loop {
let bkup = puzzle;
// If the number at cell (x, y) is uniquely determined, that number must
// not have appeared at the cells in the same row/column/group.
for y in (0.. BOARD_HEIGHT) {
for x in (0.. BOARD_WIDTH) {
if puzzle.map[y][x].count_ones()!= 1 { continue }
let (x0, y0) = ((x / GROUP_WIDTH) * GROUP_WIDTH,
(y / GROUP_HEIGHT) * GROUP_HEIGHT);
let row = (0.. BOARD_WIDTH).map(|x| (x, y));
let col = (0.. BOARD_HEIGHT).map(|y| (x, y));
let grp = idx_in_grp.iter().map(|&(dx, dy)| (x0 + dx, y0 + dy));
let it = row.chain(col).chain(grp)
.filter(|&pos: &(usize, usize)| pos!= (x, y));
let mask =!puzzle.map[y][x] & MASK_ALL;
for (x, y) in it {
puzzle.map[y][x] &= mask;
}
}
}
// If `n` appears only once at the cell in the row/column/group, the
// number of the cell must be `n`.
for n in (0.. MAX_NUMBER) {
let bit = 1 << n;
// Check each rows
for y in (0.. BOARD_HEIGHT) {
let next = {
let mut it = (0.. BOARD_WIDTH)
.filter(|&x| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
puzzle.map[y][next.unwrap()] = bit;
}
// Check each column
for x in (0.. BOARD_WIDTH) {
let next = {
let mut it = (0.. BOARD_HEIGHT)
.filter(|&y| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
puzzle.map[next.unwrap()][x] = bit;
}
// Check each group
for y0 in (0..BOARD_HEIGHT).step_by(GROUP_WIDTH) {
for x0 in (0..BOARD_WIDTH).step_by(GROUP_HEIGHT) {
let next = {
let mut it = idx_in_grp
.iter()
.map(|&(dx, dy)| (x0 + dx, y0 + dy))
.filter(|&(x, y)| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
let (x, y) = next.unwrap();
puzzle.map[y][x] = bit;
}
}
}
// Loop until no cell can be filled.
if puzzle == bkup { break }
}
let it = (0.. BOARD_HEIGHT * BOARD_WIDTH)
.map(|i| (i % BOARD_WIDTH, i / BOARD_WIDTH))
.map(|(x, y)| (x, y, puzzle.map[y][x].count_ones() as BITS))
.collect::<Vec<_>>();
// If some cells have no possible number, there is no answer.
if it.iter().any(|&(_x, _y, cnt)| cnt == 0) { return vec![]; }
// If all cells have exact one possible number, this is a answer.
if it.iter().all(|&(_x, _y, cnt)| cnt == 1) { return vec![puzzle]; }
// Find the first undetermined cell.
let (x, y, _cnt) = *it.iter()
.filter(|& &(_x, _y, cnt)| cnt > 1)
.min_by(|& &(_x, _y, cnt)| cnt)
.unwrap();
let mut answers = vec![];
for n in (0.. MAX_NUMBER) {
let bit = 1 << n;
if puzzle.map[y][x] & bit == 0 { continue }
// Assuming the number at (x, y) is `n`, try to solve the problem again.
// If some answers are found, append them to the `answers`.
let mut p2 = puzzle;
p2.map[y][x] = bit;
answers.extend(solve_sudoku(p2).into_iter());
}
answers
}
const INPUT: &'static str = "
850002400
720000009
004000000
000107002
305000900
040000000
000080070
017000000
000036040
";
#[cfg(not(test))]
fn main() {
let puzzle = INPUT.parse::<Sudoku>().unwrap();
println!("{}", puzzle);
for answer in &solve_sudoku(puzzle) {
println!("{}", answer);
}
}
#[cfg(test)]
const SOLUTION: &'static str = "
859612437
723854169
164379528
986147352
375268914
241593786
432981675
617425893
598736241
";
#[test]
fn solution() {
let puzzle = INPUT.parse::<Sudoku>().unwrap();
let answer = SOLUTION.parse::<Sudoku>().unwrap();
let solution = solve_sudoku(puzzle);
assert_eq!(solution, [answer]);
}
|
{
let mut sudoku = Sudoku::new();
for (y, line) in s.lines().filter(|l| !l.is_empty()).enumerate() {
let line = line.trim_matches(|c: char| c.is_whitespace());
for (x, c) in line.chars().enumerate() {
if let Some(d) = c.to_digit(10) {
if d != 0 { sudoku.set(x, y, d); }
} else {
return Err(())
}
}
}
Ok(sudoku)
}
|
identifier_body
|
sudoku.rs
|
// http://rosettacode.org/wiki/Sudoku
#![feature(core)]
#![feature(step_by)]
use std::fmt;
use std::str::FromStr;
const BOARD_WIDTH: usize = 9;
const BOARD_HEIGHT: usize = 9;
const GROUP_WIDTH: usize = 3;
const GROUP_HEIGHT: usize = 3;
const MAX_NUMBER: usize = 9;
type BITS = u16;
const MASK_ALL: BITS = 0x1ff;
const INVALID_CELL: u32 =!0;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
struct Sudoku {
map: [[BITS; BOARD_WIDTH]; BOARD_HEIGHT]
}
impl Sudoku {
fn new() -> Sudoku {
Sudoku { map: [[MASK_ALL; BOARD_WIDTH]; BOARD_HEIGHT] }
}
fn get(&self, x: usize, y: usize) -> u32 {
match self.map[y][x].count_ones() {
0 => INVALID_CELL,
1 => self.map[y][x].trailing_zeros() + 1,
_ => 0
}
}
fn set(&mut self, x: usize, y: usize, n: u32) {
self.map[y][x] = 1 << (n - 1);
}
}
impl FromStr for Sudoku {
type Err = ();
fn from_str(s: &str) -> Result<Sudoku, ()> {
let mut sudoku = Sudoku::new();
for (y, line) in s.lines().filter(|l|!l.is_empty()).enumerate() {
let line = line.trim_matches(|c: char| c.is_whitespace());
for (x, c) in line.chars().enumerate() {
if let Some(d) = c.to_digit(10) {
if d!= 0 { sudoku.set(x, y, d); }
} else {
return Err(())
}
}
}
Ok(sudoku)
}
}
impl fmt::Display for Sudoku {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let hbar = "+---+---+---+";
for y in (0.. BOARD_HEIGHT) {
if y % GROUP_HEIGHT == 0 {
try!(writeln!(f, "{}", hbar));
}
for x in (0.. BOARD_WIDTH) {
if x % GROUP_WIDTH == 0 {
try!(write!(f, "|"));
}
match self.get(x, y) {
INVALID_CELL => try!(write!(f, "!")),
0 => try!(write!(f, " ")),
d => try!(write!(f, "{}", d))
}
}
try!(writeln!(f, "|"));
}
try!(writeln!(f, "{}", hbar));
Ok(())
}
}
fn solve_sudoku(mut puzzle: Sudoku) -> Vec<Sudoku> {
let idx_in_grp = [(0, 0), (0, 1), (0, 2),
(1, 0), (1, 1), (1, 2),
(2, 0), (2, 1), (2, 2)];
loop {
let bkup = puzzle;
// If the number at cell (x, y) is uniquely determined, that number must
// not have appeared at the cells in the same row/column/group.
for y in (0.. BOARD_HEIGHT) {
for x in (0.. BOARD_WIDTH) {
if puzzle.map[y][x].count_ones()!= 1 { continue }
let (x0, y0) = ((x / GROUP_WIDTH) * GROUP_WIDTH,
(y / GROUP_HEIGHT) * GROUP_HEIGHT);
let row = (0.. BOARD_WIDTH).map(|x| (x, y));
let col = (0.. BOARD_HEIGHT).map(|y| (x, y));
let grp = idx_in_grp.iter().map(|&(dx, dy)| (x0 + dx, y0 + dy));
let it = row.chain(col).chain(grp)
.filter(|&pos: &(usize, usize)| pos!= (x, y));
let mask =!puzzle.map[y][x] & MASK_ALL;
for (x, y) in it {
puzzle.map[y][x] &= mask;
}
}
}
// If `n` appears only once at the cell in the row/column/group, the
// number of the cell must be `n`.
for n in (0.. MAX_NUMBER) {
let bit = 1 << n;
|
for y in (0.. BOARD_HEIGHT) {
let next = {
let mut it = (0.. BOARD_WIDTH)
.filter(|&x| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
puzzle.map[y][next.unwrap()] = bit;
}
// Check each column
for x in (0.. BOARD_WIDTH) {
let next = {
let mut it = (0.. BOARD_HEIGHT)
.filter(|&y| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
puzzle.map[next.unwrap()][x] = bit;
}
// Check each group
for y0 in (0..BOARD_HEIGHT).step_by(GROUP_WIDTH) {
for x0 in (0..BOARD_WIDTH).step_by(GROUP_HEIGHT) {
let next = {
let mut it = idx_in_grp
.iter()
.map(|&(dx, dy)| (x0 + dx, y0 + dy))
.filter(|&(x, y)| puzzle.map[y][x] & bit!= 0);
let next = it.next();
if next.is_none() || it.next().is_some() { continue }
next
};
let (x, y) = next.unwrap();
puzzle.map[y][x] = bit;
}
}
}
// Loop until no cell can be filled.
if puzzle == bkup { break }
}
let it = (0.. BOARD_HEIGHT * BOARD_WIDTH)
.map(|i| (i % BOARD_WIDTH, i / BOARD_WIDTH))
.map(|(x, y)| (x, y, puzzle.map[y][x].count_ones() as BITS))
.collect::<Vec<_>>();
// If some cells have no possible number, there is no answer.
if it.iter().any(|&(_x, _y, cnt)| cnt == 0) { return vec![]; }
// If all cells have exact one possible number, this is a answer.
if it.iter().all(|&(_x, _y, cnt)| cnt == 1) { return vec![puzzle]; }
// Find the first undetermined cell.
let (x, y, _cnt) = *it.iter()
.filter(|& &(_x, _y, cnt)| cnt > 1)
.min_by(|& &(_x, _y, cnt)| cnt)
.unwrap();
let mut answers = vec![];
for n in (0.. MAX_NUMBER) {
let bit = 1 << n;
if puzzle.map[y][x] & bit == 0 { continue }
// Assuming the number at (x, y) is `n`, try to solve the problem again.
// If some answers are found, append them to the `answers`.
let mut p2 = puzzle;
p2.map[y][x] = bit;
answers.extend(solve_sudoku(p2).into_iter());
}
answers
}
const INPUT: &'static str = "
850002400
720000009
004000000
000107002
305000900
040000000
000080070
017000000
000036040
";
#[cfg(not(test))]
fn main() {
let puzzle = INPUT.parse::<Sudoku>().unwrap();
println!("{}", puzzle);
for answer in &solve_sudoku(puzzle) {
println!("{}", answer);
}
}
#[cfg(test)]
const SOLUTION: &'static str = "
859612437
723854169
164379528
986147352
375268914
241593786
432981675
617425893
598736241
";
#[test]
fn solution() {
let puzzle = INPUT.parse::<Sudoku>().unwrap();
let answer = SOLUTION.parse::<Sudoku>().unwrap();
let solution = solve_sudoku(puzzle);
assert_eq!(solution, [answer]);
}
|
// Check each rows
|
random_line_split
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![cfg(test)]
#![feature(plugin, test)]
extern crate app_units;
extern crate cssparser;
extern crate euclid;
#[macro_use] extern crate html5ever_atoms;
#[macro_use] extern crate matches;
extern crate owning_ref;
extern crate parking_lot;
extern crate rayon;
extern crate rustc_serialize;
extern crate selectors;
extern crate servo_atoms;
extern crate servo_config;
extern crate servo_url;
extern crate style;
|
mod attr;
mod cache;
mod keyframes;
mod logical_geometry;
mod media_queries;
mod owning_handle;
mod parsing;
mod properties;
mod rule_tree;
mod str;
mod stylesheets;
mod stylist;
mod value;
mod viewport;
mod writing_modes {
use style::logical_geometry::WritingMode;
use style::properties::{INITIAL_SERVO_VALUES, get_writing_mode};
#[test]
fn initial_writing_mode_is_empty() {
assert_eq!(get_writing_mode(INITIAL_SERVO_VALUES.get_inheritedbox()), WritingMode::empty())
}
}
|
extern crate style_traits;
extern crate test;
mod animated_properties;
|
random_line_split
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![cfg(test)]
#![feature(plugin, test)]
extern crate app_units;
extern crate cssparser;
extern crate euclid;
#[macro_use] extern crate html5ever_atoms;
#[macro_use] extern crate matches;
extern crate owning_ref;
extern crate parking_lot;
extern crate rayon;
extern crate rustc_serialize;
extern crate selectors;
extern crate servo_atoms;
extern crate servo_config;
extern crate servo_url;
extern crate style;
extern crate style_traits;
extern crate test;
mod animated_properties;
mod attr;
mod cache;
mod keyframes;
mod logical_geometry;
mod media_queries;
mod owning_handle;
mod parsing;
mod properties;
mod rule_tree;
mod str;
mod stylesheets;
mod stylist;
mod value;
mod viewport;
mod writing_modes {
use style::logical_geometry::WritingMode;
use style::properties::{INITIAL_SERVO_VALUES, get_writing_mode};
#[test]
fn initial_writing_mode_is_empty()
|
}
|
{
assert_eq!(get_writing_mode(INITIAL_SERVO_VALUES.get_inheritedbox()), WritingMode::empty())
}
|
identifier_body
|
lib.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![cfg(test)]
#![feature(plugin, test)]
extern crate app_units;
extern crate cssparser;
extern crate euclid;
#[macro_use] extern crate html5ever_atoms;
#[macro_use] extern crate matches;
extern crate owning_ref;
extern crate parking_lot;
extern crate rayon;
extern crate rustc_serialize;
extern crate selectors;
extern crate servo_atoms;
extern crate servo_config;
extern crate servo_url;
extern crate style;
extern crate style_traits;
extern crate test;
mod animated_properties;
mod attr;
mod cache;
mod keyframes;
mod logical_geometry;
mod media_queries;
mod owning_handle;
mod parsing;
mod properties;
mod rule_tree;
mod str;
mod stylesheets;
mod stylist;
mod value;
mod viewport;
mod writing_modes {
use style::logical_geometry::WritingMode;
use style::properties::{INITIAL_SERVO_VALUES, get_writing_mode};
#[test]
fn
|
() {
assert_eq!(get_writing_mode(INITIAL_SERVO_VALUES.get_inheritedbox()), WritingMode::empty())
}
}
|
initial_writing_mode_is_empty
|
identifier_name
|
ssh_keys.rs
|
use std::marker::PhantomData;
use hyper::method::Method;
use response;
use request::RequestBuilder;
use request::DoRequest;
impl<'t> RequestBuilder<'t, response::SshKeys> {
pub fn create(self, name: &str, pub_key: &str) -> RequestBuilder<'t, response::SshKey> {
// POST: "https://api.digitalocean.com/v2/account/keys"
// body:
// "public_key" : "lkajflasndvioanvinasd"
// "name" : "my super key"
RequestBuilder {
method: Method::Post,
auth: self.auth,
url: self.url,
resp_t: PhantomData,
body: Some(format!("{{\"name\":{:?},\"public_key\":{:?}}}", name, pub_key)),
}
}
}
impl<'t> RequestBuilder<'t, response::SshKey> {
pub fn update(self, name: &str) -> RequestBuilder<'t, response::SshKey>
|
pub fn destroy(self) -> RequestBuilder<'t, response::HeaderOnly> {
// DELETE: "https://api.digitalocean.com/v2/account/keys/$ID"
// OR
// DELETE: "https://api.digitalocean.com/v2/account/keys/$FINGER"
RequestBuilder {
method: Method::Delete,
auth: self.auth,
url: self.url,
resp_t: PhantomData,
body: None,
}
}
}
impl<'t> DoRequest<response::SshKey> for RequestBuilder<'t, response::SshKey> {}
|
{
// PUT: "https://api.digitalocean.com/v2/account/keys/$ID"
// OR
// PUT: "https://api.digitalocean.com/v2/account/keys/$FINGER"
// body:
// "name" : "new_name"
RequestBuilder {
method: Method::Put,
url: self.url,
auth: self.auth,
resp_t: PhantomData,
body: Some(format!("{{\"name\":{:?}}}", name)),
}
}
|
identifier_body
|
ssh_keys.rs
|
use std::marker::PhantomData;
use hyper::method::Method;
use response;
use request::RequestBuilder;
use request::DoRequest;
impl<'t> RequestBuilder<'t, response::SshKeys> {
pub fn create(self, name: &str, pub_key: &str) -> RequestBuilder<'t, response::SshKey> {
// POST: "https://api.digitalocean.com/v2/account/keys"
// body:
// "public_key" : "lkajflasndvioanvinasd"
// "name" : "my super key"
RequestBuilder {
method: Method::Post,
auth: self.auth,
url: self.url,
resp_t: PhantomData,
body: Some(format!("{{\"name\":{:?},\"public_key\":{:?}}}", name, pub_key)),
}
}
}
impl<'t> RequestBuilder<'t, response::SshKey> {
pub fn update(self, name: &str) -> RequestBuilder<'t, response::SshKey> {
// PUT: "https://api.digitalocean.com/v2/account/keys/$ID"
// OR
// PUT: "https://api.digitalocean.com/v2/account/keys/$FINGER"
// body:
// "name" : "new_name"
RequestBuilder {
method: Method::Put,
url: self.url,
auth: self.auth,
resp_t: PhantomData,
body: Some(format!("{{\"name\":{:?}}}", name)),
}
}
pub fn destroy(self) -> RequestBuilder<'t, response::HeaderOnly> {
// DELETE: "https://api.digitalocean.com/v2/account/keys/$ID"
// OR
// DELETE: "https://api.digitalocean.com/v2/account/keys/$FINGER"
|
body: None,
}
}
}
impl<'t> DoRequest<response::SshKey> for RequestBuilder<'t, response::SshKey> {}
|
RequestBuilder {
method: Method::Delete,
auth: self.auth,
url: self.url,
resp_t: PhantomData,
|
random_line_split
|
ssh_keys.rs
|
use std::marker::PhantomData;
use hyper::method::Method;
use response;
use request::RequestBuilder;
use request::DoRequest;
impl<'t> RequestBuilder<'t, response::SshKeys> {
pub fn create(self, name: &str, pub_key: &str) -> RequestBuilder<'t, response::SshKey> {
// POST: "https://api.digitalocean.com/v2/account/keys"
// body:
// "public_key" : "lkajflasndvioanvinasd"
// "name" : "my super key"
RequestBuilder {
method: Method::Post,
auth: self.auth,
url: self.url,
resp_t: PhantomData,
body: Some(format!("{{\"name\":{:?},\"public_key\":{:?}}}", name, pub_key)),
}
}
}
impl<'t> RequestBuilder<'t, response::SshKey> {
pub fn
|
(self, name: &str) -> RequestBuilder<'t, response::SshKey> {
// PUT: "https://api.digitalocean.com/v2/account/keys/$ID"
// OR
// PUT: "https://api.digitalocean.com/v2/account/keys/$FINGER"
// body:
// "name" : "new_name"
RequestBuilder {
method: Method::Put,
url: self.url,
auth: self.auth,
resp_t: PhantomData,
body: Some(format!("{{\"name\":{:?}}}", name)),
}
}
pub fn destroy(self) -> RequestBuilder<'t, response::HeaderOnly> {
// DELETE: "https://api.digitalocean.com/v2/account/keys/$ID"
// OR
// DELETE: "https://api.digitalocean.com/v2/account/keys/$FINGER"
RequestBuilder {
method: Method::Delete,
auth: self.auth,
url: self.url,
resp_t: PhantomData,
body: None,
}
}
}
impl<'t> DoRequest<response::SshKey> for RequestBuilder<'t, response::SshKey> {}
|
update
|
identifier_name
|
named_anon_conflict.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Error Reporting for Anonymous Region Lifetime Errors
//! where one region is named and the other is anonymous.
use infer::error_reporting::nice_region_error::NiceRegionError;
use ty;
use util::common::ErrorReported;
use errors::Applicability;
impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
/// When given a `ConcreteFailure` for a function with arguments containing a named region and
/// an anonymous region, emit an descriptive diagnostic error.
pub(super) fn try_report_named_anon_conflict(&self) -> Option<ErrorReported> {
let (span, sub, sup) = self.get_regions();
debug!(
"try_report_named_anon_conflict(sub={:?}, sup={:?})",
sub,
sup
);
// Determine whether the sub and sup consist of one named region ('a)
// and one anonymous (elided) region. If so, find the parameter arg
// where the anonymous region appears (there must always be one; we
// only introduced anonymous regions in parameters) as well as a
// version new_ty of its type where the anonymous region is replaced
// with the named one.//scope_def_id
let (named, anon, anon_arg_info, region_info) = if self.is_named_region(sub)
&& self.tcx.is_suitable_region(sup).is_some()
&& self.find_arg_with_region(sup, sub).is_some()
{
(
sub,
sup,
self.find_arg_with_region(sup, sub).unwrap(),
self.tcx.is_suitable_region(sup).unwrap(),
)
} else if self.is_named_region(sup) && self.tcx.is_suitable_region(sub).is_some()
&& self.find_arg_with_region(sub, sup).is_some()
{
(
sup,
sub,
self.find_arg_with_region(sub, sup).unwrap(),
self.tcx.is_suitable_region(sub).unwrap(),
)
} else {
|
return None; // inapplicable
};
debug!("try_report_named_anon_conflict: named = {:?}", named);
debug!(
"try_report_named_anon_conflict: anon_arg_info = {:?}",
anon_arg_info
);
debug!(
"try_report_named_anon_conflict: region_info = {:?}",
region_info
);
let (arg, new_ty, new_ty_span, br, is_first, scope_def_id, is_impl_item) = (
anon_arg_info.arg,
anon_arg_info.arg_ty,
anon_arg_info.arg_ty_span,
anon_arg_info.bound_region,
anon_arg_info.is_first,
region_info.def_id,
region_info.is_impl_item,
);
match br {
ty::BrAnon(_) => {}
_ => {
/* not an anonymous region */
debug!("try_report_named_anon_conflict: not an anonymous region");
return None;
}
}
if is_impl_item {
debug!("try_report_named_anon_conflict: impl item, bail out");
return None;
}
if let Some((_, fndecl)) = self.find_anon_type(anon, &br) {
if self.is_return_type_anon(scope_def_id, br, fndecl).is_some()
|| self.is_self_anon(is_first, scope_def_id)
{
return None;
}
}
let (error_var, span_label_var) = if let Some(simple_ident) = arg.pat.simple_ident() {
(
format!("the type of `{}`", simple_ident),
format!("the type of `{}`", simple_ident),
)
} else {
("parameter type".to_owned(), "type".to_owned())
};
struct_span_err!(
self.tcx.sess,
span,
E0621,
"explicit lifetime required in {}",
error_var
).span_suggestion_with_applicability(
new_ty_span,
&format!("add explicit lifetime `{}` to {}", named, span_label_var),
new_ty.to_string(),
Applicability::Unspecified,
)
.span_label(span, format!("lifetime `{}` required", named))
.emit();
return Some(ErrorReported);
}
// This method returns whether the given Region is Named
pub(super) fn is_named_region(&self, region: ty::Region<'tcx>) -> bool {
match *region {
ty::ReStatic => true,
ty::ReFree(ref free_region) => match free_region.bound_region {
ty::BrNamed(..) => true,
_ => false,
},
ty::ReEarlyBound(ebr) => ebr.has_name(),
_ => false,
}
}
}
|
random_line_split
|
|
named_anon_conflict.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Error Reporting for Anonymous Region Lifetime Errors
//! where one region is named and the other is anonymous.
use infer::error_reporting::nice_region_error::NiceRegionError;
use ty;
use util::common::ErrorReported;
use errors::Applicability;
impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
/// When given a `ConcreteFailure` for a function with arguments containing a named region and
/// an anonymous region, emit an descriptive diagnostic error.
pub(super) fn try_report_named_anon_conflict(&self) -> Option<ErrorReported> {
let (span, sub, sup) = self.get_regions();
debug!(
"try_report_named_anon_conflict(sub={:?}, sup={:?})",
sub,
sup
);
// Determine whether the sub and sup consist of one named region ('a)
// and one anonymous (elided) region. If so, find the parameter arg
// where the anonymous region appears (there must always be one; we
// only introduced anonymous regions in parameters) as well as a
// version new_ty of its type where the anonymous region is replaced
// with the named one.//scope_def_id
let (named, anon, anon_arg_info, region_info) = if self.is_named_region(sub)
&& self.tcx.is_suitable_region(sup).is_some()
&& self.find_arg_with_region(sup, sub).is_some()
{
(
sub,
sup,
self.find_arg_with_region(sup, sub).unwrap(),
self.tcx.is_suitable_region(sup).unwrap(),
)
} else if self.is_named_region(sup) && self.tcx.is_suitable_region(sub).is_some()
&& self.find_arg_with_region(sub, sup).is_some()
{
(
sup,
sub,
self.find_arg_with_region(sub, sup).unwrap(),
self.tcx.is_suitable_region(sub).unwrap(),
)
} else {
return None; // inapplicable
};
debug!("try_report_named_anon_conflict: named = {:?}", named);
debug!(
"try_report_named_anon_conflict: anon_arg_info = {:?}",
anon_arg_info
);
debug!(
"try_report_named_anon_conflict: region_info = {:?}",
region_info
);
let (arg, new_ty, new_ty_span, br, is_first, scope_def_id, is_impl_item) = (
anon_arg_info.arg,
anon_arg_info.arg_ty,
anon_arg_info.arg_ty_span,
anon_arg_info.bound_region,
anon_arg_info.is_first,
region_info.def_id,
region_info.is_impl_item,
);
match br {
ty::BrAnon(_) => {}
_ => {
/* not an anonymous region */
debug!("try_report_named_anon_conflict: not an anonymous region");
return None;
}
}
if is_impl_item {
debug!("try_report_named_anon_conflict: impl item, bail out");
return None;
}
if let Some((_, fndecl)) = self.find_anon_type(anon, &br) {
if self.is_return_type_anon(scope_def_id, br, fndecl).is_some()
|| self.is_self_anon(is_first, scope_def_id)
{
return None;
}
}
let (error_var, span_label_var) = if let Some(simple_ident) = arg.pat.simple_ident() {
(
format!("the type of `{}`", simple_ident),
format!("the type of `{}`", simple_ident),
)
} else {
("parameter type".to_owned(), "type".to_owned())
};
struct_span_err!(
self.tcx.sess,
span,
E0621,
"explicit lifetime required in {}",
error_var
).span_suggestion_with_applicability(
new_ty_span,
&format!("add explicit lifetime `{}` to {}", named, span_label_var),
new_ty.to_string(),
Applicability::Unspecified,
)
.span_label(span, format!("lifetime `{}` required", named))
.emit();
return Some(ErrorReported);
}
// This method returns whether the given Region is Named
pub(super) fn is_named_region(&self, region: ty::Region<'tcx>) -> bool
|
}
|
{
match *region {
ty::ReStatic => true,
ty::ReFree(ref free_region) => match free_region.bound_region {
ty::BrNamed(..) => true,
_ => false,
},
ty::ReEarlyBound(ebr) => ebr.has_name(),
_ => false,
}
}
|
identifier_body
|
named_anon_conflict.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Error Reporting for Anonymous Region Lifetime Errors
//! where one region is named and the other is anonymous.
use infer::error_reporting::nice_region_error::NiceRegionError;
use ty;
use util::common::ErrorReported;
use errors::Applicability;
impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
/// When given a `ConcreteFailure` for a function with arguments containing a named region and
/// an anonymous region, emit an descriptive diagnostic error.
pub(super) fn
|
(&self) -> Option<ErrorReported> {
let (span, sub, sup) = self.get_regions();
debug!(
"try_report_named_anon_conflict(sub={:?}, sup={:?})",
sub,
sup
);
// Determine whether the sub and sup consist of one named region ('a)
// and one anonymous (elided) region. If so, find the parameter arg
// where the anonymous region appears (there must always be one; we
// only introduced anonymous regions in parameters) as well as a
// version new_ty of its type where the anonymous region is replaced
// with the named one.//scope_def_id
let (named, anon, anon_arg_info, region_info) = if self.is_named_region(sub)
&& self.tcx.is_suitable_region(sup).is_some()
&& self.find_arg_with_region(sup, sub).is_some()
{
(
sub,
sup,
self.find_arg_with_region(sup, sub).unwrap(),
self.tcx.is_suitable_region(sup).unwrap(),
)
} else if self.is_named_region(sup) && self.tcx.is_suitable_region(sub).is_some()
&& self.find_arg_with_region(sub, sup).is_some()
{
(
sup,
sub,
self.find_arg_with_region(sub, sup).unwrap(),
self.tcx.is_suitable_region(sub).unwrap(),
)
} else {
return None; // inapplicable
};
debug!("try_report_named_anon_conflict: named = {:?}", named);
debug!(
"try_report_named_anon_conflict: anon_arg_info = {:?}",
anon_arg_info
);
debug!(
"try_report_named_anon_conflict: region_info = {:?}",
region_info
);
let (arg, new_ty, new_ty_span, br, is_first, scope_def_id, is_impl_item) = (
anon_arg_info.arg,
anon_arg_info.arg_ty,
anon_arg_info.arg_ty_span,
anon_arg_info.bound_region,
anon_arg_info.is_first,
region_info.def_id,
region_info.is_impl_item,
);
match br {
ty::BrAnon(_) => {}
_ => {
/* not an anonymous region */
debug!("try_report_named_anon_conflict: not an anonymous region");
return None;
}
}
if is_impl_item {
debug!("try_report_named_anon_conflict: impl item, bail out");
return None;
}
if let Some((_, fndecl)) = self.find_anon_type(anon, &br) {
if self.is_return_type_anon(scope_def_id, br, fndecl).is_some()
|| self.is_self_anon(is_first, scope_def_id)
{
return None;
}
}
let (error_var, span_label_var) = if let Some(simple_ident) = arg.pat.simple_ident() {
(
format!("the type of `{}`", simple_ident),
format!("the type of `{}`", simple_ident),
)
} else {
("parameter type".to_owned(), "type".to_owned())
};
struct_span_err!(
self.tcx.sess,
span,
E0621,
"explicit lifetime required in {}",
error_var
).span_suggestion_with_applicability(
new_ty_span,
&format!("add explicit lifetime `{}` to {}", named, span_label_var),
new_ty.to_string(),
Applicability::Unspecified,
)
.span_label(span, format!("lifetime `{}` required", named))
.emit();
return Some(ErrorReported);
}
// This method returns whether the given Region is Named
pub(super) fn is_named_region(&self, region: ty::Region<'tcx>) -> bool {
match *region {
ty::ReStatic => true,
ty::ReFree(ref free_region) => match free_region.bound_region {
ty::BrNamed(..) => true,
_ => false,
},
ty::ReEarlyBound(ebr) => ebr.has_name(),
_ => false,
}
}
}
|
try_report_named_anon_conflict
|
identifier_name
|
named_anon_conflict.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Error Reporting for Anonymous Region Lifetime Errors
//! where one region is named and the other is anonymous.
use infer::error_reporting::nice_region_error::NiceRegionError;
use ty;
use util::common::ErrorReported;
use errors::Applicability;
impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
/// When given a `ConcreteFailure` for a function with arguments containing a named region and
/// an anonymous region, emit an descriptive diagnostic error.
pub(super) fn try_report_named_anon_conflict(&self) -> Option<ErrorReported> {
let (span, sub, sup) = self.get_regions();
debug!(
"try_report_named_anon_conflict(sub={:?}, sup={:?})",
sub,
sup
);
// Determine whether the sub and sup consist of one named region ('a)
// and one anonymous (elided) region. If so, find the parameter arg
// where the anonymous region appears (there must always be one; we
// only introduced anonymous regions in parameters) as well as a
// version new_ty of its type where the anonymous region is replaced
// with the named one.//scope_def_id
let (named, anon, anon_arg_info, region_info) = if self.is_named_region(sub)
&& self.tcx.is_suitable_region(sup).is_some()
&& self.find_arg_with_region(sup, sub).is_some()
{
(
sub,
sup,
self.find_arg_with_region(sup, sub).unwrap(),
self.tcx.is_suitable_region(sup).unwrap(),
)
} else if self.is_named_region(sup) && self.tcx.is_suitable_region(sub).is_some()
&& self.find_arg_with_region(sub, sup).is_some()
{
(
sup,
sub,
self.find_arg_with_region(sub, sup).unwrap(),
self.tcx.is_suitable_region(sub).unwrap(),
)
} else
|
;
debug!("try_report_named_anon_conflict: named = {:?}", named);
debug!(
"try_report_named_anon_conflict: anon_arg_info = {:?}",
anon_arg_info
);
debug!(
"try_report_named_anon_conflict: region_info = {:?}",
region_info
);
let (arg, new_ty, new_ty_span, br, is_first, scope_def_id, is_impl_item) = (
anon_arg_info.arg,
anon_arg_info.arg_ty,
anon_arg_info.arg_ty_span,
anon_arg_info.bound_region,
anon_arg_info.is_first,
region_info.def_id,
region_info.is_impl_item,
);
match br {
ty::BrAnon(_) => {}
_ => {
/* not an anonymous region */
debug!("try_report_named_anon_conflict: not an anonymous region");
return None;
}
}
if is_impl_item {
debug!("try_report_named_anon_conflict: impl item, bail out");
return None;
}
if let Some((_, fndecl)) = self.find_anon_type(anon, &br) {
if self.is_return_type_anon(scope_def_id, br, fndecl).is_some()
|| self.is_self_anon(is_first, scope_def_id)
{
return None;
}
}
let (error_var, span_label_var) = if let Some(simple_ident) = arg.pat.simple_ident() {
(
format!("the type of `{}`", simple_ident),
format!("the type of `{}`", simple_ident),
)
} else {
("parameter type".to_owned(), "type".to_owned())
};
struct_span_err!(
self.tcx.sess,
span,
E0621,
"explicit lifetime required in {}",
error_var
).span_suggestion_with_applicability(
new_ty_span,
&format!("add explicit lifetime `{}` to {}", named, span_label_var),
new_ty.to_string(),
Applicability::Unspecified,
)
.span_label(span, format!("lifetime `{}` required", named))
.emit();
return Some(ErrorReported);
}
// This method returns whether the given Region is Named
pub(super) fn is_named_region(&self, region: ty::Region<'tcx>) -> bool {
match *region {
ty::ReStatic => true,
ty::ReFree(ref free_region) => match free_region.bound_region {
ty::BrNamed(..) => true,
_ => false,
},
ty::ReEarlyBound(ebr) => ebr.has_name(),
_ => false,
}
}
}
|
{
return None; // inapplicable
}
|
conditional_block
|
ciphersuite.rs
|
struct Ciphersuite {
code: u16,
kex_algo: ~str,
sig_algo: ~str,
cipher: ~str,
cipher_keylen: u8,
mac: ~str
}
impl Ciphersuite {
static fn new(suite: u16, kex_algo: ~str, sig_algo: ~str,
cipher: ~str, cipher_keylen: u8, mac: ~str) -> Ciphersuite {
Ciphersuite {
code: suite,
kex_algo: kex_algo,
sig_algo: sig_algo,
cipher: cipher,
cipher_keylen: cipher_keylen,
mac: mac }
}
static fn from_code(suite: u16) -> Ciphersuite {
match suite {
// probably this should be a macro (or generated as in botan)
0x002f => { Ciphersuite::new(0x002f, ~"RSA", ~"RSA", ~"AES", 16, ~"SHA1") },
0x008A => { Ciphersuite::new(0x008A, ~"PSK", ~"PSK", ~"RC4", 16, ~"SHA1") },
_ => { fail(~"No such ciphersuite") }
}
}
}
impl Ciphersuite: ToStr {
pure fn to_str() -> ~str {
let mut out: ~str = ~"TLS_";
out += if self.kex_algo!= ~"RSA" { self.kex_algo + ~"_" } else { ~"" };
out += self.sig_algo + "_WITH_";
out += match (self.cipher, self.cipher_keylen) {
(~"AES", 16) => ~"AES_128",
(~"AES", 32) => ~"AES_256",
(~"RC4", 16) => ~"RC4_128",
_ => fail ~"Unknown cipher"
} + ~"_";
out += match self.mac {
~"SHA1" => ~"SHA",
~"SHA256" => ~"SHA256",
|
out
}
}
#[cfg(test)]
mod tests {
#[test]
fn test() {
let psk = Ciphersuite::from_code(0x008A);
io::println(fmt!("%?", psk));
}
}
|
_ => fail ~"Unknown mac"
};
|
random_line_split
|
ciphersuite.rs
|
struct
|
{
code: u16,
kex_algo: ~str,
sig_algo: ~str,
cipher: ~str,
cipher_keylen: u8,
mac: ~str
}
impl Ciphersuite {
static fn new(suite: u16, kex_algo: ~str, sig_algo: ~str,
cipher: ~str, cipher_keylen: u8, mac: ~str) -> Ciphersuite {
Ciphersuite {
code: suite,
kex_algo: kex_algo,
sig_algo: sig_algo,
cipher: cipher,
cipher_keylen: cipher_keylen,
mac: mac }
}
static fn from_code(suite: u16) -> Ciphersuite {
match suite {
// probably this should be a macro (or generated as in botan)
0x002f => { Ciphersuite::new(0x002f, ~"RSA", ~"RSA", ~"AES", 16, ~"SHA1") },
0x008A => { Ciphersuite::new(0x008A, ~"PSK", ~"PSK", ~"RC4", 16, ~"SHA1") },
_ => { fail(~"No such ciphersuite") }
}
}
}
impl Ciphersuite: ToStr {
pure fn to_str() -> ~str {
let mut out: ~str = ~"TLS_";
out += if self.kex_algo!= ~"RSA" { self.kex_algo + ~"_" } else { ~"" };
out += self.sig_algo + "_WITH_";
out += match (self.cipher, self.cipher_keylen) {
(~"AES", 16) => ~"AES_128",
(~"AES", 32) => ~"AES_256",
(~"RC4", 16) => ~"RC4_128",
_ => fail ~"Unknown cipher"
} + ~"_";
out += match self.mac {
~"SHA1" => ~"SHA",
~"SHA256" => ~"SHA256",
_ => fail ~"Unknown mac"
};
out
}
}
#[cfg(test)]
mod tests {
#[test]
fn test() {
let psk = Ciphersuite::from_code(0x008A);
io::println(fmt!("%?", psk));
}
}
|
Ciphersuite
|
identifier_name
|
network.rs
|
extern crate byteorder;
use std::io::prelude::*;
use std::io::{
Result,
Error,
ErrorKind,
};
use std::net::{
TcpListener,
TcpStream,
SocketAddr,
IpAddr,
Ipv4Addr,
};
use std::thread::{
Builder,
JoinHandle,
};
use std::sync::mpsc::{
channel,
Sender,
Receiver,
};
use std::time::Duration;
use files::FileWatcher;
use self::byteorder::{
BigEndian,
WriteBytesExt,
};
const PROTOCOL_HEADER : [u8; 5] = [0x44, 0x44, 0x53, 0x4E, 0x44]; // DDSND
const CLIENT_TYPE_WIN32 : u8 = 0x10;
const CLIENT_VERSION : u8 = 0x01;
const RESPONSE_CODE_ERR : u8 = 0x15;
const RESPONSE_CODE_OK : u8 = 0x06;
const RANDOM_SOUND_REQUEST : u8 = 0xB0;
const RANDOM_SOUND_RESPONSE : u8 = 0xB1;
pub struct Server {
thread : JoinHandle<()>,
receiver : Receiver<TcpStream>,
connections : Vec<TcpStream>,
}
struct Message {
data : Vec<u8>,
}
impl Message {
fn get_type( &self ) -> u8 {
self.data[ 0 ]
}
}
impl Server {
pub fn new() -> Server {
let ( stream_sender, stream_receiver ) = channel::<TcpStream>();
let builder = Builder::new().name( String::from( "Server thread" ) );
let listener = TcpListener::bind( "0.0.0.0:9090" ).expect( "Couldn't bind TCP Listener to 0.0.0.0:9090." );
let thread = builder.spawn( move || {
listener_thread( listener, stream_sender );
} ).expect( "Unable to start listener thread." );
info!( "Listening to 0.0.0.0:9090" );
Server {
thread : thread,
receiver : stream_receiver,
connections : Vec::with_capacity( 100 )
}
}
pub fn process_connections( &mut self ) {
let default_addr = SocketAddr::new( IpAddr::V4( Ipv4Addr::new( 0, 0, 0, 0 ) ), 9090 );
for stream in self.receiver.try_iter() {
let peer_addr = stream.peer_addr().unwrap_or( default_addr );
info!( "Granted connection from {}", peer_addr );
self.connections.push( stream );
}
}
pub fn process_data( &mut self, file_watcher : &mut FileWatcher ) {
let default_addr = SocketAddr::new( IpAddr::V4( Ipv4Addr::new( 0, 0, 0, 0 ) ), 9090 );
let mut removable : Vec<usize> = Vec::with_capacity( 100 );
let mut buffer : [u8; 32] = [0; 32];
let mut messages = Vec::with_capacity( self.connections.len() );
for i in 0..self.connections.len() {
let stream = &mut self.connections[ i ];
match read_data( stream, &mut buffer ) {
Ok( size ) => {
messages.push( Message { data : (&buffer[0..size]).to_vec() } );
},
Err( e ) => {
if e.kind()!= ErrorKind::WouldBlock {
let peer_addr = stream.peer_addr().unwrap_or( default_addr );
info!( "Closed connection with {}", peer_addr );
removable.push( i );
}
},
}
}
// Close connections
for i in &removable {
self.connections.remove( *i );
}
let mut sound_response = false;
// Process messages
for message in &messages {
if message.get_type() == RANDOM_SOUND_REQUEST &&!sound_response {
if let Ok( file ) = file_watcher.random_file() {
info!( "Playing random sound {}", &file.display() );
let mut response = Vec::new();
response.push( RANDOM_SOUND_RESPONSE );
response.push( 0x00 );
response.push( 0x00 );
if let Some( x ) = file.to_str() {
debug!( "Message length: {}", x.len() );
if let Err( e ) = response.write_u32::<BigEndian>( x.len() as u32 ) {
error!( "Couldn't create random sound response with {}", &file.display() );
error!( "{}", e );
continue;
}
if let Err( e ) = response.write_all( x.as_bytes() ) {
error!( "Couldn't create random sound response with {}", &file.display() );
error!( "{}", e );
continue;
}
} else {
error!( "Couldn't create random sound response with {}", &file.display() );
continue;
}
sound_response = true;
for i in 0..self.connections.len() {
let stream = &mut self.connections[ i ];
if let Err( e ) = send_data( stream, &response ) {
warn!( "Couldn't write sound response." );
warn!( "{}", e );
}
}
} else {
warn!( "Couldn't find random file to respond with!" );
}
}
}
}
}
fn send_data( stream : &mut TcpStream, buf : &[u8] ) -> Result<()> {
let mut vec = Vec::with_capacity( PROTOCOL_HEADER.len() + buf.len() );
vec.write_all( &PROTOCOL_HEADER )?;
vec.write_all( &buf )?;
stream.write_all( &vec )?;
Ok( () )
}
fn read_data( stream : &mut TcpStream, buf : &mut [u8] ) -> Result<usize> {
let mut header : [u8; 5] = [0; 5];
let count = stream.read( &mut header )?;
if count == 0 {
return Err( Error::new( ErrorKind::Other, "Connection closed." ) );
}
if count!= PROTOCOL_HEADER.len() {
return Err( Error::new( ErrorKind::Other, "Invalid number of bytes received." ) );
}
for i in 0..PROTOCOL_HEADER.len() {
if header[ i ]!= PROTOCOL_HEADER[ i ] {
return Err( Error::new( ErrorKind::Other, "Invalid header received." ) );
}
}
let result = stream.read( buf )?;
Ok( result )
}
fn establish_connection( stream : &mut TcpStream ) -> Result<()> {
stream.set_nodelay( true )?; // Enable no-delay
stream.set_read_timeout( Some( Duration::from_secs( 5 ) ) )?; // Set read time-out for handshake
stream.set_write_timeout( Some( Duration::from_secs( 5 ) ) )?; // Set write time-out for handshake
let mut buf : [u8; 3] = [0; 3];
let count = read_data( stream, &mut buf )?;
if count!= buf.len() {
return Err( Error::new( ErrorKind::Other, "Invalid connection request." ) );
}
if buf[ 0 ]!= CLIENT_TYPE_WIN32 {
// Only allow win32 client types for now
return Err( Error::new( ErrorKind::Other, "Invalid client type." ) );
}
if buf[ 1 ]!= CLIENT_VERSION {
// Only allow version 1 clients for now
return Err( Error::new( ErrorKind::Other, "Invalid client version." ) );
}
if buf[ 2 ]!= 0x00 {
// This is a reserved field that should always be 0 for now
return Err( Error::new( ErrorKind::Other, "Invalid connection request." ) );
}
stream.set_nonblocking( true )?; // Set non-blocking mode
Ok( () )
}
fn listener_thread( listener : TcpListener, sender : Sender<TcpStream> ) {
let default_addr = SocketAddr::new( IpAddr::V4( Ipv4Addr::new( 0, 0, 0, 0 ) ), 9090 );
for stream in listener.incoming() {
match stream {
Ok( mut stream ) => {
let peer_addr = stream.peer_addr().unwrap_or( default_addr );
|
match establish_connection( &mut stream ) {
Ok( _ ) => {
response[ 0 ] = RESPONSE_CODE_OK;
if let Err( e ) = send_data( &mut stream, &response ) {
error!( "{}", e );
error!( "Denied connection from {}", peer_addr );
continue;
}
},
Err( e ) => {
response[ 0 ] = RESPONSE_CODE_ERR;
let _ = send_data( &mut stream, &response );
error!( "{}", e );
error!( "Denied connection from {}", peer_addr );
continue;
},
}
// Send stream to the handling thread
if let Err( e ) = sender.send( stream ) {
error!( "{}", e );
error!( "Denied connection from {}", peer_addr );
continue;
}
},
Err( e ) => error!( "{}", e ),
}
}
}
|
info!( "Connection request from {}", peer_addr );
let mut response : [u8; 3] = [0; 3];
// Establish a proper client connection
|
random_line_split
|
network.rs
|
extern crate byteorder;
use std::io::prelude::*;
use std::io::{
Result,
Error,
ErrorKind,
};
use std::net::{
TcpListener,
TcpStream,
SocketAddr,
IpAddr,
Ipv4Addr,
};
use std::thread::{
Builder,
JoinHandle,
};
use std::sync::mpsc::{
channel,
Sender,
Receiver,
};
use std::time::Duration;
use files::FileWatcher;
use self::byteorder::{
BigEndian,
WriteBytesExt,
};
const PROTOCOL_HEADER : [u8; 5] = [0x44, 0x44, 0x53, 0x4E, 0x44]; // DDSND
const CLIENT_TYPE_WIN32 : u8 = 0x10;
const CLIENT_VERSION : u8 = 0x01;
const RESPONSE_CODE_ERR : u8 = 0x15;
const RESPONSE_CODE_OK : u8 = 0x06;
const RANDOM_SOUND_REQUEST : u8 = 0xB0;
const RANDOM_SOUND_RESPONSE : u8 = 0xB1;
pub struct Server {
thread : JoinHandle<()>,
receiver : Receiver<TcpStream>,
connections : Vec<TcpStream>,
}
struct Message {
data : Vec<u8>,
}
impl Message {
fn
|
( &self ) -> u8 {
self.data[ 0 ]
}
}
impl Server {
pub fn new() -> Server {
let ( stream_sender, stream_receiver ) = channel::<TcpStream>();
let builder = Builder::new().name( String::from( "Server thread" ) );
let listener = TcpListener::bind( "0.0.0.0:9090" ).expect( "Couldn't bind TCP Listener to 0.0.0.0:9090." );
let thread = builder.spawn( move || {
listener_thread( listener, stream_sender );
} ).expect( "Unable to start listener thread." );
info!( "Listening to 0.0.0.0:9090" );
Server {
thread : thread,
receiver : stream_receiver,
connections : Vec::with_capacity( 100 )
}
}
pub fn process_connections( &mut self ) {
let default_addr = SocketAddr::new( IpAddr::V4( Ipv4Addr::new( 0, 0, 0, 0 ) ), 9090 );
for stream in self.receiver.try_iter() {
let peer_addr = stream.peer_addr().unwrap_or( default_addr );
info!( "Granted connection from {}", peer_addr );
self.connections.push( stream );
}
}
pub fn process_data( &mut self, file_watcher : &mut FileWatcher ) {
let default_addr = SocketAddr::new( IpAddr::V4( Ipv4Addr::new( 0, 0, 0, 0 ) ), 9090 );
let mut removable : Vec<usize> = Vec::with_capacity( 100 );
let mut buffer : [u8; 32] = [0; 32];
let mut messages = Vec::with_capacity( self.connections.len() );
for i in 0..self.connections.len() {
let stream = &mut self.connections[ i ];
match read_data( stream, &mut buffer ) {
Ok( size ) => {
messages.push( Message { data : (&buffer[0..size]).to_vec() } );
},
Err( e ) => {
if e.kind()!= ErrorKind::WouldBlock {
let peer_addr = stream.peer_addr().unwrap_or( default_addr );
info!( "Closed connection with {}", peer_addr );
removable.push( i );
}
},
}
}
// Close connections
for i in &removable {
self.connections.remove( *i );
}
let mut sound_response = false;
// Process messages
for message in &messages {
if message.get_type() == RANDOM_SOUND_REQUEST &&!sound_response {
if let Ok( file ) = file_watcher.random_file() {
info!( "Playing random sound {}", &file.display() );
let mut response = Vec::new();
response.push( RANDOM_SOUND_RESPONSE );
response.push( 0x00 );
response.push( 0x00 );
if let Some( x ) = file.to_str() {
debug!( "Message length: {}", x.len() );
if let Err( e ) = response.write_u32::<BigEndian>( x.len() as u32 ) {
error!( "Couldn't create random sound response with {}", &file.display() );
error!( "{}", e );
continue;
}
if let Err( e ) = response.write_all( x.as_bytes() ) {
error!( "Couldn't create random sound response with {}", &file.display() );
error!( "{}", e );
continue;
}
} else {
error!( "Couldn't create random sound response with {}", &file.display() );
continue;
}
sound_response = true;
for i in 0..self.connections.len() {
let stream = &mut self.connections[ i ];
if let Err( e ) = send_data( stream, &response ) {
warn!( "Couldn't write sound response." );
warn!( "{}", e );
}
}
} else {
warn!( "Couldn't find random file to respond with!" );
}
}
}
}
}
fn send_data( stream : &mut TcpStream, buf : &[u8] ) -> Result<()> {
let mut vec = Vec::with_capacity( PROTOCOL_HEADER.len() + buf.len() );
vec.write_all( &PROTOCOL_HEADER )?;
vec.write_all( &buf )?;
stream.write_all( &vec )?;
Ok( () )
}
fn read_data( stream : &mut TcpStream, buf : &mut [u8] ) -> Result<usize> {
let mut header : [u8; 5] = [0; 5];
let count = stream.read( &mut header )?;
if count == 0 {
return Err( Error::new( ErrorKind::Other, "Connection closed." ) );
}
if count!= PROTOCOL_HEADER.len() {
return Err( Error::new( ErrorKind::Other, "Invalid number of bytes received." ) );
}
for i in 0..PROTOCOL_HEADER.len() {
if header[ i ]!= PROTOCOL_HEADER[ i ] {
return Err( Error::new( ErrorKind::Other, "Invalid header received." ) );
}
}
let result = stream.read( buf )?;
Ok( result )
}
fn establish_connection( stream : &mut TcpStream ) -> Result<()> {
stream.set_nodelay( true )?; // Enable no-delay
stream.set_read_timeout( Some( Duration::from_secs( 5 ) ) )?; // Set read time-out for handshake
stream.set_write_timeout( Some( Duration::from_secs( 5 ) ) )?; // Set write time-out for handshake
let mut buf : [u8; 3] = [0; 3];
let count = read_data( stream, &mut buf )?;
if count!= buf.len() {
return Err( Error::new( ErrorKind::Other, "Invalid connection request." ) );
}
if buf[ 0 ]!= CLIENT_TYPE_WIN32 {
// Only allow win32 client types for now
return Err( Error::new( ErrorKind::Other, "Invalid client type." ) );
}
if buf[ 1 ]!= CLIENT_VERSION {
// Only allow version 1 clients for now
return Err( Error::new( ErrorKind::Other, "Invalid client version." ) );
}
if buf[ 2 ]!= 0x00 {
// This is a reserved field that should always be 0 for now
return Err( Error::new( ErrorKind::Other, "Invalid connection request." ) );
}
stream.set_nonblocking( true )?; // Set non-blocking mode
Ok( () )
}
fn listener_thread( listener : TcpListener, sender : Sender<TcpStream> ) {
let default_addr = SocketAddr::new( IpAddr::V4( Ipv4Addr::new( 0, 0, 0, 0 ) ), 9090 );
for stream in listener.incoming() {
match stream {
Ok( mut stream ) => {
let peer_addr = stream.peer_addr().unwrap_or( default_addr );
info!( "Connection request from {}", peer_addr );
let mut response : [u8; 3] = [0; 3];
// Establish a proper client connection
match establish_connection( &mut stream ) {
Ok( _ ) => {
response[ 0 ] = RESPONSE_CODE_OK;
if let Err( e ) = send_data( &mut stream, &response ) {
error!( "{}", e );
error!( "Denied connection from {}", peer_addr );
continue;
}
},
Err( e ) => {
response[ 0 ] = RESPONSE_CODE_ERR;
let _ = send_data( &mut stream, &response );
error!( "{}", e );
error!( "Denied connection from {}", peer_addr );
continue;
},
}
// Send stream to the handling thread
if let Err( e ) = sender.send( stream ) {
error!( "{}", e );
error!( "Denied connection from {}", peer_addr );
continue;
}
},
Err( e ) => error!( "{}", e ),
}
}
}
|
get_type
|
identifier_name
|
network.rs
|
extern crate byteorder;
use std::io::prelude::*;
use std::io::{
Result,
Error,
ErrorKind,
};
use std::net::{
TcpListener,
TcpStream,
SocketAddr,
IpAddr,
Ipv4Addr,
};
use std::thread::{
Builder,
JoinHandle,
};
use std::sync::mpsc::{
channel,
Sender,
Receiver,
};
use std::time::Duration;
use files::FileWatcher;
use self::byteorder::{
BigEndian,
WriteBytesExt,
};
const PROTOCOL_HEADER : [u8; 5] = [0x44, 0x44, 0x53, 0x4E, 0x44]; // DDSND
const CLIENT_TYPE_WIN32 : u8 = 0x10;
const CLIENT_VERSION : u8 = 0x01;
const RESPONSE_CODE_ERR : u8 = 0x15;
const RESPONSE_CODE_OK : u8 = 0x06;
const RANDOM_SOUND_REQUEST : u8 = 0xB0;
const RANDOM_SOUND_RESPONSE : u8 = 0xB1;
pub struct Server {
thread : JoinHandle<()>,
receiver : Receiver<TcpStream>,
connections : Vec<TcpStream>,
}
struct Message {
data : Vec<u8>,
}
impl Message {
fn get_type( &self ) -> u8 {
self.data[ 0 ]
}
}
impl Server {
pub fn new() -> Server {
let ( stream_sender, stream_receiver ) = channel::<TcpStream>();
let builder = Builder::new().name( String::from( "Server thread" ) );
let listener = TcpListener::bind( "0.0.0.0:9090" ).expect( "Couldn't bind TCP Listener to 0.0.0.0:9090." );
let thread = builder.spawn( move || {
listener_thread( listener, stream_sender );
} ).expect( "Unable to start listener thread." );
info!( "Listening to 0.0.0.0:9090" );
Server {
thread : thread,
receiver : stream_receiver,
connections : Vec::with_capacity( 100 )
}
}
pub fn process_connections( &mut self ) {
let default_addr = SocketAddr::new( IpAddr::V4( Ipv4Addr::new( 0, 0, 0, 0 ) ), 9090 );
for stream in self.receiver.try_iter() {
let peer_addr = stream.peer_addr().unwrap_or( default_addr );
info!( "Granted connection from {}", peer_addr );
self.connections.push( stream );
}
}
pub fn process_data( &mut self, file_watcher : &mut FileWatcher ) {
let default_addr = SocketAddr::new( IpAddr::V4( Ipv4Addr::new( 0, 0, 0, 0 ) ), 9090 );
let mut removable : Vec<usize> = Vec::with_capacity( 100 );
let mut buffer : [u8; 32] = [0; 32];
let mut messages = Vec::with_capacity( self.connections.len() );
for i in 0..self.connections.len() {
let stream = &mut self.connections[ i ];
match read_data( stream, &mut buffer ) {
Ok( size ) => {
messages.push( Message { data : (&buffer[0..size]).to_vec() } );
},
Err( e ) => {
if e.kind()!= ErrorKind::WouldBlock {
let peer_addr = stream.peer_addr().unwrap_or( default_addr );
info!( "Closed connection with {}", peer_addr );
removable.push( i );
}
},
}
}
// Close connections
for i in &removable {
self.connections.remove( *i );
}
let mut sound_response = false;
// Process messages
for message in &messages {
if message.get_type() == RANDOM_SOUND_REQUEST &&!sound_response {
if let Ok( file ) = file_watcher.random_file() {
info!( "Playing random sound {}", &file.display() );
let mut response = Vec::new();
response.push( RANDOM_SOUND_RESPONSE );
response.push( 0x00 );
response.push( 0x00 );
if let Some( x ) = file.to_str() {
debug!( "Message length: {}", x.len() );
if let Err( e ) = response.write_u32::<BigEndian>( x.len() as u32 ) {
error!( "Couldn't create random sound response with {}", &file.display() );
error!( "{}", e );
continue;
}
if let Err( e ) = response.write_all( x.as_bytes() ) {
error!( "Couldn't create random sound response with {}", &file.display() );
error!( "{}", e );
continue;
}
} else {
error!( "Couldn't create random sound response with {}", &file.display() );
continue;
}
sound_response = true;
for i in 0..self.connections.len() {
let stream = &mut self.connections[ i ];
if let Err( e ) = send_data( stream, &response ) {
warn!( "Couldn't write sound response." );
warn!( "{}", e );
}
}
} else {
warn!( "Couldn't find random file to respond with!" );
}
}
}
}
}
fn send_data( stream : &mut TcpStream, buf : &[u8] ) -> Result<()> {
let mut vec = Vec::with_capacity( PROTOCOL_HEADER.len() + buf.len() );
vec.write_all( &PROTOCOL_HEADER )?;
vec.write_all( &buf )?;
stream.write_all( &vec )?;
Ok( () )
}
fn read_data( stream : &mut TcpStream, buf : &mut [u8] ) -> Result<usize> {
let mut header : [u8; 5] = [0; 5];
let count = stream.read( &mut header )?;
if count == 0
|
if count!= PROTOCOL_HEADER.len() {
return Err( Error::new( ErrorKind::Other, "Invalid number of bytes received." ) );
}
for i in 0..PROTOCOL_HEADER.len() {
if header[ i ]!= PROTOCOL_HEADER[ i ] {
return Err( Error::new( ErrorKind::Other, "Invalid header received." ) );
}
}
let result = stream.read( buf )?;
Ok( result )
}
fn establish_connection( stream : &mut TcpStream ) -> Result<()> {
stream.set_nodelay( true )?; // Enable no-delay
stream.set_read_timeout( Some( Duration::from_secs( 5 ) ) )?; // Set read time-out for handshake
stream.set_write_timeout( Some( Duration::from_secs( 5 ) ) )?; // Set write time-out for handshake
let mut buf : [u8; 3] = [0; 3];
let count = read_data( stream, &mut buf )?;
if count!= buf.len() {
return Err( Error::new( ErrorKind::Other, "Invalid connection request." ) );
}
if buf[ 0 ]!= CLIENT_TYPE_WIN32 {
// Only allow win32 client types for now
return Err( Error::new( ErrorKind::Other, "Invalid client type." ) );
}
if buf[ 1 ]!= CLIENT_VERSION {
// Only allow version 1 clients for now
return Err( Error::new( ErrorKind::Other, "Invalid client version." ) );
}
if buf[ 2 ]!= 0x00 {
// This is a reserved field that should always be 0 for now
return Err( Error::new( ErrorKind::Other, "Invalid connection request." ) );
}
stream.set_nonblocking( true )?; // Set non-blocking mode
Ok( () )
}
fn listener_thread( listener : TcpListener, sender : Sender<TcpStream> ) {
let default_addr = SocketAddr::new( IpAddr::V4( Ipv4Addr::new( 0, 0, 0, 0 ) ), 9090 );
for stream in listener.incoming() {
match stream {
Ok( mut stream ) => {
let peer_addr = stream.peer_addr().unwrap_or( default_addr );
info!( "Connection request from {}", peer_addr );
let mut response : [u8; 3] = [0; 3];
// Establish a proper client connection
match establish_connection( &mut stream ) {
Ok( _ ) => {
response[ 0 ] = RESPONSE_CODE_OK;
if let Err( e ) = send_data( &mut stream, &response ) {
error!( "{}", e );
error!( "Denied connection from {}", peer_addr );
continue;
}
},
Err( e ) => {
response[ 0 ] = RESPONSE_CODE_ERR;
let _ = send_data( &mut stream, &response );
error!( "{}", e );
error!( "Denied connection from {}", peer_addr );
continue;
},
}
// Send stream to the handling thread
if let Err( e ) = sender.send( stream ) {
error!( "{}", e );
error!( "Denied connection from {}", peer_addr );
continue;
}
},
Err( e ) => error!( "{}", e ),
}
}
}
|
{
return Err( Error::new( ErrorKind::Other, "Connection closed." ) );
}
|
conditional_block
|
document_loader.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tracking of pending loads in a document.
//! https://html.spec.whatwg.org/multipage/#the-end
use dom::bindings::js::JS;
use dom::document::Document;
use msg::constellation_msg::PipelineId;
use net_traits::AsyncResponseTarget;
use net_traits::{PendingAsyncLoad, ResourceThread, LoadContext};
use std::sync::Arc;
use std::thread;
use url::Url;
#[derive(JSTraceable, PartialEq, Clone, Debug, HeapSizeOf)]
pub enum LoadType {
Image(Url),
Script(Url),
Subframe(Url),
Stylesheet(Url),
PageSource(Url),
Media(Url),
}
impl LoadType {
fn url(&self) -> &Url {
match *self {
LoadType::Image(ref url) |
LoadType::Script(ref url) |
LoadType::Subframe(ref url) |
LoadType::Stylesheet(ref url) |
LoadType::Media(ref url) |
LoadType::PageSource(ref url) => url,
}
}
fn to_load_context(&self) -> LoadContext {
match *self {
LoadType::Image(_) => LoadContext::Image,
LoadType::Script(_) => LoadContext::Script,
LoadType::Subframe(_) | LoadType::PageSource(_) => LoadContext::Browsing,
LoadType::Stylesheet(_) => LoadContext::Style,
LoadType::Media(_) => LoadContext::AudioVideo,
}
}
}
/// Canary value ensuring that manually added blocking loads (ie. ones that weren't
/// created via DocumentLoader::prepare_async_load) are always removed by the time
/// that the owner is destroyed.
#[derive(JSTraceable, HeapSizeOf)]
#[must_root]
pub struct LoadBlocker {
/// The document whose load event is blocked by this object existing.
doc: JS<Document>,
/// The load that is blocking the document's load event.
load: Option<LoadType>,
}
impl LoadBlocker {
/// Mark the document's load event as blocked on this new load.
pub fn new(doc: &Document, load: LoadType) -> LoadBlocker {
doc.add_blocking_load(load.clone());
LoadBlocker {
doc: JS::from_ref(doc),
load: Some(load),
}
}
/// Remove this load from the associated document's list of blocking loads.
pub fn terminate(blocker: &mut Option<LoadBlocker>) {
if let Some(this) = blocker.as_mut() {
this.doc.finish_load(this.load.take().unwrap());
}
*blocker = None;
}
/// Return the url associated with this load.
pub fn url(&self) -> Option<&Url> {
self.load.as_ref().map(LoadType::url)
}
}
impl Drop for LoadBlocker {
fn drop(&mut self) {
if!thread::panicking() {
assert!(self.load.is_none());
}
}
}
#[derive(JSTraceable, HeapSizeOf)]
pub struct DocumentLoader {
/// We use an `Arc<ResourceThread>` here in order to avoid file descriptor exhaustion when there
/// are lots of iframes.
#[ignore_heap_size_of = "channels are hard"]
pub resource_thread: Arc<ResourceThread>,
pipeline: Option<PipelineId>,
blocking_loads: Vec<LoadType>,
events_inhibited: bool,
}
impl DocumentLoader {
pub fn new(existing: &DocumentLoader) -> DocumentLoader {
DocumentLoader::new_with_thread(existing.resource_thread.clone(), None, None)
}
/// We use an `Arc<ResourceThread>` here in order to avoid file descriptor exhaustion when there
/// are lots of iframes.
pub fn new_with_thread(resource_thread: Arc<ResourceThread>,
pipeline: Option<PipelineId>,
initial_load: Option<Url>)
-> DocumentLoader {
let initial_loads = initial_load.into_iter().map(LoadType::PageSource).collect();
DocumentLoader {
resource_thread: resource_thread,
pipeline: pipeline,
blocking_loads: initial_loads,
events_inhibited: false,
}
}
/// Add a load to the list of blocking loads.
pub fn add_blocking_load(&mut self, load: LoadType) {
self.blocking_loads.push(load);
}
/// Create a new pending network request, which can be initiated at some point in
/// the future.
pub fn prepare_async_load(&mut self, load: LoadType, referrer: &Document) -> PendingAsyncLoad {
let context = load.to_load_context();
let url = load.url().clone();
self.add_blocking_load(load);
PendingAsyncLoad::new(context,
(*self.resource_thread).clone(),
url,
self.pipeline,
referrer.get_referrer_policy(),
Some(referrer.url().clone()))
}
/// Create and initiate a new network request.
pub fn load_async(&mut self, load: LoadType, listener: AsyncResponseTarget, referrer: &Document) {
let pending = self.prepare_async_load(load, referrer);
pending.load_async(listener)
}
|
self.blocking_loads.remove(idx.expect(&format!("unknown completed load {:?}", load)));
}
pub fn is_blocked(&self) -> bool {
// TODO: Ensure that we report blocked if parsing is still ongoing.
!self.blocking_loads.is_empty()
}
pub fn inhibit_events(&mut self) {
self.events_inhibited = true;
}
pub fn events_inhibited(&self) -> bool {
self.events_inhibited
}
}
|
/// Mark an in-progress network request complete.
pub fn finish_load(&mut self, load: &LoadType) {
let idx = self.blocking_loads.iter().position(|unfinished| *unfinished == *load);
|
random_line_split
|
document_loader.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tracking of pending loads in a document.
//! https://html.spec.whatwg.org/multipage/#the-end
use dom::bindings::js::JS;
use dom::document::Document;
use msg::constellation_msg::PipelineId;
use net_traits::AsyncResponseTarget;
use net_traits::{PendingAsyncLoad, ResourceThread, LoadContext};
use std::sync::Arc;
use std::thread;
use url::Url;
#[derive(JSTraceable, PartialEq, Clone, Debug, HeapSizeOf)]
pub enum LoadType {
Image(Url),
Script(Url),
Subframe(Url),
Stylesheet(Url),
PageSource(Url),
Media(Url),
}
impl LoadType {
fn url(&self) -> &Url {
match *self {
LoadType::Image(ref url) |
LoadType::Script(ref url) |
LoadType::Subframe(ref url) |
LoadType::Stylesheet(ref url) |
LoadType::Media(ref url) |
LoadType::PageSource(ref url) => url,
}
}
fn to_load_context(&self) -> LoadContext {
match *self {
LoadType::Image(_) => LoadContext::Image,
LoadType::Script(_) => LoadContext::Script,
LoadType::Subframe(_) | LoadType::PageSource(_) => LoadContext::Browsing,
LoadType::Stylesheet(_) => LoadContext::Style,
LoadType::Media(_) => LoadContext::AudioVideo,
}
}
}
/// Canary value ensuring that manually added blocking loads (ie. ones that weren't
/// created via DocumentLoader::prepare_async_load) are always removed by the time
/// that the owner is destroyed.
#[derive(JSTraceable, HeapSizeOf)]
#[must_root]
pub struct LoadBlocker {
/// The document whose load event is blocked by this object existing.
doc: JS<Document>,
/// The load that is blocking the document's load event.
load: Option<LoadType>,
}
impl LoadBlocker {
/// Mark the document's load event as blocked on this new load.
pub fn new(doc: &Document, load: LoadType) -> LoadBlocker {
doc.add_blocking_load(load.clone());
LoadBlocker {
doc: JS::from_ref(doc),
load: Some(load),
}
}
/// Remove this load from the associated document's list of blocking loads.
pub fn terminate(blocker: &mut Option<LoadBlocker>) {
if let Some(this) = blocker.as_mut() {
this.doc.finish_load(this.load.take().unwrap());
}
*blocker = None;
}
/// Return the url associated with this load.
pub fn url(&self) -> Option<&Url> {
self.load.as_ref().map(LoadType::url)
}
}
impl Drop for LoadBlocker {
fn drop(&mut self) {
if!thread::panicking() {
assert!(self.load.is_none());
}
}
}
#[derive(JSTraceable, HeapSizeOf)]
pub struct DocumentLoader {
/// We use an `Arc<ResourceThread>` here in order to avoid file descriptor exhaustion when there
/// are lots of iframes.
#[ignore_heap_size_of = "channels are hard"]
pub resource_thread: Arc<ResourceThread>,
pipeline: Option<PipelineId>,
blocking_loads: Vec<LoadType>,
events_inhibited: bool,
}
impl DocumentLoader {
pub fn
|
(existing: &DocumentLoader) -> DocumentLoader {
DocumentLoader::new_with_thread(existing.resource_thread.clone(), None, None)
}
/// We use an `Arc<ResourceThread>` here in order to avoid file descriptor exhaustion when there
/// are lots of iframes.
pub fn new_with_thread(resource_thread: Arc<ResourceThread>,
pipeline: Option<PipelineId>,
initial_load: Option<Url>)
-> DocumentLoader {
let initial_loads = initial_load.into_iter().map(LoadType::PageSource).collect();
DocumentLoader {
resource_thread: resource_thread,
pipeline: pipeline,
blocking_loads: initial_loads,
events_inhibited: false,
}
}
/// Add a load to the list of blocking loads.
pub fn add_blocking_load(&mut self, load: LoadType) {
self.blocking_loads.push(load);
}
/// Create a new pending network request, which can be initiated at some point in
/// the future.
pub fn prepare_async_load(&mut self, load: LoadType, referrer: &Document) -> PendingAsyncLoad {
let context = load.to_load_context();
let url = load.url().clone();
self.add_blocking_load(load);
PendingAsyncLoad::new(context,
(*self.resource_thread).clone(),
url,
self.pipeline,
referrer.get_referrer_policy(),
Some(referrer.url().clone()))
}
/// Create and initiate a new network request.
pub fn load_async(&mut self, load: LoadType, listener: AsyncResponseTarget, referrer: &Document) {
let pending = self.prepare_async_load(load, referrer);
pending.load_async(listener)
}
/// Mark an in-progress network request complete.
pub fn finish_load(&mut self, load: &LoadType) {
let idx = self.blocking_loads.iter().position(|unfinished| *unfinished == *load);
self.blocking_loads.remove(idx.expect(&format!("unknown completed load {:?}", load)));
}
pub fn is_blocked(&self) -> bool {
// TODO: Ensure that we report blocked if parsing is still ongoing.
!self.blocking_loads.is_empty()
}
pub fn inhibit_events(&mut self) {
self.events_inhibited = true;
}
pub fn events_inhibited(&self) -> bool {
self.events_inhibited
}
}
|
new
|
identifier_name
|
document_loader.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Tracking of pending loads in a document.
//! https://html.spec.whatwg.org/multipage/#the-end
use dom::bindings::js::JS;
use dom::document::Document;
use msg::constellation_msg::PipelineId;
use net_traits::AsyncResponseTarget;
use net_traits::{PendingAsyncLoad, ResourceThread, LoadContext};
use std::sync::Arc;
use std::thread;
use url::Url;
#[derive(JSTraceable, PartialEq, Clone, Debug, HeapSizeOf)]
pub enum LoadType {
Image(Url),
Script(Url),
Subframe(Url),
Stylesheet(Url),
PageSource(Url),
Media(Url),
}
impl LoadType {
fn url(&self) -> &Url {
match *self {
LoadType::Image(ref url) |
LoadType::Script(ref url) |
LoadType::Subframe(ref url) |
LoadType::Stylesheet(ref url) |
LoadType::Media(ref url) |
LoadType::PageSource(ref url) => url,
}
}
fn to_load_context(&self) -> LoadContext {
match *self {
LoadType::Image(_) => LoadContext::Image,
LoadType::Script(_) => LoadContext::Script,
LoadType::Subframe(_) | LoadType::PageSource(_) => LoadContext::Browsing,
LoadType::Stylesheet(_) => LoadContext::Style,
LoadType::Media(_) => LoadContext::AudioVideo,
}
}
}
/// Canary value ensuring that manually added blocking loads (ie. ones that weren't
/// created via DocumentLoader::prepare_async_load) are always removed by the time
/// that the owner is destroyed.
#[derive(JSTraceable, HeapSizeOf)]
#[must_root]
pub struct LoadBlocker {
/// The document whose load event is blocked by this object existing.
doc: JS<Document>,
/// The load that is blocking the document's load event.
load: Option<LoadType>,
}
impl LoadBlocker {
/// Mark the document's load event as blocked on this new load.
pub fn new(doc: &Document, load: LoadType) -> LoadBlocker {
doc.add_blocking_load(load.clone());
LoadBlocker {
doc: JS::from_ref(doc),
load: Some(load),
}
}
/// Remove this load from the associated document's list of blocking loads.
pub fn terminate(blocker: &mut Option<LoadBlocker>) {
if let Some(this) = blocker.as_mut() {
this.doc.finish_load(this.load.take().unwrap());
}
*blocker = None;
}
/// Return the url associated with this load.
pub fn url(&self) -> Option<&Url> {
self.load.as_ref().map(LoadType::url)
}
}
impl Drop for LoadBlocker {
fn drop(&mut self) {
if!thread::panicking() {
assert!(self.load.is_none());
}
}
}
#[derive(JSTraceable, HeapSizeOf)]
pub struct DocumentLoader {
/// We use an `Arc<ResourceThread>` here in order to avoid file descriptor exhaustion when there
/// are lots of iframes.
#[ignore_heap_size_of = "channels are hard"]
pub resource_thread: Arc<ResourceThread>,
pipeline: Option<PipelineId>,
blocking_loads: Vec<LoadType>,
events_inhibited: bool,
}
impl DocumentLoader {
pub fn new(existing: &DocumentLoader) -> DocumentLoader
|
/// We use an `Arc<ResourceThread>` here in order to avoid file descriptor exhaustion when there
/// are lots of iframes.
pub fn new_with_thread(resource_thread: Arc<ResourceThread>,
pipeline: Option<PipelineId>,
initial_load: Option<Url>)
-> DocumentLoader {
let initial_loads = initial_load.into_iter().map(LoadType::PageSource).collect();
DocumentLoader {
resource_thread: resource_thread,
pipeline: pipeline,
blocking_loads: initial_loads,
events_inhibited: false,
}
}
/// Add a load to the list of blocking loads.
pub fn add_blocking_load(&mut self, load: LoadType) {
self.blocking_loads.push(load);
}
/// Create a new pending network request, which can be initiated at some point in
/// the future.
pub fn prepare_async_load(&mut self, load: LoadType, referrer: &Document) -> PendingAsyncLoad {
let context = load.to_load_context();
let url = load.url().clone();
self.add_blocking_load(load);
PendingAsyncLoad::new(context,
(*self.resource_thread).clone(),
url,
self.pipeline,
referrer.get_referrer_policy(),
Some(referrer.url().clone()))
}
/// Create and initiate a new network request.
pub fn load_async(&mut self, load: LoadType, listener: AsyncResponseTarget, referrer: &Document) {
let pending = self.prepare_async_load(load, referrer);
pending.load_async(listener)
}
/// Mark an in-progress network request complete.
pub fn finish_load(&mut self, load: &LoadType) {
let idx = self.blocking_loads.iter().position(|unfinished| *unfinished == *load);
self.blocking_loads.remove(idx.expect(&format!("unknown completed load {:?}", load)));
}
pub fn is_blocked(&self) -> bool {
// TODO: Ensure that we report blocked if parsing is still ongoing.
!self.blocking_loads.is_empty()
}
pub fn inhibit_events(&mut self) {
self.events_inhibited = true;
}
pub fn events_inhibited(&self) -> bool {
self.events_inhibited
}
}
|
{
DocumentLoader::new_with_thread(existing.resource_thread.clone(), None, None)
}
|
identifier_body
|
recursion_limit.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Recursion limit.
//
|
// this via an attribute on the crate like `#![recursion_limit="22"]`. This pass
// just peeks and looks for that attribute.
use session::Session;
use syntax::ast;
use syntax::attr::AttrMetaMethods;
use std::str::FromStr;
pub fn update_recursion_limit(sess: &Session, krate: &ast::Crate) {
for attr in krate.attrs.iter() {
if!attr.check_name("recursion_limit") {
continue;
}
if let Some(s) = attr.value_str() {
if let Some(n) = FromStr::from_str(s.get()) {
sess.recursion_limit.set(n);
return;
}
}
span_err!(sess, attr.span, E0296, "malformed recursion limit attribute, \
expected #![recursion_limit=\"N\"]");
}
}
|
// There are various parts of the compiler that must impose arbitrary limits
// on how deeply they recurse to prevent stack overflow. Users can override
|
random_line_split
|
recursion_limit.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Recursion limit.
//
// There are various parts of the compiler that must impose arbitrary limits
// on how deeply they recurse to prevent stack overflow. Users can override
// this via an attribute on the crate like `#![recursion_limit="22"]`. This pass
// just peeks and looks for that attribute.
use session::Session;
use syntax::ast;
use syntax::attr::AttrMetaMethods;
use std::str::FromStr;
pub fn
|
(sess: &Session, krate: &ast::Crate) {
for attr in krate.attrs.iter() {
if!attr.check_name("recursion_limit") {
continue;
}
if let Some(s) = attr.value_str() {
if let Some(n) = FromStr::from_str(s.get()) {
sess.recursion_limit.set(n);
return;
}
}
span_err!(sess, attr.span, E0296, "malformed recursion limit attribute, \
expected #![recursion_limit=\"N\"]");
}
}
|
update_recursion_limit
|
identifier_name
|
recursion_limit.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Recursion limit.
//
// There are various parts of the compiler that must impose arbitrary limits
// on how deeply they recurse to prevent stack overflow. Users can override
// this via an attribute on the crate like `#![recursion_limit="22"]`. This pass
// just peeks and looks for that attribute.
use session::Session;
use syntax::ast;
use syntax::attr::AttrMetaMethods;
use std::str::FromStr;
pub fn update_recursion_limit(sess: &Session, krate: &ast::Crate)
|
{
for attr in krate.attrs.iter() {
if !attr.check_name("recursion_limit") {
continue;
}
if let Some(s) = attr.value_str() {
if let Some(n) = FromStr::from_str(s.get()) {
sess.recursion_limit.set(n);
return;
}
}
span_err!(sess, attr.span, E0296, "malformed recursion limit attribute, \
expected #![recursion_limit=\"N\"]");
}
}
|
identifier_body
|
|
recursion_limit.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Recursion limit.
//
// There are various parts of the compiler that must impose arbitrary limits
// on how deeply they recurse to prevent stack overflow. Users can override
// this via an attribute on the crate like `#![recursion_limit="22"]`. This pass
// just peeks and looks for that attribute.
use session::Session;
use syntax::ast;
use syntax::attr::AttrMetaMethods;
use std::str::FromStr;
pub fn update_recursion_limit(sess: &Session, krate: &ast::Crate) {
for attr in krate.attrs.iter() {
if!attr.check_name("recursion_limit") {
continue;
}
if let Some(s) = attr.value_str() {
if let Some(n) = FromStr::from_str(s.get())
|
}
span_err!(sess, attr.span, E0296, "malformed recursion limit attribute, \
expected #![recursion_limit=\"N\"]");
}
}
|
{
sess.recursion_limit.set(n);
return;
}
|
conditional_block
|
bookmarks.rs
|
/*
* Copyright (c) 2016-2018 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
//! Bookmark management in the application.
use mg::{
CustomDialog,
DialogBuilder,
DeleteCompletionItem,
Info,
InputDialog,
};
use webkit2gtk::WebViewExt;
use app::{App, TAG_COMPLETER};
use app::Msg::TagEdit;
impl App {
/// Add the current page to the bookmarks.
pub fn bookmark(&self) {
if let Some(url) = self.widgets.webview.get_uri() {
let title = self.widgets.webview.get_title();
let message = format!("Added bookmark: {}", url);
match self.model.bookmark_manager.add(url.into(), title.map(Into::into)) {
Ok(true) => self.components.mg.emit(Info(message)),
Ok(false) => self.components.mg.emit(Info("The current page is already in the bookmarks".to_string())),
Err(err) => self.error(&err.to_string()),
}
}
}
/// Delete the current page from the bookmarks.
pub fn delete_bookmark(&self) {
if let Some(url) = self.widgets.webview.get_uri() {
match self.model.bookmark_manager.delete(&url) {
Ok(true) => self.components.mg.emit(Info(format!("Deleted bookmark: {}", url))),
Ok(false) => self.info_page_not_in_bookmarks(),
Err(err) => self.error(&err.to_string()),
}
}
}
/// Delete the bookmark selected in completion.
pub fn delete_selected_bookmark(&self) {
let mut command = self.model.command_text.split_whitespace();
match command.next() {
Some("open") | Some("win-open") | Some("private-win-open") =>
if let Some(url) = command.next() {
// Do not show message when deleting a bookmark in completion.
|
self.error(&err.to_string());
}
self.components.mg.emit(DeleteCompletionItem);
},
_ => (),
}
}
pub fn set_tags(&self, tags: Option<String>) {
// Do not edit tags when the user press Escape.
if let Some(tags) = tags {
let tags: Vec<_> = tags.split(',')
.map(|tag| tag.trim().to_lowercase())
.filter(|tag|!tag.is_empty())
.collect();
if let Err(err) = self.model.bookmark_manager.set_tags(&self.model.current_url, tags) {
self.error(&err.to_string());
}
}
}
/// Edit the tags of the current page from the bookmarks.
pub fn edit_bookmark_tags(&self) {
if self.model.bookmark_manager.exists(&self.model.current_url) {
match self.model.bookmark_manager.get_tags(&self.model.current_url) {
Ok(tags) => {
let default_answer = tags.join(", ");
let responder = Box::new(InputDialog::new(&self.model.relm, TagEdit));
let builder = DialogBuilder::new()
.completer(TAG_COMPLETER)
.default_answer(default_answer)
.message("Bookmark tags (separated by comma):".to_string())
.responder(responder);
self.components.mg.emit(CustomDialog(builder));
},
Err(err) => self.error(&err.to_string()),
}
}
else {
self.info_page_not_in_bookmarks();
}
}
/// Show an information message to tell that the current page is not in the bookmarks.
fn info_page_not_in_bookmarks(&self) {
self.components.mg.emit(Info("The current page is not in the bookmarks".to_string()));
}
}
|
if let Err(err) = self.model.bookmark_manager.delete(url) {
|
random_line_split
|
bookmarks.rs
|
/*
* Copyright (c) 2016-2018 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
//! Bookmark management in the application.
use mg::{
CustomDialog,
DialogBuilder,
DeleteCompletionItem,
Info,
InputDialog,
};
use webkit2gtk::WebViewExt;
use app::{App, TAG_COMPLETER};
use app::Msg::TagEdit;
impl App {
/// Add the current page to the bookmarks.
pub fn bookmark(&self) {
if let Some(url) = self.widgets.webview.get_uri() {
let title = self.widgets.webview.get_title();
let message = format!("Added bookmark: {}", url);
match self.model.bookmark_manager.add(url.into(), title.map(Into::into)) {
Ok(true) => self.components.mg.emit(Info(message)),
Ok(false) => self.components.mg.emit(Info("The current page is already in the bookmarks".to_string())),
Err(err) => self.error(&err.to_string()),
}
}
}
/// Delete the current page from the bookmarks.
pub fn delete_bookmark(&self) {
if let Some(url) = self.widgets.webview.get_uri() {
match self.model.bookmark_manager.delete(&url) {
Ok(true) => self.components.mg.emit(Info(format!("Deleted bookmark: {}", url))),
Ok(false) => self.info_page_not_in_bookmarks(),
Err(err) => self.error(&err.to_string()),
}
}
}
/// Delete the bookmark selected in completion.
pub fn
|
(&self) {
let mut command = self.model.command_text.split_whitespace();
match command.next() {
Some("open") | Some("win-open") | Some("private-win-open") =>
if let Some(url) = command.next() {
// Do not show message when deleting a bookmark in completion.
if let Err(err) = self.model.bookmark_manager.delete(url) {
self.error(&err.to_string());
}
self.components.mg.emit(DeleteCompletionItem);
},
_ => (),
}
}
pub fn set_tags(&self, tags: Option<String>) {
// Do not edit tags when the user press Escape.
if let Some(tags) = tags {
let tags: Vec<_> = tags.split(',')
.map(|tag| tag.trim().to_lowercase())
.filter(|tag|!tag.is_empty())
.collect();
if let Err(err) = self.model.bookmark_manager.set_tags(&self.model.current_url, tags) {
self.error(&err.to_string());
}
}
}
/// Edit the tags of the current page from the bookmarks.
pub fn edit_bookmark_tags(&self) {
if self.model.bookmark_manager.exists(&self.model.current_url) {
match self.model.bookmark_manager.get_tags(&self.model.current_url) {
Ok(tags) => {
let default_answer = tags.join(", ");
let responder = Box::new(InputDialog::new(&self.model.relm, TagEdit));
let builder = DialogBuilder::new()
.completer(TAG_COMPLETER)
.default_answer(default_answer)
.message("Bookmark tags (separated by comma):".to_string())
.responder(responder);
self.components.mg.emit(CustomDialog(builder));
},
Err(err) => self.error(&err.to_string()),
}
}
else {
self.info_page_not_in_bookmarks();
}
}
/// Show an information message to tell that the current page is not in the bookmarks.
fn info_page_not_in_bookmarks(&self) {
self.components.mg.emit(Info("The current page is not in the bookmarks".to_string()));
}
}
|
delete_selected_bookmark
|
identifier_name
|
bookmarks.rs
|
/*
* Copyright (c) 2016-2018 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
//! Bookmark management in the application.
use mg::{
CustomDialog,
DialogBuilder,
DeleteCompletionItem,
Info,
InputDialog,
};
use webkit2gtk::WebViewExt;
use app::{App, TAG_COMPLETER};
use app::Msg::TagEdit;
impl App {
/// Add the current page to the bookmarks.
pub fn bookmark(&self) {
if let Some(url) = self.widgets.webview.get_uri() {
let title = self.widgets.webview.get_title();
let message = format!("Added bookmark: {}", url);
match self.model.bookmark_manager.add(url.into(), title.map(Into::into)) {
Ok(true) => self.components.mg.emit(Info(message)),
Ok(false) => self.components.mg.emit(Info("The current page is already in the bookmarks".to_string())),
Err(err) => self.error(&err.to_string()),
}
}
}
/// Delete the current page from the bookmarks.
pub fn delete_bookmark(&self) {
if let Some(url) = self.widgets.webview.get_uri() {
match self.model.bookmark_manager.delete(&url) {
Ok(true) => self.components.mg.emit(Info(format!("Deleted bookmark: {}", url))),
Ok(false) => self.info_page_not_in_bookmarks(),
Err(err) => self.error(&err.to_string()),
}
}
}
/// Delete the bookmark selected in completion.
pub fn delete_selected_bookmark(&self) {
let mut command = self.model.command_text.split_whitespace();
match command.next() {
Some("open") | Some("win-open") | Some("private-win-open") =>
if let Some(url) = command.next()
|
,
_ => (),
}
}
pub fn set_tags(&self, tags: Option<String>) {
// Do not edit tags when the user press Escape.
if let Some(tags) = tags {
let tags: Vec<_> = tags.split(',')
.map(|tag| tag.trim().to_lowercase())
.filter(|tag|!tag.is_empty())
.collect();
if let Err(err) = self.model.bookmark_manager.set_tags(&self.model.current_url, tags) {
self.error(&err.to_string());
}
}
}
/// Edit the tags of the current page from the bookmarks.
pub fn edit_bookmark_tags(&self) {
if self.model.bookmark_manager.exists(&self.model.current_url) {
match self.model.bookmark_manager.get_tags(&self.model.current_url) {
Ok(tags) => {
let default_answer = tags.join(", ");
let responder = Box::new(InputDialog::new(&self.model.relm, TagEdit));
let builder = DialogBuilder::new()
.completer(TAG_COMPLETER)
.default_answer(default_answer)
.message("Bookmark tags (separated by comma):".to_string())
.responder(responder);
self.components.mg.emit(CustomDialog(builder));
},
Err(err) => self.error(&err.to_string()),
}
}
else {
self.info_page_not_in_bookmarks();
}
}
/// Show an information message to tell that the current page is not in the bookmarks.
fn info_page_not_in_bookmarks(&self) {
self.components.mg.emit(Info("The current page is not in the bookmarks".to_string()));
}
}
|
{
// Do not show message when deleting a bookmark in completion.
if let Err(err) = self.model.bookmark_manager.delete(url) {
self.error(&err.to_string());
}
self.components.mg.emit(DeleteCompletionItem);
}
|
conditional_block
|
bookmarks.rs
|
/*
* Copyright (c) 2016-2018 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
//! Bookmark management in the application.
use mg::{
CustomDialog,
DialogBuilder,
DeleteCompletionItem,
Info,
InputDialog,
};
use webkit2gtk::WebViewExt;
use app::{App, TAG_COMPLETER};
use app::Msg::TagEdit;
impl App {
/// Add the current page to the bookmarks.
pub fn bookmark(&self) {
if let Some(url) = self.widgets.webview.get_uri() {
let title = self.widgets.webview.get_title();
let message = format!("Added bookmark: {}", url);
match self.model.bookmark_manager.add(url.into(), title.map(Into::into)) {
Ok(true) => self.components.mg.emit(Info(message)),
Ok(false) => self.components.mg.emit(Info("The current page is already in the bookmarks".to_string())),
Err(err) => self.error(&err.to_string()),
}
}
}
/// Delete the current page from the bookmarks.
pub fn delete_bookmark(&self)
|
/// Delete the bookmark selected in completion.
pub fn delete_selected_bookmark(&self) {
let mut command = self.model.command_text.split_whitespace();
match command.next() {
Some("open") | Some("win-open") | Some("private-win-open") =>
if let Some(url) = command.next() {
// Do not show message when deleting a bookmark in completion.
if let Err(err) = self.model.bookmark_manager.delete(url) {
self.error(&err.to_string());
}
self.components.mg.emit(DeleteCompletionItem);
},
_ => (),
}
}
pub fn set_tags(&self, tags: Option<String>) {
// Do not edit tags when the user press Escape.
if let Some(tags) = tags {
let tags: Vec<_> = tags.split(',')
.map(|tag| tag.trim().to_lowercase())
.filter(|tag|!tag.is_empty())
.collect();
if let Err(err) = self.model.bookmark_manager.set_tags(&self.model.current_url, tags) {
self.error(&err.to_string());
}
}
}
/// Edit the tags of the current page from the bookmarks.
pub fn edit_bookmark_tags(&self) {
if self.model.bookmark_manager.exists(&self.model.current_url) {
match self.model.bookmark_manager.get_tags(&self.model.current_url) {
Ok(tags) => {
let default_answer = tags.join(", ");
let responder = Box::new(InputDialog::new(&self.model.relm, TagEdit));
let builder = DialogBuilder::new()
.completer(TAG_COMPLETER)
.default_answer(default_answer)
.message("Bookmark tags (separated by comma):".to_string())
.responder(responder);
self.components.mg.emit(CustomDialog(builder));
},
Err(err) => self.error(&err.to_string()),
}
}
else {
self.info_page_not_in_bookmarks();
}
}
/// Show an information message to tell that the current page is not in the bookmarks.
fn info_page_not_in_bookmarks(&self) {
self.components.mg.emit(Info("The current page is not in the bookmarks".to_string()));
}
}
|
{
if let Some(url) = self.widgets.webview.get_uri() {
match self.model.bookmark_manager.delete(&url) {
Ok(true) => self.components.mg.emit(Info(format!("Deleted bookmark: {}", url))),
Ok(false) => self.info_page_not_in_bookmarks(),
Err(err) => self.error(&err.to_string()),
}
}
}
|
identifier_body
|
code_lexer.rs
|
pub struct CodeLexer<'a> {
pub source: &'a str,
}
impl<'a> CodeLexer<'a> {
pub fn new(source: &'a str) -> CodeLexer<'a> {
CodeLexer { source: source }
}
pub fn is_keyword(&self, identifier: &str) -> bool {
match identifier {
"if" | "for" | "model" | "while" | "match" | "use" => true,
_ => false,
}
}
pub fn accept_identifier(&self, source: &str) -> String {
source
.chars()
.enumerate()
.take_while(|&(index, c)| match c {
'A'...'Z' | 'a'...'z' | '_' => true,
'0'...'9' if index > 0 => true,
_ => false,
})
.map(|(_, c)| c)
.collect::<String>()
}
pub fn end_of_block(&self, start_char: char, end_char: char) -> Option<usize> {
let mut scope = 0i32;
let mut in_quote: Option<char> = None;
for (index, c) in self.source.chars().enumerate() {
if c == '\'' || c == '"' {
in_quote = match in_quote {
None => Some(c),
Some(q) if q == c => None,
_ => in_quote,
};
}
if in_quote.is_none() {
if c == start_char {
scope += 1;
} else if c == end_char {
scope -= 1;
if scope <= 0 {
return Some(index);
}
}
};
}
None
}
pub fn next_instance_of(&self, search_char: char) -> Option<usize>
|
pub fn end_of_code_block(&self) -> Option<usize> {
self.end_of_block('{', '}')
}
pub fn end_of_code_statement(&self) -> Option<usize> {
self.next_instance_of(';')
}
pub fn block_delimiters(&self) -> (Option<usize>, Option<usize>) {
(self.next_instance_of('{'), self.end_of_block('{', '}'))
}
}
|
{
self.source.chars().position(|c| c == search_char)
}
|
identifier_body
|
code_lexer.rs
|
pub struct CodeLexer<'a> {
pub source: &'a str,
}
impl<'a> CodeLexer<'a> {
pub fn new(source: &'a str) -> CodeLexer<'a> {
CodeLexer { source: source }
}
pub fn is_keyword(&self, identifier: &str) -> bool {
match identifier {
"if" | "for" | "model" | "while" | "match" | "use" => true,
_ => false,
}
}
pub fn accept_identifier(&self, source: &str) -> String {
source
.chars()
.enumerate()
.take_while(|&(index, c)| match c {
'A'...'Z' | 'a'...'z' | '_' => true,
'0'...'9' if index > 0 => true,
_ => false,
})
.map(|(_, c)| c)
.collect::<String>()
}
pub fn end_of_block(&self, start_char: char, end_char: char) -> Option<usize> {
let mut scope = 0i32;
let mut in_quote: Option<char> = None;
for (index, c) in self.source.chars().enumerate() {
if c == '\'' || c == '"' {
in_quote = match in_quote {
None => Some(c),
Some(q) if q == c => None,
_ => in_quote,
};
}
if in_quote.is_none() {
if c == start_char {
scope += 1;
} else if c == end_char {
scope -= 1;
if scope <= 0 {
return Some(index);
}
}
};
}
None
}
pub fn next_instance_of(&self, search_char: char) -> Option<usize> {
self.source.chars().position(|c| c == search_char)
}
pub fn
|
(&self) -> Option<usize> {
self.end_of_block('{', '}')
}
pub fn end_of_code_statement(&self) -> Option<usize> {
self.next_instance_of(';')
}
pub fn block_delimiters(&self) -> (Option<usize>, Option<usize>) {
(self.next_instance_of('{'), self.end_of_block('{', '}'))
}
}
|
end_of_code_block
|
identifier_name
|
code_lexer.rs
|
pub struct CodeLexer<'a> {
pub source: &'a str,
}
impl<'a> CodeLexer<'a> {
pub fn new(source: &'a str) -> CodeLexer<'a> {
CodeLexer { source: source }
}
pub fn is_keyword(&self, identifier: &str) -> bool {
match identifier {
"if" | "for" | "model" | "while" | "match" | "use" => true,
_ => false,
}
}
pub fn accept_identifier(&self, source: &str) -> String {
source
|
'A'...'Z' | 'a'...'z' | '_' => true,
'0'...'9' if index > 0 => true,
_ => false,
})
.map(|(_, c)| c)
.collect::<String>()
}
pub fn end_of_block(&self, start_char: char, end_char: char) -> Option<usize> {
let mut scope = 0i32;
let mut in_quote: Option<char> = None;
for (index, c) in self.source.chars().enumerate() {
if c == '\'' || c == '"' {
in_quote = match in_quote {
None => Some(c),
Some(q) if q == c => None,
_ => in_quote,
};
}
if in_quote.is_none() {
if c == start_char {
scope += 1;
} else if c == end_char {
scope -= 1;
if scope <= 0 {
return Some(index);
}
}
};
}
None
}
pub fn next_instance_of(&self, search_char: char) -> Option<usize> {
self.source.chars().position(|c| c == search_char)
}
pub fn end_of_code_block(&self) -> Option<usize> {
self.end_of_block('{', '}')
}
pub fn end_of_code_statement(&self) -> Option<usize> {
self.next_instance_of(';')
}
pub fn block_delimiters(&self) -> (Option<usize>, Option<usize>) {
(self.next_instance_of('{'), self.end_of_block('{', '}'))
}
}
|
.chars()
.enumerate()
.take_while(|&(index, c)| match c {
|
random_line_split
|
talker_handler.rs
|
use crate::talker::Talker;
pub struct TalkerHandlerBase {
pub category: String,
pub model: String,
pub label: String,
}
impl TalkerHandlerBase {
pub fn new(category: &str, model: &str, label: &str) -> Self {
Self {
category: category.to_string(),
model: model.to_string(),
label: label.to_string(),
}
}
pub fn category<'a>(&'a self) -> &'a String
|
pub fn model<'a>(&'a self) -> &'a String {
&self.model
}
pub fn label<'a>(&'a self) -> &'a String {
&self.label
}
}
pub trait TalkerHandler {
fn base<'a>(&'a self) -> &'a TalkerHandlerBase;
fn category<'a>(&'a self) -> &'a String {
&self.base().category
}
fn model<'a>(&'a self) -> &'a String {
&self.base().model
}
fn label<'a>(&'a self) -> &'a String {
&self.base().label
}
fn make(&self) -> Result<Box<dyn Talker>, failure::Error>;
}
|
{
&self.category
}
|
identifier_body
|
talker_handler.rs
|
use crate::talker::Talker;
pub struct TalkerHandlerBase {
pub category: String,
pub model: String,
pub label: String,
}
impl TalkerHandlerBase {
pub fn new(category: &str, model: &str, label: &str) -> Self {
Self {
category: category.to_string(),
model: model.to_string(),
label: label.to_string(),
}
}
pub fn category<'a>(&'a self) -> &'a String {
&self.category
}
pub fn model<'a>(&'a self) -> &'a String {
&self.model
}
pub fn label<'a>(&'a self) -> &'a String {
&self.label
}
}
pub trait TalkerHandler {
fn base<'a>(&'a self) -> &'a TalkerHandlerBase;
fn category<'a>(&'a self) -> &'a String {
&self.base().category
}
fn model<'a>(&'a self) -> &'a String {
|
&self.base().label
}
fn make(&self) -> Result<Box<dyn Talker>, failure::Error>;
}
|
&self.base().model
}
fn label<'a>(&'a self) -> &'a String {
|
random_line_split
|
talker_handler.rs
|
use crate::talker::Talker;
pub struct TalkerHandlerBase {
pub category: String,
pub model: String,
pub label: String,
}
impl TalkerHandlerBase {
pub fn new(category: &str, model: &str, label: &str) -> Self {
Self {
category: category.to_string(),
model: model.to_string(),
label: label.to_string(),
}
}
pub fn category<'a>(&'a self) -> &'a String {
&self.category
}
pub fn model<'a>(&'a self) -> &'a String {
&self.model
}
pub fn label<'a>(&'a self) -> &'a String {
&self.label
}
}
pub trait TalkerHandler {
fn base<'a>(&'a self) -> &'a TalkerHandlerBase;
fn category<'a>(&'a self) -> &'a String {
&self.base().category
}
fn model<'a>(&'a self) -> &'a String {
&self.base().model
}
fn
|
<'a>(&'a self) -> &'a String {
&self.base().label
}
fn make(&self) -> Result<Box<dyn Talker>, failure::Error>;
}
|
label
|
identifier_name
|
performancepainttiming.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::PerformancePaintTimingBinding;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::globalscope::GlobalScope;
use dom::performanceentry::PerformanceEntry;
use dom_struct::dom_struct;
use script_traits::PaintMetricType;
#[dom_struct]
pub struct PerformancePaintTiming {
entry: PerformanceEntry,
}
|
fn new_inherited(metric_type: PaintMetricType, start_time: f64)
-> PerformancePaintTiming {
let name = match metric_type {
PaintMetricType::FirstPaint => DOMString::from("first-paint"),
PaintMetricType::FirstContentfulPaint => DOMString::from("first-contentful-paint"),
};
PerformancePaintTiming {
entry: PerformanceEntry::new_inherited(name,
DOMString::from("paint"),
start_time,
0.)
}
}
#[allow(unrooted_must_root)]
pub fn new(global: &GlobalScope,
metric_type: PaintMetricType,
start_time: f64) -> DomRoot<PerformancePaintTiming> {
let entry = PerformancePaintTiming::new_inherited(metric_type, start_time);
reflect_dom_object(Box::new(entry), global, PerformancePaintTimingBinding::Wrap)
}
}
|
impl PerformancePaintTiming {
|
random_line_split
|
performancepainttiming.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::PerformancePaintTimingBinding;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::globalscope::GlobalScope;
use dom::performanceentry::PerformanceEntry;
use dom_struct::dom_struct;
use script_traits::PaintMetricType;
#[dom_struct]
pub struct PerformancePaintTiming {
entry: PerformanceEntry,
}
impl PerformancePaintTiming {
fn new_inherited(metric_type: PaintMetricType, start_time: f64)
-> PerformancePaintTiming {
let name = match metric_type {
PaintMetricType::FirstPaint => DOMString::from("first-paint"),
PaintMetricType::FirstContentfulPaint => DOMString::from("first-contentful-paint"),
};
PerformancePaintTiming {
entry: PerformanceEntry::new_inherited(name,
DOMString::from("paint"),
start_time,
0.)
}
}
#[allow(unrooted_must_root)]
pub fn new(global: &GlobalScope,
metric_type: PaintMetricType,
start_time: f64) -> DomRoot<PerformancePaintTiming>
|
}
|
{
let entry = PerformancePaintTiming::new_inherited(metric_type, start_time);
reflect_dom_object(Box::new(entry), global, PerformancePaintTimingBinding::Wrap)
}
|
identifier_body
|
performancepainttiming.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::PerformancePaintTimingBinding;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::globalscope::GlobalScope;
use dom::performanceentry::PerformanceEntry;
use dom_struct::dom_struct;
use script_traits::PaintMetricType;
#[dom_struct]
pub struct PerformancePaintTiming {
entry: PerformanceEntry,
}
impl PerformancePaintTiming {
fn new_inherited(metric_type: PaintMetricType, start_time: f64)
-> PerformancePaintTiming {
let name = match metric_type {
PaintMetricType::FirstPaint => DOMString::from("first-paint"),
PaintMetricType::FirstContentfulPaint => DOMString::from("first-contentful-paint"),
};
PerformancePaintTiming {
entry: PerformanceEntry::new_inherited(name,
DOMString::from("paint"),
start_time,
0.)
}
}
#[allow(unrooted_must_root)]
pub fn
|
(global: &GlobalScope,
metric_type: PaintMetricType,
start_time: f64) -> DomRoot<PerformancePaintTiming> {
let entry = PerformancePaintTiming::new_inherited(metric_type, start_time);
reflect_dom_object(Box::new(entry), global, PerformancePaintTimingBinding::Wrap)
}
}
|
new
|
identifier_name
|
writer.rs
|
use std::fs::OpenOptions;
use std::io::{Error,Write};
use std::path::Path;
use byteorder::{BigEndian, WriteBytesExt};
use SMF;
use ::{Event,AbsoluteEvent,MetaEvent,MetaCommand,SMFFormat};
/// An SMFWriter is used to write an SMF to a file. It can be either
/// constructed empty and have tracks added, or created from an
/// existing rimd::SMF.
///
/// # Writing an existing SMF to a file
/// ```
/// use rimd::{SMF,SMFWriter,SMFBuilder};
/// use std::path::Path;
/// // Create smf
/// let mut builder = SMFBuilder::new();
/// // add some events to builder
/// let smf = builder.result();
/// let writer = SMFWriter::from_smf(smf);
/// let result = writer.write_to_file(Path::new("/path/to/file.smf"));
/// // handle result
pub struct SMFWriter {
format: u16,
ticks: i16,
tracks: Vec<Vec<u8>>,
}
impl SMFWriter {
/// Create a new SMFWriter with the given number of units per
/// beat. The SMFWriter will initially have no tracks.
pub fn new_with_division(ticks: i16) -> SMFWriter {
SMFWriter {
format: 1,
ticks: ticks,
tracks: Vec::new(),
}
}
/// Create a new SMFWriter with the given format and number of
/// units per beat. The SMFWriter will initially have no tracks.
pub fn new_with_division_and_format(format: SMFFormat, ticks: i16) -> SMFWriter {
SMFWriter {
format: format as u16,
ticks: ticks,
tracks: Vec::new(),
}
}
/// Create a writer that has all the tracks from the given SMF already added
pub fn from_smf(smf: SMF) -> SMFWriter {
let mut writer = SMFWriter::new_with_division_and_format
(smf.format, smf.division);
for track in smf.tracks.iter() {
let mut length = 0;
let mut saw_eot = false;
let mut vec = Vec::new();
writer.start_track_header(&mut vec);
for event in track.events.iter() {
length += SMFWriter::write_vtime(event.vtime as u64, &mut vec).unwrap(); // TODO: Handle error
writer.write_event(&mut vec, &(event.event), &mut length, &mut saw_eot);
}
writer.finish_track_write(&mut vec, &mut length, saw_eot);
writer.tracks.push(vec);
}
writer
}
pub fn vtime_to_vec(val: u64) -> Vec<u8> {
let mut storage = Vec::new();
let mut cur = val;
let mut continuation = false;
let cont_mask = 0x80 as u8;
let val_mask = 0x7F as u64;
loop {
let mut to_write = (cur & val_mask) as u8;
cur = cur >> 7;
if continuation
|
storage.push(to_write);
continuation = true;
if cur == 0 { break; }
}
storage.reverse();
storage
}
// Write a variable length value. Return number of bytes written.
pub fn write_vtime(val: u64, writer: &mut dyn Write) -> Result<u32,Error> {
let storage = SMFWriter::vtime_to_vec(val);
writer.write_all(&storage[..])?;
Ok(storage.len() as u32)
}
fn start_track_header(&self, vec: &mut Vec<u8>) {
vec.push(0x4D);
vec.push(0x54);
vec.push(0x72);
vec.push(0x6B);
// reserve space for track len
vec.push(0);
vec.push(0);
vec.push(0);
vec.push(0);
}
fn write_event(&self, vec: &mut Vec<u8>, event: &Event, length: &mut u32, saw_eot: &mut bool) {
match event {
&Event::Midi(ref midi) => {
vec.extend(midi.data.iter());
*length += midi.data.len() as u32;
}
&Event::Meta(ref meta) => {
vec.push(0xff); // indicate we're writing a meta event
vec.push(meta.command as u8);
// +2 on next line for the 0xff and the command byte we just wrote
*length += SMFWriter::write_vtime(meta.length,vec).unwrap() + 2;
vec.extend(meta.data.iter());
*length += meta.data.len() as u32;
if meta.command == MetaCommand::EndOfTrack {
*saw_eot = true;
}
}
}
}
fn finish_track_write(&self, vec: &mut Vec<u8>, length: &mut u32, saw_eot: bool) {
if!saw_eot {
// no end of track marker in passed data, add one
*length += SMFWriter::write_vtime(0,vec).unwrap();
vec.push(0xff); // indicate we're writing a meta event
vec.push(MetaCommand::EndOfTrack as u8);
*length += SMFWriter::write_vtime(0,vec).unwrap() + 2; // write length of meta command: 0
}
// write in the length in the space we reserved
for i in 0..4 {
let lbyte = (*length & 0xFF) as u8;
// 7-i because smf is big endian and we want to put this in bytes 4-7
vec[7-i] = lbyte;
*length = (*length)>>8;
}
}
/// Add any sequence of AbsoluteEvents as a track to this writer
pub fn add_track<'a,I>(&mut self, track: I) where I: Iterator<Item=&'a AbsoluteEvent> {
self.add_track_with_name(track,None)
}
/// Add any sequence of AbsoluteEvents as a track to this writer. A meta event with the given name will
/// be added at the start of the track
pub fn add_track_with_name<'a,I>(&mut self, track: I, name: Option<String>) where I: Iterator<Item=&'a AbsoluteEvent> {
let mut vec = Vec::new();
self.start_track_header(&mut vec);
let mut length = 0;
let mut cur_time: u64 = 0;
let mut saw_eot = false;
match name {
Some(n) => {
let namemeta = Event::Meta(MetaEvent::sequence_or_track_name(n));
length += SMFWriter::write_vtime(0,&mut vec).unwrap();
self.write_event(&mut vec, &namemeta, &mut length, &mut saw_eot);
}
None => {}
}
for ev in track {
let vtime = ev.get_time() - cur_time;
cur_time = vtime;
length += SMFWriter::write_vtime(vtime as u64,&mut vec).unwrap(); // TODO: Handle error
self.write_event(&mut vec, ev.get_event(), &mut length, &mut saw_eot);
}
self.finish_track_write(&mut vec, &mut length, saw_eot);
self.tracks.push(vec);
}
// actual writing stuff below
fn write_header(&self, writer: &mut dyn Write) -> Result<(),Error> {
writer.write_all(&[0x4D,0x54,0x68,0x64])?;
writer.write_u32::<BigEndian>(6)?;
writer.write_u16::<BigEndian>(self.format)?;
writer.write_u16::<BigEndian>(self.tracks.len() as u16)?;
writer.write_i16::<BigEndian>(self.ticks)?;
Ok(())
}
/// Write out all the tracks that have been added to this
/// SMFWriter to the passed writer
pub fn write_all(self, writer: &mut dyn Write) -> Result<(),Error> {
self.write_header(writer)?;
for track in self.tracks.into_iter() {
writer.write_all(&track[..])?;
}
Ok(())
}
/// Write out the result of the tracks that have been added to a
/// file.
/// Warning: This will overwrite an existing file
pub fn write_to_file(self, path: &Path) -> Result<(),Error> {
let mut file = OpenOptions::new().write(true).truncate(true).create(true).open(path)?;
self.write_all(&mut file)
}
}
#[test]
fn vwrite() {
let mut vec1 = Vec::new();
SMFWriter::write_vtime(127,&mut vec1).unwrap();
assert!(vec1[0] == 0x7f);
vec1.clear();
SMFWriter::write_vtime(255,&mut vec1).unwrap();
assert!(vec1[0] == 0x81);
assert!(vec1[1] == 0x7f);
vec1.clear();
SMFWriter::write_vtime(32768,&mut vec1).unwrap();
assert!(vec1[0] == 0x82);
assert!(vec1[1] == 0x80);
assert!(vec1[2] == 0x00);
}
|
{
// we're writing a continuation byte, so set the bit
to_write |= cont_mask;
}
|
conditional_block
|
writer.rs
|
use std::fs::OpenOptions;
use std::io::{Error,Write};
use std::path::Path;
use byteorder::{BigEndian, WriteBytesExt};
use SMF;
use ::{Event,AbsoluteEvent,MetaEvent,MetaCommand,SMFFormat};
/// An SMFWriter is used to write an SMF to a file. It can be either
/// constructed empty and have tracks added, or created from an
/// existing rimd::SMF.
///
/// # Writing an existing SMF to a file
/// ```
/// use rimd::{SMF,SMFWriter,SMFBuilder};
/// use std::path::Path;
/// // Create smf
/// let mut builder = SMFBuilder::new();
/// // add some events to builder
/// let smf = builder.result();
/// let writer = SMFWriter::from_smf(smf);
/// let result = writer.write_to_file(Path::new("/path/to/file.smf"));
/// // handle result
pub struct SMFWriter {
format: u16,
ticks: i16,
tracks: Vec<Vec<u8>>,
}
impl SMFWriter {
/// Create a new SMFWriter with the given number of units per
/// beat. The SMFWriter will initially have no tracks.
pub fn new_with_division(ticks: i16) -> SMFWriter {
SMFWriter {
format: 1,
ticks: ticks,
tracks: Vec::new(),
}
}
/// Create a new SMFWriter with the given format and number of
/// units per beat. The SMFWriter will initially have no tracks.
pub fn new_with_division_and_format(format: SMFFormat, ticks: i16) -> SMFWriter {
SMFWriter {
format: format as u16,
ticks: ticks,
tracks: Vec::new(),
}
|
let mut writer = SMFWriter::new_with_division_and_format
(smf.format, smf.division);
for track in smf.tracks.iter() {
let mut length = 0;
let mut saw_eot = false;
let mut vec = Vec::new();
writer.start_track_header(&mut vec);
for event in track.events.iter() {
length += SMFWriter::write_vtime(event.vtime as u64, &mut vec).unwrap(); // TODO: Handle error
writer.write_event(&mut vec, &(event.event), &mut length, &mut saw_eot);
}
writer.finish_track_write(&mut vec, &mut length, saw_eot);
writer.tracks.push(vec);
}
writer
}
pub fn vtime_to_vec(val: u64) -> Vec<u8> {
let mut storage = Vec::new();
let mut cur = val;
let mut continuation = false;
let cont_mask = 0x80 as u8;
let val_mask = 0x7F as u64;
loop {
let mut to_write = (cur & val_mask) as u8;
cur = cur >> 7;
if continuation {
// we're writing a continuation byte, so set the bit
to_write |= cont_mask;
}
storage.push(to_write);
continuation = true;
if cur == 0 { break; }
}
storage.reverse();
storage
}
// Write a variable length value. Return number of bytes written.
pub fn write_vtime(val: u64, writer: &mut dyn Write) -> Result<u32,Error> {
let storage = SMFWriter::vtime_to_vec(val);
writer.write_all(&storage[..])?;
Ok(storage.len() as u32)
}
fn start_track_header(&self, vec: &mut Vec<u8>) {
vec.push(0x4D);
vec.push(0x54);
vec.push(0x72);
vec.push(0x6B);
// reserve space for track len
vec.push(0);
vec.push(0);
vec.push(0);
vec.push(0);
}
fn write_event(&self, vec: &mut Vec<u8>, event: &Event, length: &mut u32, saw_eot: &mut bool) {
match event {
&Event::Midi(ref midi) => {
vec.extend(midi.data.iter());
*length += midi.data.len() as u32;
}
&Event::Meta(ref meta) => {
vec.push(0xff); // indicate we're writing a meta event
vec.push(meta.command as u8);
// +2 on next line for the 0xff and the command byte we just wrote
*length += SMFWriter::write_vtime(meta.length,vec).unwrap() + 2;
vec.extend(meta.data.iter());
*length += meta.data.len() as u32;
if meta.command == MetaCommand::EndOfTrack {
*saw_eot = true;
}
}
}
}
fn finish_track_write(&self, vec: &mut Vec<u8>, length: &mut u32, saw_eot: bool) {
if!saw_eot {
// no end of track marker in passed data, add one
*length += SMFWriter::write_vtime(0,vec).unwrap();
vec.push(0xff); // indicate we're writing a meta event
vec.push(MetaCommand::EndOfTrack as u8);
*length += SMFWriter::write_vtime(0,vec).unwrap() + 2; // write length of meta command: 0
}
// write in the length in the space we reserved
for i in 0..4 {
let lbyte = (*length & 0xFF) as u8;
// 7-i because smf is big endian and we want to put this in bytes 4-7
vec[7-i] = lbyte;
*length = (*length)>>8;
}
}
/// Add any sequence of AbsoluteEvents as a track to this writer
pub fn add_track<'a,I>(&mut self, track: I) where I: Iterator<Item=&'a AbsoluteEvent> {
self.add_track_with_name(track,None)
}
/// Add any sequence of AbsoluteEvents as a track to this writer. A meta event with the given name will
/// be added at the start of the track
pub fn add_track_with_name<'a,I>(&mut self, track: I, name: Option<String>) where I: Iterator<Item=&'a AbsoluteEvent> {
let mut vec = Vec::new();
self.start_track_header(&mut vec);
let mut length = 0;
let mut cur_time: u64 = 0;
let mut saw_eot = false;
match name {
Some(n) => {
let namemeta = Event::Meta(MetaEvent::sequence_or_track_name(n));
length += SMFWriter::write_vtime(0,&mut vec).unwrap();
self.write_event(&mut vec, &namemeta, &mut length, &mut saw_eot);
}
None => {}
}
for ev in track {
let vtime = ev.get_time() - cur_time;
cur_time = vtime;
length += SMFWriter::write_vtime(vtime as u64,&mut vec).unwrap(); // TODO: Handle error
self.write_event(&mut vec, ev.get_event(), &mut length, &mut saw_eot);
}
self.finish_track_write(&mut vec, &mut length, saw_eot);
self.tracks.push(vec);
}
// actual writing stuff below
fn write_header(&self, writer: &mut dyn Write) -> Result<(),Error> {
writer.write_all(&[0x4D,0x54,0x68,0x64])?;
writer.write_u32::<BigEndian>(6)?;
writer.write_u16::<BigEndian>(self.format)?;
writer.write_u16::<BigEndian>(self.tracks.len() as u16)?;
writer.write_i16::<BigEndian>(self.ticks)?;
Ok(())
}
/// Write out all the tracks that have been added to this
/// SMFWriter to the passed writer
pub fn write_all(self, writer: &mut dyn Write) -> Result<(),Error> {
self.write_header(writer)?;
for track in self.tracks.into_iter() {
writer.write_all(&track[..])?;
}
Ok(())
}
/// Write out the result of the tracks that have been added to a
/// file.
/// Warning: This will overwrite an existing file
pub fn write_to_file(self, path: &Path) -> Result<(),Error> {
let mut file = OpenOptions::new().write(true).truncate(true).create(true).open(path)?;
self.write_all(&mut file)
}
}
#[test]
fn vwrite() {
let mut vec1 = Vec::new();
SMFWriter::write_vtime(127,&mut vec1).unwrap();
assert!(vec1[0] == 0x7f);
vec1.clear();
SMFWriter::write_vtime(255,&mut vec1).unwrap();
assert!(vec1[0] == 0x81);
assert!(vec1[1] == 0x7f);
vec1.clear();
SMFWriter::write_vtime(32768,&mut vec1).unwrap();
assert!(vec1[0] == 0x82);
assert!(vec1[1] == 0x80);
assert!(vec1[2] == 0x00);
}
|
}
/// Create a writer that has all the tracks from the given SMF already added
pub fn from_smf(smf: SMF) -> SMFWriter {
|
random_line_split
|
writer.rs
|
use std::fs::OpenOptions;
use std::io::{Error,Write};
use std::path::Path;
use byteorder::{BigEndian, WriteBytesExt};
use SMF;
use ::{Event,AbsoluteEvent,MetaEvent,MetaCommand,SMFFormat};
/// An SMFWriter is used to write an SMF to a file. It can be either
/// constructed empty and have tracks added, or created from an
/// existing rimd::SMF.
///
/// # Writing an existing SMF to a file
/// ```
/// use rimd::{SMF,SMFWriter,SMFBuilder};
/// use std::path::Path;
/// // Create smf
/// let mut builder = SMFBuilder::new();
/// // add some events to builder
/// let smf = builder.result();
/// let writer = SMFWriter::from_smf(smf);
/// let result = writer.write_to_file(Path::new("/path/to/file.smf"));
/// // handle result
pub struct SMFWriter {
format: u16,
ticks: i16,
tracks: Vec<Vec<u8>>,
}
impl SMFWriter {
/// Create a new SMFWriter with the given number of units per
/// beat. The SMFWriter will initially have no tracks.
pub fn new_with_division(ticks: i16) -> SMFWriter {
SMFWriter {
format: 1,
ticks: ticks,
tracks: Vec::new(),
}
}
/// Create a new SMFWriter with the given format and number of
/// units per beat. The SMFWriter will initially have no tracks.
pub fn new_with_division_and_format(format: SMFFormat, ticks: i16) -> SMFWriter {
SMFWriter {
format: format as u16,
ticks: ticks,
tracks: Vec::new(),
}
}
/// Create a writer that has all the tracks from the given SMF already added
pub fn from_smf(smf: SMF) -> SMFWriter {
let mut writer = SMFWriter::new_with_division_and_format
(smf.format, smf.division);
for track in smf.tracks.iter() {
let mut length = 0;
let mut saw_eot = false;
let mut vec = Vec::new();
writer.start_track_header(&mut vec);
for event in track.events.iter() {
length += SMFWriter::write_vtime(event.vtime as u64, &mut vec).unwrap(); // TODO: Handle error
writer.write_event(&mut vec, &(event.event), &mut length, &mut saw_eot);
}
writer.finish_track_write(&mut vec, &mut length, saw_eot);
writer.tracks.push(vec);
}
writer
}
pub fn vtime_to_vec(val: u64) -> Vec<u8> {
let mut storage = Vec::new();
let mut cur = val;
let mut continuation = false;
let cont_mask = 0x80 as u8;
let val_mask = 0x7F as u64;
loop {
let mut to_write = (cur & val_mask) as u8;
cur = cur >> 7;
if continuation {
// we're writing a continuation byte, so set the bit
to_write |= cont_mask;
}
storage.push(to_write);
continuation = true;
if cur == 0 { break; }
}
storage.reverse();
storage
}
// Write a variable length value. Return number of bytes written.
pub fn
|
(val: u64, writer: &mut dyn Write) -> Result<u32,Error> {
let storage = SMFWriter::vtime_to_vec(val);
writer.write_all(&storage[..])?;
Ok(storage.len() as u32)
}
fn start_track_header(&self, vec: &mut Vec<u8>) {
vec.push(0x4D);
vec.push(0x54);
vec.push(0x72);
vec.push(0x6B);
// reserve space for track len
vec.push(0);
vec.push(0);
vec.push(0);
vec.push(0);
}
fn write_event(&self, vec: &mut Vec<u8>, event: &Event, length: &mut u32, saw_eot: &mut bool) {
match event {
&Event::Midi(ref midi) => {
vec.extend(midi.data.iter());
*length += midi.data.len() as u32;
}
&Event::Meta(ref meta) => {
vec.push(0xff); // indicate we're writing a meta event
vec.push(meta.command as u8);
// +2 on next line for the 0xff and the command byte we just wrote
*length += SMFWriter::write_vtime(meta.length,vec).unwrap() + 2;
vec.extend(meta.data.iter());
*length += meta.data.len() as u32;
if meta.command == MetaCommand::EndOfTrack {
*saw_eot = true;
}
}
}
}
fn finish_track_write(&self, vec: &mut Vec<u8>, length: &mut u32, saw_eot: bool) {
if!saw_eot {
// no end of track marker in passed data, add one
*length += SMFWriter::write_vtime(0,vec).unwrap();
vec.push(0xff); // indicate we're writing a meta event
vec.push(MetaCommand::EndOfTrack as u8);
*length += SMFWriter::write_vtime(0,vec).unwrap() + 2; // write length of meta command: 0
}
// write in the length in the space we reserved
for i in 0..4 {
let lbyte = (*length & 0xFF) as u8;
// 7-i because smf is big endian and we want to put this in bytes 4-7
vec[7-i] = lbyte;
*length = (*length)>>8;
}
}
/// Add any sequence of AbsoluteEvents as a track to this writer
pub fn add_track<'a,I>(&mut self, track: I) where I: Iterator<Item=&'a AbsoluteEvent> {
self.add_track_with_name(track,None)
}
/// Add any sequence of AbsoluteEvents as a track to this writer. A meta event with the given name will
/// be added at the start of the track
pub fn add_track_with_name<'a,I>(&mut self, track: I, name: Option<String>) where I: Iterator<Item=&'a AbsoluteEvent> {
let mut vec = Vec::new();
self.start_track_header(&mut vec);
let mut length = 0;
let mut cur_time: u64 = 0;
let mut saw_eot = false;
match name {
Some(n) => {
let namemeta = Event::Meta(MetaEvent::sequence_or_track_name(n));
length += SMFWriter::write_vtime(0,&mut vec).unwrap();
self.write_event(&mut vec, &namemeta, &mut length, &mut saw_eot);
}
None => {}
}
for ev in track {
let vtime = ev.get_time() - cur_time;
cur_time = vtime;
length += SMFWriter::write_vtime(vtime as u64,&mut vec).unwrap(); // TODO: Handle error
self.write_event(&mut vec, ev.get_event(), &mut length, &mut saw_eot);
}
self.finish_track_write(&mut vec, &mut length, saw_eot);
self.tracks.push(vec);
}
// actual writing stuff below
fn write_header(&self, writer: &mut dyn Write) -> Result<(),Error> {
writer.write_all(&[0x4D,0x54,0x68,0x64])?;
writer.write_u32::<BigEndian>(6)?;
writer.write_u16::<BigEndian>(self.format)?;
writer.write_u16::<BigEndian>(self.tracks.len() as u16)?;
writer.write_i16::<BigEndian>(self.ticks)?;
Ok(())
}
/// Write out all the tracks that have been added to this
/// SMFWriter to the passed writer
pub fn write_all(self, writer: &mut dyn Write) -> Result<(),Error> {
self.write_header(writer)?;
for track in self.tracks.into_iter() {
writer.write_all(&track[..])?;
}
Ok(())
}
/// Write out the result of the tracks that have been added to a
/// file.
/// Warning: This will overwrite an existing file
pub fn write_to_file(self, path: &Path) -> Result<(),Error> {
let mut file = OpenOptions::new().write(true).truncate(true).create(true).open(path)?;
self.write_all(&mut file)
}
}
#[test]
fn vwrite() {
let mut vec1 = Vec::new();
SMFWriter::write_vtime(127,&mut vec1).unwrap();
assert!(vec1[0] == 0x7f);
vec1.clear();
SMFWriter::write_vtime(255,&mut vec1).unwrap();
assert!(vec1[0] == 0x81);
assert!(vec1[1] == 0x7f);
vec1.clear();
SMFWriter::write_vtime(32768,&mut vec1).unwrap();
assert!(vec1[0] == 0x82);
assert!(vec1[1] == 0x80);
assert!(vec1[2] == 0x00);
}
|
write_vtime
|
identifier_name
|
workletglobalscope.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use devtools_traits::ScriptToDevtoolsControlMsg;
use dom::bindings::inheritance::Castable;
use dom::bindings::root::DomRoot;
use dom::globalscope::GlobalScope;
use dom::paintworkletglobalscope::PaintWorkletGlobalScope;
use dom::paintworkletglobalscope::PaintWorkletTask;
use dom::testworkletglobalscope::TestWorkletGlobalScope;
use dom::testworkletglobalscope::TestWorkletTask;
use dom::worklet::WorkletExecutor;
use dom_struct::dom_struct;
use ipc_channel::ipc;
use ipc_channel::ipc::IpcSender;
use js::jsapi::JSContext;
use js::jsval::UndefinedValue;
use js::rust::Runtime;
use msg::constellation_msg::PipelineId;
use net_traits::ResourceThreads;
use net_traits::image_cache::ImageCache;
use profile_traits::mem;
use profile_traits::time;
use script_thread::MainThreadScriptMsg;
use script_traits::{Painter, ScriptMsg};
use script_traits::{ScriptToConstellationChan, TimerSchedulerMsg};
use servo_atoms::Atom;
use servo_channel::Sender;
use servo_url::ImmutableOrigin;
use servo_url::MutableOrigin;
use servo_url::ServoUrl;
use std::sync::Arc;
#[dom_struct]
/// <https://drafts.css-houdini.org/worklets/#workletglobalscope>
pub struct WorkletGlobalScope {
/// The global for this worklet.
globalscope: GlobalScope,
/// The base URL for this worklet.
base_url: ServoUrl,
/// Sender back to the script thread
#[ignore_malloc_size_of = "channels are hard"]
to_script_thread_sender: Sender<MainThreadScriptMsg>,
/// Worklet task executor
executor: WorkletExecutor,
}
impl WorkletGlobalScope {
/// Create a new stack-allocated `WorkletGlobalScope`.
pub fn new_inherited(
pipeline_id: PipelineId,
base_url: ServoUrl,
executor: WorkletExecutor,
init: &WorkletGlobalScopeInit,
) -> Self {
// Any timer events fired on this global are ignored.
let (timer_event_chan, _) = ipc::channel().unwrap();
let script_to_constellation_chan = ScriptToConstellationChan {
sender: init.to_constellation_sender.clone(),
pipeline_id,
};
Self {
globalscope: GlobalScope::new_inherited(
pipeline_id,
init.devtools_chan.clone(),
init.mem_profiler_chan.clone(),
init.time_profiler_chan.clone(),
script_to_constellation_chan,
init.scheduler_chan.clone(),
init.resource_threads.clone(),
timer_event_chan,
MutableOrigin::new(ImmutableOrigin::new_opaque()),
Default::default(),
),
base_url,
to_script_thread_sender: init.to_script_thread_sender.clone(),
executor,
}
}
/// Get the JS context.
pub fn get_cx(&self) -> *mut JSContext {
self.globalscope.get_cx()
}
/// Evaluate a JS script in this global.
pub fn evaluate_js(&self, script: &str) -> bool {
|
.evaluate_js_on_global_with_result(&*script, rval.handle_mut())
}
/// Register a paint worklet to the script thread.
pub fn register_paint_worklet(&self, name: Atom, properties: Vec<Atom>, painter: Box<Painter>) {
self.to_script_thread_sender
.send(MainThreadScriptMsg::RegisterPaintWorklet {
pipeline_id: self.globalscope.pipeline_id(),
name,
properties,
painter,
}).expect("Worklet thread outlived script thread.");
}
/// The base URL of this global.
pub fn base_url(&self) -> ServoUrl {
self.base_url.clone()
}
/// The worklet executor.
pub fn executor(&self) -> WorkletExecutor {
self.executor.clone()
}
/// Perform a worklet task
pub fn perform_a_worklet_task(&self, task: WorkletTask) {
match task {
WorkletTask::Test(task) => match self.downcast::<TestWorkletGlobalScope>() {
Some(global) => global.perform_a_worklet_task(task),
None => warn!("This is not a test worklet."),
},
WorkletTask::Paint(task) => match self.downcast::<PaintWorkletGlobalScope>() {
Some(global) => global.perform_a_worklet_task(task),
None => warn!("This is not a paint worklet."),
},
}
}
}
/// Resources required by workletglobalscopes
#[derive(Clone)]
pub struct WorkletGlobalScopeInit {
/// Channel to the main script thread
pub to_script_thread_sender: Sender<MainThreadScriptMsg>,
/// Channel to a resource thread
pub resource_threads: ResourceThreads,
/// Channel to the memory profiler
pub mem_profiler_chan: mem::ProfilerChan,
/// Channel to the time profiler
pub time_profiler_chan: time::ProfilerChan,
/// Channel to devtools
pub devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
/// Messages to send to constellation
pub to_constellation_sender: IpcSender<(PipelineId, ScriptMsg)>,
/// Message to send to the scheduler
pub scheduler_chan: IpcSender<TimerSchedulerMsg>,
/// The image cache
pub image_cache: Arc<ImageCache>,
}
/// <https://drafts.css-houdini.org/worklets/#worklet-global-scope-type>
#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf)]
pub enum WorkletGlobalScopeType {
/// A servo-specific testing worklet
Test,
/// A paint worklet
Paint,
}
impl WorkletGlobalScopeType {
/// Create a new heap-allocated `WorkletGlobalScope`.
pub fn new(
&self,
runtime: &Runtime,
pipeline_id: PipelineId,
base_url: ServoUrl,
executor: WorkletExecutor,
init: &WorkletGlobalScopeInit,
) -> DomRoot<WorkletGlobalScope> {
match *self {
WorkletGlobalScopeType::Test => DomRoot::upcast(TestWorkletGlobalScope::new(
runtime,
pipeline_id,
base_url,
executor,
init,
)),
WorkletGlobalScopeType::Paint => DomRoot::upcast(PaintWorkletGlobalScope::new(
runtime,
pipeline_id,
base_url,
executor,
init,
)),
}
}
}
/// A task which can be performed in the context of a worklet global.
pub enum WorkletTask {
Test(TestWorkletTask),
Paint(PaintWorkletTask),
}
|
debug!("Evaluating Dom.");
rooted!(in (self.globalscope.get_cx()) let mut rval = UndefinedValue());
self.globalscope
|
random_line_split
|
workletglobalscope.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use devtools_traits::ScriptToDevtoolsControlMsg;
use dom::bindings::inheritance::Castable;
use dom::bindings::root::DomRoot;
use dom::globalscope::GlobalScope;
use dom::paintworkletglobalscope::PaintWorkletGlobalScope;
use dom::paintworkletglobalscope::PaintWorkletTask;
use dom::testworkletglobalscope::TestWorkletGlobalScope;
use dom::testworkletglobalscope::TestWorkletTask;
use dom::worklet::WorkletExecutor;
use dom_struct::dom_struct;
use ipc_channel::ipc;
use ipc_channel::ipc::IpcSender;
use js::jsapi::JSContext;
use js::jsval::UndefinedValue;
use js::rust::Runtime;
use msg::constellation_msg::PipelineId;
use net_traits::ResourceThreads;
use net_traits::image_cache::ImageCache;
use profile_traits::mem;
use profile_traits::time;
use script_thread::MainThreadScriptMsg;
use script_traits::{Painter, ScriptMsg};
use script_traits::{ScriptToConstellationChan, TimerSchedulerMsg};
use servo_atoms::Atom;
use servo_channel::Sender;
use servo_url::ImmutableOrigin;
use servo_url::MutableOrigin;
use servo_url::ServoUrl;
use std::sync::Arc;
#[dom_struct]
/// <https://drafts.css-houdini.org/worklets/#workletglobalscope>
pub struct WorkletGlobalScope {
/// The global for this worklet.
globalscope: GlobalScope,
/// The base URL for this worklet.
base_url: ServoUrl,
/// Sender back to the script thread
#[ignore_malloc_size_of = "channels are hard"]
to_script_thread_sender: Sender<MainThreadScriptMsg>,
/// Worklet task executor
executor: WorkletExecutor,
}
impl WorkletGlobalScope {
/// Create a new stack-allocated `WorkletGlobalScope`.
pub fn new_inherited(
pipeline_id: PipelineId,
base_url: ServoUrl,
executor: WorkletExecutor,
init: &WorkletGlobalScopeInit,
) -> Self {
// Any timer events fired on this global are ignored.
let (timer_event_chan, _) = ipc::channel().unwrap();
let script_to_constellation_chan = ScriptToConstellationChan {
sender: init.to_constellation_sender.clone(),
pipeline_id,
};
Self {
globalscope: GlobalScope::new_inherited(
pipeline_id,
init.devtools_chan.clone(),
init.mem_profiler_chan.clone(),
init.time_profiler_chan.clone(),
script_to_constellation_chan,
init.scheduler_chan.clone(),
init.resource_threads.clone(),
timer_event_chan,
MutableOrigin::new(ImmutableOrigin::new_opaque()),
Default::default(),
),
base_url,
to_script_thread_sender: init.to_script_thread_sender.clone(),
executor,
}
}
/// Get the JS context.
pub fn get_cx(&self) -> *mut JSContext {
self.globalscope.get_cx()
}
/// Evaluate a JS script in this global.
pub fn evaluate_js(&self, script: &str) -> bool {
debug!("Evaluating Dom.");
rooted!(in (self.globalscope.get_cx()) let mut rval = UndefinedValue());
self.globalscope
.evaluate_js_on_global_with_result(&*script, rval.handle_mut())
}
/// Register a paint worklet to the script thread.
pub fn register_paint_worklet(&self, name: Atom, properties: Vec<Atom>, painter: Box<Painter>) {
self.to_script_thread_sender
.send(MainThreadScriptMsg::RegisterPaintWorklet {
pipeline_id: self.globalscope.pipeline_id(),
name,
properties,
painter,
}).expect("Worklet thread outlived script thread.");
}
/// The base URL of this global.
pub fn
|
(&self) -> ServoUrl {
self.base_url.clone()
}
/// The worklet executor.
pub fn executor(&self) -> WorkletExecutor {
self.executor.clone()
}
/// Perform a worklet task
pub fn perform_a_worklet_task(&self, task: WorkletTask) {
match task {
WorkletTask::Test(task) => match self.downcast::<TestWorkletGlobalScope>() {
Some(global) => global.perform_a_worklet_task(task),
None => warn!("This is not a test worklet."),
},
WorkletTask::Paint(task) => match self.downcast::<PaintWorkletGlobalScope>() {
Some(global) => global.perform_a_worklet_task(task),
None => warn!("This is not a paint worklet."),
},
}
}
}
/// Resources required by workletglobalscopes
#[derive(Clone)]
pub struct WorkletGlobalScopeInit {
/// Channel to the main script thread
pub to_script_thread_sender: Sender<MainThreadScriptMsg>,
/// Channel to a resource thread
pub resource_threads: ResourceThreads,
/// Channel to the memory profiler
pub mem_profiler_chan: mem::ProfilerChan,
/// Channel to the time profiler
pub time_profiler_chan: time::ProfilerChan,
/// Channel to devtools
pub devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>,
/// Messages to send to constellation
pub to_constellation_sender: IpcSender<(PipelineId, ScriptMsg)>,
/// Message to send to the scheduler
pub scheduler_chan: IpcSender<TimerSchedulerMsg>,
/// The image cache
pub image_cache: Arc<ImageCache>,
}
/// <https://drafts.css-houdini.org/worklets/#worklet-global-scope-type>
#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf)]
pub enum WorkletGlobalScopeType {
/// A servo-specific testing worklet
Test,
/// A paint worklet
Paint,
}
impl WorkletGlobalScopeType {
/// Create a new heap-allocated `WorkletGlobalScope`.
pub fn new(
&self,
runtime: &Runtime,
pipeline_id: PipelineId,
base_url: ServoUrl,
executor: WorkletExecutor,
init: &WorkletGlobalScopeInit,
) -> DomRoot<WorkletGlobalScope> {
match *self {
WorkletGlobalScopeType::Test => DomRoot::upcast(TestWorkletGlobalScope::new(
runtime,
pipeline_id,
base_url,
executor,
init,
)),
WorkletGlobalScopeType::Paint => DomRoot::upcast(PaintWorkletGlobalScope::new(
runtime,
pipeline_id,
base_url,
executor,
init,
)),
}
}
}
/// A task which can be performed in the context of a worklet global.
pub enum WorkletTask {
Test(TestWorkletTask),
Paint(PaintWorkletTask),
}
|
base_url
|
identifier_name
|
lib.rs
|
#![feature(type_macros)]
//! # Goal
//!
//! We want to define components, store them in an efficient manner and
//! make them accessible to systems.
//!
//! We want to define events along with their types and make them available
//! to systems.
//!
//! We want to define systems that operate on lists of components, are
//! triggered by other systems through events.
//!
//! # Implementation
//!
//! For each component type there will be a list that is a tuple of an
//! entity ID and the component values. There will also be a map from
//! entity IDs to component list indexes.
//!
//! A system will consist of state, iterators over components its subscribed
//! to and any number of functions that are triggered by events.
//!
//! # Syntax
//!
//! ```
//! component! { Physics, body: physics.RigidBody, physics_id: physics.ID }
//!
//! // event! { GameStarted } // This one is implicitly defined
//! event! { PhysicsTick, dt: u64 }
//! event! { Bump, e1: EntityID, e2: EntityID }
//!
//! system! { PhysicsSystem,
//!
//! state! { world: physics.World }
//!
//! on! { GameStarted, {
//! state.world = physics.World::new(event.name);
//! state.world.on_collision = |e1, e2| {
//! unwrap_entity = |e| { e.user_data.downcast_ref<EntityID>() }
//! trigger! { Bump, unwrap_entity(e1), unwrap_entity(e2) }
//! };
//! }}
//!
//! on! { PhysicsTick, {
//! state.world.step(event.dt);
//! }}
//!
//! component_added! { Physics, {
//! let id = state.world.add_body(component.body);
//! component.physics_id = id;
//! }}
//!
//! component_removed! { Physics, {
//! state.world.remove_body(component.physics_id);
//! }}
//!
//! }
//!
//! system! { BumpSystem, {
//! on! { Bump, {
//! println!("Entity {:?} bumped into entity {:?}!", e1, e2);
//! }}
//! }}
//! ```
#[macro_use]
extern crate lazy_static;
extern crate shared_mutex;
extern crate uuid;
#[macro_use]
pub mod helpers;
#[macro_use]
pub mod components;
pub mod entities;
#[macro_use]
pub mod events;
#[macro_use]
pub mod systems;
use std::thread;
pub use std::time::{ Duration, Instant };
event!{ tick, step: super::Duration }
pub fn
|
(ticks_per_second: u32) {
let events_thread = ticker(ticks_per_second, true);
let _ = events_thread.join();
}
pub fn ticker(ticks_per_second: u32, sleep: bool) -> thread::JoinHandle<()> {
let step = Duration::from_secs(1) / ticks_per_second;
let mut last_tick = Instant::now();
thread::spawn(move || {
loop {
let current_time = Instant::now();
let next_tick = last_tick + step;
if next_tick > current_time {
if sleep {
thread::sleep(Duration::from_millis(1));
}
} else {
tick::trigger(step);
events::next_tick();
last_tick = Instant::now();
events::run_events();
}
}
})
}
|
run
|
identifier_name
|
lib.rs
|
#![feature(type_macros)]
//! # Goal
//!
//! We want to define components, store them in an efficient manner and
//! make them accessible to systems.
//!
//! We want to define events along with their types and make them available
//! to systems.
//!
//! We want to define systems that operate on lists of components, are
//! triggered by other systems through events.
//!
//! # Implementation
//!
//! For each component type there will be a list that is a tuple of an
//! entity ID and the component values. There will also be a map from
//! entity IDs to component list indexes.
//!
//! A system will consist of state, iterators over components its subscribed
//! to and any number of functions that are triggered by events.
//!
//! # Syntax
//!
//! ```
//! component! { Physics, body: physics.RigidBody, physics_id: physics.ID }
//!
//! // event! { GameStarted } // This one is implicitly defined
//! event! { PhysicsTick, dt: u64 }
//! event! { Bump, e1: EntityID, e2: EntityID }
//!
//! system! { PhysicsSystem,
//!
//! state! { world: physics.World }
//!
//! on! { GameStarted, {
//! state.world = physics.World::new(event.name);
//! state.world.on_collision = |e1, e2| {
//! unwrap_entity = |e| { e.user_data.downcast_ref<EntityID>() }
//! trigger! { Bump, unwrap_entity(e1), unwrap_entity(e2) }
//! };
//! }}
//!
//! on! { PhysicsTick, {
//! state.world.step(event.dt);
//! }}
//!
//! component_added! { Physics, {
//! let id = state.world.add_body(component.body);
//! component.physics_id = id;
//! }}
//!
//! component_removed! { Physics, {
//! state.world.remove_body(component.physics_id);
//! }}
//!
//! }
//!
//! system! { BumpSystem, {
//! on! { Bump, {
//! println!("Entity {:?} bumped into entity {:?}!", e1, e2);
//! }}
//! }}
//! ```
#[macro_use]
extern crate lazy_static;
extern crate shared_mutex;
extern crate uuid;
#[macro_use]
pub mod helpers;
#[macro_use]
pub mod components;
pub mod entities;
#[macro_use]
pub mod events;
#[macro_use]
pub mod systems;
use std::thread;
pub use std::time::{ Duration, Instant };
event!{ tick, step: super::Duration }
pub fn run(ticks_per_second: u32) {
let events_thread = ticker(ticks_per_second, true);
let _ = events_thread.join();
}
pub fn ticker(ticks_per_second: u32, sleep: bool) -> thread::JoinHandle<()> {
let step = Duration::from_secs(1) / ticks_per_second;
let mut last_tick = Instant::now();
thread::spawn(move || {
loop {
let current_time = Instant::now();
let next_tick = last_tick + step;
if next_tick > current_time
|
else {
tick::trigger(step);
events::next_tick();
last_tick = Instant::now();
events::run_events();
}
}
})
}
|
{
if sleep {
thread::sleep(Duration::from_millis(1));
}
}
|
conditional_block
|
lib.rs
|
#![feature(type_macros)]
//! # Goal
//!
//! We want to define components, store them in an efficient manner and
//! make them accessible to systems.
//!
//! We want to define events along with their types and make them available
//! to systems.
//!
//! We want to define systems that operate on lists of components, are
//! triggered by other systems through events.
//!
//! # Implementation
//!
//! For each component type there will be a list that is a tuple of an
//! entity ID and the component values. There will also be a map from
//! entity IDs to component list indexes.
//!
//! A system will consist of state, iterators over components its subscribed
//! to and any number of functions that are triggered by events.
//!
//! # Syntax
//!
//! ```
//! component! { Physics, body: physics.RigidBody, physics_id: physics.ID }
//!
//! // event! { GameStarted } // This one is implicitly defined
//! event! { PhysicsTick, dt: u64 }
//! event! { Bump, e1: EntityID, e2: EntityID }
//!
//! system! { PhysicsSystem,
//!
//! state! { world: physics.World }
//!
//! on! { GameStarted, {
//! state.world = physics.World::new(event.name);
//! state.world.on_collision = |e1, e2| {
//! unwrap_entity = |e| { e.user_data.downcast_ref<EntityID>() }
//! trigger! { Bump, unwrap_entity(e1), unwrap_entity(e2) }
//! };
//! }}
//!
//! on! { PhysicsTick, {
//! state.world.step(event.dt);
//! }}
//!
//! component_added! { Physics, {
//! let id = state.world.add_body(component.body);
//! component.physics_id = id;
//! }}
//!
//! component_removed! { Physics, {
//! state.world.remove_body(component.physics_id);
//! }}
//!
//! }
//!
//! system! { BumpSystem, {
//! on! { Bump, {
//! println!("Entity {:?} bumped into entity {:?}!", e1, e2);
//! }}
//! }}
//! ```
#[macro_use]
extern crate lazy_static;
extern crate shared_mutex;
extern crate uuid;
#[macro_use]
pub mod helpers;
#[macro_use]
pub mod components;
pub mod entities;
#[macro_use]
pub mod events;
#[macro_use]
pub mod systems;
use std::thread;
pub use std::time::{ Duration, Instant };
event!{ tick, step: super::Duration }
pub fn run(ticks_per_second: u32) {
let events_thread = ticker(ticks_per_second, true);
let _ = events_thread.join();
}
pub fn ticker(ticks_per_second: u32, sleep: bool) -> thread::JoinHandle<()>
|
})
}
|
{
let step = Duration::from_secs(1) / ticks_per_second;
let mut last_tick = Instant::now();
thread::spawn(move || {
loop {
let current_time = Instant::now();
let next_tick = last_tick + step;
if next_tick > current_time {
if sleep {
thread::sleep(Duration::from_millis(1));
}
} else {
tick::trigger(step);
events::next_tick();
last_tick = Instant::now();
events::run_events();
}
}
|
identifier_body
|
lib.rs
|
#![feature(type_macros)]
//! # Goal
//!
|
//! to systems.
//!
//! We want to define systems that operate on lists of components, are
//! triggered by other systems through events.
//!
//! # Implementation
//!
//! For each component type there will be a list that is a tuple of an
//! entity ID and the component values. There will also be a map from
//! entity IDs to component list indexes.
//!
//! A system will consist of state, iterators over components its subscribed
//! to and any number of functions that are triggered by events.
//!
//! # Syntax
//!
//! ```
//! component! { Physics, body: physics.RigidBody, physics_id: physics.ID }
//!
//! // event! { GameStarted } // This one is implicitly defined
//! event! { PhysicsTick, dt: u64 }
//! event! { Bump, e1: EntityID, e2: EntityID }
//!
//! system! { PhysicsSystem,
//!
//! state! { world: physics.World }
//!
//! on! { GameStarted, {
//! state.world = physics.World::new(event.name);
//! state.world.on_collision = |e1, e2| {
//! unwrap_entity = |e| { e.user_data.downcast_ref<EntityID>() }
//! trigger! { Bump, unwrap_entity(e1), unwrap_entity(e2) }
//! };
//! }}
//!
//! on! { PhysicsTick, {
//! state.world.step(event.dt);
//! }}
//!
//! component_added! { Physics, {
//! let id = state.world.add_body(component.body);
//! component.physics_id = id;
//! }}
//!
//! component_removed! { Physics, {
//! state.world.remove_body(component.physics_id);
//! }}
//!
//! }
//!
//! system! { BumpSystem, {
//! on! { Bump, {
//! println!("Entity {:?} bumped into entity {:?}!", e1, e2);
//! }}
//! }}
//! ```
#[macro_use]
extern crate lazy_static;
extern crate shared_mutex;
extern crate uuid;
#[macro_use]
pub mod helpers;
#[macro_use]
pub mod components;
pub mod entities;
#[macro_use]
pub mod events;
#[macro_use]
pub mod systems;
use std::thread;
pub use std::time::{ Duration, Instant };
event!{ tick, step: super::Duration }
pub fn run(ticks_per_second: u32) {
let events_thread = ticker(ticks_per_second, true);
let _ = events_thread.join();
}
pub fn ticker(ticks_per_second: u32, sleep: bool) -> thread::JoinHandle<()> {
let step = Duration::from_secs(1) / ticks_per_second;
let mut last_tick = Instant::now();
thread::spawn(move || {
loop {
let current_time = Instant::now();
let next_tick = last_tick + step;
if next_tick > current_time {
if sleep {
thread::sleep(Duration::from_millis(1));
}
} else {
tick::trigger(step);
events::next_tick();
last_tick = Instant::now();
events::run_events();
}
}
})
}
|
//! We want to define components, store them in an efficient manner and
//! make them accessible to systems.
//!
//! We want to define events along with their types and make them available
|
random_line_split
|
encodings.rs
|
// preprocessing
pub const PRG_CONTRADICTORY_OBS: &str = include_str!("encodings/contradictory_obs.lp");
pub const PRG_GUESS_INPUTS: &str = include_str!("encodings/guess_inputs.lp");
// minimal inconsistent cores
pub const PRG_MICS: &str = include_str!("encodings/mics.lp");
// basic sign consistency
pub const PRG_SIGN_CONS: &str = include_str!("encodings/sign_cons.lp");
// additional constraints
pub const PRG_BWD_PROP: &str = include_str!("encodings/bwd_prop.lp");
pub const PRG_FWD_PROP: &str = include_str!("encodings/fwd_prop.lp");
pub const PRG_FOUNDEDNESS: &str = include_str!("encodings/foundedness.lp");
pub const PRG_ELEM_PATH: &str = include_str!("encodings/elem_path.lp");
pub const PRG_ONE_STATE: &str = include_str!("encodings/one_state.lp");
pub const PRG_KEEP_INPUTS: &str = include_str!("encodings/keep_inputs.lp");
pub const PRG_KEEP_OBSERVATIONS: &str = include_str!("encodings/keep_observations.lp");
pub const PRG_ERROR_MEASURE: &str = include_str!("encodings/error_measure.lp");
pub const PRG_MIN_WEIGHTED_ERROR: &str = include_str!("encodings/min_weighted_error.lp");
pub const PRG_PREDICTIONS: &str = include_str!("encodings/predictions.lp");
pub const PRG_PREDICTIONS_DM: &str = include_str!("encodings/predictions_depmat.lp");
// repair operations
pub const PRG_ADD_INFLUENCES: &str = include_str!("encodings/add_influences.lp");
pub const PRG_MIN_ADDED_INFLUENCES: &str = include_str!("encodings/min_added_influences.lp");
pub const PRG_REMOVE_EDGES: &str = include_str!("encodings/remove_edges.lp");
pub const PRG_ADD_EDGES: &str = include_str!("encodings/add_edges.lp");
pub const PRG_FLIP_EDGE_DIRECTIONS: &str = include_str!("encodings/flip_edge_directions.lp");
pub const PRG_MIN_WEIGHTED_REPAIRS: &str = include_str!("encodings/min_weighted_repairs.lp");
pub const PRG_BEST_ONE_EDGE: &str = "
% guess one edge end to add
|
input(E,\"unknown\") :- exp(E).
vertex(\"unknown\").
elabel(\"unknown\", V,1) :- addeddy(V).
:- addeddy(U), addeddy(V),U!=V.";
pub const PRG_BEST_EDGE_START: &str = "
% guess one edge start to add
0{addedge(or(V),X,1); addedge(or(V),X,-1)}1 :- vertex(or(V)), edge_end(X).
% add only one edge!!!
:- addedge(Y1,X,1), addedge(Y2,X,-1).
:- addedge(Y1,X,S), addedge(Y2,X,T), Y1!=Y2.";
pub const PRG_SHOW_ERRORS: &str = "
#show flip_node_sign_Plus_to_0/2.
#show flip_node_sign_Plus_to_Minus/2.
#show flip_node_sign_Minus_to_0/2.
#show flip_node_sign_Minus_to_Plus/2.
#show flip_node_sign_0_to_Plus/2.
#show flip_node_sign_0_to_Minus/2.
#show flip_node_sign_notMinus_to_Minus/2.
#show flip_node_sign_notPlus_to_Plus/2.
";
pub const PRG_SHOW_LABELS: &str = "
#show vlabel(X,or(V),S) : vlabel(X,or(V),S).
";
pub const PRG_SHOW_REPAIRS: &str = "
#show remedge/3.
#show addedge/3.
#show new_influence/3.
";
pub const PRG_SHOW_FLIP: &str = "#show flip/3.";
pub const PRG_SHOW_ADD_EDGE_END: &str = "#show addeddy/1.";
|
0{addeddy(or(V))} :- vertex(or(V)).
% new inputs through repair
|
random_line_split
|
bndstx.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn
|
() {
run_test(&Instruction { mnemonic: Mnemonic::BNDSTX, operand1: Some(IndirectScaledIndexed(ESI, ECX, One, Some(OperandSize::Unsized), None)), operand2: Some(Direct(BND3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 27, 28, 14], OperandSize::Dword)
}
fn bndstx_2() {
run_test(&Instruction { mnemonic: Mnemonic::BNDSTX, operand1: Some(IndirectScaledIndexed(RDI, RBX, One, Some(OperandSize::Unsized), None)), operand2: Some(Direct(BND3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 27, 28, 31], OperandSize::Qword)
}
|
bndstx_1
|
identifier_name
|
bndstx.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn bndstx_1()
|
fn bndstx_2() {
run_test(&Instruction { mnemonic: Mnemonic::BNDSTX, operand1: Some(IndirectScaledIndexed(RDI, RBX, One, Some(OperandSize::Unsized), None)), operand2: Some(Direct(BND3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 27, 28, 31], OperandSize::Qword)
}
|
{
run_test(&Instruction { mnemonic: Mnemonic::BNDSTX, operand1: Some(IndirectScaledIndexed(ESI, ECX, One, Some(OperandSize::Unsized), None)), operand2: Some(Direct(BND3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 27, 28, 14], OperandSize::Dword)
}
|
identifier_body
|
bndstx.rs
|
use ::Reg::*;
use ::RegScale::*;
fn bndstx_1() {
run_test(&Instruction { mnemonic: Mnemonic::BNDSTX, operand1: Some(IndirectScaledIndexed(ESI, ECX, One, Some(OperandSize::Unsized), None)), operand2: Some(Direct(BND3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 27, 28, 14], OperandSize::Dword)
}
fn bndstx_2() {
run_test(&Instruction { mnemonic: Mnemonic::BNDSTX, operand1: Some(IndirectScaledIndexed(RDI, RBX, One, Some(OperandSize::Unsized), None)), operand2: Some(Direct(BND3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 27, 28, 31], OperandSize::Qword)
}
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
|
random_line_split
|
|
not_understood.rs
|
use crate::messages::Message;
use serde_derive::{Deserialize, Serialize};
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct NotUnderstood {
pub path: Vec<String>,
}
impl Message for NotUnderstood {
fn name(&self) -> String {
String::from("NOT_UNDERSTOOD")
}
}
impl PartialEq for NotUnderstood {
fn eq(&self, other: &Self) -> bool {
self.path == other.path
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_name() {
// Arrange
let message = NotUnderstood { path: vec![] };
// Act
|
// Assert
assert_eq!(name, "NOT_UNDERSTOOD");
}
#[test]
fn test_asoutgoing() {
// Arrange
let message = NotUnderstood { path: vec![] };
let message_ref = message.clone();
// Act
let outgoing = message.as_outgoing();
// Assert
assert_eq!(outgoing.result_type, "NOT_UNDERSTOOD");
assert_eq!(outgoing.content, message_ref);
}
}
|
let name = message.name();
|
random_line_split
|
not_understood.rs
|
use crate::messages::Message;
use serde_derive::{Deserialize, Serialize};
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct NotUnderstood {
pub path: Vec<String>,
}
impl Message for NotUnderstood {
fn name(&self) -> String
|
}
impl PartialEq for NotUnderstood {
fn eq(&self, other: &Self) -> bool {
self.path == other.path
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_name() {
// Arrange
let message = NotUnderstood { path: vec![] };
// Act
let name = message.name();
// Assert
assert_eq!(name, "NOT_UNDERSTOOD");
}
#[test]
fn test_asoutgoing() {
// Arrange
let message = NotUnderstood { path: vec![] };
let message_ref = message.clone();
// Act
let outgoing = message.as_outgoing();
// Assert
assert_eq!(outgoing.result_type, "NOT_UNDERSTOOD");
assert_eq!(outgoing.content, message_ref);
}
}
|
{
String::from("NOT_UNDERSTOOD")
}
|
identifier_body
|
not_understood.rs
|
use crate::messages::Message;
use serde_derive::{Deserialize, Serialize};
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct
|
{
pub path: Vec<String>,
}
impl Message for NotUnderstood {
fn name(&self) -> String {
String::from("NOT_UNDERSTOOD")
}
}
impl PartialEq for NotUnderstood {
fn eq(&self, other: &Self) -> bool {
self.path == other.path
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_name() {
// Arrange
let message = NotUnderstood { path: vec![] };
// Act
let name = message.name();
// Assert
assert_eq!(name, "NOT_UNDERSTOOD");
}
#[test]
fn test_asoutgoing() {
// Arrange
let message = NotUnderstood { path: vec![] };
let message_ref = message.clone();
// Act
let outgoing = message.as_outgoing();
// Assert
assert_eq!(outgoing.result_type, "NOT_UNDERSTOOD");
assert_eq!(outgoing.content, message_ref);
}
}
|
NotUnderstood
|
identifier_name
|
config_dump.rs
|
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::cluster::cluster_manager::SharedClusterManager;
use crate::filters::manager::SharedFilterManager;
use crate::endpoint::Endpoint;
use hyper::http::HeaderValue;
use hyper::{Body, Response, StatusCode};
use serde::Serialize;
use std::sync::Arc;
#[derive(Debug, Serialize)]
struct ClusterDump {
name: &'static str,
endpoints: Vec<Endpoint>,
}
#[derive(Debug, Serialize)]
struct ConfigDump {
clusters: Vec<ClusterDump>,
filterchain: FilterChainDump,
}
#[derive(Debug, Serialize)]
struct FilterConfigDump {
name: String,
config: Arc<serde_json::Value>,
}
#[derive(Debug, Serialize)]
struct FilterChainDump {
filters: Vec<FilterConfigDump>,
}
pub(crate) fn handle_request(
cluster_manager: SharedClusterManager,
filter_manager: SharedFilterManager,
) -> Response<Body> {
let mut response = Response::new(Body::empty());
match create_config_dump_json(cluster_manager, filter_manager) {
Ok(body) =>
|
Err(err) => {
*response.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
*response.body_mut() = Body::from(format!("failed to create config dump: {err}"));
}
}
response
}
fn create_config_dump_json(
cluster_manager: SharedClusterManager,
filter_manager: SharedFilterManager,
) -> Result<String, serde_json::Error> {
let endpoints = {
let cluster_manager = cluster_manager.read();
// Clone the list of endpoints immediately so that we don't hold on
// to the cluster manager's lock while serializing.
cluster_manager
.get_all_endpoints()
.map(|upstream_endpoints| upstream_endpoints.iter().cloned().collect::<Vec<_>>())
.unwrap_or_default()
};
let filters = {
let filter_manager = filter_manager.read();
// Clone the list of filter configs immediately so that we don't hold on
// to the filter manager's lock while serializing.
filter_manager
.get_filter_chain()
.get_configs()
.map(|(name, config)| FilterConfigDump {
name: name.into(),
config,
})
.collect::<Vec<_>>()
};
let dump = ConfigDump {
clusters: vec![ClusterDump {
name: "default-quilkin-cluster",
endpoints,
}],
filterchain: FilterChainDump { filters },
};
serde_json::to_string_pretty(&dump)
}
#[cfg(test)]
mod tests {
use super::handle_request;
use crate::cluster::cluster_manager::ClusterManager;
use crate::endpoint::{Endpoint, Endpoints};
use crate::filters::{manager::FilterManager, CreateFilterArgs, FilterChain};
use std::sync::Arc;
#[tokio::test]
async fn test_handle_request() {
let cluster_manager = ClusterManager::fixed(
Endpoints::new(vec![Endpoint::new(([127, 0, 0, 1], 8080).into())]).unwrap(),
)
.unwrap();
let debug_config = serde_yaml::from_str("id: hello").unwrap();
let debug_factory = crate::filters::debug::factory();
let debug_filter = debug_factory
.create_filter(CreateFilterArgs::fixed(Some(debug_config)))
.unwrap();
let filter_manager = FilterManager::fixed(Arc::new(
FilterChain::new(vec![(debug_factory.name().into(), debug_filter)]).unwrap(),
));
let mut response = handle_request(cluster_manager, filter_manager);
assert_eq!(response.status(), hyper::StatusCode::OK);
assert_eq!(
response.headers().get("Content-Type").unwrap(),
"application/json"
);
let body = hyper::body::to_bytes(response.body_mut()).await.unwrap();
let body = String::from_utf8(body.into_iter().collect()).unwrap();
let expected = serde_json::json!({
"clusters": [{
"name": "default-quilkin-cluster",
"endpoints": [{
"address": {
"host": "127.0.0.1",
"port": 8080,
},
"metadata": {
"quilkin.dev": {
"tokens": []
}
}
}]
}],
"filterchain": {
"filters": [{
"name": "quilkin.filters.debug.v1alpha1.Debug",
"config":{
"id": "hello"
}
}]
}
});
assert_eq!(
expected,
serde_json::from_str::<serde_json::Value>(body.as_str()).unwrap()
);
}
}
|
{
*response.status_mut() = StatusCode::OK;
response
.headers_mut()
.insert("Content-Type", HeaderValue::from_static("application/json"));
*response.body_mut() = Body::from(body);
}
|
conditional_block
|
config_dump.rs
|
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::cluster::cluster_manager::SharedClusterManager;
use crate::filters::manager::SharedFilterManager;
use crate::endpoint::Endpoint;
use hyper::http::HeaderValue;
use hyper::{Body, Response, StatusCode};
use serde::Serialize;
use std::sync::Arc;
#[derive(Debug, Serialize)]
struct ClusterDump {
name: &'static str,
endpoints: Vec<Endpoint>,
}
#[derive(Debug, Serialize)]
struct ConfigDump {
clusters: Vec<ClusterDump>,
filterchain: FilterChainDump,
}
#[derive(Debug, Serialize)]
struct FilterConfigDump {
name: String,
config: Arc<serde_json::Value>,
}
#[derive(Debug, Serialize)]
struct FilterChainDump {
filters: Vec<FilterConfigDump>,
}
pub(crate) fn handle_request(
cluster_manager: SharedClusterManager,
filter_manager: SharedFilterManager,
) -> Response<Body> {
let mut response = Response::new(Body::empty());
match create_config_dump_json(cluster_manager, filter_manager) {
Ok(body) => {
*response.status_mut() = StatusCode::OK;
response
.headers_mut()
.insert("Content-Type", HeaderValue::from_static("application/json"));
*response.body_mut() = Body::from(body);
}
Err(err) => {
*response.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
*response.body_mut() = Body::from(format!("failed to create config dump: {err}"));
}
}
response
}
fn
|
(
cluster_manager: SharedClusterManager,
filter_manager: SharedFilterManager,
) -> Result<String, serde_json::Error> {
let endpoints = {
let cluster_manager = cluster_manager.read();
// Clone the list of endpoints immediately so that we don't hold on
// to the cluster manager's lock while serializing.
cluster_manager
.get_all_endpoints()
.map(|upstream_endpoints| upstream_endpoints.iter().cloned().collect::<Vec<_>>())
.unwrap_or_default()
};
let filters = {
let filter_manager = filter_manager.read();
// Clone the list of filter configs immediately so that we don't hold on
// to the filter manager's lock while serializing.
filter_manager
.get_filter_chain()
.get_configs()
.map(|(name, config)| FilterConfigDump {
name: name.into(),
config,
})
.collect::<Vec<_>>()
};
let dump = ConfigDump {
clusters: vec![ClusterDump {
name: "default-quilkin-cluster",
endpoints,
}],
filterchain: FilterChainDump { filters },
};
serde_json::to_string_pretty(&dump)
}
#[cfg(test)]
mod tests {
use super::handle_request;
use crate::cluster::cluster_manager::ClusterManager;
use crate::endpoint::{Endpoint, Endpoints};
use crate::filters::{manager::FilterManager, CreateFilterArgs, FilterChain};
use std::sync::Arc;
#[tokio::test]
async fn test_handle_request() {
let cluster_manager = ClusterManager::fixed(
Endpoints::new(vec![Endpoint::new(([127, 0, 0, 1], 8080).into())]).unwrap(),
)
.unwrap();
let debug_config = serde_yaml::from_str("id: hello").unwrap();
let debug_factory = crate::filters::debug::factory();
let debug_filter = debug_factory
.create_filter(CreateFilterArgs::fixed(Some(debug_config)))
.unwrap();
let filter_manager = FilterManager::fixed(Arc::new(
FilterChain::new(vec![(debug_factory.name().into(), debug_filter)]).unwrap(),
));
let mut response = handle_request(cluster_manager, filter_manager);
assert_eq!(response.status(), hyper::StatusCode::OK);
assert_eq!(
response.headers().get("Content-Type").unwrap(),
"application/json"
);
let body = hyper::body::to_bytes(response.body_mut()).await.unwrap();
let body = String::from_utf8(body.into_iter().collect()).unwrap();
let expected = serde_json::json!({
"clusters": [{
"name": "default-quilkin-cluster",
"endpoints": [{
"address": {
"host": "127.0.0.1",
"port": 8080,
},
"metadata": {
"quilkin.dev": {
"tokens": []
}
}
}]
}],
"filterchain": {
"filters": [{
"name": "quilkin.filters.debug.v1alpha1.Debug",
"config":{
"id": "hello"
}
}]
}
});
assert_eq!(
expected,
serde_json::from_str::<serde_json::Value>(body.as_str()).unwrap()
);
}
}
|
create_config_dump_json
|
identifier_name
|
config_dump.rs
|
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::cluster::cluster_manager::SharedClusterManager;
use crate::filters::manager::SharedFilterManager;
use crate::endpoint::Endpoint;
use hyper::http::HeaderValue;
use hyper::{Body, Response, StatusCode};
use serde::Serialize;
use std::sync::Arc;
#[derive(Debug, Serialize)]
struct ClusterDump {
name: &'static str,
endpoints: Vec<Endpoint>,
}
#[derive(Debug, Serialize)]
struct ConfigDump {
clusters: Vec<ClusterDump>,
filterchain: FilterChainDump,
}
#[derive(Debug, Serialize)]
struct FilterConfigDump {
name: String,
config: Arc<serde_json::Value>,
}
#[derive(Debug, Serialize)]
struct FilterChainDump {
filters: Vec<FilterConfigDump>,
}
pub(crate) fn handle_request(
cluster_manager: SharedClusterManager,
filter_manager: SharedFilterManager,
) -> Response<Body>
|
fn create_config_dump_json(
cluster_manager: SharedClusterManager,
filter_manager: SharedFilterManager,
) -> Result<String, serde_json::Error> {
let endpoints = {
let cluster_manager = cluster_manager.read();
// Clone the list of endpoints immediately so that we don't hold on
// to the cluster manager's lock while serializing.
cluster_manager
.get_all_endpoints()
.map(|upstream_endpoints| upstream_endpoints.iter().cloned().collect::<Vec<_>>())
.unwrap_or_default()
};
let filters = {
let filter_manager = filter_manager.read();
// Clone the list of filter configs immediately so that we don't hold on
// to the filter manager's lock while serializing.
filter_manager
.get_filter_chain()
.get_configs()
.map(|(name, config)| FilterConfigDump {
name: name.into(),
config,
})
.collect::<Vec<_>>()
};
let dump = ConfigDump {
clusters: vec![ClusterDump {
name: "default-quilkin-cluster",
endpoints,
}],
filterchain: FilterChainDump { filters },
};
serde_json::to_string_pretty(&dump)
}
#[cfg(test)]
mod tests {
use super::handle_request;
use crate::cluster::cluster_manager::ClusterManager;
use crate::endpoint::{Endpoint, Endpoints};
use crate::filters::{manager::FilterManager, CreateFilterArgs, FilterChain};
use std::sync::Arc;
#[tokio::test]
async fn test_handle_request() {
let cluster_manager = ClusterManager::fixed(
Endpoints::new(vec![Endpoint::new(([127, 0, 0, 1], 8080).into())]).unwrap(),
)
.unwrap();
let debug_config = serde_yaml::from_str("id: hello").unwrap();
let debug_factory = crate::filters::debug::factory();
let debug_filter = debug_factory
.create_filter(CreateFilterArgs::fixed(Some(debug_config)))
.unwrap();
let filter_manager = FilterManager::fixed(Arc::new(
FilterChain::new(vec![(debug_factory.name().into(), debug_filter)]).unwrap(),
));
let mut response = handle_request(cluster_manager, filter_manager);
assert_eq!(response.status(), hyper::StatusCode::OK);
assert_eq!(
response.headers().get("Content-Type").unwrap(),
"application/json"
);
let body = hyper::body::to_bytes(response.body_mut()).await.unwrap();
let body = String::from_utf8(body.into_iter().collect()).unwrap();
let expected = serde_json::json!({
"clusters": [{
"name": "default-quilkin-cluster",
"endpoints": [{
"address": {
"host": "127.0.0.1",
"port": 8080,
},
"metadata": {
"quilkin.dev": {
"tokens": []
}
}
}]
}],
"filterchain": {
"filters": [{
"name": "quilkin.filters.debug.v1alpha1.Debug",
"config":{
"id": "hello"
}
}]
}
});
assert_eq!(
expected,
serde_json::from_str::<serde_json::Value>(body.as_str()).unwrap()
);
}
}
|
{
let mut response = Response::new(Body::empty());
match create_config_dump_json(cluster_manager, filter_manager) {
Ok(body) => {
*response.status_mut() = StatusCode::OK;
response
.headers_mut()
.insert("Content-Type", HeaderValue::from_static("application/json"));
*response.body_mut() = Body::from(body);
}
Err(err) => {
*response.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
*response.body_mut() = Body::from(format!("failed to create config dump: {err}"));
}
}
response
}
|
identifier_body
|
config_dump.rs
|
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::cluster::cluster_manager::SharedClusterManager;
use crate::filters::manager::SharedFilterManager;
use crate::endpoint::Endpoint;
use hyper::http::HeaderValue;
use hyper::{Body, Response, StatusCode};
use serde::Serialize;
use std::sync::Arc;
#[derive(Debug, Serialize)]
struct ClusterDump {
name: &'static str,
endpoints: Vec<Endpoint>,
}
#[derive(Debug, Serialize)]
struct ConfigDump {
clusters: Vec<ClusterDump>,
filterchain: FilterChainDump,
}
#[derive(Debug, Serialize)]
struct FilterConfigDump {
name: String,
config: Arc<serde_json::Value>,
}
#[derive(Debug, Serialize)]
struct FilterChainDump {
filters: Vec<FilterConfigDump>,
}
pub(crate) fn handle_request(
cluster_manager: SharedClusterManager,
filter_manager: SharedFilterManager,
) -> Response<Body> {
let mut response = Response::new(Body::empty());
match create_config_dump_json(cluster_manager, filter_manager) {
Ok(body) => {
*response.status_mut() = StatusCode::OK;
response
.headers_mut()
.insert("Content-Type", HeaderValue::from_static("application/json"));
*response.body_mut() = Body::from(body);
}
Err(err) => {
*response.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
*response.body_mut() = Body::from(format!("failed to create config dump: {err}"));
}
}
response
}
fn create_config_dump_json(
cluster_manager: SharedClusterManager,
filter_manager: SharedFilterManager,
) -> Result<String, serde_json::Error> {
let endpoints = {
let cluster_manager = cluster_manager.read();
// Clone the list of endpoints immediately so that we don't hold on
// to the cluster manager's lock while serializing.
cluster_manager
.get_all_endpoints()
.map(|upstream_endpoints| upstream_endpoints.iter().cloned().collect::<Vec<_>>())
.unwrap_or_default()
};
let filters = {
let filter_manager = filter_manager.read();
// Clone the list of filter configs immediately so that we don't hold on
// to the filter manager's lock while serializing.
filter_manager
.get_filter_chain()
.get_configs()
.map(|(name, config)| FilterConfigDump {
name: name.into(),
config,
})
.collect::<Vec<_>>()
|
clusters: vec![ClusterDump {
name: "default-quilkin-cluster",
endpoints,
}],
filterchain: FilterChainDump { filters },
};
serde_json::to_string_pretty(&dump)
}
#[cfg(test)]
mod tests {
use super::handle_request;
use crate::cluster::cluster_manager::ClusterManager;
use crate::endpoint::{Endpoint, Endpoints};
use crate::filters::{manager::FilterManager, CreateFilterArgs, FilterChain};
use std::sync::Arc;
#[tokio::test]
async fn test_handle_request() {
let cluster_manager = ClusterManager::fixed(
Endpoints::new(vec![Endpoint::new(([127, 0, 0, 1], 8080).into())]).unwrap(),
)
.unwrap();
let debug_config = serde_yaml::from_str("id: hello").unwrap();
let debug_factory = crate::filters::debug::factory();
let debug_filter = debug_factory
.create_filter(CreateFilterArgs::fixed(Some(debug_config)))
.unwrap();
let filter_manager = FilterManager::fixed(Arc::new(
FilterChain::new(vec![(debug_factory.name().into(), debug_filter)]).unwrap(),
));
let mut response = handle_request(cluster_manager, filter_manager);
assert_eq!(response.status(), hyper::StatusCode::OK);
assert_eq!(
response.headers().get("Content-Type").unwrap(),
"application/json"
);
let body = hyper::body::to_bytes(response.body_mut()).await.unwrap();
let body = String::from_utf8(body.into_iter().collect()).unwrap();
let expected = serde_json::json!({
"clusters": [{
"name": "default-quilkin-cluster",
"endpoints": [{
"address": {
"host": "127.0.0.1",
"port": 8080,
},
"metadata": {
"quilkin.dev": {
"tokens": []
}
}
}]
}],
"filterchain": {
"filters": [{
"name": "quilkin.filters.debug.v1alpha1.Debug",
"config":{
"id": "hello"
}
}]
}
});
assert_eq!(
expected,
serde_json::from_str::<serde_json::Value>(body.as_str()).unwrap()
);
}
}
|
};
let dump = ConfigDump {
|
random_line_split
|
class-attributes-1.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pp-exact - Make sure we actually print the attributes
struct
|
{
name: ~str,
}
impl Drop for cat {
#[cat_dropper]
fn drop(&mut self) { error2!("{} landed on hir feet", self. name); }
}
#[cat_maker]
fn cat(name: ~str) -> cat { cat{name: name,} }
pub fn main() { let _kitty = cat(~"Spotty"); }
|
cat
|
identifier_name
|
class-attributes-1.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pp-exact - Make sure we actually print the attributes
|
}
impl Drop for cat {
#[cat_dropper]
fn drop(&mut self) { error2!("{} landed on hir feet", self. name); }
}
#[cat_maker]
fn cat(name: ~str) -> cat { cat{name: name,} }
pub fn main() { let _kitty = cat(~"Spotty"); }
|
struct cat {
name: ~str,
|
random_line_split
|
class-attributes-1.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pp-exact - Make sure we actually print the attributes
struct cat {
name: ~str,
}
impl Drop for cat {
#[cat_dropper]
fn drop(&mut self)
|
}
#[cat_maker]
fn cat(name: ~str) -> cat { cat{name: name,} }
pub fn main() { let _kitty = cat(~"Spotty"); }
|
{ error2!("{} landed on hir feet" , self . name); }
|
identifier_body
|
local_transactions.rs
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Local Transactions List.
use linked_hash_map::LinkedHashMap;
use transaction::SignedTransaction;
use error::TransactionError;
use util::{U256, H256};
/// Status of local transaction.
/// Can indicate that the transaction is currently part of the queue (`Pending/Future`)
/// or gives a reason why the transaction was removed.
#[derive(Debug, PartialEq, Clone)]
pub enum Status {
/// The transaction is currently in the transaction queue.
Pending,
/// The transaction is in future part of the queue.
Future,
/// Transaction is already mined.
Mined(SignedTransaction),
/// Transaction is dropped because of limit
Dropped(SignedTransaction),
/// Replaced because of higher gas price of another transaction.
Replaced(SignedTransaction, U256, H256),
/// Transaction was never accepted to the queue.
Rejected(SignedTransaction, TransactionError),
/// Transaction is invalid.
Invalid(SignedTransaction),
}
impl Status {
fn is_current(&self) -> bool {
*self == Status::Pending || *self == Status::Future
}
}
/// Keeps track of local transactions that are in the queue or were mined/dropped recently.
#[derive(Debug)]
pub struct LocalTransactionsList {
max_old: usize,
transactions: LinkedHashMap<H256, Status>,
}
impl Default for LocalTransactionsList {
fn default() -> Self {
Self::new(10)
}
}
impl LocalTransactionsList {
pub fn new(max_old: usize) -> Self {
LocalTransactionsList {
max_old: max_old,
transactions: Default::default(),
}
}
pub fn mark_pending(&mut self, hash: H256) {
self.clear_old();
self.transactions.insert(hash, Status::Pending);
}
pub fn mark_future(&mut self, hash: H256) {
self.transactions.insert(hash, Status::Future);
self.clear_old();
}
pub fn mark_rejected(&mut self, tx: SignedTransaction, err: TransactionError) {
self.transactions.insert(tx.hash(), Status::Rejected(tx, err));
self.clear_old();
}
pub fn mark_replaced(&mut self, tx: SignedTransaction, gas_price: U256, hash: H256) {
self.transactions.insert(tx.hash(), Status::Replaced(tx, gas_price, hash));
self.clear_old();
}
pub fn mark_invalid(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Invalid(tx));
self.clear_old();
}
pub fn mark_dropped(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Dropped(tx));
self.clear_old();
}
pub fn mark_mined(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Mined(tx));
self.clear_old();
}
pub fn contains(&self, hash: &H256) -> bool {
self.transactions.contains_key(hash)
}
pub fn all_transactions(&self) -> &LinkedHashMap<H256, Status> {
&self.transactions
}
fn clear_old(&mut self) {
let number_of_old = self.transactions
.values()
.filter(|status|!status.is_current())
.count();
if self.max_old >= number_of_old {
return;
}
let to_remove = self.transactions
.iter()
.filter(|&(_, status)|!status.is_current())
.map(|(hash, _)| *hash)
.take(number_of_old - self.max_old)
.collect::<Vec<_>>();
for hash in to_remove {
self.transactions.remove(&hash);
}
}
}
#[cfg(test)]
mod tests {
use util::U256;
use ethkey::{Random, Generator};
use transaction::{Action, Transaction, SignedTransaction};
use super::{LocalTransactionsList, Status};
#[test]
fn should_add_transaction_as_pending() {
// given
let mut list = LocalTransactionsList::default();
// when
list.mark_pending(10.into());
list.mark_future(20.into());
// then
assert!(list.contains(&10.into()), "Should contain the transaction.");
assert!(list.contains(&20.into()), "Should contain the transaction.");
let statuses = list.all_transactions().values().cloned().collect::<Vec<Status>>();
assert_eq!(statuses, vec![Status::Pending, Status::Future]);
}
#[test]
fn should_clear_old_transactions() {
// given
let mut list = LocalTransactionsList::new(1);
let tx1 = new_tx(10.into());
let tx1_hash = tx1.hash();
let tx2 = new_tx(50.into());
let tx2_hash = tx2.hash();
list.mark_pending(10.into());
list.mark_invalid(tx1);
list.mark_dropped(tx2);
assert!(list.contains(&tx2_hash));
assert!(!list.contains(&tx1_hash));
assert!(list.contains(&10.into()));
// when
list.mark_future(15.into());
// then
assert!(list.contains(&10.into()));
assert!(list.contains(&15.into()));
}
fn new_tx(nonce: U256) -> SignedTransaction {
let keypair = Random.generate().unwrap();
Transaction {
action: Action::Create,
value: U256::from(100),
data: Default::default(),
gas: U256::from(10),
gas_price: U256::from(1245),
nonce: nonce
}.sign(keypair.secret(), None)
|
}
|
}
|
random_line_split
|
local_transactions.rs
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Local Transactions List.
use linked_hash_map::LinkedHashMap;
use transaction::SignedTransaction;
use error::TransactionError;
use util::{U256, H256};
/// Status of local transaction.
/// Can indicate that the transaction is currently part of the queue (`Pending/Future`)
/// or gives a reason why the transaction was removed.
#[derive(Debug, PartialEq, Clone)]
pub enum Status {
/// The transaction is currently in the transaction queue.
Pending,
/// The transaction is in future part of the queue.
Future,
/// Transaction is already mined.
Mined(SignedTransaction),
/// Transaction is dropped because of limit
Dropped(SignedTransaction),
/// Replaced because of higher gas price of another transaction.
Replaced(SignedTransaction, U256, H256),
/// Transaction was never accepted to the queue.
Rejected(SignedTransaction, TransactionError),
/// Transaction is invalid.
Invalid(SignedTransaction),
}
impl Status {
fn is_current(&self) -> bool {
*self == Status::Pending || *self == Status::Future
}
}
/// Keeps track of local transactions that are in the queue or were mined/dropped recently.
#[derive(Debug)]
pub struct LocalTransactionsList {
max_old: usize,
transactions: LinkedHashMap<H256, Status>,
}
impl Default for LocalTransactionsList {
fn default() -> Self {
Self::new(10)
}
}
impl LocalTransactionsList {
pub fn new(max_old: usize) -> Self {
LocalTransactionsList {
max_old: max_old,
transactions: Default::default(),
}
}
pub fn mark_pending(&mut self, hash: H256) {
self.clear_old();
self.transactions.insert(hash, Status::Pending);
}
pub fn mark_future(&mut self, hash: H256) {
self.transactions.insert(hash, Status::Future);
self.clear_old();
}
pub fn mark_rejected(&mut self, tx: SignedTransaction, err: TransactionError) {
self.transactions.insert(tx.hash(), Status::Rejected(tx, err));
self.clear_old();
}
pub fn mark_replaced(&mut self, tx: SignedTransaction, gas_price: U256, hash: H256) {
self.transactions.insert(tx.hash(), Status::Replaced(tx, gas_price, hash));
self.clear_old();
}
pub fn mark_invalid(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Invalid(tx));
self.clear_old();
}
pub fn mark_dropped(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Dropped(tx));
self.clear_old();
}
pub fn mark_mined(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Mined(tx));
self.clear_old();
}
pub fn contains(&self, hash: &H256) -> bool {
self.transactions.contains_key(hash)
}
pub fn all_transactions(&self) -> &LinkedHashMap<H256, Status> {
&self.transactions
}
fn clear_old(&mut self) {
let number_of_old = self.transactions
.values()
.filter(|status|!status.is_current())
.count();
if self.max_old >= number_of_old
|
let to_remove = self.transactions
.iter()
.filter(|&(_, status)|!status.is_current())
.map(|(hash, _)| *hash)
.take(number_of_old - self.max_old)
.collect::<Vec<_>>();
for hash in to_remove {
self.transactions.remove(&hash);
}
}
}
#[cfg(test)]
mod tests {
use util::U256;
use ethkey::{Random, Generator};
use transaction::{Action, Transaction, SignedTransaction};
use super::{LocalTransactionsList, Status};
#[test]
fn should_add_transaction_as_pending() {
// given
let mut list = LocalTransactionsList::default();
// when
list.mark_pending(10.into());
list.mark_future(20.into());
// then
assert!(list.contains(&10.into()), "Should contain the transaction.");
assert!(list.contains(&20.into()), "Should contain the transaction.");
let statuses = list.all_transactions().values().cloned().collect::<Vec<Status>>();
assert_eq!(statuses, vec![Status::Pending, Status::Future]);
}
#[test]
fn should_clear_old_transactions() {
// given
let mut list = LocalTransactionsList::new(1);
let tx1 = new_tx(10.into());
let tx1_hash = tx1.hash();
let tx2 = new_tx(50.into());
let tx2_hash = tx2.hash();
list.mark_pending(10.into());
list.mark_invalid(tx1);
list.mark_dropped(tx2);
assert!(list.contains(&tx2_hash));
assert!(!list.contains(&tx1_hash));
assert!(list.contains(&10.into()));
// when
list.mark_future(15.into());
// then
assert!(list.contains(&10.into()));
assert!(list.contains(&15.into()));
}
fn new_tx(nonce: U256) -> SignedTransaction {
let keypair = Random.generate().unwrap();
Transaction {
action: Action::Create,
value: U256::from(100),
data: Default::default(),
gas: U256::from(10),
gas_price: U256::from(1245),
nonce: nonce
}.sign(keypair.secret(), None)
}
}
|
{
return;
}
|
conditional_block
|
local_transactions.rs
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Local Transactions List.
use linked_hash_map::LinkedHashMap;
use transaction::SignedTransaction;
use error::TransactionError;
use util::{U256, H256};
/// Status of local transaction.
/// Can indicate that the transaction is currently part of the queue (`Pending/Future`)
/// or gives a reason why the transaction was removed.
#[derive(Debug, PartialEq, Clone)]
pub enum Status {
/// The transaction is currently in the transaction queue.
Pending,
/// The transaction is in future part of the queue.
Future,
/// Transaction is already mined.
Mined(SignedTransaction),
/// Transaction is dropped because of limit
Dropped(SignedTransaction),
/// Replaced because of higher gas price of another transaction.
Replaced(SignedTransaction, U256, H256),
/// Transaction was never accepted to the queue.
Rejected(SignedTransaction, TransactionError),
/// Transaction is invalid.
Invalid(SignedTransaction),
}
impl Status {
fn is_current(&self) -> bool {
*self == Status::Pending || *self == Status::Future
}
}
/// Keeps track of local transactions that are in the queue or were mined/dropped recently.
#[derive(Debug)]
pub struct LocalTransactionsList {
max_old: usize,
transactions: LinkedHashMap<H256, Status>,
}
impl Default for LocalTransactionsList {
fn default() -> Self {
Self::new(10)
}
}
impl LocalTransactionsList {
pub fn new(max_old: usize) -> Self {
LocalTransactionsList {
max_old: max_old,
transactions: Default::default(),
}
}
pub fn mark_pending(&mut self, hash: H256) {
self.clear_old();
self.transactions.insert(hash, Status::Pending);
}
pub fn mark_future(&mut self, hash: H256) {
self.transactions.insert(hash, Status::Future);
self.clear_old();
}
pub fn mark_rejected(&mut self, tx: SignedTransaction, err: TransactionError) {
self.transactions.insert(tx.hash(), Status::Rejected(tx, err));
self.clear_old();
}
pub fn mark_replaced(&mut self, tx: SignedTransaction, gas_price: U256, hash: H256) {
self.transactions.insert(tx.hash(), Status::Replaced(tx, gas_price, hash));
self.clear_old();
}
pub fn mark_invalid(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Invalid(tx));
self.clear_old();
}
pub fn mark_dropped(&mut self, tx: SignedTransaction)
|
pub fn mark_mined(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Mined(tx));
self.clear_old();
}
pub fn contains(&self, hash: &H256) -> bool {
self.transactions.contains_key(hash)
}
pub fn all_transactions(&self) -> &LinkedHashMap<H256, Status> {
&self.transactions
}
fn clear_old(&mut self) {
let number_of_old = self.transactions
.values()
.filter(|status|!status.is_current())
.count();
if self.max_old >= number_of_old {
return;
}
let to_remove = self.transactions
.iter()
.filter(|&(_, status)|!status.is_current())
.map(|(hash, _)| *hash)
.take(number_of_old - self.max_old)
.collect::<Vec<_>>();
for hash in to_remove {
self.transactions.remove(&hash);
}
}
}
#[cfg(test)]
mod tests {
use util::U256;
use ethkey::{Random, Generator};
use transaction::{Action, Transaction, SignedTransaction};
use super::{LocalTransactionsList, Status};
#[test]
fn should_add_transaction_as_pending() {
// given
let mut list = LocalTransactionsList::default();
// when
list.mark_pending(10.into());
list.mark_future(20.into());
// then
assert!(list.contains(&10.into()), "Should contain the transaction.");
assert!(list.contains(&20.into()), "Should contain the transaction.");
let statuses = list.all_transactions().values().cloned().collect::<Vec<Status>>();
assert_eq!(statuses, vec![Status::Pending, Status::Future]);
}
#[test]
fn should_clear_old_transactions() {
// given
let mut list = LocalTransactionsList::new(1);
let tx1 = new_tx(10.into());
let tx1_hash = tx1.hash();
let tx2 = new_tx(50.into());
let tx2_hash = tx2.hash();
list.mark_pending(10.into());
list.mark_invalid(tx1);
list.mark_dropped(tx2);
assert!(list.contains(&tx2_hash));
assert!(!list.contains(&tx1_hash));
assert!(list.contains(&10.into()));
// when
list.mark_future(15.into());
// then
assert!(list.contains(&10.into()));
assert!(list.contains(&15.into()));
}
fn new_tx(nonce: U256) -> SignedTransaction {
let keypair = Random.generate().unwrap();
Transaction {
action: Action::Create,
value: U256::from(100),
data: Default::default(),
gas: U256::from(10),
gas_price: U256::from(1245),
nonce: nonce
}.sign(keypair.secret(), None)
}
}
|
{
self.transactions.insert(tx.hash(), Status::Dropped(tx));
self.clear_old();
}
|
identifier_body
|
local_transactions.rs
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Local Transactions List.
use linked_hash_map::LinkedHashMap;
use transaction::SignedTransaction;
use error::TransactionError;
use util::{U256, H256};
/// Status of local transaction.
/// Can indicate that the transaction is currently part of the queue (`Pending/Future`)
/// or gives a reason why the transaction was removed.
#[derive(Debug, PartialEq, Clone)]
pub enum Status {
/// The transaction is currently in the transaction queue.
Pending,
/// The transaction is in future part of the queue.
Future,
/// Transaction is already mined.
Mined(SignedTransaction),
/// Transaction is dropped because of limit
Dropped(SignedTransaction),
/// Replaced because of higher gas price of another transaction.
Replaced(SignedTransaction, U256, H256),
/// Transaction was never accepted to the queue.
Rejected(SignedTransaction, TransactionError),
/// Transaction is invalid.
Invalid(SignedTransaction),
}
impl Status {
fn
|
(&self) -> bool {
*self == Status::Pending || *self == Status::Future
}
}
/// Keeps track of local transactions that are in the queue or were mined/dropped recently.
#[derive(Debug)]
pub struct LocalTransactionsList {
max_old: usize,
transactions: LinkedHashMap<H256, Status>,
}
impl Default for LocalTransactionsList {
fn default() -> Self {
Self::new(10)
}
}
impl LocalTransactionsList {
pub fn new(max_old: usize) -> Self {
LocalTransactionsList {
max_old: max_old,
transactions: Default::default(),
}
}
pub fn mark_pending(&mut self, hash: H256) {
self.clear_old();
self.transactions.insert(hash, Status::Pending);
}
pub fn mark_future(&mut self, hash: H256) {
self.transactions.insert(hash, Status::Future);
self.clear_old();
}
pub fn mark_rejected(&mut self, tx: SignedTransaction, err: TransactionError) {
self.transactions.insert(tx.hash(), Status::Rejected(tx, err));
self.clear_old();
}
pub fn mark_replaced(&mut self, tx: SignedTransaction, gas_price: U256, hash: H256) {
self.transactions.insert(tx.hash(), Status::Replaced(tx, gas_price, hash));
self.clear_old();
}
pub fn mark_invalid(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Invalid(tx));
self.clear_old();
}
pub fn mark_dropped(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Dropped(tx));
self.clear_old();
}
pub fn mark_mined(&mut self, tx: SignedTransaction) {
self.transactions.insert(tx.hash(), Status::Mined(tx));
self.clear_old();
}
pub fn contains(&self, hash: &H256) -> bool {
self.transactions.contains_key(hash)
}
pub fn all_transactions(&self) -> &LinkedHashMap<H256, Status> {
&self.transactions
}
fn clear_old(&mut self) {
let number_of_old = self.transactions
.values()
.filter(|status|!status.is_current())
.count();
if self.max_old >= number_of_old {
return;
}
let to_remove = self.transactions
.iter()
.filter(|&(_, status)|!status.is_current())
.map(|(hash, _)| *hash)
.take(number_of_old - self.max_old)
.collect::<Vec<_>>();
for hash in to_remove {
self.transactions.remove(&hash);
}
}
}
#[cfg(test)]
mod tests {
use util::U256;
use ethkey::{Random, Generator};
use transaction::{Action, Transaction, SignedTransaction};
use super::{LocalTransactionsList, Status};
#[test]
fn should_add_transaction_as_pending() {
// given
let mut list = LocalTransactionsList::default();
// when
list.mark_pending(10.into());
list.mark_future(20.into());
// then
assert!(list.contains(&10.into()), "Should contain the transaction.");
assert!(list.contains(&20.into()), "Should contain the transaction.");
let statuses = list.all_transactions().values().cloned().collect::<Vec<Status>>();
assert_eq!(statuses, vec![Status::Pending, Status::Future]);
}
#[test]
fn should_clear_old_transactions() {
// given
let mut list = LocalTransactionsList::new(1);
let tx1 = new_tx(10.into());
let tx1_hash = tx1.hash();
let tx2 = new_tx(50.into());
let tx2_hash = tx2.hash();
list.mark_pending(10.into());
list.mark_invalid(tx1);
list.mark_dropped(tx2);
assert!(list.contains(&tx2_hash));
assert!(!list.contains(&tx1_hash));
assert!(list.contains(&10.into()));
// when
list.mark_future(15.into());
// then
assert!(list.contains(&10.into()));
assert!(list.contains(&15.into()));
}
fn new_tx(nonce: U256) -> SignedTransaction {
let keypair = Random.generate().unwrap();
Transaction {
action: Action::Create,
value: U256::from(100),
data: Default::default(),
gas: U256::from(10),
gas_price: U256::from(1245),
nonce: nonce
}.sign(keypair.secret(), None)
}
}
|
is_current
|
identifier_name
|
lib.rs
|
// This is a part of rust-encoding.
// Copyright (c) 2013-2015, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! # Encoding 0.3.0-dev
//!
//! Character encoding support for Rust. (also known as `rust-encoding`)
//! It is based on [WHATWG Encoding Standard](http://encoding.spec.whatwg.org/),
//! and also provides an advanced interface for error detection and recovery.
//!
//! *This documentation is for the development version (0.3).
//! Please see the [stable documentation][doc] for 0.2.x versions.*
//!
//! ## Usage
//!
//! Put this in your `Cargo.toml`:
//!
//! ```toml
//! [dependencies]
//! encoding = "0.3"
//! ```
//!
//! Then put this in your crate root:
//!
//! ```rust
//! extern crate encoding;
//! ```
//!
//! ### Data Table
//!
//! By default, Encoding comes with ~480 KB of data table ("indices").
//! This allows Encoding to encode and decode legacy encodings efficiently,
//! but this might not be desirable for some applications.
//!
//! Encoding provides the `no-optimized-legacy-encoding` Cargo feature
//! to reduce the size of encoding tables (to ~185 KB)
//! at the expense of encoding performance (typically 5x to 20x slower).
//! The decoding performance remains identical.
//! **This feature is strongly intended for end users.
//! Do not enable this feature from library crates, ever.**
//!
//! For finer-tuned optimization, see `src/index/gen_index.py` for
//! custom table generation. At the most reduced (and slowest) setting,
//! the minimal size of data table is about 160 KB.
//!
//! ## Overview
//!
//! To encode a string:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap};
//! use encoding::all::ISO_8859_1;
//!
//! assert_eq!(ISO_8859_1.encode("caf\u{e9}", EncoderTrap::Strict),
//! Ok(vec![99,97,102,233]));
//! ~~~~
//!
//! To encode a string with unrepresentable characters:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap};
//! use encoding::all::ISO_8859_2;
//!
//! assert!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Strict).is_err());
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Replace),
//! Ok(vec![65,99,109,101,63]));
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Ignore),
//! Ok(vec![65,99,109,101]));
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::NcrEscape),
//! Ok(vec![65,99,109,101,38,35,49,54,57,59]));
//! ~~~~
//!
//! To decode a byte sequence:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::all::ISO_8859_1;
//!
//! assert_eq!(ISO_8859_1.decode(&[99,97,102,233], DecoderTrap::Strict),
//! Ok("caf\u{e9}".to_string()));
//! ~~~~
//!
//! To decode a byte sequence with invalid sequences:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::all::ISO_8859_6;
//!
//! assert!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Strict).is_err());
//! assert_eq!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Replace),
//! Ok("Acme\u{fffd}".to_string()));
//! assert_eq!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Ignore),
//! Ok("Acme".to_string()));
//! ~~~~
//!
//! To encode or decode the input into the already allocated buffer:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap, DecoderTrap};
//! use encoding::all::{ISO_8859_2, ISO_8859_6};
//!
//! let mut bytes = Vec::new();
//! let mut chars = String::new();
//!
//! assert!(ISO_8859_2.encode_to("Acme\u{a9}", EncoderTrap::Ignore, &mut bytes).is_ok());
//! assert!(ISO_8859_6.decode_to(&[65,99,109,101,169], DecoderTrap::Replace, &mut chars).is_ok());
//!
//! assert_eq!(bytes, [65,99,109,101]);
//! assert_eq!(chars, "Acme\u{fffd}");
//! ~~~~
//!
//! A practical example of custom encoder traps:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, ByteWriter, EncoderTrap, DecoderTrap};
//! use encoding::types::RawEncoder;
//! use encoding::all::ASCII;
//!
//! // hexadecimal numeric character reference replacement
//! fn hex_ncr_escape(_encoder: &mut RawEncoder, input: &str, output: &mut ByteWriter) -> bool {
//! let escapes: Vec<String> =
//! input.chars().map(|ch| format!("&#x{:x};", ch as isize)).collect();
//! let escapes = escapes.concat();
//! output.write_bytes(escapes.as_bytes());
//! true
//! }
//! static HEX_NCR_ESCAPE: EncoderTrap = EncoderTrap::Call(hex_ncr_escape);
//!
//! let orig = "Hello, 世界!".to_string();
//! let encoded = ASCII.encode(&orig, HEX_NCR_ESCAPE).unwrap();
//! assert_eq!(ASCII.decode(&encoded, DecoderTrap::Strict),
//! Ok("Hello, 世界!".to_string()));
//! ~~~~
//!
//! Getting the encoding from the string label, as specified in WHATWG Encoding standard:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::label::encoding_from_whatwg_label;
//! use encoding::all::WINDOWS_949;
//!
//! let euckr = encoding_from_whatwg_label("euc-kr").unwrap();
//! assert_eq!(euckr.name(), "windows-949");
//! assert_eq!(euckr.whatwg_name(), Some("euc-kr")); // for the sake of compatibility
//! let broken = &[0xbf, 0xec, 0xbf, 0xcd, 0xff, 0xbe, 0xd3];
//! assert_eq!(euckr.decode(broken, DecoderTrap::Replace),
//! Ok("\u{c6b0}\u{c640}\u{fffd}\u{c559}".to_string()));
//!
//! // corresponding Encoding native API:
//! assert_eq!(WINDOWS_949.decode(broken, DecoderTrap::Replace),
//! Ok("\u{c6b0}\u{c640}\u{fffd}\u{c559}".to_string()));
//! ~~~~
//!
//! ## Types and Stuffs
//!
//! There are three main entry points to Encoding.
//!
//! **`Encoding`** is a single character encoding.
//! It contains `encode` and `decode` methods for converting `String` to `Vec<u8>` and vice versa.
//! For the error handling, they receive **traps** (`EncoderTrap` and `DecoderTrap` respectively)
//! which replace any error with some string (e.g. `U+FFFD`) or sequence (e.g. `?`).
//! You can also use `EncoderTrap::Strict` and `DecoderTrap::Strict` traps to stop on an error.
//!
//! There are two ways to get `Encoding`:
//!
//! * `encoding::all` has static items for every supported encoding.
//! You should use them when the encoding would not change or only handful of them are required.
//! Combined with link-time optimization, any unused encoding would be discarded from the binary.
//! * `encoding::label` has functions to dynamically get an encoding from given string ("label").
//! They will return a static reference to the encoding,
//! which type is also known as `EncodingRef`.
//! It is useful when a list of required encodings is not available in advance,
//! but it will result in the larger binary and missed optimization opportunities.
//!
//! **`RawEncoder`** is an experimental incremental encoder.
//! At each step of `raw_feed`, it receives a slice of string
//! and emits any encoded bytes to a generic `ByteWriter` (normally `Vec<u8>`).
//! It will stop at the first error if any, and would return a `CodecError` struct in that case.
//! The caller is responsible for calling `raw_finish` at the end of encoding process.
//!
//! **`RawDecoder`** is an experimental incremental decoder.
//! At each step of `raw_feed`, it receives a slice of byte sequence
//! and emits any decoded characters to a generic `StringWriter` (normally `String`).
//! Otherwise it is identical to `RawEncoder`s.
//!
//! One should prefer `Encoding::{encode,decode}` as a primary interface.
//! `RawEncoder` and `RawDecoder` is experimental and can change substantially.
//! See the additional documents on `encoding::types` module for more information on them.
//!
//! ## Supported Encodings
//!
//! Encoding covers all encodings specified by WHATWG Encoding Standard and some more:
//!
//! * 7-bit strict ASCII (`ascii`)
//! * UTF-8 (`utf-8`)
//! * UTF-16 in little endian (`utf-16` or `utf-16le`) and big endian (`utf-16be`)
//! * All single byte encoding in WHATWG Encoding Standard:
//! * IBM code page 866
//! * ISO 8859-{2,3,4,5,6,7,8,10,13,14,15,16}
//! * KOI8-R, KOI8-U
//! * MacRoman (`macintosh`), Macintosh Cyrillic encoding (`x-mac-cyrillic`)
//! * Windows code pages 874, 1250, 1251, 1252 (instead of ISO 8859-1), 1253,
//! 1254 (instead of ISO 8859-9), 1255, 1256, 1257, 1258
//! * All multi byte encodings in WHATWG Encoding Standard:
//! * Windows code page 949 (`euc-kr`, since the strict EUC-KR is hardly used)
//! * EUC-JP and Windows code page 932 (`shift_jis`,
//! since it's the most widespread extension to Shift_JIS)
//! * ISO-2022-JP with asymmetric JIS X 0212 support
//! (Note: this is not yet up to date to the current standard)
//! * GBK
//! * GB 18030
//! * Big5-2003 with HKSCS-2008 extensions
//! * Encodings that were originally specified by WHATWG Encoding Standard:
//! * HZ
//! * ISO 8859-1 (distinct from Windows code page 1252)
//!
//! Parenthesized names refer to the encoding's primary name assigned by WHATWG Encoding Standard.
//!
//! Many legacy character encodings lack the proper specification,
//! and even those that have a specification are highly dependent of the actual implementation.
//! Consequently one should be careful when picking a desired character encoding.
//! The only standards reliable in this regard are WHATWG Encoding Standard and
//! [vendor-provided mappings from the Unicode consortium](http://www.unicode.org/Public/MAPPINGS/).
//! Whenever in doubt, look at the source code and specifications for detailed explanations.
#![cfg_attr(test, feature(test))] // lib stability features as per RFC #507
extern crate encoding_types;
extern crate encoding_index_singlebyte as index_singlebyte;
extern crate encoding_index_korean as index_korean;
extern crate encoding_index_japanese as index_japanese;
extern crate encoding_index_simpchinese as index_simpchinese;
extern crate encoding_index_tradchinese as index_tradchinese;
#[cfg(test)] extern crate test;
pub use self::types::{CodecError, ByteWriter, StringWriter,
RawEncoder, RawDecoder, EncodingRef, Encoding,
EncoderTrapFunc, DecoderTrapFunc, DecoderTrap,
EncoderTrap}; // reexport
use std::borrow::Cow;
#[macro_use] mod util;
#[cfg(test)] #[macro_use] mod testutils;
pub mod types;
/// Codec implementations.
pub mod codec {
pub mod error;
pub mod ascii;
pub mod singlebyte;
pub mod utf_8;
pub mod utf_16;
pub mod korean;
pub mod japanese;
pub mod simpchinese;
pub mod tradchinese;
pub mod whatwg;
}
pub mod all;
pub mod label;
/// Determine the encoding by looking for a Byte Order Mark (BOM)
/// and decoded a single string in memory.
/// Return the result and the used encoding.
pub fn decode(input: &[u8], trap: DecoderTrap, fallback_encoding: EncodingRef)
-> (Result<String, Cow<'static, str>>, EncodingRef) {
|
cfg(test)]
mod tests {
use super::*;
#[test]
fn test_decode() {
fn test_one(input: &[u8], expected_result: &str, expected_encoding: &str) {
let (result, used_encoding) = decode(
input, DecoderTrap::Strict, all::ISO_8859_1 as EncodingRef);
let result = result.unwrap();
assert_eq!(used_encoding.name(), expected_encoding);
assert_eq!(&result[..], expected_result);
}
test_one(&[0xEF, 0xBB, 0xBF, 0xC3, 0xA9], "é", "utf-8");
test_one(&[0xC3, 0xA9], "é", "iso-8859-1");
test_one(&[0xFE, 0xFF, 0x00, 0xE9], "é", "utf-16be");
test_one(&[0x00, 0xE9], "\x00é", "iso-8859-1");
test_one(&[0xFF, 0xFE, 0xE9, 0x00], "é", "utf-16le");
test_one(&[0xE9, 0x00], "é\x00", "iso-8859-1");
}
}
|
use all::{UTF_8, UTF_16LE, UTF_16BE};
if input.starts_with(&[0xEF, 0xBB, 0xBF]) {
(UTF_8.decode(&input[3..], trap), UTF_8 as EncodingRef)
} else if input.starts_with(&[0xFE, 0xFF]) {
(UTF_16BE.decode(&input[2..], trap), UTF_16BE as EncodingRef)
} else if input.starts_with(&[0xFF, 0xFE]) {
(UTF_16LE.decode(&input[2..], trap), UTF_16LE as EncodingRef)
} else {
(fallback_encoding.decode(input, trap), fallback_encoding)
}
}
#[
|
identifier_body
|
lib.rs
|
// This is a part of rust-encoding.
// Copyright (c) 2013-2015, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! # Encoding 0.3.0-dev
//!
//! Character encoding support for Rust. (also known as `rust-encoding`)
//! It is based on [WHATWG Encoding Standard](http://encoding.spec.whatwg.org/),
//! and also provides an advanced interface for error detection and recovery.
//!
//! *This documentation is for the development version (0.3).
//! Please see the [stable documentation][doc] for 0.2.x versions.*
//!
//! ## Usage
//!
//! Put this in your `Cargo.toml`:
//!
//! ```toml
//! [dependencies]
//! encoding = "0.3"
//! ```
//!
//! Then put this in your crate root:
//!
//! ```rust
//! extern crate encoding;
//! ```
//!
//! ### Data Table
//!
//! By default, Encoding comes with ~480 KB of data table ("indices").
//! This allows Encoding to encode and decode legacy encodings efficiently,
//! but this might not be desirable for some applications.
//!
//! Encoding provides the `no-optimized-legacy-encoding` Cargo feature
//! to reduce the size of encoding tables (to ~185 KB)
//! at the expense of encoding performance (typically 5x to 20x slower).
//! The decoding performance remains identical.
//! **This feature is strongly intended for end users.
//! Do not enable this feature from library crates, ever.**
//!
//! For finer-tuned optimization, see `src/index/gen_index.py` for
//! custom table generation. At the most reduced (and slowest) setting,
//! the minimal size of data table is about 160 KB.
//!
//! ## Overview
//!
//! To encode a string:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap};
//! use encoding::all::ISO_8859_1;
//!
//! assert_eq!(ISO_8859_1.encode("caf\u{e9}", EncoderTrap::Strict),
//! Ok(vec![99,97,102,233]));
//! ~~~~
//!
//! To encode a string with unrepresentable characters:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap};
//! use encoding::all::ISO_8859_2;
//!
//! assert!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Strict).is_err());
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Replace),
//! Ok(vec![65,99,109,101,63]));
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Ignore),
//! Ok(vec![65,99,109,101]));
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::NcrEscape),
//! Ok(vec![65,99,109,101,38,35,49,54,57,59]));
//! ~~~~
//!
//! To decode a byte sequence:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::all::ISO_8859_1;
//!
//! assert_eq!(ISO_8859_1.decode(&[99,97,102,233], DecoderTrap::Strict),
//! Ok("caf\u{e9}".to_string()));
//! ~~~~
//!
//! To decode a byte sequence with invalid sequences:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::all::ISO_8859_6;
//!
//! assert!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Strict).is_err());
//! assert_eq!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Replace),
//! Ok("Acme\u{fffd}".to_string()));
//! assert_eq!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Ignore),
//! Ok("Acme".to_string()));
//! ~~~~
//!
//! To encode or decode the input into the already allocated buffer:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap, DecoderTrap};
//! use encoding::all::{ISO_8859_2, ISO_8859_6};
//!
//! let mut bytes = Vec::new();
//! let mut chars = String::new();
//!
//! assert!(ISO_8859_2.encode_to("Acme\u{a9}", EncoderTrap::Ignore, &mut bytes).is_ok());
//! assert!(ISO_8859_6.decode_to(&[65,99,109,101,169], DecoderTrap::Replace, &mut chars).is_ok());
//!
//! assert_eq!(bytes, [65,99,109,101]);
//! assert_eq!(chars, "Acme\u{fffd}");
//! ~~~~
//!
//! A practical example of custom encoder traps:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, ByteWriter, EncoderTrap, DecoderTrap};
//! use encoding::types::RawEncoder;
//! use encoding::all::ASCII;
//!
//! // hexadecimal numeric character reference replacement
//! fn hex_ncr_escape(_encoder: &mut RawEncoder, input: &str, output: &mut ByteWriter) -> bool {
//! let escapes: Vec<String> =
//! input.chars().map(|ch| format!("&#x{:x};", ch as isize)).collect();
//! let escapes = escapes.concat();
//! output.write_bytes(escapes.as_bytes());
//! true
//! }
//! static HEX_NCR_ESCAPE: EncoderTrap = EncoderTrap::Call(hex_ncr_escape);
//!
//! let orig = "Hello, 世界!".to_string();
//! let encoded = ASCII.encode(&orig, HEX_NCR_ESCAPE).unwrap();
//! assert_eq!(ASCII.decode(&encoded, DecoderTrap::Strict),
//! Ok("Hello, 世界!".to_string()));
//! ~~~~
//!
//! Getting the encoding from the string label, as specified in WHATWG Encoding standard:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::label::encoding_from_whatwg_label;
//! use encoding::all::WINDOWS_949;
//!
//! let euckr = encoding_from_whatwg_label("euc-kr").unwrap();
//! assert_eq!(euckr.name(), "windows-949");
//! assert_eq!(euckr.whatwg_name(), Some("euc-kr")); // for the sake of compatibility
//! let broken = &[0xbf, 0xec, 0xbf, 0xcd, 0xff, 0xbe, 0xd3];
//! assert_eq!(euckr.decode(broken, DecoderTrap::Replace),
//! Ok("\u{c6b0}\u{c640}\u{fffd}\u{c559}".to_string()));
//!
//! // corresponding Encoding native API:
//! assert_eq!(WINDOWS_949.decode(broken, DecoderTrap::Replace),
//! Ok("\u{c6b0}\u{c640}\u{fffd}\u{c559}".to_string()));
//! ~~~~
//!
//! ## Types and Stuffs
//!
//! There are three main entry points to Encoding.
//!
//! **`Encoding`** is a single character encoding.
//! It contains `encode` and `decode` methods for converting `String` to `Vec<u8>` and vice versa.
//! For the error handling, they receive **traps** (`EncoderTrap` and `DecoderTrap` respectively)
//! which replace any error with some string (e.g. `U+FFFD`) or sequence (e.g. `?`).
//! You can also use `EncoderTrap::Strict` and `DecoderTrap::Strict` traps to stop on an error.
//!
//! There are two ways to get `Encoding`:
//!
//! * `encoding::all` has static items for every supported encoding.
//! You should use them when the encoding would not change or only handful of them are required.
//! Combined with link-time optimization, any unused encoding would be discarded from the binary.
//! * `encoding::label` has functions to dynamically get an encoding from given string ("label").
//! They will return a static reference to the encoding,
//! which type is also known as `EncodingRef`.
//! It is useful when a list of required encodings is not available in advance,
//! but it will result in the larger binary and missed optimization opportunities.
//!
//! **`RawEncoder`** is an experimental incremental encoder.
//! At each step of `raw_feed`, it receives a slice of string
//! and emits any encoded bytes to a generic `ByteWriter` (normally `Vec<u8>`).
//! It will stop at the first error if any, and would return a `CodecError` struct in that case.
//! The caller is responsible for calling `raw_finish` at the end of encoding process.
//!
//! **`RawDecoder`** is an experimental incremental decoder.
//! At each step of `raw_feed`, it receives a slice of byte sequence
//! and emits any decoded characters to a generic `StringWriter` (normally `String`).
//! Otherwise it is identical to `RawEncoder`s.
//!
//! One should prefer `Encoding::{encode,decode}` as a primary interface.
//! `RawEncoder` and `RawDecoder` is experimental and can change substantially.
//! See the additional documents on `encoding::types` module for more information on them.
//!
//! ## Supported Encodings
//!
//! Encoding covers all encodings specified by WHATWG Encoding Standard and some more:
//!
//! * 7-bit strict ASCII (`ascii`)
//! * UTF-8 (`utf-8`)
//! * UTF-16 in little endian (`utf-16` or `utf-16le`) and big endian (`utf-16be`)
//! * All single byte encoding in WHATWG Encoding Standard:
//! * IBM code page 866
//! * ISO 8859-{2,3,4,5,6,7,8,10,13,14,15,16}
//! * KOI8-R, KOI8-U
//! * MacRoman (`macintosh`), Macintosh Cyrillic encoding (`x-mac-cyrillic`)
//! * Windows code pages 874, 1250, 1251, 1252 (instead of ISO 8859-1), 1253,
//! 1254 (instead of ISO 8859-9), 1255, 1256, 1257, 1258
//! * All multi byte encodings in WHATWG Encoding Standard:
//! * Windows code page 949 (`euc-kr`, since the strict EUC-KR is hardly used)
//! * EUC-JP and Windows code page 932 (`shift_jis`,
//! since it's the most widespread extension to Shift_JIS)
//! * ISO-2022-JP with asymmetric JIS X 0212 support
//! (Note: this is not yet up to date to the current standard)
//! * GBK
//! * GB 18030
//! * Big5-2003 with HKSCS-2008 extensions
//! * Encodings that were originally specified by WHATWG Encoding Standard:
//! * HZ
//! * ISO 8859-1 (distinct from Windows code page 1252)
//!
//! Parenthesized names refer to the encoding's primary name assigned by WHATWG Encoding Standard.
//!
//! Many legacy character encodings lack the proper specification,
//! and even those that have a specification are highly dependent of the actual implementation.
//! Consequently one should be careful when picking a desired character encoding.
//! The only standards reliable in this regard are WHATWG Encoding Standard and
//! [vendor-provided mappings from the Unicode consortium](http://www.unicode.org/Public/MAPPINGS/).
//! Whenever in doubt, look at the source code and specifications for detailed explanations.
#![cfg_attr(test, feature(test))] // lib stability features as per RFC #507
extern crate encoding_types;
extern crate encoding_index_singlebyte as index_singlebyte;
extern crate encoding_index_korean as index_korean;
extern crate encoding_index_japanese as index_japanese;
extern crate encoding_index_simpchinese as index_simpchinese;
extern crate encoding_index_tradchinese as index_tradchinese;
#[cfg(test)] extern crate test;
pub use self::types::{CodecError, ByteWriter, StringWriter,
RawEncoder, RawDecoder, EncodingRef, Encoding,
EncoderTrapFunc, DecoderTrapFunc, DecoderTrap,
EncoderTrap}; // reexport
use std::borrow::Cow;
#[macro_use] mod util;
#[cfg(test)] #[macro_use] mod testutils;
pub mod types;
/// Codec implementations.
pub mod codec {
pub mod error;
pub mod ascii;
pub mod singlebyte;
pub mod utf_8;
pub mod utf_16;
pub mod korean;
pub mod japanese;
pub mod simpchinese;
pub mod tradchinese;
pub mod whatwg;
}
pub mod all;
pub mod label;
/// Determine the encoding by looking for a Byte Order Mark (BOM)
/// and decoded a single string in memory.
/// Return the result and the used encoding.
pub fn decode(input: &[u8], trap: DecoderTrap, fallback_encoding: EncodingRef)
-> (Result<String, Cow<'static, str>>, EncodingRef) {
use all::{UTF_8, UTF_16LE, UTF_16BE};
if input.starts_with(&[0xEF, 0xBB, 0xBF]) {
(UTF_8.decode(&input[3..], trap), UTF_8 as EncodingRef)
} else if input.starts_with(&[0xFE, 0xFF]) {
(UTF_16BE.decode(&input[2..], trap), UTF_16BE as EncodingRef)
} else if input.starts_with(&[0xFF, 0xFE]) {
(UTF_16LE.decode(&input[2..], trap), UTF_16LE as EncodingRef)
} else {
|
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_decode() {
fn test_one(input: &[u8], expected_result: &str, expected_encoding: &str) {
let (result, used_encoding) = decode(
input, DecoderTrap::Strict, all::ISO_8859_1 as EncodingRef);
let result = result.unwrap();
assert_eq!(used_encoding.name(), expected_encoding);
assert_eq!(&result[..], expected_result);
}
test_one(&[0xEF, 0xBB, 0xBF, 0xC3, 0xA9], "é", "utf-8");
test_one(&[0xC3, 0xA9], "é", "iso-8859-1");
test_one(&[0xFE, 0xFF, 0x00, 0xE9], "é", "utf-16be");
test_one(&[0x00, 0xE9], "\x00é", "iso-8859-1");
test_one(&[0xFF, 0xFE, 0xE9, 0x00], "é", "utf-16le");
test_one(&[0xE9, 0x00], "é\x00", "iso-8859-1");
}
}
|
(fallback_encoding.decode(input, trap), fallback_encoding)
}
}
|
conditional_block
|
lib.rs
|
// This is a part of rust-encoding.
// Copyright (c) 2013-2015, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! # Encoding 0.3.0-dev
//!
//! Character encoding support for Rust. (also known as `rust-encoding`)
//! It is based on [WHATWG Encoding Standard](http://encoding.spec.whatwg.org/),
//! and also provides an advanced interface for error detection and recovery.
//!
//! *This documentation is for the development version (0.3).
//! Please see the [stable documentation][doc] for 0.2.x versions.*
//!
//! ## Usage
//!
//! Put this in your `Cargo.toml`:
//!
//! ```toml
//! [dependencies]
//! encoding = "0.3"
//! ```
//!
//! Then put this in your crate root:
//!
//! ```rust
//! extern crate encoding;
//! ```
|
//!
//! ### Data Table
//!
//! By default, Encoding comes with ~480 KB of data table ("indices").
//! This allows Encoding to encode and decode legacy encodings efficiently,
//! but this might not be desirable for some applications.
//!
//! Encoding provides the `no-optimized-legacy-encoding` Cargo feature
//! to reduce the size of encoding tables (to ~185 KB)
//! at the expense of encoding performance (typically 5x to 20x slower).
//! The decoding performance remains identical.
//! **This feature is strongly intended for end users.
//! Do not enable this feature from library crates, ever.**
//!
//! For finer-tuned optimization, see `src/index/gen_index.py` for
//! custom table generation. At the most reduced (and slowest) setting,
//! the minimal size of data table is about 160 KB.
//!
//! ## Overview
//!
//! To encode a string:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap};
//! use encoding::all::ISO_8859_1;
//!
//! assert_eq!(ISO_8859_1.encode("caf\u{e9}", EncoderTrap::Strict),
//! Ok(vec![99,97,102,233]));
//! ~~~~
//!
//! To encode a string with unrepresentable characters:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap};
//! use encoding::all::ISO_8859_2;
//!
//! assert!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Strict).is_err());
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Replace),
//! Ok(vec![65,99,109,101,63]));
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Ignore),
//! Ok(vec![65,99,109,101]));
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::NcrEscape),
//! Ok(vec![65,99,109,101,38,35,49,54,57,59]));
//! ~~~~
//!
//! To decode a byte sequence:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::all::ISO_8859_1;
//!
//! assert_eq!(ISO_8859_1.decode(&[99,97,102,233], DecoderTrap::Strict),
//! Ok("caf\u{e9}".to_string()));
//! ~~~~
//!
//! To decode a byte sequence with invalid sequences:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::all::ISO_8859_6;
//!
//! assert!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Strict).is_err());
//! assert_eq!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Replace),
//! Ok("Acme\u{fffd}".to_string()));
//! assert_eq!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Ignore),
//! Ok("Acme".to_string()));
//! ~~~~
//!
//! To encode or decode the input into the already allocated buffer:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap, DecoderTrap};
//! use encoding::all::{ISO_8859_2, ISO_8859_6};
//!
//! let mut bytes = Vec::new();
//! let mut chars = String::new();
//!
//! assert!(ISO_8859_2.encode_to("Acme\u{a9}", EncoderTrap::Ignore, &mut bytes).is_ok());
//! assert!(ISO_8859_6.decode_to(&[65,99,109,101,169], DecoderTrap::Replace, &mut chars).is_ok());
//!
//! assert_eq!(bytes, [65,99,109,101]);
//! assert_eq!(chars, "Acme\u{fffd}");
//! ~~~~
//!
//! A practical example of custom encoder traps:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, ByteWriter, EncoderTrap, DecoderTrap};
//! use encoding::types::RawEncoder;
//! use encoding::all::ASCII;
//!
//! // hexadecimal numeric character reference replacement
//! fn hex_ncr_escape(_encoder: &mut RawEncoder, input: &str, output: &mut ByteWriter) -> bool {
//! let escapes: Vec<String> =
//! input.chars().map(|ch| format!("&#x{:x};", ch as isize)).collect();
//! let escapes = escapes.concat();
//! output.write_bytes(escapes.as_bytes());
//! true
//! }
//! static HEX_NCR_ESCAPE: EncoderTrap = EncoderTrap::Call(hex_ncr_escape);
//!
//! let orig = "Hello, 世界!".to_string();
//! let encoded = ASCII.encode(&orig, HEX_NCR_ESCAPE).unwrap();
//! assert_eq!(ASCII.decode(&encoded, DecoderTrap::Strict),
//! Ok("Hello, 世界!".to_string()));
//! ~~~~
//!
//! Getting the encoding from the string label, as specified in WHATWG Encoding standard:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::label::encoding_from_whatwg_label;
//! use encoding::all::WINDOWS_949;
//!
//! let euckr = encoding_from_whatwg_label("euc-kr").unwrap();
//! assert_eq!(euckr.name(), "windows-949");
//! assert_eq!(euckr.whatwg_name(), Some("euc-kr")); // for the sake of compatibility
//! let broken = &[0xbf, 0xec, 0xbf, 0xcd, 0xff, 0xbe, 0xd3];
//! assert_eq!(euckr.decode(broken, DecoderTrap::Replace),
//! Ok("\u{c6b0}\u{c640}\u{fffd}\u{c559}".to_string()));
//!
//! // corresponding Encoding native API:
//! assert_eq!(WINDOWS_949.decode(broken, DecoderTrap::Replace),
//! Ok("\u{c6b0}\u{c640}\u{fffd}\u{c559}".to_string()));
//! ~~~~
//!
//! ## Types and Stuffs
//!
//! There are three main entry points to Encoding.
//!
//! **`Encoding`** is a single character encoding.
//! It contains `encode` and `decode` methods for converting `String` to `Vec<u8>` and vice versa.
//! For the error handling, they receive **traps** (`EncoderTrap` and `DecoderTrap` respectively)
//! which replace any error with some string (e.g. `U+FFFD`) or sequence (e.g. `?`).
//! You can also use `EncoderTrap::Strict` and `DecoderTrap::Strict` traps to stop on an error.
//!
//! There are two ways to get `Encoding`:
//!
//! * `encoding::all` has static items for every supported encoding.
//! You should use them when the encoding would not change or only handful of them are required.
//! Combined with link-time optimization, any unused encoding would be discarded from the binary.
//! * `encoding::label` has functions to dynamically get an encoding from given string ("label").
//! They will return a static reference to the encoding,
//! which type is also known as `EncodingRef`.
//! It is useful when a list of required encodings is not available in advance,
//! but it will result in the larger binary and missed optimization opportunities.
//!
//! **`RawEncoder`** is an experimental incremental encoder.
//! At each step of `raw_feed`, it receives a slice of string
//! and emits any encoded bytes to a generic `ByteWriter` (normally `Vec<u8>`).
//! It will stop at the first error if any, and would return a `CodecError` struct in that case.
//! The caller is responsible for calling `raw_finish` at the end of encoding process.
//!
//! **`RawDecoder`** is an experimental incremental decoder.
//! At each step of `raw_feed`, it receives a slice of byte sequence
//! and emits any decoded characters to a generic `StringWriter` (normally `String`).
//! Otherwise it is identical to `RawEncoder`s.
//!
//! One should prefer `Encoding::{encode,decode}` as a primary interface.
//! `RawEncoder` and `RawDecoder` is experimental and can change substantially.
//! See the additional documents on `encoding::types` module for more information on them.
//!
//! ## Supported Encodings
//!
//! Encoding covers all encodings specified by WHATWG Encoding Standard and some more:
//!
//! * 7-bit strict ASCII (`ascii`)
//! * UTF-8 (`utf-8`)
//! * UTF-16 in little endian (`utf-16` or `utf-16le`) and big endian (`utf-16be`)
//! * All single byte encoding in WHATWG Encoding Standard:
//! * IBM code page 866
//! * ISO 8859-{2,3,4,5,6,7,8,10,13,14,15,16}
//! * KOI8-R, KOI8-U
//! * MacRoman (`macintosh`), Macintosh Cyrillic encoding (`x-mac-cyrillic`)
//! * Windows code pages 874, 1250, 1251, 1252 (instead of ISO 8859-1), 1253,
//! 1254 (instead of ISO 8859-9), 1255, 1256, 1257, 1258
//! * All multi byte encodings in WHATWG Encoding Standard:
//! * Windows code page 949 (`euc-kr`, since the strict EUC-KR is hardly used)
//! * EUC-JP and Windows code page 932 (`shift_jis`,
//! since it's the most widespread extension to Shift_JIS)
//! * ISO-2022-JP with asymmetric JIS X 0212 support
//! (Note: this is not yet up to date to the current standard)
//! * GBK
//! * GB 18030
//! * Big5-2003 with HKSCS-2008 extensions
//! * Encodings that were originally specified by WHATWG Encoding Standard:
//! * HZ
//! * ISO 8859-1 (distinct from Windows code page 1252)
//!
//! Parenthesized names refer to the encoding's primary name assigned by WHATWG Encoding Standard.
//!
//! Many legacy character encodings lack the proper specification,
//! and even those that have a specification are highly dependent of the actual implementation.
//! Consequently one should be careful when picking a desired character encoding.
//! The only standards reliable in this regard are WHATWG Encoding Standard and
//! [vendor-provided mappings from the Unicode consortium](http://www.unicode.org/Public/MAPPINGS/).
//! Whenever in doubt, look at the source code and specifications for detailed explanations.
#![cfg_attr(test, feature(test))] // lib stability features as per RFC #507
extern crate encoding_types;
extern crate encoding_index_singlebyte as index_singlebyte;
extern crate encoding_index_korean as index_korean;
extern crate encoding_index_japanese as index_japanese;
extern crate encoding_index_simpchinese as index_simpchinese;
extern crate encoding_index_tradchinese as index_tradchinese;
#[cfg(test)] extern crate test;
pub use self::types::{CodecError, ByteWriter, StringWriter,
RawEncoder, RawDecoder, EncodingRef, Encoding,
EncoderTrapFunc, DecoderTrapFunc, DecoderTrap,
EncoderTrap}; // reexport
use std::borrow::Cow;
#[macro_use] mod util;
#[cfg(test)] #[macro_use] mod testutils;
pub mod types;
/// Codec implementations.
pub mod codec {
pub mod error;
pub mod ascii;
pub mod singlebyte;
pub mod utf_8;
pub mod utf_16;
pub mod korean;
pub mod japanese;
pub mod simpchinese;
pub mod tradchinese;
pub mod whatwg;
}
pub mod all;
pub mod label;
/// Determine the encoding by looking for a Byte Order Mark (BOM)
/// and decoded a single string in memory.
/// Return the result and the used encoding.
pub fn decode(input: &[u8], trap: DecoderTrap, fallback_encoding: EncodingRef)
-> (Result<String, Cow<'static, str>>, EncodingRef) {
use all::{UTF_8, UTF_16LE, UTF_16BE};
if input.starts_with(&[0xEF, 0xBB, 0xBF]) {
(UTF_8.decode(&input[3..], trap), UTF_8 as EncodingRef)
} else if input.starts_with(&[0xFE, 0xFF]) {
(UTF_16BE.decode(&input[2..], trap), UTF_16BE as EncodingRef)
} else if input.starts_with(&[0xFF, 0xFE]) {
(UTF_16LE.decode(&input[2..], trap), UTF_16LE as EncodingRef)
} else {
(fallback_encoding.decode(input, trap), fallback_encoding)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_decode() {
fn test_one(input: &[u8], expected_result: &str, expected_encoding: &str) {
let (result, used_encoding) = decode(
input, DecoderTrap::Strict, all::ISO_8859_1 as EncodingRef);
let result = result.unwrap();
assert_eq!(used_encoding.name(), expected_encoding);
assert_eq!(&result[..], expected_result);
}
test_one(&[0xEF, 0xBB, 0xBF, 0xC3, 0xA9], "é", "utf-8");
test_one(&[0xC3, 0xA9], "é", "iso-8859-1");
test_one(&[0xFE, 0xFF, 0x00, 0xE9], "é", "utf-16be");
test_one(&[0x00, 0xE9], "\x00é", "iso-8859-1");
test_one(&[0xFF, 0xFE, 0xE9, 0x00], "é", "utf-16le");
test_one(&[0xE9, 0x00], "é\x00", "iso-8859-1");
}
}
|
random_line_split
|
|
lib.rs
|
// This is a part of rust-encoding.
// Copyright (c) 2013-2015, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! # Encoding 0.3.0-dev
//!
//! Character encoding support for Rust. (also known as `rust-encoding`)
//! It is based on [WHATWG Encoding Standard](http://encoding.spec.whatwg.org/),
//! and also provides an advanced interface for error detection and recovery.
//!
//! *This documentation is for the development version (0.3).
//! Please see the [stable documentation][doc] for 0.2.x versions.*
//!
//! ## Usage
//!
//! Put this in your `Cargo.toml`:
//!
//! ```toml
//! [dependencies]
//! encoding = "0.3"
//! ```
//!
//! Then put this in your crate root:
//!
//! ```rust
//! extern crate encoding;
//! ```
//!
//! ### Data Table
//!
//! By default, Encoding comes with ~480 KB of data table ("indices").
//! This allows Encoding to encode and decode legacy encodings efficiently,
//! but this might not be desirable for some applications.
//!
//! Encoding provides the `no-optimized-legacy-encoding` Cargo feature
//! to reduce the size of encoding tables (to ~185 KB)
//! at the expense of encoding performance (typically 5x to 20x slower).
//! The decoding performance remains identical.
//! **This feature is strongly intended for end users.
//! Do not enable this feature from library crates, ever.**
//!
//! For finer-tuned optimization, see `src/index/gen_index.py` for
//! custom table generation. At the most reduced (and slowest) setting,
//! the minimal size of data table is about 160 KB.
//!
//! ## Overview
//!
//! To encode a string:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap};
//! use encoding::all::ISO_8859_1;
//!
//! assert_eq!(ISO_8859_1.encode("caf\u{e9}", EncoderTrap::Strict),
//! Ok(vec![99,97,102,233]));
//! ~~~~
//!
//! To encode a string with unrepresentable characters:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap};
//! use encoding::all::ISO_8859_2;
//!
//! assert!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Strict).is_err());
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Replace),
//! Ok(vec![65,99,109,101,63]));
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::Ignore),
//! Ok(vec![65,99,109,101]));
//! assert_eq!(ISO_8859_2.encode("Acme\u{a9}", EncoderTrap::NcrEscape),
//! Ok(vec![65,99,109,101,38,35,49,54,57,59]));
//! ~~~~
//!
//! To decode a byte sequence:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::all::ISO_8859_1;
//!
//! assert_eq!(ISO_8859_1.decode(&[99,97,102,233], DecoderTrap::Strict),
//! Ok("caf\u{e9}".to_string()));
//! ~~~~
//!
//! To decode a byte sequence with invalid sequences:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::all::ISO_8859_6;
//!
//! assert!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Strict).is_err());
//! assert_eq!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Replace),
//! Ok("Acme\u{fffd}".to_string()));
//! assert_eq!(ISO_8859_6.decode(&[65,99,109,101,169], DecoderTrap::Ignore),
//! Ok("Acme".to_string()));
//! ~~~~
//!
//! To encode or decode the input into the already allocated buffer:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, EncoderTrap, DecoderTrap};
//! use encoding::all::{ISO_8859_2, ISO_8859_6};
//!
//! let mut bytes = Vec::new();
//! let mut chars = String::new();
//!
//! assert!(ISO_8859_2.encode_to("Acme\u{a9}", EncoderTrap::Ignore, &mut bytes).is_ok());
//! assert!(ISO_8859_6.decode_to(&[65,99,109,101,169], DecoderTrap::Replace, &mut chars).is_ok());
//!
//! assert_eq!(bytes, [65,99,109,101]);
//! assert_eq!(chars, "Acme\u{fffd}");
//! ~~~~
//!
//! A practical example of custom encoder traps:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, ByteWriter, EncoderTrap, DecoderTrap};
//! use encoding::types::RawEncoder;
//! use encoding::all::ASCII;
//!
//! // hexadecimal numeric character reference replacement
//! fn hex_ncr_escape(_encoder: &mut RawEncoder, input: &str, output: &mut ByteWriter) -> bool {
//! let escapes: Vec<String> =
//! input.chars().map(|ch| format!("&#x{:x};", ch as isize)).collect();
//! let escapes = escapes.concat();
//! output.write_bytes(escapes.as_bytes());
//! true
//! }
//! static HEX_NCR_ESCAPE: EncoderTrap = EncoderTrap::Call(hex_ncr_escape);
//!
//! let orig = "Hello, 世界!".to_string();
//! let encoded = ASCII.encode(&orig, HEX_NCR_ESCAPE).unwrap();
//! assert_eq!(ASCII.decode(&encoded, DecoderTrap::Strict),
//! Ok("Hello, 世界!".to_string()));
//! ~~~~
//!
//! Getting the encoding from the string label, as specified in WHATWG Encoding standard:
//!
//! ~~~~ {.rust}
//! use encoding::{Encoding, DecoderTrap};
//! use encoding::label::encoding_from_whatwg_label;
//! use encoding::all::WINDOWS_949;
//!
//! let euckr = encoding_from_whatwg_label("euc-kr").unwrap();
//! assert_eq!(euckr.name(), "windows-949");
//! assert_eq!(euckr.whatwg_name(), Some("euc-kr")); // for the sake of compatibility
//! let broken = &[0xbf, 0xec, 0xbf, 0xcd, 0xff, 0xbe, 0xd3];
//! assert_eq!(euckr.decode(broken, DecoderTrap::Replace),
//! Ok("\u{c6b0}\u{c640}\u{fffd}\u{c559}".to_string()));
//!
//! // corresponding Encoding native API:
//! assert_eq!(WINDOWS_949.decode(broken, DecoderTrap::Replace),
//! Ok("\u{c6b0}\u{c640}\u{fffd}\u{c559}".to_string()));
//! ~~~~
//!
//! ## Types and Stuffs
//!
//! There are three main entry points to Encoding.
//!
//! **`Encoding`** is a single character encoding.
//! It contains `encode` and `decode` methods for converting `String` to `Vec<u8>` and vice versa.
//! For the error handling, they receive **traps** (`EncoderTrap` and `DecoderTrap` respectively)
//! which replace any error with some string (e.g. `U+FFFD`) or sequence (e.g. `?`).
//! You can also use `EncoderTrap::Strict` and `DecoderTrap::Strict` traps to stop on an error.
//!
//! There are two ways to get `Encoding`:
//!
//! * `encoding::all` has static items for every supported encoding.
//! You should use them when the encoding would not change or only handful of them are required.
//! Combined with link-time optimization, any unused encoding would be discarded from the binary.
//! * `encoding::label` has functions to dynamically get an encoding from given string ("label").
//! They will return a static reference to the encoding,
//! which type is also known as `EncodingRef`.
//! It is useful when a list of required encodings is not available in advance,
//! but it will result in the larger binary and missed optimization opportunities.
//!
//! **`RawEncoder`** is an experimental incremental encoder.
//! At each step of `raw_feed`, it receives a slice of string
//! and emits any encoded bytes to a generic `ByteWriter` (normally `Vec<u8>`).
//! It will stop at the first error if any, and would return a `CodecError` struct in that case.
//! The caller is responsible for calling `raw_finish` at the end of encoding process.
//!
//! **`RawDecoder`** is an experimental incremental decoder.
//! At each step of `raw_feed`, it receives a slice of byte sequence
//! and emits any decoded characters to a generic `StringWriter` (normally `String`).
//! Otherwise it is identical to `RawEncoder`s.
//!
//! One should prefer `Encoding::{encode,decode}` as a primary interface.
//! `RawEncoder` and `RawDecoder` is experimental and can change substantially.
//! See the additional documents on `encoding::types` module for more information on them.
//!
//! ## Supported Encodings
//!
//! Encoding covers all encodings specified by WHATWG Encoding Standard and some more:
//!
//! * 7-bit strict ASCII (`ascii`)
//! * UTF-8 (`utf-8`)
//! * UTF-16 in little endian (`utf-16` or `utf-16le`) and big endian (`utf-16be`)
//! * All single byte encoding in WHATWG Encoding Standard:
//! * IBM code page 866
//! * ISO 8859-{2,3,4,5,6,7,8,10,13,14,15,16}
//! * KOI8-R, KOI8-U
//! * MacRoman (`macintosh`), Macintosh Cyrillic encoding (`x-mac-cyrillic`)
//! * Windows code pages 874, 1250, 1251, 1252 (instead of ISO 8859-1), 1253,
//! 1254 (instead of ISO 8859-9), 1255, 1256, 1257, 1258
//! * All multi byte encodings in WHATWG Encoding Standard:
//! * Windows code page 949 (`euc-kr`, since the strict EUC-KR is hardly used)
//! * EUC-JP and Windows code page 932 (`shift_jis`,
//! since it's the most widespread extension to Shift_JIS)
//! * ISO-2022-JP with asymmetric JIS X 0212 support
//! (Note: this is not yet up to date to the current standard)
//! * GBK
//! * GB 18030
//! * Big5-2003 with HKSCS-2008 extensions
//! * Encodings that were originally specified by WHATWG Encoding Standard:
//! * HZ
//! * ISO 8859-1 (distinct from Windows code page 1252)
//!
//! Parenthesized names refer to the encoding's primary name assigned by WHATWG Encoding Standard.
//!
//! Many legacy character encodings lack the proper specification,
//! and even those that have a specification are highly dependent of the actual implementation.
//! Consequently one should be careful when picking a desired character encoding.
//! The only standards reliable in this regard are WHATWG Encoding Standard and
//! [vendor-provided mappings from the Unicode consortium](http://www.unicode.org/Public/MAPPINGS/).
//! Whenever in doubt, look at the source code and specifications for detailed explanations.
#![cfg_attr(test, feature(test))] // lib stability features as per RFC #507
extern crate encoding_types;
extern crate encoding_index_singlebyte as index_singlebyte;
extern crate encoding_index_korean as index_korean;
extern crate encoding_index_japanese as index_japanese;
extern crate encoding_index_simpchinese as index_simpchinese;
extern crate encoding_index_tradchinese as index_tradchinese;
#[cfg(test)] extern crate test;
pub use self::types::{CodecError, ByteWriter, StringWriter,
RawEncoder, RawDecoder, EncodingRef, Encoding,
EncoderTrapFunc, DecoderTrapFunc, DecoderTrap,
EncoderTrap}; // reexport
use std::borrow::Cow;
#[macro_use] mod util;
#[cfg(test)] #[macro_use] mod testutils;
pub mod types;
/// Codec implementations.
pub mod codec {
pub mod error;
pub mod ascii;
pub mod singlebyte;
pub mod utf_8;
pub mod utf_16;
pub mod korean;
pub mod japanese;
pub mod simpchinese;
pub mod tradchinese;
pub mod whatwg;
}
pub mod all;
pub mod label;
/// Determine the encoding by looking for a Byte Order Mark (BOM)
/// and decoded a single string in memory.
/// Return the result and the used encoding.
pub fn deco
|
ut: &[u8], trap: DecoderTrap, fallback_encoding: EncodingRef)
-> (Result<String, Cow<'static, str>>, EncodingRef) {
use all::{UTF_8, UTF_16LE, UTF_16BE};
if input.starts_with(&[0xEF, 0xBB, 0xBF]) {
(UTF_8.decode(&input[3..], trap), UTF_8 as EncodingRef)
} else if input.starts_with(&[0xFE, 0xFF]) {
(UTF_16BE.decode(&input[2..], trap), UTF_16BE as EncodingRef)
} else if input.starts_with(&[0xFF, 0xFE]) {
(UTF_16LE.decode(&input[2..], trap), UTF_16LE as EncodingRef)
} else {
(fallback_encoding.decode(input, trap), fallback_encoding)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_decode() {
fn test_one(input: &[u8], expected_result: &str, expected_encoding: &str) {
let (result, used_encoding) = decode(
input, DecoderTrap::Strict, all::ISO_8859_1 as EncodingRef);
let result = result.unwrap();
assert_eq!(used_encoding.name(), expected_encoding);
assert_eq!(&result[..], expected_result);
}
test_one(&[0xEF, 0xBB, 0xBF, 0xC3, 0xA9], "é", "utf-8");
test_one(&[0xC3, 0xA9], "é", "iso-8859-1");
test_one(&[0xFE, 0xFF, 0x00, 0xE9], "é", "utf-16be");
test_one(&[0x00, 0xE9], "\x00é", "iso-8859-1");
test_one(&[0xFF, 0xFE, 0xE9, 0x00], "é", "utf-16le");
test_one(&[0xE9, 0x00], "é\x00", "iso-8859-1");
}
}
|
de(inp
|
identifier_name
|
gdb-pretty-struct-and-enums.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-test also broken on nightly linux distcheck. it's just broken!
// ignore-windows failing on win32 bot
// ignore-freebsd: output doesn't match
// ignore-tidy-linelength
// ignore-lldb
// ignore-android: FIXME(#10381)
// compile-flags:-g
// This test uses some GDB Python API features (e.g. accessing anonymous fields)
// which are only available in newer GDB version. The following directive will
// case the test runner to ignore this test if an older GDB version is used:
// min-gdb-version 7.7
// gdb-command: run
// gdb-command: print regular_struct
// gdb-check:$1 = RegularStruct = {the_first_field = 101, the_second_field = 102.5, the_third_field = false, the_fourth_field = "I'm so pretty, oh so pretty..."}
// gdb-command: print tuple
// gdb-check:$2 = {true, 103, "blub"}
// gdb-command: print tuple_struct
// gdb-check:$3 = TupleStruct = {-104.5, 105}
// gdb-command: print empty_struct
// gdb-check:$4 = EmptyStruct
// gdb-command: print c_style_enum1
// gdb-check:$5 = CStyleEnumVar1
// gdb-command: print c_style_enum2
// gdb-check:$6 = CStyleEnumVar2
// gdb-command: print c_style_enum3
// gdb-check:$7 = CStyleEnumVar3
// gdb-command: print mixed_enum_c_style_var
// gdb-check:$8 = MixedEnumCStyleVar
// gdb-command: print mixed_enum_tuple_var
// gdb-check:$9 = MixedEnumTupleVar = {106, 107, false}
// gdb-command: print mixed_enum_struct_var
// gdb-check:$10 = MixedEnumStructVar = {field1 = 108.5, field2 = 109}
// gdb-command: print some
// gdb-check:$11 = Some = {110}
// gdb-command: print none
// gdb-check:$12 = None
// gdb-command: print some_fat
// gdb-check:$13 = Some = {"abc"}
// gdb-command: print none_fat
// gdb-check:$14 = None
// gdb-command: print nested_variant1
// gdb-check:$15 = NestedVariant1 = {NestedStruct = {regular_struct = RegularStruct = {the_first_field = 111, the_second_field = 112.5, the_third_field = true, the_fourth_field = "NestedStructString1"}, tuple_struct = TupleStruct = {113.5, 114}, empty_struct = EmptyStruct, c_style_enum = CStyleEnumVar2, mixed_enum = MixedEnumTupleVar = {115, 116, false}}}
// gdb-command: print nested_variant2
// gdb-check:$16 = NestedVariant2 = {abc = NestedStruct = {regular_struct = RegularStruct = {the_first_field = 117, the_second_field = 118.5, the_third_field = false, the_fourth_field = "NestedStructString10"}, tuple_struct = TupleStruct = {119.5, 120}, empty_struct = EmptyStruct, c_style_enum = CStyleEnumVar3, mixed_enum = MixedEnumStructVar = {field1 = 121.5, field2 = -122}}}
// gdb-command: print none_check1
// gdb-check:$17 = None
// gdb-command: print none_check2
// gdb-check:$18 = None
#![allow(dead_code, unused_variables)]
use self::CStyleEnum::{CStyleEnumVar1, CStyleEnumVar2, CStyleEnumVar3};
use self::MixedEnum::{MixedEnumCStyleVar, MixedEnumTupleVar, MixedEnumStructVar};
use self::NestedEnum::{NestedVariant1, NestedVariant2};
struct RegularStruct {
the_first_field: isize,
the_second_field: f64,
the_third_field: bool,
the_fourth_field: &'static str,
}
struct TupleStruct(f64, i16);
struct EmptyStruct;
enum CStyleEnum {
CStyleEnumVar1,
CStyleEnumVar2,
CStyleEnumVar3,
}
enum MixedEnum {
MixedEnumCStyleVar,
MixedEnumTupleVar(u32, u16, bool),
MixedEnumStructVar { field1: f64, field2: i32 }
}
struct NestedStruct {
regular_struct: RegularStruct,
tuple_struct: TupleStruct,
empty_struct: EmptyStruct,
|
c_style_enum: CStyleEnum,
mixed_enum: MixedEnum,
}
enum NestedEnum {
NestedVariant1(NestedStruct),
NestedVariant2 { abc: NestedStruct }
}
fn main() {
let regular_struct = RegularStruct {
the_first_field: 101,
the_second_field: 102.5,
the_third_field: false,
the_fourth_field: "I'm so pretty, oh so pretty..."
};
let tuple = ( true, 103u32, "blub" );
let tuple_struct = TupleStruct(-104.5, 105);
let empty_struct = EmptyStruct;
let c_style_enum1 = CStyleEnumVar1;
let c_style_enum2 = CStyleEnumVar2;
let c_style_enum3 = CStyleEnumVar3;
let mixed_enum_c_style_var = MixedEnumCStyleVar;
let mixed_enum_tuple_var = MixedEnumTupleVar(106, 107, false);
let mixed_enum_struct_var = MixedEnumStructVar { field1: 108.5, field2: 109 };
let some = Some(110_usize);
let none: Option<isize> = None;
let some_fat = Some("abc");
let none_fat: Option<&'static str> = None;
let nested_variant1 = NestedVariant1(
NestedStruct {
regular_struct: RegularStruct {
the_first_field: 111,
the_second_field: 112.5,
the_third_field: true,
the_fourth_field: "NestedStructString1",
},
tuple_struct: TupleStruct(113.5, 114),
empty_struct: EmptyStruct,
c_style_enum: CStyleEnumVar2,
mixed_enum: MixedEnumTupleVar(115, 116, false)
}
);
let nested_variant2 = NestedVariant2 {
abc: NestedStruct {
regular_struct: RegularStruct {
the_first_field: 117,
the_second_field: 118.5,
the_third_field: false,
the_fourth_field: "NestedStructString10",
},
tuple_struct: TupleStruct(119.5, 120),
empty_struct: EmptyStruct,
c_style_enum: CStyleEnumVar3,
mixed_enum: MixedEnumStructVar {
field1: 121.5,
field2: -122
}
}
};
let none_check1: Option<(usize, Vec<usize>)> = None;
let none_check2: Option<String> = None;
zzz(); // #break
}
fn zzz() { () }
|
random_line_split
|
|
gdb-pretty-struct-and-enums.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-test also broken on nightly linux distcheck. it's just broken!
// ignore-windows failing on win32 bot
// ignore-freebsd: output doesn't match
// ignore-tidy-linelength
// ignore-lldb
// ignore-android: FIXME(#10381)
// compile-flags:-g
// This test uses some GDB Python API features (e.g. accessing anonymous fields)
// which are only available in newer GDB version. The following directive will
// case the test runner to ignore this test if an older GDB version is used:
// min-gdb-version 7.7
// gdb-command: run
// gdb-command: print regular_struct
// gdb-check:$1 = RegularStruct = {the_first_field = 101, the_second_field = 102.5, the_third_field = false, the_fourth_field = "I'm so pretty, oh so pretty..."}
// gdb-command: print tuple
// gdb-check:$2 = {true, 103, "blub"}
// gdb-command: print tuple_struct
// gdb-check:$3 = TupleStruct = {-104.5, 105}
// gdb-command: print empty_struct
// gdb-check:$4 = EmptyStruct
// gdb-command: print c_style_enum1
// gdb-check:$5 = CStyleEnumVar1
// gdb-command: print c_style_enum2
// gdb-check:$6 = CStyleEnumVar2
// gdb-command: print c_style_enum3
// gdb-check:$7 = CStyleEnumVar3
// gdb-command: print mixed_enum_c_style_var
// gdb-check:$8 = MixedEnumCStyleVar
// gdb-command: print mixed_enum_tuple_var
// gdb-check:$9 = MixedEnumTupleVar = {106, 107, false}
// gdb-command: print mixed_enum_struct_var
// gdb-check:$10 = MixedEnumStructVar = {field1 = 108.5, field2 = 109}
// gdb-command: print some
// gdb-check:$11 = Some = {110}
// gdb-command: print none
// gdb-check:$12 = None
// gdb-command: print some_fat
// gdb-check:$13 = Some = {"abc"}
// gdb-command: print none_fat
// gdb-check:$14 = None
// gdb-command: print nested_variant1
// gdb-check:$15 = NestedVariant1 = {NestedStruct = {regular_struct = RegularStruct = {the_first_field = 111, the_second_field = 112.5, the_third_field = true, the_fourth_field = "NestedStructString1"}, tuple_struct = TupleStruct = {113.5, 114}, empty_struct = EmptyStruct, c_style_enum = CStyleEnumVar2, mixed_enum = MixedEnumTupleVar = {115, 116, false}}}
// gdb-command: print nested_variant2
// gdb-check:$16 = NestedVariant2 = {abc = NestedStruct = {regular_struct = RegularStruct = {the_first_field = 117, the_second_field = 118.5, the_third_field = false, the_fourth_field = "NestedStructString10"}, tuple_struct = TupleStruct = {119.5, 120}, empty_struct = EmptyStruct, c_style_enum = CStyleEnumVar3, mixed_enum = MixedEnumStructVar = {field1 = 121.5, field2 = -122}}}
// gdb-command: print none_check1
// gdb-check:$17 = None
// gdb-command: print none_check2
// gdb-check:$18 = None
#![allow(dead_code, unused_variables)]
use self::CStyleEnum::{CStyleEnumVar1, CStyleEnumVar2, CStyleEnumVar3};
use self::MixedEnum::{MixedEnumCStyleVar, MixedEnumTupleVar, MixedEnumStructVar};
use self::NestedEnum::{NestedVariant1, NestedVariant2};
struct RegularStruct {
the_first_field: isize,
the_second_field: f64,
the_third_field: bool,
the_fourth_field: &'static str,
}
struct TupleStruct(f64, i16);
struct EmptyStruct;
enum CStyleEnum {
CStyleEnumVar1,
CStyleEnumVar2,
CStyleEnumVar3,
}
enum
|
{
MixedEnumCStyleVar,
MixedEnumTupleVar(u32, u16, bool),
MixedEnumStructVar { field1: f64, field2: i32 }
}
struct NestedStruct {
regular_struct: RegularStruct,
tuple_struct: TupleStruct,
empty_struct: EmptyStruct,
c_style_enum: CStyleEnum,
mixed_enum: MixedEnum,
}
enum NestedEnum {
NestedVariant1(NestedStruct),
NestedVariant2 { abc: NestedStruct }
}
fn main() {
let regular_struct = RegularStruct {
the_first_field: 101,
the_second_field: 102.5,
the_third_field: false,
the_fourth_field: "I'm so pretty, oh so pretty..."
};
let tuple = ( true, 103u32, "blub" );
let tuple_struct = TupleStruct(-104.5, 105);
let empty_struct = EmptyStruct;
let c_style_enum1 = CStyleEnumVar1;
let c_style_enum2 = CStyleEnumVar2;
let c_style_enum3 = CStyleEnumVar3;
let mixed_enum_c_style_var = MixedEnumCStyleVar;
let mixed_enum_tuple_var = MixedEnumTupleVar(106, 107, false);
let mixed_enum_struct_var = MixedEnumStructVar { field1: 108.5, field2: 109 };
let some = Some(110_usize);
let none: Option<isize> = None;
let some_fat = Some("abc");
let none_fat: Option<&'static str> = None;
let nested_variant1 = NestedVariant1(
NestedStruct {
regular_struct: RegularStruct {
the_first_field: 111,
the_second_field: 112.5,
the_third_field: true,
the_fourth_field: "NestedStructString1",
},
tuple_struct: TupleStruct(113.5, 114),
empty_struct: EmptyStruct,
c_style_enum: CStyleEnumVar2,
mixed_enum: MixedEnumTupleVar(115, 116, false)
}
);
let nested_variant2 = NestedVariant2 {
abc: NestedStruct {
regular_struct: RegularStruct {
the_first_field: 117,
the_second_field: 118.5,
the_third_field: false,
the_fourth_field: "NestedStructString10",
},
tuple_struct: TupleStruct(119.5, 120),
empty_struct: EmptyStruct,
c_style_enum: CStyleEnumVar3,
mixed_enum: MixedEnumStructVar {
field1: 121.5,
field2: -122
}
}
};
let none_check1: Option<(usize, Vec<usize>)> = None;
let none_check2: Option<String> = None;
zzz(); // #break
}
fn zzz() { () }
|
MixedEnum
|
identifier_name
|
gdb-pretty-struct-and-enums.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-test also broken on nightly linux distcheck. it's just broken!
// ignore-windows failing on win32 bot
// ignore-freebsd: output doesn't match
// ignore-tidy-linelength
// ignore-lldb
// ignore-android: FIXME(#10381)
// compile-flags:-g
// This test uses some GDB Python API features (e.g. accessing anonymous fields)
// which are only available in newer GDB version. The following directive will
// case the test runner to ignore this test if an older GDB version is used:
// min-gdb-version 7.7
// gdb-command: run
// gdb-command: print regular_struct
// gdb-check:$1 = RegularStruct = {the_first_field = 101, the_second_field = 102.5, the_third_field = false, the_fourth_field = "I'm so pretty, oh so pretty..."}
// gdb-command: print tuple
// gdb-check:$2 = {true, 103, "blub"}
// gdb-command: print tuple_struct
// gdb-check:$3 = TupleStruct = {-104.5, 105}
// gdb-command: print empty_struct
// gdb-check:$4 = EmptyStruct
// gdb-command: print c_style_enum1
// gdb-check:$5 = CStyleEnumVar1
// gdb-command: print c_style_enum2
// gdb-check:$6 = CStyleEnumVar2
// gdb-command: print c_style_enum3
// gdb-check:$7 = CStyleEnumVar3
// gdb-command: print mixed_enum_c_style_var
// gdb-check:$8 = MixedEnumCStyleVar
// gdb-command: print mixed_enum_tuple_var
// gdb-check:$9 = MixedEnumTupleVar = {106, 107, false}
// gdb-command: print mixed_enum_struct_var
// gdb-check:$10 = MixedEnumStructVar = {field1 = 108.5, field2 = 109}
// gdb-command: print some
// gdb-check:$11 = Some = {110}
// gdb-command: print none
// gdb-check:$12 = None
// gdb-command: print some_fat
// gdb-check:$13 = Some = {"abc"}
// gdb-command: print none_fat
// gdb-check:$14 = None
// gdb-command: print nested_variant1
// gdb-check:$15 = NestedVariant1 = {NestedStruct = {regular_struct = RegularStruct = {the_first_field = 111, the_second_field = 112.5, the_third_field = true, the_fourth_field = "NestedStructString1"}, tuple_struct = TupleStruct = {113.5, 114}, empty_struct = EmptyStruct, c_style_enum = CStyleEnumVar2, mixed_enum = MixedEnumTupleVar = {115, 116, false}}}
// gdb-command: print nested_variant2
// gdb-check:$16 = NestedVariant2 = {abc = NestedStruct = {regular_struct = RegularStruct = {the_first_field = 117, the_second_field = 118.5, the_third_field = false, the_fourth_field = "NestedStructString10"}, tuple_struct = TupleStruct = {119.5, 120}, empty_struct = EmptyStruct, c_style_enum = CStyleEnumVar3, mixed_enum = MixedEnumStructVar = {field1 = 121.5, field2 = -122}}}
// gdb-command: print none_check1
// gdb-check:$17 = None
// gdb-command: print none_check2
// gdb-check:$18 = None
#![allow(dead_code, unused_variables)]
use self::CStyleEnum::{CStyleEnumVar1, CStyleEnumVar2, CStyleEnumVar3};
use self::MixedEnum::{MixedEnumCStyleVar, MixedEnumTupleVar, MixedEnumStructVar};
use self::NestedEnum::{NestedVariant1, NestedVariant2};
struct RegularStruct {
the_first_field: isize,
the_second_field: f64,
the_third_field: bool,
the_fourth_field: &'static str,
}
struct TupleStruct(f64, i16);
struct EmptyStruct;
enum CStyleEnum {
CStyleEnumVar1,
CStyleEnumVar2,
CStyleEnumVar3,
}
enum MixedEnum {
MixedEnumCStyleVar,
MixedEnumTupleVar(u32, u16, bool),
MixedEnumStructVar { field1: f64, field2: i32 }
}
struct NestedStruct {
regular_struct: RegularStruct,
tuple_struct: TupleStruct,
empty_struct: EmptyStruct,
c_style_enum: CStyleEnum,
mixed_enum: MixedEnum,
}
enum NestedEnum {
NestedVariant1(NestedStruct),
NestedVariant2 { abc: NestedStruct }
}
fn main() {
let regular_struct = RegularStruct {
the_first_field: 101,
the_second_field: 102.5,
the_third_field: false,
the_fourth_field: "I'm so pretty, oh so pretty..."
};
let tuple = ( true, 103u32, "blub" );
let tuple_struct = TupleStruct(-104.5, 105);
let empty_struct = EmptyStruct;
let c_style_enum1 = CStyleEnumVar1;
let c_style_enum2 = CStyleEnumVar2;
let c_style_enum3 = CStyleEnumVar3;
let mixed_enum_c_style_var = MixedEnumCStyleVar;
let mixed_enum_tuple_var = MixedEnumTupleVar(106, 107, false);
let mixed_enum_struct_var = MixedEnumStructVar { field1: 108.5, field2: 109 };
let some = Some(110_usize);
let none: Option<isize> = None;
let some_fat = Some("abc");
let none_fat: Option<&'static str> = None;
let nested_variant1 = NestedVariant1(
NestedStruct {
regular_struct: RegularStruct {
the_first_field: 111,
the_second_field: 112.5,
the_third_field: true,
the_fourth_field: "NestedStructString1",
},
tuple_struct: TupleStruct(113.5, 114),
empty_struct: EmptyStruct,
c_style_enum: CStyleEnumVar2,
mixed_enum: MixedEnumTupleVar(115, 116, false)
}
);
let nested_variant2 = NestedVariant2 {
abc: NestedStruct {
regular_struct: RegularStruct {
the_first_field: 117,
the_second_field: 118.5,
the_third_field: false,
the_fourth_field: "NestedStructString10",
},
tuple_struct: TupleStruct(119.5, 120),
empty_struct: EmptyStruct,
c_style_enum: CStyleEnumVar3,
mixed_enum: MixedEnumStructVar {
field1: 121.5,
field2: -122
}
}
};
let none_check1: Option<(usize, Vec<usize>)> = None;
let none_check2: Option<String> = None;
zzz(); // #break
}
fn zzz()
|
{ () }
|
identifier_body
|
|
extendablemessageevent.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::ExtendableMessageEventBinding;
use dom::bindings::codegen::Bindings::ExtendableMessageEventBinding::ExtendableMessageEventMethods;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::bindings::trace::RootedTraceableBox;
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::extendableevent::ExtendableEvent;
use dom::globalscope::GlobalScope;
use dom::serviceworkerglobalscope::ServiceWorkerGlobalScope;
use dom_struct::dom_struct;
use js::jsapi::{HandleValue, Heap, JSContext};
use js::jsval::JSVal;
use servo_atoms::Atom;
#[dom_struct]
pub struct ExtendableMessageEvent {
event: ExtendableEvent,
data: Heap<JSVal>,
origin: DOMString,
lastEventId: DOMString,
}
impl ExtendableMessageEvent {
pub fn new(global: &GlobalScope, type_: Atom,
bubbles: bool, cancelable: bool,
data: HandleValue, origin: DOMString, lastEventId: DOMString)
-> DomRoot<ExtendableMessageEvent> {
let ev = box ExtendableMessageEvent {
event: ExtendableEvent::new_inherited(),
data: Heap::default(),
origin: origin,
lastEventId: lastEventId,
};
let ev = reflect_dom_object(ev, global, ExtendableMessageEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
ev.data.set(data.get());
ev
}
pub fn Constructor(worker: &ServiceWorkerGlobalScope,
type_: DOMString,
init: RootedTraceableBox<ExtendableMessageEventBinding::ExtendableMessageEventInit>)
-> Fallible<DomRoot<ExtendableMessageEvent>> {
let global = worker.upcast::<GlobalScope>();
let ev = ExtendableMessageEvent::new(global,
Atom::from(type_),
init.parent.parent.bubbles,
init.parent.parent.cancelable,
init.data.handle(),
init.origin.clone().unwrap(),
init.lastEventId.clone().unwrap());
Ok(ev)
}
}
impl ExtendableMessageEvent {
pub fn dispatch_jsval(target: &EventTarget,
scope: &GlobalScope,
message: HandleValue) {
let Extendablemessageevent = ExtendableMessageEvent::new(
scope, atom!("message"), false, false, message,
DOMString::new(), DOMString::new());
Extendablemessageevent.upcast::<Event>().fire(target);
}
}
impl ExtendableMessageEventMethods for ExtendableMessageEvent {
#[allow(unsafe_code)]
// https://w3c.github.io/ServiceWorker/#extendablemessage-event-data-attribute
unsafe fn Data(&self, _cx: *mut JSContext) -> JSVal
|
// https://w3c.github.io/ServiceWorker/#extendablemessage-event-origin-attribute
fn Origin(&self) -> DOMString {
self.origin.clone()
}
// https://w3c.github.io/ServiceWorker/#extendablemessage-event-lasteventid-attribute
fn LastEventId(&self) -> DOMString {
self.lastEventId.clone()
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}
|
{
self.data.get()
}
|
identifier_body
|
extendablemessageevent.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::ExtendableMessageEventBinding;
use dom::bindings::codegen::Bindings::ExtendableMessageEventBinding::ExtendableMessageEventMethods;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::bindings::trace::RootedTraceableBox;
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::extendableevent::ExtendableEvent;
use dom::globalscope::GlobalScope;
use dom::serviceworkerglobalscope::ServiceWorkerGlobalScope;
use dom_struct::dom_struct;
use js::jsapi::{HandleValue, Heap, JSContext};
use js::jsval::JSVal;
use servo_atoms::Atom;
#[dom_struct]
pub struct ExtendableMessageEvent {
event: ExtendableEvent,
data: Heap<JSVal>,
origin: DOMString,
lastEventId: DOMString,
}
impl ExtendableMessageEvent {
pub fn new(global: &GlobalScope, type_: Atom,
bubbles: bool, cancelable: bool,
data: HandleValue, origin: DOMString, lastEventId: DOMString)
-> DomRoot<ExtendableMessageEvent> {
let ev = box ExtendableMessageEvent {
event: ExtendableEvent::new_inherited(),
data: Heap::default(),
origin: origin,
lastEventId: lastEventId,
};
let ev = reflect_dom_object(ev, global, ExtendableMessageEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
ev.data.set(data.get());
ev
}
pub fn Constructor(worker: &ServiceWorkerGlobalScope,
type_: DOMString,
init: RootedTraceableBox<ExtendableMessageEventBinding::ExtendableMessageEventInit>)
-> Fallible<DomRoot<ExtendableMessageEvent>> {
let global = worker.upcast::<GlobalScope>();
let ev = ExtendableMessageEvent::new(global,
Atom::from(type_),
init.parent.parent.bubbles,
init.parent.parent.cancelable,
init.data.handle(),
init.origin.clone().unwrap(),
init.lastEventId.clone().unwrap());
Ok(ev)
}
}
|
impl ExtendableMessageEvent {
pub fn dispatch_jsval(target: &EventTarget,
scope: &GlobalScope,
message: HandleValue) {
let Extendablemessageevent = ExtendableMessageEvent::new(
scope, atom!("message"), false, false, message,
DOMString::new(), DOMString::new());
Extendablemessageevent.upcast::<Event>().fire(target);
}
}
impl ExtendableMessageEventMethods for ExtendableMessageEvent {
#[allow(unsafe_code)]
// https://w3c.github.io/ServiceWorker/#extendablemessage-event-data-attribute
unsafe fn Data(&self, _cx: *mut JSContext) -> JSVal {
self.data.get()
}
// https://w3c.github.io/ServiceWorker/#extendablemessage-event-origin-attribute
fn Origin(&self) -> DOMString {
self.origin.clone()
}
// https://w3c.github.io/ServiceWorker/#extendablemessage-event-lasteventid-attribute
fn LastEventId(&self) -> DOMString {
self.lastEventId.clone()
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}
|
random_line_split
|
|
extendablemessageevent.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::ExtendableMessageEventBinding;
use dom::bindings::codegen::Bindings::ExtendableMessageEventBinding::ExtendableMessageEventMethods;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::bindings::trace::RootedTraceableBox;
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::extendableevent::ExtendableEvent;
use dom::globalscope::GlobalScope;
use dom::serviceworkerglobalscope::ServiceWorkerGlobalScope;
use dom_struct::dom_struct;
use js::jsapi::{HandleValue, Heap, JSContext};
use js::jsval::JSVal;
use servo_atoms::Atom;
#[dom_struct]
pub struct ExtendableMessageEvent {
event: ExtendableEvent,
data: Heap<JSVal>,
origin: DOMString,
lastEventId: DOMString,
}
impl ExtendableMessageEvent {
pub fn new(global: &GlobalScope, type_: Atom,
bubbles: bool, cancelable: bool,
data: HandleValue, origin: DOMString, lastEventId: DOMString)
-> DomRoot<ExtendableMessageEvent> {
let ev = box ExtendableMessageEvent {
event: ExtendableEvent::new_inherited(),
data: Heap::default(),
origin: origin,
lastEventId: lastEventId,
};
let ev = reflect_dom_object(ev, global, ExtendableMessageEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
ev.data.set(data.get());
ev
}
pub fn Constructor(worker: &ServiceWorkerGlobalScope,
type_: DOMString,
init: RootedTraceableBox<ExtendableMessageEventBinding::ExtendableMessageEventInit>)
-> Fallible<DomRoot<ExtendableMessageEvent>> {
let global = worker.upcast::<GlobalScope>();
let ev = ExtendableMessageEvent::new(global,
Atom::from(type_),
init.parent.parent.bubbles,
init.parent.parent.cancelable,
init.data.handle(),
init.origin.clone().unwrap(),
init.lastEventId.clone().unwrap());
Ok(ev)
}
}
impl ExtendableMessageEvent {
pub fn dispatch_jsval(target: &EventTarget,
scope: &GlobalScope,
message: HandleValue) {
let Extendablemessageevent = ExtendableMessageEvent::new(
scope, atom!("message"), false, false, message,
DOMString::new(), DOMString::new());
Extendablemessageevent.upcast::<Event>().fire(target);
}
}
impl ExtendableMessageEventMethods for ExtendableMessageEvent {
#[allow(unsafe_code)]
// https://w3c.github.io/ServiceWorker/#extendablemessage-event-data-attribute
unsafe fn Data(&self, _cx: *mut JSContext) -> JSVal {
self.data.get()
}
// https://w3c.github.io/ServiceWorker/#extendablemessage-event-origin-attribute
fn Origin(&self) -> DOMString {
self.origin.clone()
}
// https://w3c.github.io/ServiceWorker/#extendablemessage-event-lasteventid-attribute
fn
|
(&self) -> DOMString {
self.lastEventId.clone()
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}
|
LastEventId
|
identifier_name
|
constellation_msg.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The high-level interface from script to constellation. Using this abstract interface helps
//! reduce coupling between these two components.
use euclid::scale_factor::ScaleFactor;
use euclid::size::TypedSize2D;
use hyper::header::Headers;
use hyper::method::Method;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender, IpcSharedMemory};
use layers::geometry::DevicePixel;
use serde::{Deserialize, Serialize};
use std::cell::Cell;
use std::fmt;
use url::Url;
use util::geometry::{PagePx, ViewportPx};
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
use webrender_traits;
#[derive(Deserialize, Serialize)]
pub struct ConstellationChan<T: Deserialize + Serialize>(pub IpcSender<T>);
impl<T: Deserialize + Serialize> ConstellationChan<T> {
pub fn new() -> (IpcReceiver<T>, ConstellationChan<T>) {
let (chan, port) = ipc::channel().unwrap();
(port, ConstellationChan(chan))
}
}
impl<T: Serialize + Deserialize> Clone for ConstellationChan<T> {
fn clone(&self) -> ConstellationChan<T> {
ConstellationChan(self.0.clone())
|
pub type PanicMsg = (Option<PipelineId>, String, String);
#[derive(Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct WindowSizeData {
/// The size of the initial layout viewport, before parsing an
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport
pub initial_viewport: TypedSize2D<ViewportPx, f32>,
/// The "viewing area" in page px. See `PagePx` documentation for details.
pub visible_viewport: TypedSize2D<PagePx, f32>,
/// The resolution of the window in dppx, not including any "pinch zoom" factor.
pub device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>,
}
#[derive(Deserialize, Eq, PartialEq, Serialize, Copy, Clone, HeapSizeOf)]
pub enum WindowSizeType {
Initial,
Resize,
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum KeyState {
Pressed,
Released,
Repeated,
}
//N.B. Based on the glutin key enum
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub enum Key {
Space,
Apostrophe,
Comma,
Minus,
Period,
Slash,
Num0,
Num1,
Num2,
Num3,
Num4,
Num5,
Num6,
Num7,
Num8,
Num9,
Semicolon,
Equal,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
LeftBracket,
Backslash,
RightBracket,
GraveAccent,
World1,
World2,
Escape,
Enter,
Tab,
Backspace,
Insert,
Delete,
Right,
Left,
Down,
Up,
PageUp,
PageDown,
Home,
End,
CapsLock,
ScrollLock,
NumLock,
PrintScreen,
Pause,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
F25,
Kp0,
Kp1,
Kp2,
Kp3,
Kp4,
Kp5,
Kp6,
Kp7,
Kp8,
Kp9,
KpDecimal,
KpDivide,
KpMultiply,
KpSubtract,
KpAdd,
KpEnter,
KpEqual,
LeftShift,
LeftControl,
LeftAlt,
LeftSuper,
RightShift,
RightControl,
RightAlt,
RightSuper,
Menu,
NavigateBackward,
NavigateForward,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags KeyModifiers: u8 {
const NONE = 0x00,
const SHIFT = 0x01,
const CONTROL = 0x02,
const ALT = 0x04,
const SUPER = 0x08,
}
}
#[derive(Deserialize, Serialize)]
pub enum WebDriverCommandMsg {
LoadUrl(PipelineId, LoadData, IpcSender<LoadStatus>),
Refresh(PipelineId, IpcSender<LoadStatus>),
ScriptCommand(PipelineId, WebDriverScriptCommand),
SendKeys(PipelineId, Vec<(Key, KeyModifiers, KeyState)>),
TakeScreenshot(PipelineId, IpcSender<Option<Image>>),
}
#[derive(Clone, Copy, Deserialize, Eq, PartialEq, Serialize, HeapSizeOf)]
pub enum PixelFormat {
K8, // Luminance channel only
KA8, // Luminance + alpha
RGB8, // RGB, 8 bits per channel
RGBA8, // RGB + alpha, 8 bits per channel
}
#[derive(Clone, Deserialize, Eq, PartialEq, Serialize, HeapSizeOf)]
pub struct ImageMetadata {
pub width: u32,
pub height: u32,
}
#[derive(Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct Image {
pub width: u32,
pub height: u32,
pub format: PixelFormat,
#[ignore_heap_size_of = "Defined in ipc-channel"]
pub bytes: IpcSharedMemory,
#[ignore_heap_size_of = "Defined in webrender_traits"]
pub id: Option<webrender_traits::ImageKey>,
}
/// Similar to net::resource_thread::LoadData
/// can be passed to LoadUrl to load a page with GET/POST
/// parameters or headers
#[derive(Clone, Deserialize, Serialize)]
pub struct LoadData {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub data: Option<Vec<u8>>,
pub referrer_policy: Option<ReferrerPolicy>,
pub referrer_url: Option<Url>,
}
impl LoadData {
pub fn new(url: Url, referrer_policy: Option<ReferrerPolicy>, referrer_url: Option<Url>) -> LoadData {
LoadData {
url: url,
method: Method::Get,
headers: Headers::new(),
data: None,
referrer_policy: referrer_policy,
referrer_url: referrer_url,
}
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub enum NavigationDirection {
Forward,
Back,
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct FrameId(pub u32);
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
/// generate the pipeline ID from an iframe element (this simplifies a lot of other
/// code that makes use of pipeline IDs).
///
/// To achieve this, each pipeline index belongs to a particular namespace. There is
/// a namespace for the constellation thread, and also one for every script thread.
/// This allows pipeline IDs to be generated by any of those threads without conflicting
/// with pipeline IDs created by other script threads or the constellation. The
/// constellation is the only code that is responsible for creating new *namespaces*.
/// This ensures that namespaces are always unique, even when using multi-process mode.
///
/// It may help conceptually to think of the namespace ID as an identifier for the
/// thread that created this pipeline ID - however this is really an implementation
/// detail so shouldn't be relied upon in code logic. It's best to think of the
/// pipeline ID as a simple unique identifier that doesn't convey any more information.
#[derive(Clone, Copy)]
pub struct PipelineNamespace {
id: PipelineNamespaceId,
next_index: PipelineIndex,
}
impl PipelineNamespace {
pub fn install(namespace_id: PipelineNamespaceId) {
PIPELINE_NAMESPACE.with(|tls| {
assert!(tls.get().is_none());
tls.set(Some(PipelineNamespace {
id: namespace_id,
next_index: PipelineIndex(0),
}));
});
}
fn next(&mut self) -> PipelineId {
let pipeline_id = PipelineId {
namespace_id: self.id,
index: self.next_index,
};
let PipelineIndex(current_index) = self.next_index;
self.next_index = PipelineIndex(current_index + 1);
pipeline_id
}
}
thread_local!(pub static PIPELINE_NAMESPACE: Cell<Option<PipelineNamespace>> = Cell::new(None));
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineNamespaceId(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineIndex(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineId {
pub namespace_id: PipelineNamespaceId,
pub index: PipelineIndex
}
impl PipelineId {
pub fn new() -> PipelineId {
PIPELINE_NAMESPACE.with(|tls| {
let mut namespace = tls.get().expect("No namespace set for this thread!");
let new_pipeline_id = namespace.next();
tls.set(Some(namespace));
new_pipeline_id
})
}
// TODO(gw): This should be removed. It's only required because of the code
// that uses it in the devtools lib.rs file (which itself is a TODO). Once
// that is fixed, this should be removed. It also relies on the first
// call to PipelineId::new() returning (0,0), which is checked with an
// assert in handle_init_load().
pub fn fake_root_pipeline_id() -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(0),
index: PipelineIndex(0),
}
}
}
impl fmt::Display for PipelineId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
write!(fmt, "({},{})", namespace_id, index)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct SubpageId(pub u32);
pub trait ConvertPipelineIdToWebRender {
fn to_webrender(&self) -> webrender_traits::PipelineId;
}
pub trait ConvertPipelineIdFromWebRender {
fn from_webrender(&self) -> PipelineId;
}
impl ConvertPipelineIdToWebRender for PipelineId {
fn to_webrender(&self) -> webrender_traits::PipelineId {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
webrender_traits::PipelineId(namespace_id, index)
}
}
impl ConvertPipelineIdFromWebRender for webrender_traits::PipelineId {
fn from_webrender(&self) -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(self.0),
index: PipelineIndex(self.1),
}
}
}
/// [Policies](https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-states)
/// for providing a referrer header for a request
#[derive(HeapSizeOf, Clone, Deserialize, Serialize)]
pub enum ReferrerPolicy {
NoReferrer,
NoRefWhenDowngrade,
OriginOnly,
OriginWhenCrossOrigin,
UnsafeUrl,
}
|
}
}
|
random_line_split
|
constellation_msg.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The high-level interface from script to constellation. Using this abstract interface helps
//! reduce coupling between these two components.
use euclid::scale_factor::ScaleFactor;
use euclid::size::TypedSize2D;
use hyper::header::Headers;
use hyper::method::Method;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender, IpcSharedMemory};
use layers::geometry::DevicePixel;
use serde::{Deserialize, Serialize};
use std::cell::Cell;
use std::fmt;
use url::Url;
use util::geometry::{PagePx, ViewportPx};
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
use webrender_traits;
#[derive(Deserialize, Serialize)]
pub struct ConstellationChan<T: Deserialize + Serialize>(pub IpcSender<T>);
impl<T: Deserialize + Serialize> ConstellationChan<T> {
pub fn new() -> (IpcReceiver<T>, ConstellationChan<T>) {
let (chan, port) = ipc::channel().unwrap();
(port, ConstellationChan(chan))
}
}
impl<T: Serialize + Deserialize> Clone for ConstellationChan<T> {
fn clone(&self) -> ConstellationChan<T> {
ConstellationChan(self.0.clone())
}
}
pub type PanicMsg = (Option<PipelineId>, String, String);
#[derive(Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct WindowSizeData {
/// The size of the initial layout viewport, before parsing an
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport
pub initial_viewport: TypedSize2D<ViewportPx, f32>,
/// The "viewing area" in page px. See `PagePx` documentation for details.
pub visible_viewport: TypedSize2D<PagePx, f32>,
/// The resolution of the window in dppx, not including any "pinch zoom" factor.
pub device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>,
}
#[derive(Deserialize, Eq, PartialEq, Serialize, Copy, Clone, HeapSizeOf)]
pub enum WindowSizeType {
Initial,
Resize,
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum KeyState {
Pressed,
Released,
Repeated,
}
//N.B. Based on the glutin key enum
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub enum Key {
Space,
Apostrophe,
Comma,
Minus,
Period,
Slash,
Num0,
Num1,
Num2,
Num3,
Num4,
Num5,
Num6,
Num7,
Num8,
Num9,
Semicolon,
Equal,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
LeftBracket,
Backslash,
RightBracket,
GraveAccent,
World1,
World2,
Escape,
Enter,
Tab,
Backspace,
Insert,
Delete,
Right,
Left,
Down,
Up,
PageUp,
PageDown,
Home,
End,
CapsLock,
ScrollLock,
NumLock,
PrintScreen,
Pause,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
F25,
Kp0,
Kp1,
Kp2,
Kp3,
Kp4,
Kp5,
Kp6,
Kp7,
Kp8,
Kp9,
KpDecimal,
KpDivide,
KpMultiply,
KpSubtract,
KpAdd,
KpEnter,
KpEqual,
LeftShift,
LeftControl,
LeftAlt,
LeftSuper,
RightShift,
RightControl,
RightAlt,
RightSuper,
Menu,
NavigateBackward,
NavigateForward,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags KeyModifiers: u8 {
const NONE = 0x00,
const SHIFT = 0x01,
const CONTROL = 0x02,
const ALT = 0x04,
const SUPER = 0x08,
}
}
#[derive(Deserialize, Serialize)]
pub enum WebDriverCommandMsg {
LoadUrl(PipelineId, LoadData, IpcSender<LoadStatus>),
Refresh(PipelineId, IpcSender<LoadStatus>),
ScriptCommand(PipelineId, WebDriverScriptCommand),
SendKeys(PipelineId, Vec<(Key, KeyModifiers, KeyState)>),
TakeScreenshot(PipelineId, IpcSender<Option<Image>>),
}
#[derive(Clone, Copy, Deserialize, Eq, PartialEq, Serialize, HeapSizeOf)]
pub enum PixelFormat {
K8, // Luminance channel only
KA8, // Luminance + alpha
RGB8, // RGB, 8 bits per channel
RGBA8, // RGB + alpha, 8 bits per channel
}
#[derive(Clone, Deserialize, Eq, PartialEq, Serialize, HeapSizeOf)]
pub struct ImageMetadata {
pub width: u32,
pub height: u32,
}
#[derive(Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct Image {
pub width: u32,
pub height: u32,
pub format: PixelFormat,
#[ignore_heap_size_of = "Defined in ipc-channel"]
pub bytes: IpcSharedMemory,
#[ignore_heap_size_of = "Defined in webrender_traits"]
pub id: Option<webrender_traits::ImageKey>,
}
/// Similar to net::resource_thread::LoadData
/// can be passed to LoadUrl to load a page with GET/POST
/// parameters or headers
#[derive(Clone, Deserialize, Serialize)]
pub struct LoadData {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub data: Option<Vec<u8>>,
pub referrer_policy: Option<ReferrerPolicy>,
pub referrer_url: Option<Url>,
}
impl LoadData {
pub fn new(url: Url, referrer_policy: Option<ReferrerPolicy>, referrer_url: Option<Url>) -> LoadData {
LoadData {
url: url,
method: Method::Get,
headers: Headers::new(),
data: None,
referrer_policy: referrer_policy,
referrer_url: referrer_url,
}
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub enum NavigationDirection {
Forward,
Back,
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct FrameId(pub u32);
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
/// generate the pipeline ID from an iframe element (this simplifies a lot of other
/// code that makes use of pipeline IDs).
///
/// To achieve this, each pipeline index belongs to a particular namespace. There is
/// a namespace for the constellation thread, and also one for every script thread.
/// This allows pipeline IDs to be generated by any of those threads without conflicting
/// with pipeline IDs created by other script threads or the constellation. The
/// constellation is the only code that is responsible for creating new *namespaces*.
/// This ensures that namespaces are always unique, even when using multi-process mode.
///
/// It may help conceptually to think of the namespace ID as an identifier for the
/// thread that created this pipeline ID - however this is really an implementation
/// detail so shouldn't be relied upon in code logic. It's best to think of the
/// pipeline ID as a simple unique identifier that doesn't convey any more information.
#[derive(Clone, Copy)]
pub struct PipelineNamespace {
id: PipelineNamespaceId,
next_index: PipelineIndex,
}
impl PipelineNamespace {
pub fn install(namespace_id: PipelineNamespaceId) {
PIPELINE_NAMESPACE.with(|tls| {
assert!(tls.get().is_none());
tls.set(Some(PipelineNamespace {
id: namespace_id,
next_index: PipelineIndex(0),
}));
});
}
fn next(&mut self) -> PipelineId {
let pipeline_id = PipelineId {
namespace_id: self.id,
index: self.next_index,
};
let PipelineIndex(current_index) = self.next_index;
self.next_index = PipelineIndex(current_index + 1);
pipeline_id
}
}
thread_local!(pub static PIPELINE_NAMESPACE: Cell<Option<PipelineNamespace>> = Cell::new(None));
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineNamespaceId(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineIndex(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineId {
pub namespace_id: PipelineNamespaceId,
pub index: PipelineIndex
}
impl PipelineId {
pub fn new() -> PipelineId {
PIPELINE_NAMESPACE.with(|tls| {
let mut namespace = tls.get().expect("No namespace set for this thread!");
let new_pipeline_id = namespace.next();
tls.set(Some(namespace));
new_pipeline_id
})
}
// TODO(gw): This should be removed. It's only required because of the code
// that uses it in the devtools lib.rs file (which itself is a TODO). Once
// that is fixed, this should be removed. It also relies on the first
// call to PipelineId::new() returning (0,0), which is checked with an
// assert in handle_init_load().
pub fn fake_root_pipeline_id() -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(0),
index: PipelineIndex(0),
}
}
}
impl fmt::Display for PipelineId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
write!(fmt, "({},{})", namespace_id, index)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct SubpageId(pub u32);
pub trait ConvertPipelineIdToWebRender {
fn to_webrender(&self) -> webrender_traits::PipelineId;
}
pub trait ConvertPipelineIdFromWebRender {
fn from_webrender(&self) -> PipelineId;
}
impl ConvertPipelineIdToWebRender for PipelineId {
fn to_webrender(&self) -> webrender_traits::PipelineId
|
}
impl ConvertPipelineIdFromWebRender for webrender_traits::PipelineId {
fn from_webrender(&self) -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(self.0),
index: PipelineIndex(self.1),
}
}
}
/// [Policies](https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-states)
/// for providing a referrer header for a request
#[derive(HeapSizeOf, Clone, Deserialize, Serialize)]
pub enum ReferrerPolicy {
NoReferrer,
NoRefWhenDowngrade,
OriginOnly,
OriginWhenCrossOrigin,
UnsafeUrl,
}
|
{
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
webrender_traits::PipelineId(namespace_id, index)
}
|
identifier_body
|
constellation_msg.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The high-level interface from script to constellation. Using this abstract interface helps
//! reduce coupling between these two components.
use euclid::scale_factor::ScaleFactor;
use euclid::size::TypedSize2D;
use hyper::header::Headers;
use hyper::method::Method;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender, IpcSharedMemory};
use layers::geometry::DevicePixel;
use serde::{Deserialize, Serialize};
use std::cell::Cell;
use std::fmt;
use url::Url;
use util::geometry::{PagePx, ViewportPx};
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
use webrender_traits;
#[derive(Deserialize, Serialize)]
pub struct ConstellationChan<T: Deserialize + Serialize>(pub IpcSender<T>);
impl<T: Deserialize + Serialize> ConstellationChan<T> {
pub fn new() -> (IpcReceiver<T>, ConstellationChan<T>) {
let (chan, port) = ipc::channel().unwrap();
(port, ConstellationChan(chan))
}
}
impl<T: Serialize + Deserialize> Clone for ConstellationChan<T> {
fn clone(&self) -> ConstellationChan<T> {
ConstellationChan(self.0.clone())
}
}
pub type PanicMsg = (Option<PipelineId>, String, String);
#[derive(Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct WindowSizeData {
/// The size of the initial layout viewport, before parsing an
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport
pub initial_viewport: TypedSize2D<ViewportPx, f32>,
/// The "viewing area" in page px. See `PagePx` documentation for details.
pub visible_viewport: TypedSize2D<PagePx, f32>,
/// The resolution of the window in dppx, not including any "pinch zoom" factor.
pub device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>,
}
#[derive(Deserialize, Eq, PartialEq, Serialize, Copy, Clone, HeapSizeOf)]
pub enum WindowSizeType {
Initial,
Resize,
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum KeyState {
Pressed,
Released,
Repeated,
}
//N.B. Based on the glutin key enum
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub enum Key {
Space,
Apostrophe,
Comma,
Minus,
Period,
Slash,
Num0,
Num1,
Num2,
Num3,
Num4,
Num5,
Num6,
Num7,
Num8,
Num9,
Semicolon,
Equal,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
LeftBracket,
Backslash,
RightBracket,
GraveAccent,
World1,
World2,
Escape,
Enter,
Tab,
Backspace,
Insert,
Delete,
Right,
Left,
Down,
Up,
PageUp,
PageDown,
Home,
End,
CapsLock,
ScrollLock,
NumLock,
PrintScreen,
Pause,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
F25,
Kp0,
Kp1,
Kp2,
Kp3,
Kp4,
Kp5,
Kp6,
Kp7,
Kp8,
Kp9,
KpDecimal,
KpDivide,
KpMultiply,
KpSubtract,
KpAdd,
KpEnter,
KpEqual,
LeftShift,
LeftControl,
LeftAlt,
LeftSuper,
RightShift,
RightControl,
RightAlt,
RightSuper,
Menu,
NavigateBackward,
NavigateForward,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags KeyModifiers: u8 {
const NONE = 0x00,
const SHIFT = 0x01,
const CONTROL = 0x02,
const ALT = 0x04,
const SUPER = 0x08,
}
}
#[derive(Deserialize, Serialize)]
pub enum WebDriverCommandMsg {
LoadUrl(PipelineId, LoadData, IpcSender<LoadStatus>),
Refresh(PipelineId, IpcSender<LoadStatus>),
ScriptCommand(PipelineId, WebDriverScriptCommand),
SendKeys(PipelineId, Vec<(Key, KeyModifiers, KeyState)>),
TakeScreenshot(PipelineId, IpcSender<Option<Image>>),
}
#[derive(Clone, Copy, Deserialize, Eq, PartialEq, Serialize, HeapSizeOf)]
pub enum PixelFormat {
K8, // Luminance channel only
KA8, // Luminance + alpha
RGB8, // RGB, 8 bits per channel
RGBA8, // RGB + alpha, 8 bits per channel
}
#[derive(Clone, Deserialize, Eq, PartialEq, Serialize, HeapSizeOf)]
pub struct ImageMetadata {
pub width: u32,
pub height: u32,
}
#[derive(Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct Image {
pub width: u32,
pub height: u32,
pub format: PixelFormat,
#[ignore_heap_size_of = "Defined in ipc-channel"]
pub bytes: IpcSharedMemory,
#[ignore_heap_size_of = "Defined in webrender_traits"]
pub id: Option<webrender_traits::ImageKey>,
}
/// Similar to net::resource_thread::LoadData
/// can be passed to LoadUrl to load a page with GET/POST
/// parameters or headers
#[derive(Clone, Deserialize, Serialize)]
pub struct LoadData {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub data: Option<Vec<u8>>,
pub referrer_policy: Option<ReferrerPolicy>,
pub referrer_url: Option<Url>,
}
impl LoadData {
pub fn new(url: Url, referrer_policy: Option<ReferrerPolicy>, referrer_url: Option<Url>) -> LoadData {
LoadData {
url: url,
method: Method::Get,
headers: Headers::new(),
data: None,
referrer_policy: referrer_policy,
referrer_url: referrer_url,
}
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub enum NavigationDirection {
Forward,
Back,
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct FrameId(pub u32);
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
/// generate the pipeline ID from an iframe element (this simplifies a lot of other
/// code that makes use of pipeline IDs).
///
/// To achieve this, each pipeline index belongs to a particular namespace. There is
/// a namespace for the constellation thread, and also one for every script thread.
/// This allows pipeline IDs to be generated by any of those threads without conflicting
/// with pipeline IDs created by other script threads or the constellation. The
/// constellation is the only code that is responsible for creating new *namespaces*.
/// This ensures that namespaces are always unique, even when using multi-process mode.
///
/// It may help conceptually to think of the namespace ID as an identifier for the
/// thread that created this pipeline ID - however this is really an implementation
/// detail so shouldn't be relied upon in code logic. It's best to think of the
/// pipeline ID as a simple unique identifier that doesn't convey any more information.
#[derive(Clone, Copy)]
pub struct PipelineNamespace {
id: PipelineNamespaceId,
next_index: PipelineIndex,
}
impl PipelineNamespace {
pub fn install(namespace_id: PipelineNamespaceId) {
PIPELINE_NAMESPACE.with(|tls| {
assert!(tls.get().is_none());
tls.set(Some(PipelineNamespace {
id: namespace_id,
next_index: PipelineIndex(0),
}));
});
}
fn next(&mut self) -> PipelineId {
let pipeline_id = PipelineId {
namespace_id: self.id,
index: self.next_index,
};
let PipelineIndex(current_index) = self.next_index;
self.next_index = PipelineIndex(current_index + 1);
pipeline_id
}
}
thread_local!(pub static PIPELINE_NAMESPACE: Cell<Option<PipelineNamespace>> = Cell::new(None));
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineNamespaceId(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineIndex(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineId {
pub namespace_id: PipelineNamespaceId,
pub index: PipelineIndex
}
impl PipelineId {
pub fn new() -> PipelineId {
PIPELINE_NAMESPACE.with(|tls| {
let mut namespace = tls.get().expect("No namespace set for this thread!");
let new_pipeline_id = namespace.next();
tls.set(Some(namespace));
new_pipeline_id
})
}
// TODO(gw): This should be removed. It's only required because of the code
// that uses it in the devtools lib.rs file (which itself is a TODO). Once
// that is fixed, this should be removed. It also relies on the first
// call to PipelineId::new() returning (0,0), which is checked with an
// assert in handle_init_load().
pub fn
|
() -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(0),
index: PipelineIndex(0),
}
}
}
impl fmt::Display for PipelineId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
write!(fmt, "({},{})", namespace_id, index)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct SubpageId(pub u32);
pub trait ConvertPipelineIdToWebRender {
fn to_webrender(&self) -> webrender_traits::PipelineId;
}
pub trait ConvertPipelineIdFromWebRender {
fn from_webrender(&self) -> PipelineId;
}
impl ConvertPipelineIdToWebRender for PipelineId {
fn to_webrender(&self) -> webrender_traits::PipelineId {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
webrender_traits::PipelineId(namespace_id, index)
}
}
impl ConvertPipelineIdFromWebRender for webrender_traits::PipelineId {
fn from_webrender(&self) -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(self.0),
index: PipelineIndex(self.1),
}
}
}
/// [Policies](https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-states)
/// for providing a referrer header for a request
#[derive(HeapSizeOf, Clone, Deserialize, Serialize)]
pub enum ReferrerPolicy {
NoReferrer,
NoRefWhenDowngrade,
OriginOnly,
OriginWhenCrossOrigin,
UnsafeUrl,
}
|
fake_root_pipeline_id
|
identifier_name
|
ioapiset.rs
|
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
use shared::basetsd::{PULONG_PTR, ULONG_PTR};
use shared::minwindef::{BOOL, DWORD, LPDWORD, LPVOID, PULONG, ULONG};
use um::minwinbase::{LPOVERLAPPED, LPOVERLAPPED_ENTRY};
use um::winnt::HANDLE;
extern "system" {
pub fn CreateIoCompletionPort(
FileHandle: HANDLE,
ExistingCompletionPort: HANDLE,
CompletionKey: ULONG_PTR,
NumberOfConcurrentThreads: DWORD,
) -> HANDLE;
pub fn GetQueuedCompletionStatus(
CompletionPort: HANDLE,
lpNumberOfBytesTransferred: LPDWORD,
lpCompletionKey: PULONG_PTR,
lpOverlapped: *mut LPOVERLAPPED,
dwMilliseconds: DWORD,
) -> BOOL;
pub fn GetQueuedCompletionStatusEx(
CompletionPort: HANDLE,
lpCompletionPortEntries: LPOVERLAPPED_ENTRY,
ulCount: ULONG,
ulNumEntriesRemoved: PULONG,
dwMilliseconds: DWORD,
fAlertable: BOOL,
) -> BOOL;
pub fn PostQueuedCompletionStatus(
CompletionPort: HANDLE,
dwNumberOfBytesTransferred: DWORD,
dwCompletionKey: ULONG_PTR,
lpOverlapped: LPOVERLAPPED,
) -> BOOL;
pub fn DeviceIoControl(
hDevice: HANDLE,
dwIoControlCode: DWORD,
lpInBuffer: LPVOID,
nInBufferSize: DWORD,
lpOutBuffer: LPVOID,
nOutBufferSize: DWORD,
lpBytesReturned: LPDWORD,
lpOverlapped: LPOVERLAPPED,
) -> BOOL;
pub fn GetOverlappedResult(
hFile: HANDLE,
lpOverlapped: LPOVERLAPPED,
lpNumberOfBytesTransferred: LPDWORD,
bWait: BOOL,
) -> BOOL;
pub fn CancelIoEx(
hFile: HANDLE,
|
pub fn GetOverlappedResultEx(
hFile: HANDLE,
lpOverlapped: LPOVERLAPPED,
lpNumberOfBytesTransferred: LPDWORD,
dwMilliseconds: DWORD,
bAlertable: BOOL,
) -> BOOL;
pub fn CancelSynchronousIo(
hThread: HANDLE,
) -> BOOL;
}
|
lpOverlapped: LPOVERLAPPED,
) -> BOOL;
pub fn CancelIo(
hFile: HANDLE,
) -> BOOL;
|
random_line_split
|
select_with_weak.rs
|
use std::pin::Pin;
use std::task::Context;
use std::task::Poll;
use futures::stream::{Fuse, Stream};
pub trait SelectWithWeakExt: Stream {
fn select_with_weak<S>(self, other: S) -> SelectWithWeak<Self, S>
where
S: Stream<Item = Self::Item>,
Self: Sized;
}
impl<T> SelectWithWeakExt for T
where
T: Stream,
{
fn select_with_weak<S>(self, other: S) -> SelectWithWeak<Self, S>
where
S: Stream<Item = Self::Item>,
Self: Sized,
|
}
/// An adapter for merging the output of two streams.
///
/// The merged stream produces items from either of the underlying streams as
/// they become available, and the streams are polled in a round-robin fashion.
/// Errors, however, are not merged: you get at most one error at a time.
///
/// Finishes when strong stream finishes
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct SelectWithWeak<S1, S2> {
strong: Fuse<S1>,
weak: Fuse<S2>,
use_strong: bool,
}
fn new<S1, S2>(stream1: S1, stream2: S2) -> SelectWithWeak<S1, S2>
where
S1: Stream,
S2: Stream<Item = S1::Item>,
{
use futures::StreamExt;
SelectWithWeak {
strong: stream1.fuse(),
weak: stream2.fuse(),
use_strong: false,
}
}
impl<S1, S2> Stream for SelectWithWeak<S1, S2>
where
S1: Stream + Unpin,
S2: Stream<Item = S1::Item> + Unpin,
{
type Item = S1::Item;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut this = Pin::into_inner(self);
let mut checked_strong = false;
loop {
if this.use_strong {
match Pin::new(&mut this.strong).poll_next(cx) {
Poll::Ready(Some(item)) => {
this.use_strong = false;
return Poll::Ready(Some(item));
}
Poll::Ready(None) => return Poll::Ready(None),
Poll::Pending => {
if!checked_strong {
this.use_strong = false;
} else {
return Poll::Pending;
}
}
}
checked_strong = true;
} else {
this.use_strong = true;
match Pin::new(&mut this.weak).poll_next(cx) {
Poll::Ready(Some(item)) => return Poll::Ready(Some(item)),
Poll::Ready(None) | Poll::Pending => (),
}
}
}
}
}
|
{
new(self, other)
}
|
identifier_body
|
select_with_weak.rs
|
use std::pin::Pin;
use std::task::Context;
use std::task::Poll;
use futures::stream::{Fuse, Stream};
pub trait SelectWithWeakExt: Stream {
fn select_with_weak<S>(self, other: S) -> SelectWithWeak<Self, S>
where
S: Stream<Item = Self::Item>,
Self: Sized;
}
impl<T> SelectWithWeakExt for T
where
T: Stream,
{
fn
|
<S>(self, other: S) -> SelectWithWeak<Self, S>
where
S: Stream<Item = Self::Item>,
Self: Sized,
{
new(self, other)
}
}
/// An adapter for merging the output of two streams.
///
/// The merged stream produces items from either of the underlying streams as
/// they become available, and the streams are polled in a round-robin fashion.
/// Errors, however, are not merged: you get at most one error at a time.
///
/// Finishes when strong stream finishes
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct SelectWithWeak<S1, S2> {
strong: Fuse<S1>,
weak: Fuse<S2>,
use_strong: bool,
}
fn new<S1, S2>(stream1: S1, stream2: S2) -> SelectWithWeak<S1, S2>
where
S1: Stream,
S2: Stream<Item = S1::Item>,
{
use futures::StreamExt;
SelectWithWeak {
strong: stream1.fuse(),
weak: stream2.fuse(),
use_strong: false,
}
}
impl<S1, S2> Stream for SelectWithWeak<S1, S2>
where
S1: Stream + Unpin,
S2: Stream<Item = S1::Item> + Unpin,
{
type Item = S1::Item;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut this = Pin::into_inner(self);
let mut checked_strong = false;
loop {
if this.use_strong {
match Pin::new(&mut this.strong).poll_next(cx) {
Poll::Ready(Some(item)) => {
this.use_strong = false;
return Poll::Ready(Some(item));
}
Poll::Ready(None) => return Poll::Ready(None),
Poll::Pending => {
if!checked_strong {
this.use_strong = false;
} else {
return Poll::Pending;
}
}
}
checked_strong = true;
} else {
this.use_strong = true;
match Pin::new(&mut this.weak).poll_next(cx) {
Poll::Ready(Some(item)) => return Poll::Ready(Some(item)),
Poll::Ready(None) | Poll::Pending => (),
}
}
}
}
}
|
select_with_weak
|
identifier_name
|
select_with_weak.rs
|
use std::pin::Pin;
use std::task::Context;
use std::task::Poll;
use futures::stream::{Fuse, Stream};
pub trait SelectWithWeakExt: Stream {
fn select_with_weak<S>(self, other: S) -> SelectWithWeak<Self, S>
where
S: Stream<Item = Self::Item>,
Self: Sized;
}
impl<T> SelectWithWeakExt for T
where
T: Stream,
{
fn select_with_weak<S>(self, other: S) -> SelectWithWeak<Self, S>
where
S: Stream<Item = Self::Item>,
Self: Sized,
{
new(self, other)
}
}
/// An adapter for merging the output of two streams.
///
/// The merged stream produces items from either of the underlying streams as
/// they become available, and the streams are polled in a round-robin fashion.
/// Errors, however, are not merged: you get at most one error at a time.
///
/// Finishes when strong stream finishes
#[derive(Debug)]
#[must_use = "streams do nothing unless polled"]
pub struct SelectWithWeak<S1, S2> {
strong: Fuse<S1>,
weak: Fuse<S2>,
use_strong: bool,
}
fn new<S1, S2>(stream1: S1, stream2: S2) -> SelectWithWeak<S1, S2>
where
S1: Stream,
S2: Stream<Item = S1::Item>,
{
use futures::StreamExt;
SelectWithWeak {
strong: stream1.fuse(),
weak: stream2.fuse(),
use_strong: false,
}
}
impl<S1, S2> Stream for SelectWithWeak<S1, S2>
where
S1: Stream + Unpin,
S2: Stream<Item = S1::Item> + Unpin,
{
type Item = S1::Item;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut this = Pin::into_inner(self);
let mut checked_strong = false;
loop {
if this.use_strong {
match Pin::new(&mut this.strong).poll_next(cx) {
Poll::Ready(Some(item)) => {
this.use_strong = false;
return Poll::Ready(Some(item));
}
Poll::Ready(None) => return Poll::Ready(None),
Poll::Pending => {
if!checked_strong {
this.use_strong = false;
} else {
return Poll::Pending;
}
}
}
checked_strong = true;
} else {
this.use_strong = true;
match Pin::new(&mut this.weak).poll_next(cx) {
|
}
}
}
}
|
Poll::Ready(Some(item)) => return Poll::Ready(Some(item)),
Poll::Ready(None) | Poll::Pending => (),
}
|
random_line_split
|
panic-runtime-abort.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags:-C panic=abort
// no-prefer-dynamic
#![feature(panic_runtime)]
#![crate_type = "rlib"]
#![no_std]
#![panic_runtime]
#[no_mangle]
pub extern fn
|
() {}
#[no_mangle]
pub extern fn __rust_start_panic() {}
#[no_mangle]
pub extern fn rust_eh_personality() {}
|
__rust_maybe_catch_panic
|
identifier_name
|
panic-runtime-abort.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
|
// compile-flags:-C panic=abort
// no-prefer-dynamic
#![feature(panic_runtime)]
#![crate_type = "rlib"]
#![no_std]
#![panic_runtime]
#[no_mangle]
pub extern fn __rust_maybe_catch_panic() {}
#[no_mangle]
pub extern fn __rust_start_panic() {}
#[no_mangle]
pub extern fn rust_eh_personality() {}
|
// except according to those terms.
|
random_line_split
|
panic-runtime-abort.rs
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags:-C panic=abort
// no-prefer-dynamic
#![feature(panic_runtime)]
#![crate_type = "rlib"]
#![no_std]
#![panic_runtime]
#[no_mangle]
pub extern fn __rust_maybe_catch_panic() {}
#[no_mangle]
pub extern fn __rust_start_panic() {}
#[no_mangle]
pub extern fn rust_eh_personality()
|
{}
|
identifier_body
|
|
issue-27362.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
// aux-build:issue-27362.rs
// ignore-cross-compile
// ignore-test This test fails on beta/stable #32019
extern crate issue_27362;
pub use issue_27362 as quux;
// @matches issue_27362/quux/fn.foo.html '//pre' "pub const fn foo()"
// @matches issue_27362/quux/fn.bar.html '//pre' "pub const unsafe fn bar()"
// @matches issue_27362/quux/struct.Foo.html '//code' "const unsafe fn baz()"
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
random_line_split
|
places_sidebar.rs
|
// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! GtkPlacesSidebar — Sidebar that displays frequently-used places in the file system
use ffi;
use FFIWidget;
use cast::GTK_PLACES_SIDEBAR;
use glib::{to_bool, to_gboolean};
struct_Widget!(PlacesSidebar);
impl PlacesSidebar {
pub fn new() -> Option<PlacesSidebar> {
let tmp_pointer = unsafe { ffi::gtk_places_sidebar_new() };
check_pointer!(tmp_pointer, PlacesSidebar)
}
pub fn set_open_flags(&self, flags: ::PlacesOpenFlags) {
unsafe { ffi::gtk_places_sidebar_set_open_flags(GTK_PLACES_SIDEBAR(self.unwrap_widget()), flags) }
}
pub fn get_open_flags(&self) -> ::PlacesOpenFlags {
unsafe { ffi::gtk_places_sidebar_get_open_flags(GTK_PLACES_SIDEBAR(self.unwrap_widget())) }
}
pub fn set_show_desktop(&self, show_desktop: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_desktop(GTK_PLACES_SIDEBAR(self.unwrap_widget()), to_gboolean(show_desktop)) }
}
pub fn get_show_desktop(&self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_show_desktop(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
pub fn set_show_connect_to_server(&self, show_connect_to_server: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_connect_to_server(GTK_PLACES_SIDEBAR(self.unwrap_widget()),
to_gboolean(show_connect_to_server)) }
}
pub fn get_show_connect_to_server(&self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_show_connect_to_server(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
#[cfg(gtk_3_12)]
pub fn set_local_only(&self, local_only: bool) {
unsafe { ffi::gtk_places_sidebar_set_local_only(GTK_PLACES_SIDEBAR(self.unwrap_widget()), to_gboolean(local_only)) }
}
|
#[cfg(gtk_3_12)]
pub fn get_local_only(&self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_local_only(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
#[cfg(gtk_3_14)]
pub fn set_show_enter_location(&self, show_enter_location: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_enter_location(GTK_PLACES_SIDEBAR(self.unwrap_widget()),
to_gboolean(show_enter_location)) }
}
#[cfg(gtk_3_14)]
pub fn get_show_enter_location(&self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_show_enter_location(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
}
impl_drop!(PlacesSidebar);
impl_TraitWidget!(PlacesSidebar);
impl ::ContainerTrait for PlacesSidebar {}
impl ::BinTrait for PlacesSidebar {}
impl ::ScrolledWindowTrait for PlacesSidebar {}
|
random_line_split
|
|
places_sidebar.rs
|
// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! GtkPlacesSidebar — Sidebar that displays frequently-used places in the file system
use ffi;
use FFIWidget;
use cast::GTK_PLACES_SIDEBAR;
use glib::{to_bool, to_gboolean};
struct_Widget!(PlacesSidebar);
impl PlacesSidebar {
pub fn new() -> Option<PlacesSidebar> {
let tmp_pointer = unsafe { ffi::gtk_places_sidebar_new() };
check_pointer!(tmp_pointer, PlacesSidebar)
}
pub fn set_open_flags(&self, flags: ::PlacesOpenFlags) {
unsafe { ffi::gtk_places_sidebar_set_open_flags(GTK_PLACES_SIDEBAR(self.unwrap_widget()), flags) }
}
pub fn get_open_flags(&self) -> ::PlacesOpenFlags {
unsafe { ffi::gtk_places_sidebar_get_open_flags(GTK_PLACES_SIDEBAR(self.unwrap_widget())) }
}
pub fn set_show_desktop(&self, show_desktop: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_desktop(GTK_PLACES_SIDEBAR(self.unwrap_widget()), to_gboolean(show_desktop)) }
}
pub fn get_show_desktop(&self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_show_desktop(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
pub fn set_show_connect_to_server(&self, show_connect_to_server: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_connect_to_server(GTK_PLACES_SIDEBAR(self.unwrap_widget()),
to_gboolean(show_connect_to_server)) }
}
pub fn ge
|
self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_show_connect_to_server(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
#[cfg(gtk_3_12)]
pub fn set_local_only(&self, local_only: bool) {
unsafe { ffi::gtk_places_sidebar_set_local_only(GTK_PLACES_SIDEBAR(self.unwrap_widget()), to_gboolean(local_only)) }
}
#[cfg(gtk_3_12)]
pub fn get_local_only(&self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_local_only(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
#[cfg(gtk_3_14)]
pub fn set_show_enter_location(&self, show_enter_location: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_enter_location(GTK_PLACES_SIDEBAR(self.unwrap_widget()),
to_gboolean(show_enter_location)) }
}
#[cfg(gtk_3_14)]
pub fn get_show_enter_location(&self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_show_enter_location(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
}
impl_drop!(PlacesSidebar);
impl_TraitWidget!(PlacesSidebar);
impl ::ContainerTrait for PlacesSidebar {}
impl ::BinTrait for PlacesSidebar {}
impl ::ScrolledWindowTrait for PlacesSidebar {}
|
t_show_connect_to_server(&
|
identifier_name
|
places_sidebar.rs
|
// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! GtkPlacesSidebar — Sidebar that displays frequently-used places in the file system
use ffi;
use FFIWidget;
use cast::GTK_PLACES_SIDEBAR;
use glib::{to_bool, to_gboolean};
struct_Widget!(PlacesSidebar);
impl PlacesSidebar {
pub fn new() -> Option<PlacesSidebar> {
let tmp_pointer = unsafe { ffi::gtk_places_sidebar_new() };
check_pointer!(tmp_pointer, PlacesSidebar)
}
pub fn set_open_flags(&self, flags: ::PlacesOpenFlags) {
unsafe { ffi::gtk_places_sidebar_set_open_flags(GTK_PLACES_SIDEBAR(self.unwrap_widget()), flags) }
}
pub fn get_open_flags(&self) -> ::PlacesOpenFlags {
unsafe { ffi::gtk_places_sidebar_get_open_flags(GTK_PLACES_SIDEBAR(self.unwrap_widget())) }
}
pub fn set_show_desktop(&self, show_desktop: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_desktop(GTK_PLACES_SIDEBAR(self.unwrap_widget()), to_gboolean(show_desktop)) }
}
pub fn get_show_desktop(&self) -> bool {
|
pub fn set_show_connect_to_server(&self, show_connect_to_server: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_connect_to_server(GTK_PLACES_SIDEBAR(self.unwrap_widget()),
to_gboolean(show_connect_to_server)) }
}
pub fn get_show_connect_to_server(&self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_show_connect_to_server(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
#[cfg(gtk_3_12)]
pub fn set_local_only(&self, local_only: bool) {
unsafe { ffi::gtk_places_sidebar_set_local_only(GTK_PLACES_SIDEBAR(self.unwrap_widget()), to_gboolean(local_only)) }
}
#[cfg(gtk_3_12)]
pub fn get_local_only(&self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_local_only(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
#[cfg(gtk_3_14)]
pub fn set_show_enter_location(&self, show_enter_location: bool) {
unsafe { ffi::gtk_places_sidebar_set_show_enter_location(GTK_PLACES_SIDEBAR(self.unwrap_widget()),
to_gboolean(show_enter_location)) }
}
#[cfg(gtk_3_14)]
pub fn get_show_enter_location(&self) -> bool {
unsafe { to_bool(ffi::gtk_places_sidebar_get_show_enter_location(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
}
impl_drop!(PlacesSidebar);
impl_TraitWidget!(PlacesSidebar);
impl ::ContainerTrait for PlacesSidebar {}
impl ::BinTrait for PlacesSidebar {}
impl ::ScrolledWindowTrait for PlacesSidebar {}
|
unsafe { to_bool(ffi::gtk_places_sidebar_get_show_desktop(GTK_PLACES_SIDEBAR(self.unwrap_widget()))) }
}
|
identifier_body
|
url.rs
|
use std::marker::PhantomData;
use std::time::Duration;
use irc::client::prelude::*;
use lazy_static::lazy_static;
use regex::Regex;
use crate::plugin::*;
use crate::utils::Url;
use crate::FrippyClient;
use self::error::*;
use crate::error::ErrorKind as FrippyErrorKind;
use crate::error::FrippyError;
use failure::Fail;
use failure::ResultExt;
use log::debug;
use frippy_derive::PluginName;
lazy_static! {
static ref URL_RE: Regex = Regex::new(r"(^|\s)(https?://\S+)").unwrap();
static ref WORD_RE: Regex = Regex::new(r"(\w+)").unwrap();
}
#[derive(PluginName, Debug)]
pub struct UrlTitles<C> {
max_kib: usize,
phantom: PhantomData<C>,
}
#[derive(Clone, Debug)]
struct Title(String, Option<usize>);
impl From<String> for Title {
fn from(title: String) -> Self {
Title(title, None)
}
}
impl From<Title> for String {
fn from(title: Title) -> Self {
title.0
}
}
impl Title {
fn find_by_delimiters(body: &str, delimiters: [&str; 3]) -> Result<Self, UrlError> {
let title = body
.find(delimiters[0])
.map(|tag| {
body[tag..]
.find(delimiters[1])
.map(|offset| tag + offset + delimiters[1].len())
.map(|start| {
body[start..]
.find(delimiters[2])
.map(|offset| start + offset)
.map(|end| &body[start..end])
})
})
.and_then(|s| s.and_then(|s| s))
.ok_or(ErrorKind::MissingTitle)?;
debug!("Found title {:?} with delimiters {:?}", title, delimiters);
htmlescape::decode_html(title)
.map(|t| t.into())
.map_err(|_| ErrorKind::HtmlDecoding.into())
}
fn find_ogtitle(body: &str) -> Result<Self, UrlError>
|
fn find_title(body: &str) -> Result<Self, UrlError> {
Self::find_by_delimiters(body, ["<title", ">", "</title>"])
}
// TODO Improve logic
fn get_usefulness(self, url: &str) -> Self {
let mut usefulness = 0;
for word in WORD_RE.find_iter(&self.0) {
let w = word.as_str().to_lowercase();
if w.len() > 2 &&!url.to_lowercase().contains(&w) {
usefulness += 1;
}
}
Title(self.0, Some(usefulness))
}
pub fn usefulness(&self) -> usize {
self.1.expect("Usefulness should be calculated already")
}
fn clean_up(self) -> Self {
Title(self.0.trim().replace('\n', "|").replace('\r', "|"), self.1)
}
pub fn find_clean_ogtitle(body: &str, url: &str) -> Result<Self, UrlError> {
let title = Self::find_ogtitle(body)?;
Ok(title.get_usefulness(url).clean_up())
}
pub fn find_clean_title(body: &str, url: &str) -> Result<Self, UrlError> {
let title = Self::find_title(body)?;
Ok(title.get_usefulness(url).clean_up())
}
}
impl<C: FrippyClient> UrlTitles<C> {
/// If a file is larger than `max_kib` KiB the download is stopped
pub fn new(max_kib: usize) -> Self {
UrlTitles {
max_kib,
phantom: PhantomData,
}
}
fn grep_url<'a>(&self, msg: &'a str) -> Option<Url<'a>> {
let captures = URL_RE.captures(msg)?;
debug!("Url captures: {:?}", captures);
Some(captures.get(2)?.as_str().into())
}
fn url(&self, text: &str) -> Result<String, UrlError> {
let url = self
.grep_url(text)
.ok_or(ErrorKind::MissingUrl)?
.max_kib(self.max_kib)
.timeout(Duration::from_secs(5));
let body = url.request().context(ErrorKind::Download)?;
let title = Title::find_clean_title(&body, url.as_str());
let og_title = Title::find_clean_ogtitle(&body, url.as_str());
let title = match (title, og_title) {
(Ok(title), Ok(og_title)) => {
if title.usefulness() > og_title.usefulness() {
title
} else {
og_title
}
}
(Ok(title), _) => title,
(_, Ok(title)) => title,
(Err(e), _) => Err(e)?,
};
if title.usefulness() == 0 {
Err(ErrorKind::UselessTitle)?;
}
Ok(title.into())
}
}
impl<C: FrippyClient> Plugin for UrlTitles<C> {
type Client = C;
fn execute(&self, _: &Self::Client, message: &Message) -> ExecutionStatus {
match message.command {
Command::PRIVMSG(_, ref msg) => {
if URL_RE.is_match(msg) {
ExecutionStatus::RequiresThread
} else {
ExecutionStatus::Done
}
}
_ => ExecutionStatus::Done,
}
}
fn execute_threaded(
&self,
client: &Self::Client,
message: &Message,
) -> Result<(), FrippyError> {
if let Command::PRIVMSG(_, ref content) = message.command {
let title = self.url(content).context(FrippyErrorKind::Url)?;
let response = format!("[URL] {}", title);
client
.send_privmsg(message.response_target().unwrap(), &response)
.context(FrippyErrorKind::Connection)?;
}
Ok(())
}
fn command(&self, client: &Self::Client, command: PluginCommand) -> Result<(), FrippyError> {
client
.send_notice(
&command.source,
"This Plugin does not implement any commands.",
)
.context(FrippyErrorKind::Connection)?;
Ok(())
}
fn evaluate(&self, _: &Self::Client, command: PluginCommand) -> Result<String, String> {
self.url(&command.tokens[0])
.map_err(|e| e.cause().unwrap().to_string())
}
}
pub mod error {
use failure::Fail;
use frippy_derive::Error;
/// A URL plugin error
#[derive(Copy, Clone, Eq, PartialEq, Debug, Fail, Error)]
#[error = "UrlError"]
pub enum ErrorKind {
/// A download error
#[fail(display = "A download error occured")]
Download,
/// Missing URL error
#[fail(display = "No URL was found")]
MissingUrl,
/// Missing title error
#[fail(display = "No title was found")]
MissingTitle,
/// Useless title error
#[fail(display = "The titles found were not useful enough")]
UselessTitle,
/// Html decoding error
#[fail(display = "Failed to decode Html characters")]
HtmlDecoding,
}
}
|
{
Self::find_by_delimiters(body, ["property=\"og:title\"", "content=\"", "\""])
}
|
identifier_body
|
url.rs
|
use std::marker::PhantomData;
use std::time::Duration;
use irc::client::prelude::*;
use lazy_static::lazy_static;
use regex::Regex;
use crate::plugin::*;
use crate::utils::Url;
use crate::FrippyClient;
use self::error::*;
use crate::error::ErrorKind as FrippyErrorKind;
use crate::error::FrippyError;
use failure::Fail;
use failure::ResultExt;
use log::debug;
use frippy_derive::PluginName;
lazy_static! {
static ref URL_RE: Regex = Regex::new(r"(^|\s)(https?://\S+)").unwrap();
static ref WORD_RE: Regex = Regex::new(r"(\w+)").unwrap();
}
#[derive(PluginName, Debug)]
pub struct UrlTitles<C> {
max_kib: usize,
phantom: PhantomData<C>,
}
#[derive(Clone, Debug)]
struct Title(String, Option<usize>);
impl From<String> for Title {
fn from(title: String) -> Self {
Title(title, None)
}
}
impl From<Title> for String {
fn from(title: Title) -> Self {
title.0
}
}
impl Title {
fn find_by_delimiters(body: &str, delimiters: [&str; 3]) -> Result<Self, UrlError> {
let title = body
.find(delimiters[0])
.map(|tag| {
body[tag..]
.find(delimiters[1])
.map(|offset| tag + offset + delimiters[1].len())
.map(|start| {
body[start..]
.find(delimiters[2])
.map(|offset| start + offset)
.map(|end| &body[start..end])
})
})
.and_then(|s| s.and_then(|s| s))
.ok_or(ErrorKind::MissingTitle)?;
debug!("Found title {:?} with delimiters {:?}", title, delimiters);
htmlescape::decode_html(title)
.map(|t| t.into())
.map_err(|_| ErrorKind::HtmlDecoding.into())
}
fn find_ogtitle(body: &str) -> Result<Self, UrlError> {
Self::find_by_delimiters(body, ["property=\"og:title\"", "content=\"", "\""])
}
fn find_title(body: &str) -> Result<Self, UrlError> {
Self::find_by_delimiters(body, ["<title", ">", "</title>"])
}
// TODO Improve logic
fn get_usefulness(self, url: &str) -> Self {
let mut usefulness = 0;
for word in WORD_RE.find_iter(&self.0) {
let w = word.as_str().to_lowercase();
if w.len() > 2 &&!url.to_lowercase().contains(&w) {
usefulness += 1;
}
}
Title(self.0, Some(usefulness))
}
pub fn usefulness(&self) -> usize {
self.1.expect("Usefulness should be calculated already")
}
fn clean_up(self) -> Self {
Title(self.0.trim().replace('\n', "|").replace('\r', "|"), self.1)
}
pub fn find_clean_ogtitle(body: &str, url: &str) -> Result<Self, UrlError> {
let title = Self::find_ogtitle(body)?;
Ok(title.get_usefulness(url).clean_up())
}
pub fn find_clean_title(body: &str, url: &str) -> Result<Self, UrlError> {
let title = Self::find_title(body)?;
Ok(title.get_usefulness(url).clean_up())
}
}
impl<C: FrippyClient> UrlTitles<C> {
/// If a file is larger than `max_kib` KiB the download is stopped
pub fn
|
(max_kib: usize) -> Self {
UrlTitles {
max_kib,
phantom: PhantomData,
}
}
fn grep_url<'a>(&self, msg: &'a str) -> Option<Url<'a>> {
let captures = URL_RE.captures(msg)?;
debug!("Url captures: {:?}", captures);
Some(captures.get(2)?.as_str().into())
}
fn url(&self, text: &str) -> Result<String, UrlError> {
let url = self
.grep_url(text)
.ok_or(ErrorKind::MissingUrl)?
.max_kib(self.max_kib)
.timeout(Duration::from_secs(5));
let body = url.request().context(ErrorKind::Download)?;
let title = Title::find_clean_title(&body, url.as_str());
let og_title = Title::find_clean_ogtitle(&body, url.as_str());
let title = match (title, og_title) {
(Ok(title), Ok(og_title)) => {
if title.usefulness() > og_title.usefulness() {
title
} else {
og_title
}
}
(Ok(title), _) => title,
(_, Ok(title)) => title,
(Err(e), _) => Err(e)?,
};
if title.usefulness() == 0 {
Err(ErrorKind::UselessTitle)?;
}
Ok(title.into())
}
}
impl<C: FrippyClient> Plugin for UrlTitles<C> {
type Client = C;
fn execute(&self, _: &Self::Client, message: &Message) -> ExecutionStatus {
match message.command {
Command::PRIVMSG(_, ref msg) => {
if URL_RE.is_match(msg) {
ExecutionStatus::RequiresThread
} else {
ExecutionStatus::Done
}
}
_ => ExecutionStatus::Done,
}
}
fn execute_threaded(
&self,
client: &Self::Client,
message: &Message,
) -> Result<(), FrippyError> {
if let Command::PRIVMSG(_, ref content) = message.command {
let title = self.url(content).context(FrippyErrorKind::Url)?;
let response = format!("[URL] {}", title);
client
.send_privmsg(message.response_target().unwrap(), &response)
.context(FrippyErrorKind::Connection)?;
}
Ok(())
}
fn command(&self, client: &Self::Client, command: PluginCommand) -> Result<(), FrippyError> {
client
.send_notice(
&command.source,
"This Plugin does not implement any commands.",
)
.context(FrippyErrorKind::Connection)?;
Ok(())
}
fn evaluate(&self, _: &Self::Client, command: PluginCommand) -> Result<String, String> {
self.url(&command.tokens[0])
.map_err(|e| e.cause().unwrap().to_string())
}
}
pub mod error {
use failure::Fail;
use frippy_derive::Error;
/// A URL plugin error
#[derive(Copy, Clone, Eq, PartialEq, Debug, Fail, Error)]
#[error = "UrlError"]
pub enum ErrorKind {
/// A download error
#[fail(display = "A download error occured")]
Download,
/// Missing URL error
#[fail(display = "No URL was found")]
MissingUrl,
/// Missing title error
#[fail(display = "No title was found")]
MissingTitle,
/// Useless title error
#[fail(display = "The titles found were not useful enough")]
UselessTitle,
/// Html decoding error
#[fail(display = "Failed to decode Html characters")]
HtmlDecoding,
}
}
|
new
|
identifier_name
|
url.rs
|
use std::marker::PhantomData;
use std::time::Duration;
use irc::client::prelude::*;
use lazy_static::lazy_static;
use regex::Regex;
use crate::plugin::*;
use crate::utils::Url;
use crate::FrippyClient;
use self::error::*;
use crate::error::ErrorKind as FrippyErrorKind;
use crate::error::FrippyError;
use failure::Fail;
use failure::ResultExt;
use log::debug;
use frippy_derive::PluginName;
lazy_static! {
static ref URL_RE: Regex = Regex::new(r"(^|\s)(https?://\S+)").unwrap();
static ref WORD_RE: Regex = Regex::new(r"(\w+)").unwrap();
}
#[derive(PluginName, Debug)]
pub struct UrlTitles<C> {
max_kib: usize,
phantom: PhantomData<C>,
}
#[derive(Clone, Debug)]
struct Title(String, Option<usize>);
impl From<String> for Title {
fn from(title: String) -> Self {
Title(title, None)
}
}
impl From<Title> for String {
fn from(title: Title) -> Self {
title.0
}
}
impl Title {
fn find_by_delimiters(body: &str, delimiters: [&str; 3]) -> Result<Self, UrlError> {
let title = body
.find(delimiters[0])
.map(|tag| {
body[tag..]
.find(delimiters[1])
.map(|offset| tag + offset + delimiters[1].len())
.map(|start| {
body[start..]
.find(delimiters[2])
.map(|offset| start + offset)
.map(|end| &body[start..end])
})
})
.and_then(|s| s.and_then(|s| s))
.ok_or(ErrorKind::MissingTitle)?;
debug!("Found title {:?} with delimiters {:?}", title, delimiters);
htmlescape::decode_html(title)
.map(|t| t.into())
.map_err(|_| ErrorKind::HtmlDecoding.into())
}
fn find_ogtitle(body: &str) -> Result<Self, UrlError> {
Self::find_by_delimiters(body, ["property=\"og:title\"", "content=\"", "\""])
}
fn find_title(body: &str) -> Result<Self, UrlError> {
Self::find_by_delimiters(body, ["<title", ">", "</title>"])
}
// TODO Improve logic
fn get_usefulness(self, url: &str) -> Self {
let mut usefulness = 0;
for word in WORD_RE.find_iter(&self.0) {
let w = word.as_str().to_lowercase();
if w.len() > 2 &&!url.to_lowercase().contains(&w) {
usefulness += 1;
}
}
Title(self.0, Some(usefulness))
}
pub fn usefulness(&self) -> usize {
self.1.expect("Usefulness should be calculated already")
}
fn clean_up(self) -> Self {
Title(self.0.trim().replace('\n', "|").replace('\r', "|"), self.1)
}
pub fn find_clean_ogtitle(body: &str, url: &str) -> Result<Self, UrlError> {
let title = Self::find_ogtitle(body)?;
Ok(title.get_usefulness(url).clean_up())
}
pub fn find_clean_title(body: &str, url: &str) -> Result<Self, UrlError> {
let title = Self::find_title(body)?;
Ok(title.get_usefulness(url).clean_up())
}
}
impl<C: FrippyClient> UrlTitles<C> {
/// If a file is larger than `max_kib` KiB the download is stopped
pub fn new(max_kib: usize) -> Self {
UrlTitles {
max_kib,
phantom: PhantomData,
}
}
fn grep_url<'a>(&self, msg: &'a str) -> Option<Url<'a>> {
let captures = URL_RE.captures(msg)?;
debug!("Url captures: {:?}", captures);
Some(captures.get(2)?.as_str().into())
}
fn url(&self, text: &str) -> Result<String, UrlError> {
let url = self
.grep_url(text)
.ok_or(ErrorKind::MissingUrl)?
.max_kib(self.max_kib)
.timeout(Duration::from_secs(5));
let body = url.request().context(ErrorKind::Download)?;
let title = Title::find_clean_title(&body, url.as_str());
let og_title = Title::find_clean_ogtitle(&body, url.as_str());
let title = match (title, og_title) {
(Ok(title), Ok(og_title)) => {
if title.usefulness() > og_title.usefulness() {
title
} else {
og_title
}
}
(Ok(title), _) => title,
(_, Ok(title)) => title,
(Err(e), _) => Err(e)?,
};
if title.usefulness() == 0 {
Err(ErrorKind::UselessTitle)?;
}
Ok(title.into())
}
}
impl<C: FrippyClient> Plugin for UrlTitles<C> {
type Client = C;
fn execute(&self, _: &Self::Client, message: &Message) -> ExecutionStatus {
match message.command {
Command::PRIVMSG(_, ref msg) => {
if URL_RE.is_match(msg) {
ExecutionStatus::RequiresThread
} else
|
}
_ => ExecutionStatus::Done,
}
}
fn execute_threaded(
&self,
client: &Self::Client,
message: &Message,
) -> Result<(), FrippyError> {
if let Command::PRIVMSG(_, ref content) = message.command {
let title = self.url(content).context(FrippyErrorKind::Url)?;
let response = format!("[URL] {}", title);
client
.send_privmsg(message.response_target().unwrap(), &response)
.context(FrippyErrorKind::Connection)?;
}
Ok(())
}
fn command(&self, client: &Self::Client, command: PluginCommand) -> Result<(), FrippyError> {
client
.send_notice(
&command.source,
"This Plugin does not implement any commands.",
)
.context(FrippyErrorKind::Connection)?;
Ok(())
}
fn evaluate(&self, _: &Self::Client, command: PluginCommand) -> Result<String, String> {
self.url(&command.tokens[0])
.map_err(|e| e.cause().unwrap().to_string())
}
}
pub mod error {
use failure::Fail;
use frippy_derive::Error;
/// A URL plugin error
#[derive(Copy, Clone, Eq, PartialEq, Debug, Fail, Error)]
#[error = "UrlError"]
pub enum ErrorKind {
/// A download error
#[fail(display = "A download error occured")]
Download,
/// Missing URL error
#[fail(display = "No URL was found")]
MissingUrl,
/// Missing title error
#[fail(display = "No title was found")]
MissingTitle,
/// Useless title error
#[fail(display = "The titles found were not useful enough")]
UselessTitle,
/// Html decoding error
#[fail(display = "Failed to decode Html characters")]
HtmlDecoding,
}
}
|
{
ExecutionStatus::Done
}
|
conditional_block
|
url.rs
|
use std::marker::PhantomData;
use std::time::Duration;
use irc::client::prelude::*;
use lazy_static::lazy_static;
use regex::Regex;
use crate::plugin::*;
use crate::utils::Url;
use crate::FrippyClient;
use self::error::*;
use crate::error::ErrorKind as FrippyErrorKind;
use crate::error::FrippyError;
use failure::Fail;
use failure::ResultExt;
use log::debug;
use frippy_derive::PluginName;
lazy_static! {
static ref URL_RE: Regex = Regex::new(r"(^|\s)(https?://\S+)").unwrap();
static ref WORD_RE: Regex = Regex::new(r"(\w+)").unwrap();
}
#[derive(PluginName, Debug)]
pub struct UrlTitles<C> {
max_kib: usize,
phantom: PhantomData<C>,
}
#[derive(Clone, Debug)]
struct Title(String, Option<usize>);
impl From<String> for Title {
fn from(title: String) -> Self {
Title(title, None)
}
}
impl From<Title> for String {
fn from(title: Title) -> Self {
title.0
}
}
impl Title {
fn find_by_delimiters(body: &str, delimiters: [&str; 3]) -> Result<Self, UrlError> {
let title = body
.find(delimiters[0])
.map(|tag| {
body[tag..]
.find(delimiters[1])
.map(|offset| tag + offset + delimiters[1].len())
.map(|start| {
|
body[start..]
.find(delimiters[2])
.map(|offset| start + offset)
.map(|end| &body[start..end])
})
})
.and_then(|s| s.and_then(|s| s))
.ok_or(ErrorKind::MissingTitle)?;
debug!("Found title {:?} with delimiters {:?}", title, delimiters);
htmlescape::decode_html(title)
.map(|t| t.into())
.map_err(|_| ErrorKind::HtmlDecoding.into())
}
fn find_ogtitle(body: &str) -> Result<Self, UrlError> {
Self::find_by_delimiters(body, ["property=\"og:title\"", "content=\"", "\""])
}
fn find_title(body: &str) -> Result<Self, UrlError> {
Self::find_by_delimiters(body, ["<title", ">", "</title>"])
}
// TODO Improve logic
fn get_usefulness(self, url: &str) -> Self {
let mut usefulness = 0;
for word in WORD_RE.find_iter(&self.0) {
let w = word.as_str().to_lowercase();
if w.len() > 2 &&!url.to_lowercase().contains(&w) {
usefulness += 1;
}
}
Title(self.0, Some(usefulness))
}
pub fn usefulness(&self) -> usize {
self.1.expect("Usefulness should be calculated already")
}
fn clean_up(self) -> Self {
Title(self.0.trim().replace('\n', "|").replace('\r', "|"), self.1)
}
pub fn find_clean_ogtitle(body: &str, url: &str) -> Result<Self, UrlError> {
let title = Self::find_ogtitle(body)?;
Ok(title.get_usefulness(url).clean_up())
}
pub fn find_clean_title(body: &str, url: &str) -> Result<Self, UrlError> {
let title = Self::find_title(body)?;
Ok(title.get_usefulness(url).clean_up())
}
}
impl<C: FrippyClient> UrlTitles<C> {
/// If a file is larger than `max_kib` KiB the download is stopped
pub fn new(max_kib: usize) -> Self {
UrlTitles {
max_kib,
phantom: PhantomData,
}
}
fn grep_url<'a>(&self, msg: &'a str) -> Option<Url<'a>> {
let captures = URL_RE.captures(msg)?;
debug!("Url captures: {:?}", captures);
Some(captures.get(2)?.as_str().into())
}
fn url(&self, text: &str) -> Result<String, UrlError> {
let url = self
.grep_url(text)
.ok_or(ErrorKind::MissingUrl)?
.max_kib(self.max_kib)
.timeout(Duration::from_secs(5));
let body = url.request().context(ErrorKind::Download)?;
let title = Title::find_clean_title(&body, url.as_str());
let og_title = Title::find_clean_ogtitle(&body, url.as_str());
let title = match (title, og_title) {
(Ok(title), Ok(og_title)) => {
if title.usefulness() > og_title.usefulness() {
title
} else {
og_title
}
}
(Ok(title), _) => title,
(_, Ok(title)) => title,
(Err(e), _) => Err(e)?,
};
if title.usefulness() == 0 {
Err(ErrorKind::UselessTitle)?;
}
Ok(title.into())
}
}
impl<C: FrippyClient> Plugin for UrlTitles<C> {
type Client = C;
fn execute(&self, _: &Self::Client, message: &Message) -> ExecutionStatus {
match message.command {
Command::PRIVMSG(_, ref msg) => {
if URL_RE.is_match(msg) {
ExecutionStatus::RequiresThread
} else {
ExecutionStatus::Done
}
}
_ => ExecutionStatus::Done,
}
}
fn execute_threaded(
&self,
client: &Self::Client,
message: &Message,
) -> Result<(), FrippyError> {
if let Command::PRIVMSG(_, ref content) = message.command {
let title = self.url(content).context(FrippyErrorKind::Url)?;
let response = format!("[URL] {}", title);
client
.send_privmsg(message.response_target().unwrap(), &response)
.context(FrippyErrorKind::Connection)?;
}
Ok(())
}
fn command(&self, client: &Self::Client, command: PluginCommand) -> Result<(), FrippyError> {
client
.send_notice(
&command.source,
"This Plugin does not implement any commands.",
)
.context(FrippyErrorKind::Connection)?;
Ok(())
}
fn evaluate(&self, _: &Self::Client, command: PluginCommand) -> Result<String, String> {
self.url(&command.tokens[0])
.map_err(|e| e.cause().unwrap().to_string())
}
}
pub mod error {
use failure::Fail;
use frippy_derive::Error;
/// A URL plugin error
#[derive(Copy, Clone, Eq, PartialEq, Debug, Fail, Error)]
#[error = "UrlError"]
pub enum ErrorKind {
/// A download error
#[fail(display = "A download error occured")]
Download,
/// Missing URL error
#[fail(display = "No URL was found")]
MissingUrl,
/// Missing title error
#[fail(display = "No title was found")]
MissingTitle,
/// Useless title error
#[fail(display = "The titles found were not useful enough")]
UselessTitle,
/// Html decoding error
#[fail(display = "Failed to decode Html characters")]
HtmlDecoding,
}
}
|
random_line_split
|
|
aead.rs
|
or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use std::error::Error;
use std::fmt::{self, Display, Formatter};
use std::io::{self, ErrorKind, Read, Write};
use as_bytes::AsBytes;
use chacha20::ChaCha20;
use constant_time_eq::constant_time_eq;
use poly1305::Poly1305;
use simd::u32x4;
const CHACHA20_COUNTER_OVERFLOW: u64 = ((1 << 32) - 1) * 64;
/// Encrypts a byte slice and returns the authentication tag.
///
/// # Example
///
/// ```
/// use chacha20_poly1305_aead::encrypt;
///
/// let key = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
/// 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31];
/// let nonce = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
/// let aad = [1, 2, 3, 4];
///
/// let plaintext = b"hello, world";
///
/// // Vec implements the Write trait
/// let mut ciphertext = Vec::with_capacity(plaintext.len());
///
/// let tag = encrypt(&key, &nonce, &aad, plaintext, &mut ciphertext).unwrap();
///
/// assert_eq!(ciphertext, [0xfc, 0x5a, 0x17, 0x82,
/// 0xab, 0xcf, 0xbc, 0x5d, 0x18, 0x29, 0xbf, 0x97]);
/// assert_eq!(tag, [0xdb, 0xb7, 0x0d, 0xda, 0xbd, 0xfa, 0x8c, 0xa5,
/// 0x60, 0xa2, 0x30, 0x3d, 0xe6, 0x07, 0x92, 0x10]);
/// ```
pub fn encrypt<W: Write>(key: &[u8], nonce: &[u8],
aad: &[u8], mut input: &[u8],
output: &mut W) -> io::Result<[u8; 16]> {
encrypt_read(key, nonce, aad, &mut input, output)
}
/// Encrypts bytes from a reader and returns the authentication tag.
///
/// This function is identical to the `encrypt` function, the only
/// difference being that its input comes from a reader instead of a
/// byte slice.
pub fn encrypt_read<R: Read, W: Write>(key: &[u8], nonce: &[u8],
aad: &[u8], input: &mut R,
output: &mut W) -> io::Result<[u8; 16]> {
let mut chacha20 = ChaCha20::new(key, nonce);
let mut poly1305 = Poly1305::new(&chacha20.next().as_bytes()[..32]);
let aad_len = aad.len() as u64;
let mut input_len = 0;
poly1305.padded_blocks(aad);
let mut buf = [u32x4::default(); 4];
loop {
let read = try!(read_all(input, buf.as_mut_bytes()));
if read == 0 { break; }
input_len += read as u64;
if input_len >= CHACHA20_COUNTER_OVERFLOW {
return Err(io::Error::new(ErrorKind::WriteZero,
"counter overflow"));
}
let block = chacha20.next();
buf[0] = buf[0] ^ block[0];
buf[1] = buf[1] ^ block[1];
buf[2] = buf[2] ^ block[2];
buf[3] = buf[3] ^ block[3];
poly1305.padded_blocks(&buf.as_bytes()[..read]);
try!(output.write_all(&buf.as_bytes()[..read]));
}
poly1305.block([aad_len.to_le(), input_len.to_le()].as_bytes());
let mut tag = [0; 16];
tag.clone_from_slice(poly1305.tag().as_bytes());
Ok(tag)
}
/// Verifies the authentication tag and decrypts a byte slice.
///
/// If the tag does not match, this function produces no output and
/// returns `Err(DecryptError::TagMismatch)`.
///
/// # Example
///
/// ```
/// # use chacha20_poly1305_aead::DecryptError;
/// # fn example() -> Result<(), DecryptError> {
/// use chacha20_poly1305_aead::decrypt;
///
/// let key = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
/// 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31];
/// let nonce = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
/// let aad = [1, 2, 3, 4];
///
/// let ciphertext = [0xfc, 0x5a, 0x17, 0x82, 0xab, 0xcf, 0xbc, 0x5d,
/// 0x18, 0x29, 0xbf, 0x97];
/// let tag = [0xdb, 0xb7, 0x0d, 0xda, 0xbd, 0xfa, 0x8c, 0xa5,
/// 0x60, 0xa2, 0x30, 0x3d, 0xe6, 0x07, 0x92, 0x10];
///
/// // Vec implements the Write trait
/// let mut plaintext = Vec::with_capacity(ciphertext.len());
///
/// try!(decrypt(&key, &nonce, &aad, &ciphertext, &tag, &mut plaintext));
///
/// assert_eq!(plaintext, b"hello, world");
/// # Ok(())
/// # }
/// # example().unwrap();
/// ```
pub fn decrypt<W: Write>(key: &[u8], nonce: &[u8],
aad: &[u8], mut input: &[u8], tag: &[u8],
output: &mut W) -> Result<(), DecryptError> {
let mut chacha20 = ChaCha20::new(key, nonce);
let mut poly1305 = Poly1305::new(&chacha20.next().as_bytes()[..32]);
let aad_len = aad.len() as u64;
let input_len = input.len() as u64;
assert!(tag.len() == 16);
if input_len >= CHACHA20_COUNTER_OVERFLOW {
return Err(io::Error::new(ErrorKind::WriteZero,
"counter overflow").into());
}
poly1305.padded_blocks(aad);
poly1305.padded_blocks(input);
poly1305.block([aad_len.to_le(), input_len.to_le()].as_bytes());
if!constant_time_eq(poly1305.tag().as_bytes(), tag) {
return Err(DecryptError::TagMismatch);
}
let mut buf = [u32x4::default(); 4];
loop {
let read = try!(read_all(&mut input, buf.as_mut_bytes()));
if read == 0 { break; }
let block = chacha20.next();
buf[0] = buf[0] ^ block[0];
buf[1] = buf[1] ^ block[1];
buf[2] = buf[2] ^ block[2];
buf[3] = buf[3] ^ block[3];
try!(output.write_all(&buf.as_bytes()[..read]));
}
Ok(())
}
fn read_all<R: Read>(reader: &mut R, mut buf: &mut [u8]) -> io::Result<usize> {
let mut read = 0;
while!buf.is_empty() {
match reader.read(buf) {
Ok(0) => break,
Ok(n) => { read += n; let tmp = buf; buf = &mut tmp[n..]; }
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
Err(e) => return Err(e),
}
}
Ok(read)
}
/// Error returned from the `decrypt` function.
#[derive(Debug)]
pub enum DecryptError {
/// The calculated Poly1305 tag did not match the given tag.
TagMismatch,
/// There was an error writing the output.
IoError(io::Error),
}
impl Display for DecryptError {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
match *self {
DecryptError::TagMismatch => fmt.write_str(self.description()),
DecryptError::IoError(ref e) => e.fmt(fmt),
}
}
}
impl Error for DecryptError {
fn description(&self) -> &str {
match *self {
DecryptError::TagMismatch => "authentication tag mismatch",
DecryptError::IoError(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&Error> {
match *self {
DecryptError::TagMismatch => None,
DecryptError::IoError(ref e) => Some(e),
}
}
}
impl From<io::Error> for DecryptError {
fn from(error: io::Error) -> Self {
DecryptError::IoError(error)
}
}
impl From<DecryptError> for io::Error {
fn from(error: DecryptError) -> Self {
match error {
DecryptError::IoError(e) => e,
DecryptError::TagMismatch =>
io::Error::new(ErrorKind::InvalidData, error),
}
}
}
pub mod selftest {
use super::*;
static PLAINTEXT: &'static [u8] = b"\
Ladies and Gentlemen of the class of '99: If I could offer you o\
nly one tip for the future, sunscreen would be it.";
static AAD: &'static [u8] = &[0x50, 0x51, 0x52, 0x53,
0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7];
static KEY: &'static [u8] = &[
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f];
static NONCE: &'static [u8] = &[0x07, 0x00, 0x00, 0x00,
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47];
static CIPHERTEXT: &'static [u8] = &[
0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb,
0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2,
0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe,
0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6,
0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12,
0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b,
0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29,
0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36,
0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c,
0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58,
0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94,
0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc,
0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d,
0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b,
0x61, 0x16];
static TAG: &'static [u8] = &[
0x1a, 0xe1, 0x0b, 0x59, 0x4f, 0x09, 0xe2, 0x6a,
0x7e, 0x90, 0x2e, 0xcb, 0xd0, 0x60, 0x06, 0x91];
#[cold]
pub fn selftest() {
selftest_encrypt();
selftest_decrypt();
selftest_decrypt_mismatch();
}
#[cold]
pub fn selftest_encrypt() {
let mut output = Vec::with_capacity(PLAINTEXT.len());
let tag = encrypt(KEY, NONCE, AAD, PLAINTEXT, &mut output)
.expect("selftest failure");
|
assert_eq!(&output[..], CIPHERTEXT);
assert_eq!(tag, TAG);
}
#[cold]
pub fn selftest_decrypt() {
let mut output = Vec::with_capacity(CIPHERTEXT.len());
decrypt(KEY, NONCE, AAD, CIPHERTEXT, TAG, &mut output)
.expect("selftest failure");
assert_eq!(&output[..], PLAINTEXT);
}
#[cold]
pub fn selftest_decrypt_mismatch() {
let mut output = Vec::with_capacity(0);
let result = decrypt(KEY, NONCE, AAD, CIPHERTEXT, &[0; 16],
&mut output);
if let Err(DecryptError::TagMismatch) = result {
assert!(output.is_empty());
} else {
panic!("selftest failure");
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn selftest_encrypt() {
selftest::selftest_encrypt();
}
#[test]
fn selftest_decrypt() {
selftest::selftest_decrypt();
}
#[test]
fn selftest_decrypt_mismatch() {
selftest::selftest_decrypt_mismatch();
}
#[test]
fn test_encrypt() {
let mut output = Vec::with_capacity(PLAINTEXT.len());
let tag = encrypt(KEY, NONCE, AAD, PLAINTEXT.as_bytes(),
&mut output).expect("test failed");
assert_eq!(&output[..], CIPHERTEXT);
assert_eq!(tag, TAG);
}
#[test]
fn test_decrypt() {
let mut output = Vec::with_capacity(CIPHERTEXT.len());
decrypt(KEY, NONCE, AAD, CIPHERTEXT, TAG,
&mut output).expect("test failed");
assert_eq!(&output[..], PLAINTEXT.as_bytes());
}
static KEY: &'static [u8] = &[
0x1c, 0x92, 0x40, 0xa5, 0xeb, 0x55, 0xd3, 0x8a,
0xf3, 0x33, 0x88, 0x86, 0x04, 0xf6, 0xb5, 0xf0,
0x47, 0x39, 0x17, 0xc1, 0x40, 0x2b, 0x80, 0x09,
0x9d, 0xca, 0x5c, 0xbc, 0x20, 0x70, 0x75, 0xc0];
static CIPHERTEXT: &'static [u8] = &[
0x64, 0xa0, 0x86, 0x15, 0x75, 0x86, 0x1a, 0xf4,
0x60, 0xf0, 0x62, 0xc7, 0x9b, 0xe6, 0x43, 0xbd,
0x5e, 0x80, 0x5c, 0xfd, 0x34, 0x5c, 0xf3, 0x89,
0xf1, 0x08, 0x67, 0x0a, 0xc7, 0x6c, 0x8c, 0xb2,
0x4c, 0x6c, 0xfc, 0x18, 0x75, 0x5d, 0x43, 0xee,
0xa0, 0x9e, 0xe9, 0x4e, 0x38, 0x2d, 0x26, 0xb0,
0xbd, 0xb7, 0xb7, 0x3c, 0x32, 0x1b, 0x01, 0x00,
0xd4, 0xf0, 0x3b, 0x7f, 0x35, 0x58, 0x94, 0xcf,
0x33, 0x2f, 0x83, 0x0e, 0x71, 0x0b, 0x97, 0xce,
0x98, 0xc8, 0xa8, 0x4a, 0xbd, 0x0b, 0x94, 0x81,
0x14, 0xad, 0x17, 0x6e, 0x00, 0x8d, 0x33, 0xbd,
0x60, 0xf9, 0x82, 0xb1, 0xff, 0x37, 0xc8, 0x55,
0x97, 0x97, 0xa0, 0x6e, 0xf4, 0xf0, 0xef, 0x61,
0xc1, 0x86, 0x32, 0x4e, 0x2b, 0x35, 0x06, 0x38,
0x36, 0x06, 0x90, 0x7b, 0x6a, 0x7c, 0x02, 0xb0,
0xf9, 0xf6, 0x15, 0x7b, 0x53, 0xc8, 0x67, 0xe4,
0xb9, 0x16, 0x6c, 0x76, 0x7b, 0x80, 0x4d, 0x46,
0xa5, 0x9b, 0x52, 0x16, 0xcd, 0xe7, 0xa4, 0xe9,
0x90, 0x40, 0xc5, 0xa4, 0x04, 0x33, 0x22, 0x5e,
0xe2, 0x82, 0xa1, 0xb0, 0xa0, 0x6c, 0x52, 0x3e,
0xaf, 0x45, 0x34, 0xd7, 0xf8, 0x3f, 0xa1, 0x15,
0x5b, 0x00, 0x47, 0x71, 0x8c, 0xbc, 0x54, 0x6a,
0x0d, 0x07, 0x2b, 0x04, 0xb3, 0x56, 0x4e, 0xea,
0x1b, 0x42, 0x22, 0x73, 0xf5, 0x48, 0x27, 0x1a,
0x0b, 0xb2, 0x31, 0x60, 0x53, 0xfa, 0x76, 0x99,
0x19, 0x55, 0xeb, 0xd6, 0x31, 0x59, 0x43, 0x4e,
0xce, 0xbb, 0x4e, 0x46, 0x6d, 0xae, 0x5a, 0x10,
0x73, 0xa6, 0x72, 0x76, 0x27, 0x09, 0x7a, 0x10,
0x49, 0xe6, 0x17, 0xd9, 0x1d, 0x36, 0x10, 0x94,
0xfa, 0x68, 0xf0, 0xff, 0x77, 0x98, 0x71, 0x30,
0x30, 0x5b, 0xea, 0xba, 0x2e, 0xda, 0x04, 0xdf,
0x99, 0x7b, 0x71, 0x4d, 0x6c, 0x6f, 0x2c, 0x29,
0xa6, 0xad, 0x5c, 0xb4, 0x02, 0x2b, 0x02, 0x70,
0x9b];
static NONCE: &'static [u8] = &[0x00, 0x00, 0x00, 0x00,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08];
static AAD: &'static [u8] = &[0xf3, 0x33, 0x88, 0x86,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4e, 0x91];
static TAG: &'static [u8] = &[
0xee, 0xad, 0x9d, 0x67, 0x89, 0x0c, 0xbb, 0x22,
0x39, 0x23, 0x36, 0xfe, 0xa1, 0x85, 0x1f, 0x38];
static PLAINTEXT: &'static str = "\
Internet-Drafts are draft documents valid for a maximum of six m\
onths and may be updated, replaced, or obsoleted by other docume\
nts at any time. It is inappropriate to use Internet-Drafts as r\
eference material or to cite them other than as /\u{201c}work in prog\
ress./\u{201d}";
}
#[cfg(all(feature = "bench", test))]
mod bench {
use test::{Bencher, black_box};
use super::*;
#[cfg_attr(feature = "clippy", allow(result_unwrap_used))]
fn bench_encrypt(b: &mut Bencher, aad: &[u8], data: &[u8]) {
let key = [!0; 32];
let nonce = [!0; 12];
let mut buf = Vec::with_capacity(data.len());
b.bytes = data.len() as u64;
b.iter(|| {
buf.clear();
encrypt(black_box(&key), black_box(&nonce),
black_box(aad), black_box(data),
black_box(&mut buf)).unwrap()
})
}
#[cfg_attr(feature = "clippy", allow(result_unwrap_used))]
fn bench_decrypt(b: &mut Bencher, aad: &[u8], data: &[u8]) {
let key = [!0; 32];
let nonce = [!0; 12];
let mut ciphertext = Vec::with_capacity(data.len());
let tag = encrypt(&key, &nonce, aad, data, &mut ciphertext).unwrap();
let input = &ciphertext[..];
let mut buf = Vec::with_capacity(data.len());
b.bytes = data.len() as u64;
b.iter(|| {
buf.clear();
decrypt(black_box(&key), black_box(&nonce),
black_box(aad), black_box(input), black_box(&tag),
black_box(&mut buf)).unwrap()
})
}
#[bench]
fn bench_encrypt_16(b: &mut Bencher) {
bench_encrypt(b, &[!0; 16], &[!0; 16])
}
#[bench]
fn bench_encrypt_4k(b: &mut Bencher) {
bench_encrypt(b, &[!0; 16], &[!0; 4096])
}
#[bench]
fn bench_encrypt_64k(b: &mut Bencher) {
bench_encrypt(b, &[!0; 16], &[!0; 65536])
}
#[bench]
fn bench_decrypt_16(b: &mut Bencher) {
bench_decrypt(b, &[!0; 16], &[!0; 16])
}
|
random_line_split
|
|
aead.rs
|
the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use std::error::Error;
use std::fmt::{self, Display, Formatter};
use std::io::{self, ErrorKind, Read, Write};
use as_bytes::AsBytes;
use chacha20::ChaCha20;
use constant_time_eq::constant_time_eq;
use poly1305::Poly1305;
use simd::u32x4;
const CHACHA20_COUNTER_OVERFLOW: u64 = ((1 << 32) - 1) * 64;
/// Encrypts a byte slice and returns the authentication tag.
///
/// # Example
///
/// ```
/// use chacha20_poly1305_aead::encrypt;
///
/// let key = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
/// 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31];
/// let nonce = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
/// let aad = [1, 2, 3, 4];
///
/// let plaintext = b"hello, world";
///
/// // Vec implements the Write trait
/// let mut ciphertext = Vec::with_capacity(plaintext.len());
///
/// let tag = encrypt(&key, &nonce, &aad, plaintext, &mut ciphertext).unwrap();
///
/// assert_eq!(ciphertext, [0xfc, 0x5a, 0x17, 0x82,
/// 0xab, 0xcf, 0xbc, 0x5d, 0x18, 0x29, 0xbf, 0x97]);
/// assert_eq!(tag, [0xdb, 0xb7, 0x0d, 0xda, 0xbd, 0xfa, 0x8c, 0xa5,
/// 0x60, 0xa2, 0x30, 0x3d, 0xe6, 0x07, 0x92, 0x10]);
/// ```
pub fn encrypt<W: Write>(key: &[u8], nonce: &[u8],
aad: &[u8], mut input: &[u8],
output: &mut W) -> io::Result<[u8; 16]> {
encrypt_read(key, nonce, aad, &mut input, output)
}
/// Encrypts bytes from a reader and returns the authentication tag.
///
/// This function is identical to the `encrypt` function, the only
/// difference being that its input comes from a reader instead of a
/// byte slice.
pub fn encrypt_read<R: Read, W: Write>(key: &[u8], nonce: &[u8],
aad: &[u8], input: &mut R,
output: &mut W) -> io::Result<[u8; 16]> {
let mut chacha20 = ChaCha20::new(key, nonce);
let mut poly1305 = Poly1305::new(&chacha20.next().as_bytes()[..32]);
let aad_len = aad.len() as u64;
let mut input_len = 0;
poly1305.padded_blocks(aad);
let mut buf = [u32x4::default(); 4];
loop {
let read = try!(read_all(input, buf.as_mut_bytes()));
if read == 0 { break; }
input_len += read as u64;
if input_len >= CHACHA20_COUNTER_OVERFLOW {
return Err(io::Error::new(ErrorKind::WriteZero,
"counter overflow"));
}
let block = chacha20.next();
buf[0] = buf[0] ^ block[0];
buf[1] = buf[1] ^ block[1];
buf[2] = buf[2] ^ block[2];
buf[3] = buf[3] ^ block[3];
poly1305.padded_blocks(&buf.as_bytes()[..read]);
try!(output.write_all(&buf.as_bytes()[..read]));
}
poly1305.block([aad_len.to_le(), input_len.to_le()].as_bytes());
let mut tag = [0; 16];
tag.clone_from_slice(poly1305.tag().as_bytes());
Ok(tag)
}
/// Verifies the authentication tag and decrypts a byte slice.
///
/// If the tag does not match, this function produces no output and
/// returns `Err(DecryptError::TagMismatch)`.
///
/// # Example
///
/// ```
/// # use chacha20_poly1305_aead::DecryptError;
/// # fn example() -> Result<(), DecryptError> {
/// use chacha20_poly1305_aead::decrypt;
///
/// let key = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
/// 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31];
/// let nonce = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
/// let aad = [1, 2, 3, 4];
///
/// let ciphertext = [0xfc, 0x5a, 0x17, 0x82, 0xab, 0xcf, 0xbc, 0x5d,
/// 0x18, 0x29, 0xbf, 0x97];
/// let tag = [0xdb, 0xb7, 0x0d, 0xda, 0xbd, 0xfa, 0x8c, 0xa5,
/// 0x60, 0xa2, 0x30, 0x3d, 0xe6, 0x07, 0x92, 0x10];
///
/// // Vec implements the Write trait
/// let mut plaintext = Vec::with_capacity(ciphertext.len());
///
/// try!(decrypt(&key, &nonce, &aad, &ciphertext, &tag, &mut plaintext));
///
/// assert_eq!(plaintext, b"hello, world");
/// # Ok(())
/// # }
/// # example().unwrap();
/// ```
pub fn decrypt<W: Write>(key: &[u8], nonce: &[u8],
aad: &[u8], mut input: &[u8], tag: &[u8],
output: &mut W) -> Result<(), DecryptError> {
let mut chacha20 = ChaCha20::new(key, nonce);
let mut poly1305 = Poly1305::new(&chacha20.next().as_bytes()[..32]);
let aad_len = aad.len() as u64;
let input_len = input.len() as u64;
assert!(tag.len() == 16);
if input_len >= CHACHA20_COUNTER_OVERFLOW {
return Err(io::Error::new(ErrorKind::WriteZero,
"counter overflow").into());
}
poly1305.padded_blocks(aad);
poly1305.padded_blocks(input);
poly1305.block([aad_len.to_le(), input_len.to_le()].as_bytes());
if!constant_time_eq(poly1305.tag().as_bytes(), tag) {
return Err(DecryptError::TagMismatch);
}
let mut buf = [u32x4::default(); 4];
loop {
let read = try!(read_all(&mut input, buf.as_mut_bytes()));
if read == 0 { break; }
let block = chacha20.next();
buf[0] = buf[0] ^ block[0];
buf[1] = buf[1] ^ block[1];
buf[2] = buf[2] ^ block[2];
buf[3] = buf[3] ^ block[3];
try!(output.write_all(&buf.as_bytes()[..read]));
}
Ok(())
}
fn read_all<R: Read>(reader: &mut R, mut buf: &mut [u8]) -> io::Result<usize> {
let mut read = 0;
while!buf.is_empty() {
match reader.read(buf) {
Ok(0) => break,
Ok(n) => { read += n; let tmp = buf; buf = &mut tmp[n..]; }
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
Err(e) => return Err(e),
}
}
Ok(read)
}
/// Error returned from the `decrypt` function.
#[derive(Debug)]
pub enum DecryptError {
/// The calculated Poly1305 tag did not match the given tag.
TagMismatch,
/// There was an error writing the output.
IoError(io::Error),
}
impl Display for DecryptError {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
match *self {
DecryptError::TagMismatch => fmt.write_str(self.description()),
DecryptError::IoError(ref e) => e.fmt(fmt),
}
}
}
impl Error for DecryptError {
fn description(&self) -> &str {
match *self {
DecryptError::TagMismatch => "authentication tag mismatch",
DecryptError::IoError(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&Error> {
match *self {
DecryptError::TagMismatch => None,
DecryptError::IoError(ref e) => Some(e),
}
}
}
impl From<io::Error> for DecryptError {
fn from(error: io::Error) -> Self {
DecryptError::IoError(error)
}
}
impl From<DecryptError> for io::Error {
fn from(error: DecryptError) -> Self {
match error {
DecryptError::IoError(e) => e,
DecryptError::TagMismatch =>
io::Error::new(ErrorKind::InvalidData, error),
}
}
}
pub mod selftest {
use super::*;
static PLAINTEXT: &'static [u8] = b"\
Ladies and Gentlemen of the class of '99: If I could offer you o\
nly one tip for the future, sunscreen would be it.";
static AAD: &'static [u8] = &[0x50, 0x51, 0x52, 0x53,
0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7];
static KEY: &'static [u8] = &[
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f];
static NONCE: &'static [u8] = &[0x07, 0x00, 0x00, 0x00,
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47];
static CIPHERTEXT: &'static [u8] = &[
0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb,
0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2,
0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe,
0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6,
0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12,
0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b,
0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29,
0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36,
0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c,
0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58,
0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94,
0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc,
0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d,
0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b,
0x61, 0x16];
static TAG: &'static [u8] = &[
0x1a, 0xe1, 0x0b, 0x59, 0x4f, 0x09, 0xe2, 0x6a,
0x7e, 0x90, 0x2e, 0xcb, 0xd0, 0x60, 0x06, 0x91];
#[cold]
pub fn selftest() {
selftest_encrypt();
selftest_decrypt();
selftest_decrypt_mismatch();
}
#[cold]
pub fn selftest_encrypt() {
let mut output = Vec::with_capacity(PLAINTEXT.len());
let tag = encrypt(KEY, NONCE, AAD, PLAINTEXT, &mut output)
.expect("selftest failure");
assert_eq!(&output[..], CIPHERTEXT);
assert_eq!(tag, TAG);
}
#[cold]
pub fn selftest_decrypt() {
let mut output = Vec::with_capacity(CIPHERTEXT.len());
decrypt(KEY, NONCE, AAD, CIPHERTEXT, TAG, &mut output)
.expect("selftest failure");
assert_eq!(&output[..], PLAINTEXT);
}
#[cold]
pub fn selftest_decrypt_mismatch() {
let mut output = Vec::with_capacity(0);
let result = decrypt(KEY, NONCE, AAD, CIPHERTEXT, &[0; 16],
&mut output);
if let Err(DecryptError::TagMismatch) = result {
assert!(output.is_empty());
} else {
panic!("selftest failure");
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn selftest_encrypt() {
selftest::selftest_encrypt();
}
#[test]
fn selftest_decrypt() {
selftest::selftest_decrypt();
}
#[test]
fn selftest_decrypt_mismatch() {
selftest::selftest_decrypt_mismatch();
}
#[test]
fn test_encrypt() {
let mut output = Vec::with_capacity(PLAINTEXT.len());
let tag = encrypt(KEY, NONCE, AAD, PLAINTEXT.as_bytes(),
&mut output).expect("test failed");
assert_eq!(&output[..], CIPHERTEXT);
assert_eq!(tag, TAG);
}
#[test]
fn test_decrypt() {
let mut output = Vec::with_capacity(CIPHERTEXT.len());
decrypt(KEY, NONCE, AAD, CIPHERTEXT, TAG,
&mut output).expect("test failed");
assert_eq!(&output[..], PLAINTEXT.as_bytes());
}
static KEY: &'static [u8] = &[
0x1c, 0x92, 0x40, 0xa5, 0xeb, 0x55, 0xd3, 0x8a,
0xf3, 0x33, 0x88, 0x86, 0x04, 0xf6, 0xb5, 0xf0,
0x47, 0x39, 0x17, 0xc1, 0x40, 0x2b, 0x80, 0x09,
0x9d, 0xca, 0x5c, 0xbc, 0x20, 0x70, 0x75, 0xc0];
static CIPHERTEXT: &'static [u8] = &[
0x64, 0xa0, 0x86, 0x15, 0x75, 0x86, 0x1a, 0xf4,
0x60, 0xf0, 0x62, 0xc7, 0x9b, 0xe6, 0x43, 0xbd,
0x5e, 0x80, 0x5c, 0xfd, 0x34, 0x5c, 0xf3, 0x89,
0xf1, 0x08, 0x67, 0x0a, 0xc7, 0x6c, 0x8c, 0xb2,
0x4c, 0x6c, 0xfc, 0x18, 0x75, 0x5d, 0x43, 0xee,
0xa0, 0x9e, 0xe9, 0x4e, 0x38, 0x2d, 0x26, 0xb0,
0xbd, 0xb7, 0xb7, 0x3c, 0x32, 0x1b, 0x01, 0x00,
0xd4, 0xf0, 0x3b, 0x7f, 0x35, 0x58, 0x94, 0xcf,
0x33, 0x2f, 0x83, 0x0e, 0x71, 0x0b, 0x97, 0xce,
0x98, 0xc8, 0xa8, 0x4a, 0xbd, 0x0b, 0x94, 0x81,
0x14, 0xad, 0x17, 0x6e, 0x00, 0x8d, 0x33, 0xbd,
0x60, 0xf9, 0x82, 0xb1, 0xff, 0x37, 0xc8, 0x55,
0x97, 0x97, 0xa0, 0x6e, 0xf4, 0xf0, 0xef, 0x61,
0xc1, 0x86, 0x32, 0x4e, 0x2b, 0x35, 0x06, 0x38,
0x36, 0x06, 0x90, 0x7b, 0x6a, 0x7c, 0x02, 0xb0,
0xf9, 0xf6, 0x15, 0x7b, 0x53, 0xc8, 0x67, 0xe4,
0xb9, 0x16, 0x6c, 0x76, 0x7b, 0x80, 0x4d, 0x46,
0xa5, 0x9b, 0x52, 0x16, 0xcd, 0xe7, 0xa4, 0xe9,
0x90, 0x40, 0xc5, 0xa4, 0x04, 0x33, 0x22, 0x5e,
0xe2, 0x82, 0xa1, 0xb0, 0xa0, 0x6c, 0x52, 0x3e,
0xaf, 0x45, 0x34, 0xd7, 0xf8, 0x3f, 0xa1, 0x15,
0x5b, 0x00, 0x47, 0x71, 0x8c, 0xbc, 0x54, 0x6a,
0x0d, 0x07, 0x2b, 0x04, 0xb3, 0x56, 0x4e, 0xea,
0x1b, 0x42, 0x22, 0x73, 0xf5, 0x48, 0x27, 0x1a,
0x0b, 0xb2, 0x31, 0x60, 0x53, 0xfa, 0x76, 0x99,
0x19, 0x55, 0xeb, 0xd6, 0x31, 0x59, 0x43, 0x4e,
0xce, 0xbb, 0x4e, 0x46, 0x6d, 0xae, 0x5a, 0x10,
0x73, 0xa6, 0x72, 0x76, 0x27, 0x09, 0x7a, 0x10,
0x49, 0xe6, 0x17, 0xd9, 0x1d, 0x36, 0x10, 0x94,
0xfa, 0x68, 0xf0, 0xff, 0x77, 0x98, 0x71, 0x30,
0x30, 0x5b, 0xea, 0xba, 0x2e, 0xda, 0x04, 0xdf,
0x99, 0x7b, 0x71, 0x4d, 0x6c, 0x6f, 0x2c, 0x29,
0xa6, 0xad, 0x5c, 0xb4, 0x02, 0x2b, 0x02, 0x70,
0x9b];
static NONCE: &'static [u8] = &[0x00, 0x00, 0x00, 0x00,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08];
static AAD: &'static [u8] = &[0xf3, 0x33, 0x88, 0x86,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4e, 0x91];
static TAG: &'static [u8] = &[
0xee, 0xad, 0x9d, 0x67, 0x89, 0x0c, 0xbb, 0x22,
0x39, 0x23, 0x36, 0xfe, 0xa1, 0x85, 0x1f, 0x38];
static PLAINTEXT: &'static str = "\
Internet-Drafts are draft documents valid for a maximum of six m\
onths and may be updated, replaced, or obsoleted by other docume\
nts at any time. It is inappropriate to use Internet-Drafts as r\
eference material or to cite them other than as /\u{201c}work in prog\
ress./\u{201d}";
}
#[cfg(all(feature = "bench", test))]
mod bench {
use test::{Bencher, black_box};
use super::*;
#[cfg_attr(feature = "clippy", allow(result_unwrap_used))]
fn
|
(b: &mut Bencher, aad: &[u8], data: &[u8]) {
let key = [!0; 32];
let nonce = [!0; 12];
let mut buf = Vec::with_capacity(data.len());
b.bytes = data.len() as u64;
b.iter(|| {
buf.clear();
encrypt(black_box(&key), black_box(&nonce),
black_box(aad), black_box(data),
black_box(&mut buf)).unwrap()
})
}
#[cfg_attr(feature = "clippy", allow(result_unwrap_used))]
fn bench_decrypt(b: &mut Bencher, aad: &[u8], data: &[u8]) {
let key = [!0; 32];
let nonce = [!0; 12];
let mut ciphertext = Vec::with_capacity(data.len());
let tag = encrypt(&key, &nonce, aad, data, &mut ciphertext).unwrap();
let input = &ciphertext[..];
let mut buf = Vec::with_capacity(data.len());
b.bytes = data.len() as u64;
b.iter(|| {
buf.clear();
decrypt(black_box(&key), black_box(&nonce),
black_box(aad), black_box(input), black_box(&tag),
black_box(&mut buf)).unwrap()
})
}
#[bench]
fn bench_encrypt_16(b: &mut Bencher) {
bench_encrypt(b, &[!0; 16], &[!0; 16])
}
#[bench]
fn bench_encrypt_4k(b: &mut Bencher) {
bench_encrypt(b, &[!0; 16], &[!0; 4096])
}
#[bench]
fn bench_encrypt_64k(b: &mut Bencher) {
bench_encrypt(b, &[!0; 16], &[!0; 65536])
}
#[bench]
fn bench_decrypt_16(b: &mut Bencher) {
bench_decrypt(b, &[!0; 16], &[!0; 16])
}
|
bench_encrypt
|
identifier_name
|
aead.rs
|
1305::Poly1305;
use simd::u32x4;
const CHACHA20_COUNTER_OVERFLOW: u64 = ((1 << 32) - 1) * 64;
/// Encrypts a byte slice and returns the authentication tag.
///
/// # Example
///
/// ```
/// use chacha20_poly1305_aead::encrypt;
///
/// let key = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
/// 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31];
/// let nonce = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
/// let aad = [1, 2, 3, 4];
///
/// let plaintext = b"hello, world";
///
/// // Vec implements the Write trait
/// let mut ciphertext = Vec::with_capacity(plaintext.len());
///
/// let tag = encrypt(&key, &nonce, &aad, plaintext, &mut ciphertext).unwrap();
///
/// assert_eq!(ciphertext, [0xfc, 0x5a, 0x17, 0x82,
/// 0xab, 0xcf, 0xbc, 0x5d, 0x18, 0x29, 0xbf, 0x97]);
/// assert_eq!(tag, [0xdb, 0xb7, 0x0d, 0xda, 0xbd, 0xfa, 0x8c, 0xa5,
/// 0x60, 0xa2, 0x30, 0x3d, 0xe6, 0x07, 0x92, 0x10]);
/// ```
pub fn encrypt<W: Write>(key: &[u8], nonce: &[u8],
aad: &[u8], mut input: &[u8],
output: &mut W) -> io::Result<[u8; 16]> {
encrypt_read(key, nonce, aad, &mut input, output)
}
/// Encrypts bytes from a reader and returns the authentication tag.
///
/// This function is identical to the `encrypt` function, the only
/// difference being that its input comes from a reader instead of a
/// byte slice.
pub fn encrypt_read<R: Read, W: Write>(key: &[u8], nonce: &[u8],
aad: &[u8], input: &mut R,
output: &mut W) -> io::Result<[u8; 16]> {
let mut chacha20 = ChaCha20::new(key, nonce);
let mut poly1305 = Poly1305::new(&chacha20.next().as_bytes()[..32]);
let aad_len = aad.len() as u64;
let mut input_len = 0;
poly1305.padded_blocks(aad);
let mut buf = [u32x4::default(); 4];
loop {
let read = try!(read_all(input, buf.as_mut_bytes()));
if read == 0 { break; }
input_len += read as u64;
if input_len >= CHACHA20_COUNTER_OVERFLOW {
return Err(io::Error::new(ErrorKind::WriteZero,
"counter overflow"));
}
let block = chacha20.next();
buf[0] = buf[0] ^ block[0];
buf[1] = buf[1] ^ block[1];
buf[2] = buf[2] ^ block[2];
buf[3] = buf[3] ^ block[3];
poly1305.padded_blocks(&buf.as_bytes()[..read]);
try!(output.write_all(&buf.as_bytes()[..read]));
}
poly1305.block([aad_len.to_le(), input_len.to_le()].as_bytes());
let mut tag = [0; 16];
tag.clone_from_slice(poly1305.tag().as_bytes());
Ok(tag)
}
/// Verifies the authentication tag and decrypts a byte slice.
///
/// If the tag does not match, this function produces no output and
/// returns `Err(DecryptError::TagMismatch)`.
///
/// # Example
///
/// ```
/// # use chacha20_poly1305_aead::DecryptError;
/// # fn example() -> Result<(), DecryptError> {
/// use chacha20_poly1305_aead::decrypt;
///
/// let key = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
/// 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31];
/// let nonce = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
/// let aad = [1, 2, 3, 4];
///
/// let ciphertext = [0xfc, 0x5a, 0x17, 0x82, 0xab, 0xcf, 0xbc, 0x5d,
/// 0x18, 0x29, 0xbf, 0x97];
/// let tag = [0xdb, 0xb7, 0x0d, 0xda, 0xbd, 0xfa, 0x8c, 0xa5,
/// 0x60, 0xa2, 0x30, 0x3d, 0xe6, 0x07, 0x92, 0x10];
///
/// // Vec implements the Write trait
/// let mut plaintext = Vec::with_capacity(ciphertext.len());
///
/// try!(decrypt(&key, &nonce, &aad, &ciphertext, &tag, &mut plaintext));
///
/// assert_eq!(plaintext, b"hello, world");
/// # Ok(())
/// # }
/// # example().unwrap();
/// ```
pub fn decrypt<W: Write>(key: &[u8], nonce: &[u8],
aad: &[u8], mut input: &[u8], tag: &[u8],
output: &mut W) -> Result<(), DecryptError> {
let mut chacha20 = ChaCha20::new(key, nonce);
let mut poly1305 = Poly1305::new(&chacha20.next().as_bytes()[..32]);
let aad_len = aad.len() as u64;
let input_len = input.len() as u64;
assert!(tag.len() == 16);
if input_len >= CHACHA20_COUNTER_OVERFLOW {
return Err(io::Error::new(ErrorKind::WriteZero,
"counter overflow").into());
}
poly1305.padded_blocks(aad);
poly1305.padded_blocks(input);
poly1305.block([aad_len.to_le(), input_len.to_le()].as_bytes());
if!constant_time_eq(poly1305.tag().as_bytes(), tag) {
return Err(DecryptError::TagMismatch);
}
let mut buf = [u32x4::default(); 4];
loop {
let read = try!(read_all(&mut input, buf.as_mut_bytes()));
if read == 0 { break; }
let block = chacha20.next();
buf[0] = buf[0] ^ block[0];
buf[1] = buf[1] ^ block[1];
buf[2] = buf[2] ^ block[2];
buf[3] = buf[3] ^ block[3];
try!(output.write_all(&buf.as_bytes()[..read]));
}
Ok(())
}
fn read_all<R: Read>(reader: &mut R, mut buf: &mut [u8]) -> io::Result<usize> {
let mut read = 0;
while!buf.is_empty() {
match reader.read(buf) {
Ok(0) => break,
Ok(n) => { read += n; let tmp = buf; buf = &mut tmp[n..]; }
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
Err(e) => return Err(e),
}
}
Ok(read)
}
/// Error returned from the `decrypt` function.
#[derive(Debug)]
pub enum DecryptError {
/// The calculated Poly1305 tag did not match the given tag.
TagMismatch,
/// There was an error writing the output.
IoError(io::Error),
}
impl Display for DecryptError {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
match *self {
DecryptError::TagMismatch => fmt.write_str(self.description()),
DecryptError::IoError(ref e) => e.fmt(fmt),
}
}
}
impl Error for DecryptError {
fn description(&self) -> &str {
match *self {
DecryptError::TagMismatch => "authentication tag mismatch",
DecryptError::IoError(ref e) => e.description(),
}
}
fn cause(&self) -> Option<&Error> {
match *self {
DecryptError::TagMismatch => None,
DecryptError::IoError(ref e) => Some(e),
}
}
}
impl From<io::Error> for DecryptError {
fn from(error: io::Error) -> Self {
DecryptError::IoError(error)
}
}
impl From<DecryptError> for io::Error {
fn from(error: DecryptError) -> Self {
match error {
DecryptError::IoError(e) => e,
DecryptError::TagMismatch =>
io::Error::new(ErrorKind::InvalidData, error),
}
}
}
pub mod selftest {
use super::*;
static PLAINTEXT: &'static [u8] = b"\
Ladies and Gentlemen of the class of '99: If I could offer you o\
nly one tip for the future, sunscreen would be it.";
static AAD: &'static [u8] = &[0x50, 0x51, 0x52, 0x53,
0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7];
static KEY: &'static [u8] = &[
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f];
static NONCE: &'static [u8] = &[0x07, 0x00, 0x00, 0x00,
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47];
static CIPHERTEXT: &'static [u8] = &[
0xd3, 0x1a, 0x8d, 0x34, 0x64, 0x8e, 0x60, 0xdb,
0x7b, 0x86, 0xaf, 0xbc, 0x53, 0xef, 0x7e, 0xc2,
0xa4, 0xad, 0xed, 0x51, 0x29, 0x6e, 0x08, 0xfe,
0xa9, 0xe2, 0xb5, 0xa7, 0x36, 0xee, 0x62, 0xd6,
0x3d, 0xbe, 0xa4, 0x5e, 0x8c, 0xa9, 0x67, 0x12,
0x82, 0xfa, 0xfb, 0x69, 0xda, 0x92, 0x72, 0x8b,
0x1a, 0x71, 0xde, 0x0a, 0x9e, 0x06, 0x0b, 0x29,
0x05, 0xd6, 0xa5, 0xb6, 0x7e, 0xcd, 0x3b, 0x36,
0x92, 0xdd, 0xbd, 0x7f, 0x2d, 0x77, 0x8b, 0x8c,
0x98, 0x03, 0xae, 0xe3, 0x28, 0x09, 0x1b, 0x58,
0xfa, 0xb3, 0x24, 0xe4, 0xfa, 0xd6, 0x75, 0x94,
0x55, 0x85, 0x80, 0x8b, 0x48, 0x31, 0xd7, 0xbc,
0x3f, 0xf4, 0xde, 0xf0, 0x8e, 0x4b, 0x7a, 0x9d,
0xe5, 0x76, 0xd2, 0x65, 0x86, 0xce, 0xc6, 0x4b,
0x61, 0x16];
static TAG: &'static [u8] = &[
0x1a, 0xe1, 0x0b, 0x59, 0x4f, 0x09, 0xe2, 0x6a,
0x7e, 0x90, 0x2e, 0xcb, 0xd0, 0x60, 0x06, 0x91];
#[cold]
pub fn selftest() {
selftest_encrypt();
selftest_decrypt();
selftest_decrypt_mismatch();
}
#[cold]
pub fn selftest_encrypt() {
let mut output = Vec::with_capacity(PLAINTEXT.len());
let tag = encrypt(KEY, NONCE, AAD, PLAINTEXT, &mut output)
.expect("selftest failure");
assert_eq!(&output[..], CIPHERTEXT);
assert_eq!(tag, TAG);
}
#[cold]
pub fn selftest_decrypt() {
let mut output = Vec::with_capacity(CIPHERTEXT.len());
decrypt(KEY, NONCE, AAD, CIPHERTEXT, TAG, &mut output)
.expect("selftest failure");
assert_eq!(&output[..], PLAINTEXT);
}
#[cold]
pub fn selftest_decrypt_mismatch() {
let mut output = Vec::with_capacity(0);
let result = decrypt(KEY, NONCE, AAD, CIPHERTEXT, &[0; 16],
&mut output);
if let Err(DecryptError::TagMismatch) = result {
assert!(output.is_empty());
} else {
panic!("selftest failure");
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn selftest_encrypt() {
selftest::selftest_encrypt();
}
#[test]
fn selftest_decrypt() {
selftest::selftest_decrypt();
}
#[test]
fn selftest_decrypt_mismatch() {
selftest::selftest_decrypt_mismatch();
}
#[test]
fn test_encrypt() {
let mut output = Vec::with_capacity(PLAINTEXT.len());
let tag = encrypt(KEY, NONCE, AAD, PLAINTEXT.as_bytes(),
&mut output).expect("test failed");
assert_eq!(&output[..], CIPHERTEXT);
assert_eq!(tag, TAG);
}
#[test]
fn test_decrypt() {
let mut output = Vec::with_capacity(CIPHERTEXT.len());
decrypt(KEY, NONCE, AAD, CIPHERTEXT, TAG,
&mut output).expect("test failed");
assert_eq!(&output[..], PLAINTEXT.as_bytes());
}
static KEY: &'static [u8] = &[
0x1c, 0x92, 0x40, 0xa5, 0xeb, 0x55, 0xd3, 0x8a,
0xf3, 0x33, 0x88, 0x86, 0x04, 0xf6, 0xb5, 0xf0,
0x47, 0x39, 0x17, 0xc1, 0x40, 0x2b, 0x80, 0x09,
0x9d, 0xca, 0x5c, 0xbc, 0x20, 0x70, 0x75, 0xc0];
static CIPHERTEXT: &'static [u8] = &[
0x64, 0xa0, 0x86, 0x15, 0x75, 0x86, 0x1a, 0xf4,
0x60, 0xf0, 0x62, 0xc7, 0x9b, 0xe6, 0x43, 0xbd,
0x5e, 0x80, 0x5c, 0xfd, 0x34, 0x5c, 0xf3, 0x89,
0xf1, 0x08, 0x67, 0x0a, 0xc7, 0x6c, 0x8c, 0xb2,
0x4c, 0x6c, 0xfc, 0x18, 0x75, 0x5d, 0x43, 0xee,
0xa0, 0x9e, 0xe9, 0x4e, 0x38, 0x2d, 0x26, 0xb0,
0xbd, 0xb7, 0xb7, 0x3c, 0x32, 0x1b, 0x01, 0x00,
0xd4, 0xf0, 0x3b, 0x7f, 0x35, 0x58, 0x94, 0xcf,
0x33, 0x2f, 0x83, 0x0e, 0x71, 0x0b, 0x97, 0xce,
0x98, 0xc8, 0xa8, 0x4a, 0xbd, 0x0b, 0x94, 0x81,
0x14, 0xad, 0x17, 0x6e, 0x00, 0x8d, 0x33, 0xbd,
0x60, 0xf9, 0x82, 0xb1, 0xff, 0x37, 0xc8, 0x55,
0x97, 0x97, 0xa0, 0x6e, 0xf4, 0xf0, 0xef, 0x61,
0xc1, 0x86, 0x32, 0x4e, 0x2b, 0x35, 0x06, 0x38,
0x36, 0x06, 0x90, 0x7b, 0x6a, 0x7c, 0x02, 0xb0,
0xf9, 0xf6, 0x15, 0x7b, 0x53, 0xc8, 0x67, 0xe4,
0xb9, 0x16, 0x6c, 0x76, 0x7b, 0x80, 0x4d, 0x46,
0xa5, 0x9b, 0x52, 0x16, 0xcd, 0xe7, 0xa4, 0xe9,
0x90, 0x40, 0xc5, 0xa4, 0x04, 0x33, 0x22, 0x5e,
0xe2, 0x82, 0xa1, 0xb0, 0xa0, 0x6c, 0x52, 0x3e,
0xaf, 0x45, 0x34, 0xd7, 0xf8, 0x3f, 0xa1, 0x15,
0x5b, 0x00, 0x47, 0x71, 0x8c, 0xbc, 0x54, 0x6a,
0x0d, 0x07, 0x2b, 0x04, 0xb3, 0x56, 0x4e, 0xea,
0x1b, 0x42, 0x22, 0x73, 0xf5, 0x48, 0x27, 0x1a,
0x0b, 0xb2, 0x31, 0x60, 0x53, 0xfa, 0x76, 0x99,
0x19, 0x55, 0xeb, 0xd6, 0x31, 0x59, 0x43, 0x4e,
0xce, 0xbb, 0x4e, 0x46, 0x6d, 0xae, 0x5a, 0x10,
0x73, 0xa6, 0x72, 0x76, 0x27, 0x09, 0x7a, 0x10,
0x49, 0xe6, 0x17, 0xd9, 0x1d, 0x36, 0x10, 0x94,
0xfa, 0x68, 0xf0, 0xff, 0x77, 0x98, 0x71, 0x30,
0x30, 0x5b, 0xea, 0xba, 0x2e, 0xda, 0x04, 0xdf,
0x99, 0x7b, 0x71, 0x4d, 0x6c, 0x6f, 0x2c, 0x29,
0xa6, 0xad, 0x5c, 0xb4, 0x02, 0x2b, 0x02, 0x70,
0x9b];
static NONCE: &'static [u8] = &[0x00, 0x00, 0x00, 0x00,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08];
static AAD: &'static [u8] = &[0xf3, 0x33, 0x88, 0x86,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x4e, 0x91];
static TAG: &'static [u8] = &[
0xee, 0xad, 0x9d, 0x67, 0x89, 0x0c, 0xbb, 0x22,
0x39, 0x23, 0x36, 0xfe, 0xa1, 0x85, 0x1f, 0x38];
static PLAINTEXT: &'static str = "\
Internet-Drafts are draft documents valid for a maximum of six m\
onths and may be updated, replaced, or obsoleted by other docume\
nts at any time. It is inappropriate to use Internet-Drafts as r\
eference material or to cite them other than as /\u{201c}work in prog\
ress./\u{201d}";
}
#[cfg(all(feature = "bench", test))]
mod bench {
use test::{Bencher, black_box};
use super::*;
#[cfg_attr(feature = "clippy", allow(result_unwrap_used))]
fn bench_encrypt(b: &mut Bencher, aad: &[u8], data: &[u8]) {
let key = [!0; 32];
let nonce = [!0; 12];
let mut buf = Vec::with_capacity(data.len());
b.bytes = data.len() as u64;
b.iter(|| {
buf.clear();
encrypt(black_box(&key), black_box(&nonce),
black_box(aad), black_box(data),
black_box(&mut buf)).unwrap()
})
}
#[cfg_attr(feature = "clippy", allow(result_unwrap_used))]
fn bench_decrypt(b: &mut Bencher, aad: &[u8], data: &[u8]) {
let key = [!0; 32];
let nonce = [!0; 12];
let mut ciphertext = Vec::with_capacity(data.len());
let tag = encrypt(&key, &nonce, aad, data, &mut ciphertext).unwrap();
let input = &ciphertext[..];
let mut buf = Vec::with_capacity(data.len());
b.bytes = data.len() as u64;
b.iter(|| {
buf.clear();
decrypt(black_box(&key), black_box(&nonce),
black_box(aad), black_box(input), black_box(&tag),
black_box(&mut buf)).unwrap()
})
}
#[bench]
fn bench_encrypt_16(b: &mut Bencher) {
bench_encrypt(b, &[!0; 16], &[!0; 16])
}
#[bench]
fn bench_encrypt_4k(b: &mut Bencher) {
bench_encrypt(b, &[!0; 16], &[!0; 4096])
}
#[bench]
fn bench_encrypt_64k(b: &mut Bencher) {
bench_encrypt(b, &[!0; 16], &[!0; 65536])
}
#[bench]
fn bench_decrypt_16(b: &mut Bencher) {
bench_decrypt(b, &[!0; 16], &[!0; 16])
}
#[bench]
fn bench_decrypt_4k(b: &mut Bencher) {
bench_decrypt(b, &[!0; 16], &[!0; 4096])
}
#[bench]
fn bench_decrypt_64k(b: &mut Bencher)
|
{
bench_decrypt(b, &[!0; 16], &[!0; 65536])
}
|
identifier_body
|
|
characterdata.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! DOM bindings for `CharacterData`.
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CharacterDataBinding::CharacterDataMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeBinding::NodeMethods;
use dom::bindings::codegen::Bindings::ProcessingInstructionBinding::ProcessingInstructionMethods;
use dom::bindings::codegen::InheritTypes::{CharacterDataTypeId, NodeTypeId};
use dom::bindings::codegen::UnionTypes::NodeOrString;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{LayoutJS, Root};
use dom::bindings::str::DOMString;
use dom::comment::Comment;
use dom::document::Document;
use dom::element::Element;
use dom::node::{ChildrenMutation, Node, NodeDamage};
use dom::processinginstruction::ProcessingInstruction;
use dom::text::Text;
use dom::virtualmethods::vtable_for;
use dom_struct::dom_struct;
use servo_config::opts;
use std::cell::Ref;
// https://dom.spec.whatwg.org/#characterdata
#[dom_struct]
pub struct CharacterData {
node: Node,
data: DOMRefCell<DOMString>,
}
impl CharacterData {
pub fn new_inherited(data: DOMString, document: &Document) -> CharacterData {
CharacterData {
node: Node::new_inherited(document),
data: DOMRefCell::new(data),
}
}
pub fn clone_with_data(&self, data: DOMString, document: &Document) -> Root<Node> {
match self.upcast::<Node>().type_id() {
NodeTypeId::CharacterData(CharacterDataTypeId::Comment) => {
Root::upcast(Comment::new(data, &document))
}
NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction) => {
let pi = self.downcast::<ProcessingInstruction>().unwrap();
Root::upcast(ProcessingInstruction::new(pi.Target(), data, &document))
},
NodeTypeId::CharacterData(CharacterDataTypeId::Text) => {
Root::upcast(Text::new(data, &document))
},
_ => unreachable!(),
}
}
#[inline]
pub fn data(&self) -> Ref<DOMString> {
|
#[inline]
pub fn append_data(&self, data: &str) {
self.data.borrow_mut().push_str(data);
self.content_changed();
}
fn content_changed(&self) {
let node = self.upcast::<Node>();
node.dirty(NodeDamage::OtherNodeDamage);
}
}
impl CharacterDataMethods for CharacterData {
// https://dom.spec.whatwg.org/#dom-characterdata-data
fn Data(&self) -> DOMString {
self.data.borrow().clone()
}
// https://dom.spec.whatwg.org/#dom-characterdata-data
fn SetData(&self, data: DOMString) {
let old_length = self.Length();
let new_length = data.encode_utf16().count() as u32;
*self.data.borrow_mut() = data;
self.content_changed();
let node = self.upcast::<Node>();
node.ranges().replace_code_units(node, 0, old_length, new_length);
// If this is a Text node, we might need to re-parse (say, if our parent
// is a <style> element.) We don't need to if this is a Comment or
// ProcessingInstruction.
if self.is::<Text>() {
if let Some(parent_node) = node.GetParentNode() {
let mutation = ChildrenMutation::ChangeText;
vtable_for(&parent_node).children_changed(&mutation);
}
}
}
// https://dom.spec.whatwg.org/#dom-characterdata-length
fn Length(&self) -> u32 {
self.data.borrow().encode_utf16().count() as u32
}
// https://dom.spec.whatwg.org/#dom-characterdata-substringdata
fn SubstringData(&self, offset: u32, count: u32) -> Fallible<DOMString> {
let data = self.data.borrow();
// Step 1.
let mut substring = String::new();
let remaining;
match split_at_utf16_code_unit_offset(&data, offset) {
Ok((_, astral, s)) => {
// As if we had split the UTF-16 surrogate pair in half
// and then transcoded that to UTF-8 lossily,
// since our DOMString is currently strict UTF-8.
if astral.is_some() {
substring = substring + "\u{FFFD}";
}
remaining = s;
}
// Step 2.
Err(()) => return Err(Error::IndexSize),
}
match split_at_utf16_code_unit_offset(remaining, count) {
// Steps 3.
Err(()) => substring = substring + remaining,
// Steps 4.
Ok((s, astral, _)) => {
substring = substring + s;
// As if we had split the UTF-16 surrogate pair in half
// and then transcoded that to UTF-8 lossily,
// since our DOMString is currently strict UTF-8.
if astral.is_some() {
substring = substring + "\u{FFFD}";
}
}
};
Ok(DOMString::from(substring))
}
// https://dom.spec.whatwg.org/#dom-characterdata-appenddatadata
fn AppendData(&self, data: DOMString) {
// FIXME(ajeffrey): Efficient append on DOMStrings?
self.append_data(&*data);
}
// https://dom.spec.whatwg.org/#dom-characterdata-insertdataoffset-data
fn InsertData(&self, offset: u32, arg: DOMString) -> ErrorResult {
self.ReplaceData(offset, 0, arg)
}
// https://dom.spec.whatwg.org/#dom-characterdata-deletedataoffset-count
fn DeleteData(&self, offset: u32, count: u32) -> ErrorResult {
self.ReplaceData(offset, count, DOMString::new())
}
// https://dom.spec.whatwg.org/#dom-characterdata-replacedata
fn ReplaceData(&self, offset: u32, count: u32, arg: DOMString) -> ErrorResult {
let mut new_data;
{
let data = self.data.borrow();
let prefix;
let replacement_before;
let remaining;
match split_at_utf16_code_unit_offset(&data, offset) {
Ok((p, astral, r)) => {
prefix = p;
// As if we had split the UTF-16 surrogate pair in half
// and then transcoded that to UTF-8 lossily,
// since our DOMString is currently strict UTF-8.
replacement_before = if astral.is_some() { "\u{FFFD}" } else { "" };
remaining = r;
}
// Step 2.
Err(()) => return Err(Error::IndexSize),
};
let replacement_after;
let suffix;
match split_at_utf16_code_unit_offset(remaining, count) {
// Steps 3.
Err(()) => {
replacement_after = "";
suffix = "";
}
Ok((_, astral, s)) => {
// As if we had split the UTF-16 surrogate pair in half
// and then transcoded that to UTF-8 lossily,
// since our DOMString is currently strict UTF-8.
replacement_after = if astral.is_some() { "\u{FFFD}" } else { "" };
suffix = s;
}
};
// Step 4: Mutation observers.
// Step 5 to 7.
new_data = String::with_capacity(
prefix.len() +
replacement_before.len() +
arg.len() +
replacement_after.len() +
suffix.len());
new_data.push_str(prefix);
new_data.push_str(replacement_before);
new_data.push_str(&arg);
new_data.push_str(replacement_after);
new_data.push_str(suffix);
}
*self.data.borrow_mut() = DOMString::from(new_data);
self.content_changed();
// Steps 8-11.
let node = self.upcast::<Node>();
node.ranges().replace_code_units(
node, offset, count, arg.encode_utf16().count() as u32);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-childnode-before
fn Before(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().before(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-after
fn After(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().after(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-replacewith
fn ReplaceWith(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().replace_with(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-remove
fn Remove(&self) {
let node = self.upcast::<Node>();
node.remove_self();
}
// https://dom.spec.whatwg.org/#dom-nondocumenttypechildnode-previouselementsibling
fn GetPreviousElementSibling(&self) -> Option<Root<Element>> {
self.upcast::<Node>().preceding_siblings().filter_map(Root::downcast).next()
}
// https://dom.spec.whatwg.org/#dom-nondocumenttypechildnode-nextelementsibling
fn GetNextElementSibling(&self) -> Option<Root<Element>> {
self.upcast::<Node>().following_siblings().filter_map(Root::downcast).next()
}
}
#[allow(unsafe_code)]
pub trait LayoutCharacterDataHelpers {
unsafe fn data_for_layout(&self) -> &str;
}
#[allow(unsafe_code)]
impl LayoutCharacterDataHelpers for LayoutJS<CharacterData> {
#[inline]
unsafe fn data_for_layout(&self) -> &str {
&(*self.unsafe_get()).data.borrow_for_layout()
}
}
/// Split the given string at the given position measured in UTF-16 code units from the start.
///
/// * `Err(())` indicates that `offset` if after the end of the string
/// * `Ok((before, None, after))` indicates that `offset` is between Unicode code points.
/// The two string slices are such that:
/// `before == s.to_utf16()[..offset].to_utf8()` and
/// `after == s.to_utf16()[offset..].to_utf8()`
/// * `Ok((before, Some(ch), after))` indicates that `offset` is "in the middle"
/// of a single Unicode code point that would be represented in UTF-16 by a surrogate pair
/// of two 16-bit code units.
/// `ch` is that code point.
/// The two string slices are such that:
/// `before == s.to_utf16()[..offset - 1].to_utf8()` and
/// `after == s.to_utf16()[offset + 1..].to_utf8()`
///
/// # Panics
///
/// Note that the third variant is only ever returned when the `-Z replace-surrogates`
/// command-line option is specified.
/// When it *would* be returned but the option is *not* specified, this function panics.
fn split_at_utf16_code_unit_offset(s: &str, offset: u32) -> Result<(&str, Option<char>, &str), ()> {
let mut code_units = 0;
for (i, c) in s.char_indices() {
if code_units == offset {
let (a, b) = s.split_at(i);
return Ok((a, None, b));
}
code_units += 1;
if c > '\u{FFFF}' {
if code_units == offset {
if opts::get().replace_surrogates {
debug_assert!(c.len_utf8() == 4);
return Ok((&s[..i], Some(c), &s[i + c.len_utf8()..]))
}
panic!("\n\n\
Would split a surrogate pair in CharacterData API.\n\
If you see this in real content, please comment with the URL\n\
on https://github.com/servo/servo/issues/6873\n\
\n");
}
code_units += 1;
}
}
if code_units == offset {
Ok((s, None, ""))
} else {
Err(())
}
}
|
self.data.borrow()
}
|
random_line_split
|
characterdata.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! DOM bindings for `CharacterData`.
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CharacterDataBinding::CharacterDataMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeBinding::NodeMethods;
use dom::bindings::codegen::Bindings::ProcessingInstructionBinding::ProcessingInstructionMethods;
use dom::bindings::codegen::InheritTypes::{CharacterDataTypeId, NodeTypeId};
use dom::bindings::codegen::UnionTypes::NodeOrString;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{LayoutJS, Root};
use dom::bindings::str::DOMString;
use dom::comment::Comment;
use dom::document::Document;
use dom::element::Element;
use dom::node::{ChildrenMutation, Node, NodeDamage};
use dom::processinginstruction::ProcessingInstruction;
use dom::text::Text;
use dom::virtualmethods::vtable_for;
use dom_struct::dom_struct;
use servo_config::opts;
use std::cell::Ref;
// https://dom.spec.whatwg.org/#characterdata
#[dom_struct]
pub struct CharacterData {
node: Node,
data: DOMRefCell<DOMString>,
}
impl CharacterData {
pub fn new_inherited(data: DOMString, document: &Document) -> CharacterData {
CharacterData {
node: Node::new_inherited(document),
data: DOMRefCell::new(data),
}
}
pub fn clone_with_data(&self, data: DOMString, document: &Document) -> Root<Node> {
match self.upcast::<Node>().type_id() {
NodeTypeId::CharacterData(CharacterDataTypeId::Comment) => {
Root::upcast(Comment::new(data, &document))
}
NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction) => {
let pi = self.downcast::<ProcessingInstruction>().unwrap();
Root::upcast(ProcessingInstruction::new(pi.Target(), data, &document))
},
NodeTypeId::CharacterData(CharacterDataTypeId::Text) => {
Root::upcast(Text::new(data, &document))
},
_ => unreachable!(),
}
}
#[inline]
pub fn data(&self) -> Ref<DOMString> {
self.data.borrow()
}
#[inline]
pub fn append_data(&self, data: &str) {
self.data.borrow_mut().push_str(data);
self.content_changed();
}
fn content_changed(&self) {
let node = self.upcast::<Node>();
node.dirty(NodeDamage::OtherNodeDamage);
}
}
impl CharacterDataMethods for CharacterData {
// https://dom.spec.whatwg.org/#dom-characterdata-data
fn Data(&self) -> DOMString {
self.data.borrow().clone()
}
// https://dom.spec.whatwg.org/#dom-characterdata-data
fn SetData(&self, data: DOMString) {
let old_length = self.Length();
let new_length = data.encode_utf16().count() as u32;
*self.data.borrow_mut() = data;
self.content_changed();
let node = self.upcast::<Node>();
node.ranges().replace_code_units(node, 0, old_length, new_length);
// If this is a Text node, we might need to re-parse (say, if our parent
// is a <style> element.) We don't need to if this is a Comment or
// ProcessingInstruction.
if self.is::<Text>() {
if let Some(parent_node) = node.GetParentNode() {
let mutation = ChildrenMutation::ChangeText;
vtable_for(&parent_node).children_changed(&mutation);
}
}
}
// https://dom.spec.whatwg.org/#dom-characterdata-length
fn Length(&self) -> u32 {
self.data.borrow().encode_utf16().count() as u32
}
// https://dom.spec.whatwg.org/#dom-characterdata-substringdata
fn SubstringData(&self, offset: u32, count: u32) -> Fallible<DOMString> {
let data = self.data.borrow();
// Step 1.
let mut substring = String::new();
let remaining;
match split_at_utf16_code_unit_offset(&data, offset) {
Ok((_, astral, s)) => {
// As if we had split the UTF-16 surrogate pair in half
// and then transcoded that to UTF-8 lossily,
// since our DOMString is currently strict UTF-8.
if astral.is_some() {
substring = substring + "\u{FFFD}";
}
remaining = s;
}
// Step 2.
Err(()) => return Err(Error::IndexSize),
}
match split_at_utf16_code_unit_offset(remaining, count) {
// Steps 3.
Err(()) => substring = substring + remaining,
// Steps 4.
Ok((s, astral, _)) => {
substring = substring + s;
// As if we had split the UTF-16 surrogate pair in half
// and then transcoded that to UTF-8 lossily,
// since our DOMString is currently strict UTF-8.
if astral.is_some() {
substring = substring + "\u{FFFD}";
}
}
};
Ok(DOMString::from(substring))
}
// https://dom.spec.whatwg.org/#dom-characterdata-appenddatadata
fn AppendData(&self, data: DOMString) {
// FIXME(ajeffrey): Efficient append on DOMStrings?
self.append_data(&*data);
}
// https://dom.spec.whatwg.org/#dom-characterdata-insertdataoffset-data
fn InsertData(&self, offset: u32, arg: DOMString) -> ErrorResult {
self.ReplaceData(offset, 0, arg)
}
// https://dom.spec.whatwg.org/#dom-characterdata-deletedataoffset-count
fn DeleteData(&self, offset: u32, count: u32) -> ErrorResult {
self.ReplaceData(offset, count, DOMString::new())
}
// https://dom.spec.whatwg.org/#dom-characterdata-replacedata
fn ReplaceData(&self, offset: u32, count: u32, arg: DOMString) -> ErrorResult {
let mut new_data;
{
let data = self.data.borrow();
let prefix;
let replacement_before;
let remaining;
match split_at_utf16_code_unit_offset(&data, offset) {
Ok((p, astral, r)) => {
prefix = p;
// As if we had split the UTF-16 surrogate pair in half
// and then transcoded that to UTF-8 lossily,
// since our DOMString is currently strict UTF-8.
replacement_before = if astral.is_some() { "\u{FFFD}" } else { "" };
remaining = r;
}
// Step 2.
Err(()) => return Err(Error::IndexSize),
};
let replacement_after;
let suffix;
match split_at_utf16_code_unit_offset(remaining, count) {
// Steps 3.
Err(()) => {
replacement_after = "";
suffix = "";
}
Ok((_, astral, s)) => {
// As if we had split the UTF-16 surrogate pair in half
// and then transcoded that to UTF-8 lossily,
// since our DOMString is currently strict UTF-8.
replacement_after = if astral.is_some() { "\u{FFFD}" } else { "" };
suffix = s;
}
};
// Step 4: Mutation observers.
// Step 5 to 7.
new_data = String::with_capacity(
prefix.len() +
replacement_before.len() +
arg.len() +
replacement_after.len() +
suffix.len());
new_data.push_str(prefix);
new_data.push_str(replacement_before);
new_data.push_str(&arg);
new_data.push_str(replacement_after);
new_data.push_str(suffix);
}
*self.data.borrow_mut() = DOMString::from(new_data);
self.content_changed();
// Steps 8-11.
let node = self.upcast::<Node>();
node.ranges().replace_code_units(
node, offset, count, arg.encode_utf16().count() as u32);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-childnode-before
fn Before(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().before(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-after
fn After(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().after(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-replacewith
fn ReplaceWith(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().replace_with(nodes)
}
// https://dom.spec.whatwg.org/#dom-childnode-remove
fn Remove(&self) {
let node = self.upcast::<Node>();
node.remove_self();
}
// https://dom.spec.whatwg.org/#dom-nondocumenttypechildnode-previouselementsibling
fn GetPreviousElementSibling(&self) -> Option<Root<Element>> {
self.upcast::<Node>().preceding_siblings().filter_map(Root::downcast).next()
}
// https://dom.spec.whatwg.org/#dom-nondocumenttypechildnode-nextelementsibling
fn GetNextElementSibling(&self) -> Option<Root<Element>> {
self.upcast::<Node>().following_siblings().filter_map(Root::downcast).next()
}
}
#[allow(unsafe_code)]
pub trait LayoutCharacterDataHelpers {
unsafe fn data_for_layout(&self) -> &str;
}
#[allow(unsafe_code)]
impl LayoutCharacterDataHelpers for LayoutJS<CharacterData> {
#[inline]
unsafe fn
|
(&self) -> &str {
&(*self.unsafe_get()).data.borrow_for_layout()
}
}
/// Split the given string at the given position measured in UTF-16 code units from the start.
///
/// * `Err(())` indicates that `offset` if after the end of the string
/// * `Ok((before, None, after))` indicates that `offset` is between Unicode code points.
/// The two string slices are such that:
/// `before == s.to_utf16()[..offset].to_utf8()` and
/// `after == s.to_utf16()[offset..].to_utf8()`
/// * `Ok((before, Some(ch), after))` indicates that `offset` is "in the middle"
/// of a single Unicode code point that would be represented in UTF-16 by a surrogate pair
/// of two 16-bit code units.
/// `ch` is that code point.
/// The two string slices are such that:
/// `before == s.to_utf16()[..offset - 1].to_utf8()` and
/// `after == s.to_utf16()[offset + 1..].to_utf8()`
///
/// # Panics
///
/// Note that the third variant is only ever returned when the `-Z replace-surrogates`
/// command-line option is specified.
/// When it *would* be returned but the option is *not* specified, this function panics.
fn split_at_utf16_code_unit_offset(s: &str, offset: u32) -> Result<(&str, Option<char>, &str), ()> {
let mut code_units = 0;
for (i, c) in s.char_indices() {
if code_units == offset {
let (a, b) = s.split_at(i);
return Ok((a, None, b));
}
code_units += 1;
if c > '\u{FFFF}' {
if code_units == offset {
if opts::get().replace_surrogates {
debug_assert!(c.len_utf8() == 4);
return Ok((&s[..i], Some(c), &s[i + c.len_utf8()..]))
}
panic!("\n\n\
Would split a surrogate pair in CharacterData API.\n\
If you see this in real content, please comment with the URL\n\
on https://github.com/servo/servo/issues/6873\n\
\n");
}
code_units += 1;
}
}
if code_units == offset {
Ok((s, None, ""))
} else {
Err(())
}
}
|
data_for_layout
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.