file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
input_state.rs | use std::vec::Vec;
use operation::{ Operation, Direction, };
pub struct InputState {
pub left: bool,
pub right: bool,
pub up: bool,
pub down: bool,
pub enter: bool,
pub cancel: bool,
}
impl InputState {
pub fn new() -> InputState {
InputState {
left: false, right: false, up: false, down: false,
enter: false, cancel: false,
}
}
pub fn press(&mut self, op: Operation) -> &mut Self {
self.update(op, true)
}
pub fn release(&mut self, op: Operation) -> &mut Self {
self.update(op, false)
}
fn update(&mut self, op: Operation, value: bool) -> &mut Self {
match op {
Operation::Move(dir) => match dir {
Direction::Left => self.left = value,
Direction::Right => self.right = value,
Direction::Up => self.up = value,
Direction::Down => self.down = value,
},
Operation::Enter => self.enter = value,
Operation::Cancel => self.cancel = value,
_ => {}
}
self
}
pub fn vec(&self) -> Vec<Operation> {
let mut states = Vec::new();
if self.left { states.push(Operation::Move(Direction::Left)); }
if self.right { states.push(Operation::Move(Direction::Right)); }
if self.up { states.push(Operation::Move(Direction::Up)); }
if self.down { states.push(Operation::Move(Direction::Down)); }
if self.enter |
if self.cancel { states.push(Operation::Cancel); }
states
}
}
| { states.push(Operation::Enter); } | conditional_block |
input_state.rs | use std::vec::Vec;
use operation::{ Operation, Direction, };
pub struct InputState {
pub left: bool,
pub right: bool,
pub up: bool,
pub down: bool,
pub enter: bool,
pub cancel: bool,
}
impl InputState {
pub fn new() -> InputState {
InputState {
left: false, right: false, up: false, down: false,
enter: false, cancel: false,
}
}
pub fn press(&mut self, op: Operation) -> &mut Self {
self.update(op, true)
}
pub fn | (&mut self, op: Operation) -> &mut Self {
self.update(op, false)
}
fn update(&mut self, op: Operation, value: bool) -> &mut Self {
match op {
Operation::Move(dir) => match dir {
Direction::Left => self.left = value,
Direction::Right => self.right = value,
Direction::Up => self.up = value,
Direction::Down => self.down = value,
},
Operation::Enter => self.enter = value,
Operation::Cancel => self.cancel = value,
_ => {}
}
self
}
pub fn vec(&self) -> Vec<Operation> {
let mut states = Vec::new();
if self.left { states.push(Operation::Move(Direction::Left)); }
if self.right { states.push(Operation::Move(Direction::Right)); }
if self.up { states.push(Operation::Move(Direction::Up)); }
if self.down { states.push(Operation::Move(Direction::Down)); }
if self.enter { states.push(Operation::Enter); }
if self.cancel { states.push(Operation::Cancel); }
states
}
}
| release | identifier_name |
input_state.rs | use std::vec::Vec; | use operation::{ Operation, Direction, };
pub struct InputState {
pub left: bool,
pub right: bool,
pub up: bool,
pub down: bool,
pub enter: bool,
pub cancel: bool,
}
impl InputState {
pub fn new() -> InputState {
InputState {
left: false, right: false, up: false, down: false,
enter: false, cancel: false,
}
}
pub fn press(&mut self, op: Operation) -> &mut Self {
self.update(op, true)
}
pub fn release(&mut self, op: Operation) -> &mut Self {
self.update(op, false)
}
fn update(&mut self, op: Operation, value: bool) -> &mut Self {
match op {
Operation::Move(dir) => match dir {
Direction::Left => self.left = value,
Direction::Right => self.right = value,
Direction::Up => self.up = value,
Direction::Down => self.down = value,
},
Operation::Enter => self.enter = value,
Operation::Cancel => self.cancel = value,
_ => {}
}
self
}
pub fn vec(&self) -> Vec<Operation> {
let mut states = Vec::new();
if self.left { states.push(Operation::Move(Direction::Left)); }
if self.right { states.push(Operation::Move(Direction::Right)); }
if self.up { states.push(Operation::Move(Direction::Up)); }
if self.down { states.push(Operation::Move(Direction::Down)); }
if self.enter { states.push(Operation::Enter); }
if self.cancel { states.push(Operation::Cancel); }
states
}
} | random_line_split |
|
spatial_reference.rs | extern crate gdal; |
fn run() -> Result<(), gdal::errors::Error> {
let spatial_ref1 = SpatialRef::from_proj4(
"+proj=laea +lat_0=52 +lon_0=10 +x_0=4321000 +y_0=3210000 +ellps=GRS80 +units=m +no_defs",
)?;
println!(
"Spatial ref from proj4 to wkt:\n{:?}\n",
spatial_ref1.to_wkt()?
);
let spatial_ref2 = SpatialRef::from_wkt("GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",7030]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY[\"EPSG\",6326]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",8901]],UNIT[\"DMSH\",0.0174532925199433,AUTHORITY[\"EPSG\",9108]],AXIS[\"Lat\",NORTH],AXIS[\"Long\",EAST],AUTHORITY[\"EPSG\",4326]]")?;
println!(
"Spatial ref from wkt to proj4:\n{:?}\n",
spatial_ref2.to_proj4()?
);
let spatial_ref3 = SpatialRef::from_definition("urn:ogc:def:crs:EPSG:6.3:26986")?;
println!(
"Spatial ref from ogc naming to wkt:\n{:?}\n",
spatial_ref3.to_wkt()?
);
let spatial_ref4 = SpatialRef::from_epsg(4326)?;
println!(
"Spatial ref from epsg code to wkt:\n{:?}\n",
spatial_ref4.to_wkt()?
);
println!(
"Spatial ref from epsg code to pretty wkt:\n{:?}\n",
spatial_ref4.to_pretty_wkt()?
);
println!(
"Comparison between identical SRS : {:?}\n",
spatial_ref2 == spatial_ref4
);
let htransform = CoordTransform::new(&spatial_ref2, &spatial_ref1)?;
let mut xs = [23.43, 23.50];
let mut ys = [37.58, 37.70];
println!("Before transformation :\n{:?} {:?}", xs, ys);
htransform.transform_coords(&mut xs, &mut ys, &mut [0.0, 0.0])?;
println!("After transformation :\n{:?} {:?}\n", xs, ys);
let geom = Geometry::from_wkt(
"POLYGON((23.43 37.58, 23.43 40.0, 25.29 40.0, 25.29 37.58, 23.43 37.58))",
)?;
println!("Polygon before transformation:\n{:?}\n", geom.wkt()?);
geom.transform(&htransform)?;
println!("Polygon after transformation:\n{:?}\n", geom.wkt()?);
let spatial_ref5 = SpatialRef::from_epsg(4326)?;
println!("To wkt: {:?}", spatial_ref5.to_wkt());
spatial_ref5.morph_to_esri()?;
println!("To esri wkt: {:?}", spatial_ref5.to_wkt());
println!("To xml: {:?}", spatial_ref5.to_xml());
Ok(())
}
fn main() {
run().unwrap();
} |
use gdal::spatial_ref::{CoordTransform, SpatialRef};
use gdal::vector::Geometry; | random_line_split |
spatial_reference.rs | extern crate gdal;
use gdal::spatial_ref::{CoordTransform, SpatialRef};
use gdal::vector::Geometry;
fn | () -> Result<(), gdal::errors::Error> {
let spatial_ref1 = SpatialRef::from_proj4(
"+proj=laea +lat_0=52 +lon_0=10 +x_0=4321000 +y_0=3210000 +ellps=GRS80 +units=m +no_defs",
)?;
println!(
"Spatial ref from proj4 to wkt:\n{:?}\n",
spatial_ref1.to_wkt()?
);
let spatial_ref2 = SpatialRef::from_wkt("GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",7030]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY[\"EPSG\",6326]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",8901]],UNIT[\"DMSH\",0.0174532925199433,AUTHORITY[\"EPSG\",9108]],AXIS[\"Lat\",NORTH],AXIS[\"Long\",EAST],AUTHORITY[\"EPSG\",4326]]")?;
println!(
"Spatial ref from wkt to proj4:\n{:?}\n",
spatial_ref2.to_proj4()?
);
let spatial_ref3 = SpatialRef::from_definition("urn:ogc:def:crs:EPSG:6.3:26986")?;
println!(
"Spatial ref from ogc naming to wkt:\n{:?}\n",
spatial_ref3.to_wkt()?
);
let spatial_ref4 = SpatialRef::from_epsg(4326)?;
println!(
"Spatial ref from epsg code to wkt:\n{:?}\n",
spatial_ref4.to_wkt()?
);
println!(
"Spatial ref from epsg code to pretty wkt:\n{:?}\n",
spatial_ref4.to_pretty_wkt()?
);
println!(
"Comparison between identical SRS : {:?}\n",
spatial_ref2 == spatial_ref4
);
let htransform = CoordTransform::new(&spatial_ref2, &spatial_ref1)?;
let mut xs = [23.43, 23.50];
let mut ys = [37.58, 37.70];
println!("Before transformation :\n{:?} {:?}", xs, ys);
htransform.transform_coords(&mut xs, &mut ys, &mut [0.0, 0.0])?;
println!("After transformation :\n{:?} {:?}\n", xs, ys);
let geom = Geometry::from_wkt(
"POLYGON((23.43 37.58, 23.43 40.0, 25.29 40.0, 25.29 37.58, 23.43 37.58))",
)?;
println!("Polygon before transformation:\n{:?}\n", geom.wkt()?);
geom.transform(&htransform)?;
println!("Polygon after transformation:\n{:?}\n", geom.wkt()?);
let spatial_ref5 = SpatialRef::from_epsg(4326)?;
println!("To wkt: {:?}", spatial_ref5.to_wkt());
spatial_ref5.morph_to_esri()?;
println!("To esri wkt: {:?}", spatial_ref5.to_wkt());
println!("To xml: {:?}", spatial_ref5.to_xml());
Ok(())
}
fn main() {
run().unwrap();
}
| run | identifier_name |
spatial_reference.rs | extern crate gdal;
use gdal::spatial_ref::{CoordTransform, SpatialRef};
use gdal::vector::Geometry;
fn run() -> Result<(), gdal::errors::Error> | "Spatial ref from epsg code to wkt:\n{:?}\n",
spatial_ref4.to_wkt()?
);
println!(
"Spatial ref from epsg code to pretty wkt:\n{:?}\n",
spatial_ref4.to_pretty_wkt()?
);
println!(
"Comparison between identical SRS : {:?}\n",
spatial_ref2 == spatial_ref4
);
let htransform = CoordTransform::new(&spatial_ref2, &spatial_ref1)?;
let mut xs = [23.43, 23.50];
let mut ys = [37.58, 37.70];
println!("Before transformation :\n{:?} {:?}", xs, ys);
htransform.transform_coords(&mut xs, &mut ys, &mut [0.0, 0.0])?;
println!("After transformation :\n{:?} {:?}\n", xs, ys);
let geom = Geometry::from_wkt(
"POLYGON((23.43 37.58, 23.43 40.0, 25.29 40.0, 25.29 37.58, 23.43 37.58))",
)?;
println!("Polygon before transformation:\n{:?}\n", geom.wkt()?);
geom.transform(&htransform)?;
println!("Polygon after transformation:\n{:?}\n", geom.wkt()?);
let spatial_ref5 = SpatialRef::from_epsg(4326)?;
println!("To wkt: {:?}", spatial_ref5.to_wkt());
spatial_ref5.morph_to_esri()?;
println!("To esri wkt: {:?}", spatial_ref5.to_wkt());
println!("To xml: {:?}", spatial_ref5.to_xml());
Ok(())
}
fn main() {
run().unwrap();
}
| {
let spatial_ref1 = SpatialRef::from_proj4(
"+proj=laea +lat_0=52 +lon_0=10 +x_0=4321000 +y_0=3210000 +ellps=GRS80 +units=m +no_defs",
)?;
println!(
"Spatial ref from proj4 to wkt:\n{:?}\n",
spatial_ref1.to_wkt()?
);
let spatial_ref2 = SpatialRef::from_wkt("GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",7030]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY[\"EPSG\",6326]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",8901]],UNIT[\"DMSH\",0.0174532925199433,AUTHORITY[\"EPSG\",9108]],AXIS[\"Lat\",NORTH],AXIS[\"Long\",EAST],AUTHORITY[\"EPSG\",4326]]")?;
println!(
"Spatial ref from wkt to proj4:\n{:?}\n",
spatial_ref2.to_proj4()?
);
let spatial_ref3 = SpatialRef::from_definition("urn:ogc:def:crs:EPSG:6.3:26986")?;
println!(
"Spatial ref from ogc naming to wkt:\n{:?}\n",
spatial_ref3.to_wkt()?
);
let spatial_ref4 = SpatialRef::from_epsg(4326)?;
println!( | identifier_body |
vga.rs | use core::prelude::*;
use io;
const BACKSPACE: u8 = 0x08;
const TAB: u8 = 0x09;
const NEWLINE: u8 = 0x0A;
const CR: u8 = 0x0D;
const WHITESPACE: u8 = 0x20;
const VGA_ADDRESS: int = 0xB8000;
const VGA_HEIGHT: u16 = 25;
const VGA_WIDTH: u16 = 80;
enum Color {
Black = 0,
Blue = 1,
Green = 2,
Cyan = 3,
Red = 4,
Pink = 5,
Brown = 6,
LightGray = 7,
DarkGray = 8,
LightBlue = 9,
LightGreen = 10,
LightCyan = 11,
LightRed = 12,
LightPink = 13,
Yellow = 14,
White = 15,
}
static mut VGA: VGA = VGA { height: VGA_HEIGHT, width: VGA_WIDTH, x: 0, y: 0 };
struct VGA {
height: u16,
width: u16,
x: u16,
y: u16,
}
impl VGA {
fn new() -> VGA {
VGA {height: VGA_HEIGHT, width: VGA_WIDTH, x: 0, y: 0}
}
fn back(&mut self) {
if self.x > 0 {
self.x -= 1;
let offset = self.offset();
self.put(offset, Black as u16, White as u16, WHITESPACE);
}
}
fn forward(&mut self) {
self.x += 1;
if self.x >= self.width {
self.newline();
}
}
fn cr(&mut self) {
self.x = 0;
}
fn newline(&mut self) {
self.x = 0;
self.y += 1;
}
fn offset(&self) -> u16 {
self.y * self.width + self.x
}
fn reset(&mut self) {
self.x = 0;
self.y = 0;
self.mov();
}
// handle a tab by increasing the cursor's X, but only to a point
// where it is divisible by 8
fn tab(&mut self) {
self.x = (self.x + 8) &!(8 - 1);
}
fn mov(&mut self) {
let offset = self.offset() as u8;
io::port::write(0x3D4, 14); // tell the VGA board we are setting the high cursor byte
io::port::write(0x3D5, offset >> 8); // send the high cursor byte
io::port::write(0x3D4, 15); // tell the VGA board we are setting the low cursor byte
io::port::write(0x3D5, offset);
}
fn clear (&mut self) {
let mut x: u16 = 0;
loop {
if x > 80 * 200 {
break;
}
self.put(x, Black as u16, White as u16, WHITESPACE);
x += 1;
}
}
fn put(&mut self, offset: u16, background: u16, foreground: u16, character: u8) {
let pixel: u16 = (background << 12) | (foreground << 8) | character as u16;
unsafe {
*((VGA_ADDRESS + offset as int * 2) as *mut u16) = pixel;
}
}
fn putc(&mut self, character: u8) {
if character == BACKSPACE {
self.back();
}
else if character == TAB {
self.tab();
}
else if character == NEWLINE {
self.newline();
}
else if character == CR {
self.cr();
}
else if character >= WHITESPACE {
let offset = self.offset();
self.put(offset, Black as u16, White as u16, character);
self.forward();
}
self.mov();
}
fn puti(&mut self, integer: uint) {
if integer == 0 {
self.puts("0");
}
else {
let mut integer = integer;
let mut reversed = 0;
while integer > 0 {
reversed *= 10;
reversed += integer % 10;
integer /= 10;
}
while reversed > 0 {
let character = (reversed % 10) as u8 + '0' as u8;
self.putc(character);
reversed /= 10;
}
}
}
fn puth(&mut self, integer: uint) {
self.puts("0x");
let mut nibbles = 1;
while (integer >> nibbles * 4) > 0 {
nibbles += 1
}
for i in range(0, nibbles) {
let nibble = ((integer >> (nibbles - i - 1) * 4) & 0xF) as u8;
let character = if nibble < 10 { '0' as u8 + nibble } else { 'a' as u8 + nibble - 10 };
self.putc(character);
}
}
fn puts(&mut self, string: &str) {
for character in string.bytes() {
self.putc(character);
}
}
}
pub fn clear() {
unsafe {
VGA.clear();
}
}
pub fn puth(integer: uint) {
unsafe {
VGA.puth(integer);
}
}
pub fn puti(integer: uint) {
unsafe {
VGA.puti(integer);
}
}
| VGA.putc(character);
}
}
pub fn puts(string: &str) {
unsafe {
VGA.puts(string);
}
} | pub fn putc(character: u8) {
unsafe { | random_line_split |
vga.rs | use core::prelude::*;
use io;
const BACKSPACE: u8 = 0x08;
const TAB: u8 = 0x09;
const NEWLINE: u8 = 0x0A;
const CR: u8 = 0x0D;
const WHITESPACE: u8 = 0x20;
const VGA_ADDRESS: int = 0xB8000;
const VGA_HEIGHT: u16 = 25;
const VGA_WIDTH: u16 = 80;
enum Color {
Black = 0,
Blue = 1,
Green = 2,
Cyan = 3,
Red = 4,
Pink = 5,
Brown = 6,
LightGray = 7,
DarkGray = 8,
LightBlue = 9,
LightGreen = 10,
LightCyan = 11,
LightRed = 12,
LightPink = 13,
Yellow = 14,
White = 15,
}
static mut VGA: VGA = VGA { height: VGA_HEIGHT, width: VGA_WIDTH, x: 0, y: 0 };
struct VGA {
height: u16,
width: u16,
x: u16,
y: u16,
}
impl VGA {
fn new() -> VGA {
VGA {height: VGA_HEIGHT, width: VGA_WIDTH, x: 0, y: 0}
}
fn back(&mut self) {
if self.x > 0 {
self.x -= 1;
let offset = self.offset();
self.put(offset, Black as u16, White as u16, WHITESPACE);
}
}
fn forward(&mut self) {
self.x += 1;
if self.x >= self.width {
self.newline();
}
}
fn cr(&mut self) {
self.x = 0;
}
fn newline(&mut self) {
self.x = 0;
self.y += 1;
}
fn offset(&self) -> u16 {
self.y * self.width + self.x
}
fn reset(&mut self) {
self.x = 0;
self.y = 0;
self.mov();
}
// handle a tab by increasing the cursor's X, but only to a point
// where it is divisible by 8
fn tab(&mut self) {
self.x = (self.x + 8) &!(8 - 1);
}
fn mov(&mut self) {
let offset = self.offset() as u8;
io::port::write(0x3D4, 14); // tell the VGA board we are setting the high cursor byte
io::port::write(0x3D5, offset >> 8); // send the high cursor byte
io::port::write(0x3D4, 15); // tell the VGA board we are setting the low cursor byte
io::port::write(0x3D5, offset);
}
fn clear (&mut self) {
let mut x: u16 = 0;
loop {
if x > 80 * 200 {
break;
}
self.put(x, Black as u16, White as u16, WHITESPACE);
x += 1;
}
}
fn put(&mut self, offset: u16, background: u16, foreground: u16, character: u8) {
let pixel: u16 = (background << 12) | (foreground << 8) | character as u16;
unsafe {
*((VGA_ADDRESS + offset as int * 2) as *mut u16) = pixel;
}
}
fn putc(&mut self, character: u8) | self.put(offset, Black as u16, White as u16, character);
self.forward();
}
self.mov();
}
fn puti(&mut self, integer: uint) {
if integer == 0 {
self.puts("0");
}
else {
let mut integer = integer;
let mut reversed = 0;
while integer > 0 {
reversed *= 10;
reversed += integer % 10;
integer /= 10;
}
while reversed > 0 {
let character = (reversed % 10) as u8 + '0' as u8;
self.putc(character);
reversed /= 10;
}
}
}
fn puth(&mut self, integer: uint) {
self.puts("0x");
let mut nibbles = 1;
while (integer >> nibbles * 4) > 0 {
nibbles += 1
}
for i in range(0, nibbles) {
let nibble = ((integer >> (nibbles - i - 1) * 4) & 0xF) as u8;
let character = if nibble < 10 { '0' as u8 + nibble } else { 'a' as u8 + nibble - 10 };
self.putc(character);
}
}
fn puts(&mut self, string: &str) {
for character in string.bytes() {
self.putc(character);
}
}
}
pub fn clear() {
unsafe {
VGA.clear();
}
}
pub fn puth(integer: uint) {
unsafe {
VGA.puth(integer);
}
}
pub fn puti(integer: uint) {
unsafe {
VGA.puti(integer);
}
}
pub fn putc(character: u8) {
unsafe {
VGA.putc(character);
}
}
pub fn puts(string: &str) {
unsafe {
VGA.puts(string);
}
}
| {
if character == BACKSPACE {
self.back();
}
else if character == TAB {
self.tab();
}
else if character == NEWLINE {
self.newline();
}
else if character == CR {
self.cr();
}
else if character >= WHITESPACE {
let offset = self.offset(); | identifier_body |
vga.rs | use core::prelude::*;
use io;
const BACKSPACE: u8 = 0x08;
const TAB: u8 = 0x09;
const NEWLINE: u8 = 0x0A;
const CR: u8 = 0x0D;
const WHITESPACE: u8 = 0x20;
const VGA_ADDRESS: int = 0xB8000;
const VGA_HEIGHT: u16 = 25;
const VGA_WIDTH: u16 = 80;
enum Color {
Black = 0,
Blue = 1,
Green = 2,
Cyan = 3,
Red = 4,
Pink = 5,
Brown = 6,
LightGray = 7,
DarkGray = 8,
LightBlue = 9,
LightGreen = 10,
LightCyan = 11,
LightRed = 12,
LightPink = 13,
Yellow = 14,
White = 15,
}
static mut VGA: VGA = VGA { height: VGA_HEIGHT, width: VGA_WIDTH, x: 0, y: 0 };
struct VGA {
height: u16,
width: u16,
x: u16,
y: u16,
}
impl VGA {
fn new() -> VGA {
VGA {height: VGA_HEIGHT, width: VGA_WIDTH, x: 0, y: 0}
}
fn back(&mut self) {
if self.x > 0 {
self.x -= 1;
let offset = self.offset();
self.put(offset, Black as u16, White as u16, WHITESPACE);
}
}
fn forward(&mut self) {
self.x += 1;
if self.x >= self.width {
self.newline();
}
}
fn cr(&mut self) {
self.x = 0;
}
fn newline(&mut self) {
self.x = 0;
self.y += 1;
}
fn offset(&self) -> u16 {
self.y * self.width + self.x
}
fn reset(&mut self) {
self.x = 0;
self.y = 0;
self.mov();
}
// handle a tab by increasing the cursor's X, but only to a point
// where it is divisible by 8
fn tab(&mut self) {
self.x = (self.x + 8) &!(8 - 1);
}
fn mov(&mut self) {
let offset = self.offset() as u8;
io::port::write(0x3D4, 14); // tell the VGA board we are setting the high cursor byte
io::port::write(0x3D5, offset >> 8); // send the high cursor byte
io::port::write(0x3D4, 15); // tell the VGA board we are setting the low cursor byte
io::port::write(0x3D5, offset);
}
fn clear (&mut self) {
let mut x: u16 = 0;
loop {
if x > 80 * 200 {
break;
}
self.put(x, Black as u16, White as u16, WHITESPACE);
x += 1;
}
}
fn put(&mut self, offset: u16, background: u16, foreground: u16, character: u8) {
let pixel: u16 = (background << 12) | (foreground << 8) | character as u16;
unsafe {
*((VGA_ADDRESS + offset as int * 2) as *mut u16) = pixel;
}
}
fn putc(&mut self, character: u8) {
if character == BACKSPACE {
self.back();
}
else if character == TAB {
self.tab();
}
else if character == NEWLINE {
self.newline();
}
else if character == CR {
self.cr();
}
else if character >= WHITESPACE {
let offset = self.offset();
self.put(offset, Black as u16, White as u16, character);
self.forward();
}
self.mov();
}
fn puti(&mut self, integer: uint) {
if integer == 0 {
self.puts("0");
}
else {
let mut integer = integer;
let mut reversed = 0;
while integer > 0 {
reversed *= 10;
reversed += integer % 10;
integer /= 10;
}
while reversed > 0 {
let character = (reversed % 10) as u8 + '0' as u8;
self.putc(character);
reversed /= 10;
}
}
}
fn | (&mut self, integer: uint) {
self.puts("0x");
let mut nibbles = 1;
while (integer >> nibbles * 4) > 0 {
nibbles += 1
}
for i in range(0, nibbles) {
let nibble = ((integer >> (nibbles - i - 1) * 4) & 0xF) as u8;
let character = if nibble < 10 { '0' as u8 + nibble } else { 'a' as u8 + nibble - 10 };
self.putc(character);
}
}
fn puts(&mut self, string: &str) {
for character in string.bytes() {
self.putc(character);
}
}
}
pub fn clear() {
unsafe {
VGA.clear();
}
}
pub fn puth(integer: uint) {
unsafe {
VGA.puth(integer);
}
}
pub fn puti(integer: uint) {
unsafe {
VGA.puti(integer);
}
}
pub fn putc(character: u8) {
unsafe {
VGA.putc(character);
}
}
pub fn puts(string: &str) {
unsafe {
VGA.puts(string);
}
}
| puth | identifier_name |
vga.rs | use core::prelude::*;
use io;
const BACKSPACE: u8 = 0x08;
const TAB: u8 = 0x09;
const NEWLINE: u8 = 0x0A;
const CR: u8 = 0x0D;
const WHITESPACE: u8 = 0x20;
const VGA_ADDRESS: int = 0xB8000;
const VGA_HEIGHT: u16 = 25;
const VGA_WIDTH: u16 = 80;
enum Color {
Black = 0,
Blue = 1,
Green = 2,
Cyan = 3,
Red = 4,
Pink = 5,
Brown = 6,
LightGray = 7,
DarkGray = 8,
LightBlue = 9,
LightGreen = 10,
LightCyan = 11,
LightRed = 12,
LightPink = 13,
Yellow = 14,
White = 15,
}
static mut VGA: VGA = VGA { height: VGA_HEIGHT, width: VGA_WIDTH, x: 0, y: 0 };
struct VGA {
height: u16,
width: u16,
x: u16,
y: u16,
}
impl VGA {
fn new() -> VGA {
VGA {height: VGA_HEIGHT, width: VGA_WIDTH, x: 0, y: 0}
}
fn back(&mut self) {
if self.x > 0 {
self.x -= 1;
let offset = self.offset();
self.put(offset, Black as u16, White as u16, WHITESPACE);
}
}
fn forward(&mut self) {
self.x += 1;
if self.x >= self.width {
self.newline();
}
}
fn cr(&mut self) {
self.x = 0;
}
fn newline(&mut self) {
self.x = 0;
self.y += 1;
}
fn offset(&self) -> u16 {
self.y * self.width + self.x
}
fn reset(&mut self) {
self.x = 0;
self.y = 0;
self.mov();
}
// handle a tab by increasing the cursor's X, but only to a point
// where it is divisible by 8
fn tab(&mut self) {
self.x = (self.x + 8) &!(8 - 1);
}
fn mov(&mut self) {
let offset = self.offset() as u8;
io::port::write(0x3D4, 14); // tell the VGA board we are setting the high cursor byte
io::port::write(0x3D5, offset >> 8); // send the high cursor byte
io::port::write(0x3D4, 15); // tell the VGA board we are setting the low cursor byte
io::port::write(0x3D5, offset);
}
fn clear (&mut self) {
let mut x: u16 = 0;
loop {
if x > 80 * 200 {
break;
}
self.put(x, Black as u16, White as u16, WHITESPACE);
x += 1;
}
}
fn put(&mut self, offset: u16, background: u16, foreground: u16, character: u8) {
let pixel: u16 = (background << 12) | (foreground << 8) | character as u16;
unsafe {
*((VGA_ADDRESS + offset as int * 2) as *mut u16) = pixel;
}
}
fn putc(&mut self, character: u8) {
if character == BACKSPACE {
self.back();
}
else if character == TAB {
self.tab();
}
else if character == NEWLINE {
self.newline();
}
else if character == CR |
else if character >= WHITESPACE {
let offset = self.offset();
self.put(offset, Black as u16, White as u16, character);
self.forward();
}
self.mov();
}
fn puti(&mut self, integer: uint) {
if integer == 0 {
self.puts("0");
}
else {
let mut integer = integer;
let mut reversed = 0;
while integer > 0 {
reversed *= 10;
reversed += integer % 10;
integer /= 10;
}
while reversed > 0 {
let character = (reversed % 10) as u8 + '0' as u8;
self.putc(character);
reversed /= 10;
}
}
}
fn puth(&mut self, integer: uint) {
self.puts("0x");
let mut nibbles = 1;
while (integer >> nibbles * 4) > 0 {
nibbles += 1
}
for i in range(0, nibbles) {
let nibble = ((integer >> (nibbles - i - 1) * 4) & 0xF) as u8;
let character = if nibble < 10 { '0' as u8 + nibble } else { 'a' as u8 + nibble - 10 };
self.putc(character);
}
}
fn puts(&mut self, string: &str) {
for character in string.bytes() {
self.putc(character);
}
}
}
pub fn clear() {
unsafe {
VGA.clear();
}
}
pub fn puth(integer: uint) {
unsafe {
VGA.puth(integer);
}
}
pub fn puti(integer: uint) {
unsafe {
VGA.puti(integer);
}
}
pub fn putc(character: u8) {
unsafe {
VGA.putc(character);
}
}
pub fn puts(string: &str) {
unsafe {
VGA.puts(string);
}
}
| {
self.cr();
} | conditional_block |
gid_filter.rs | use filter;
use filter::Filter;
use walkdir::DirEntry;
use std::os::unix::fs::MetadataExt;
use std::process;
pub struct GidFilter {
gid: u32,
comp_op: filter::CompOp,
}
impl GidFilter {
pub fn new(comp_op: filter::CompOp, gid: u32) -> GidFilter {
GidFilter{comp_op: comp_op, gid: gid}
}
}
impl Filter for GidFilter {
fn test(&self, dir_entry: &DirEntry) -> bool {
match self.comp_op {
filter::CompOp::Equal => self.gid == dir_entry.metadata().unwrap().gid(),
filter::CompOp::Unequal => self.gid!= dir_entry.metadata().unwrap().gid(),
_ => | ,
}
}
}
| {
eprintln!("Operator {:?} not covered for attribute gid!", self.comp_op);
process::exit(1);
} | conditional_block |
gid_filter.rs | use filter;
use filter::Filter;
use walkdir::DirEntry;
use std::os::unix::fs::MetadataExt;
use std::process;
pub struct GidFilter {
gid: u32,
comp_op: filter::CompOp,
}
impl GidFilter {
pub fn new(comp_op: filter::CompOp, gid: u32) -> GidFilter {
GidFilter{comp_op: comp_op, gid: gid}
}
}
impl Filter for GidFilter {
fn test(&self, dir_entry: &DirEntry) -> bool |
}
| {
match self.comp_op {
filter::CompOp::Equal => self.gid == dir_entry.metadata().unwrap().gid(),
filter::CompOp::Unequal => self.gid != dir_entry.metadata().unwrap().gid(),
_ => {
eprintln!("Operator {:?} not covered for attribute gid!", self.comp_op);
process::exit(1);
},
}
} | identifier_body |
gid_filter.rs | use filter;
use filter::Filter;
use walkdir::DirEntry;
use std::os::unix::fs::MetadataExt;
use std::process;
pub struct | {
gid: u32,
comp_op: filter::CompOp,
}
impl GidFilter {
pub fn new(comp_op: filter::CompOp, gid: u32) -> GidFilter {
GidFilter{comp_op: comp_op, gid: gid}
}
}
impl Filter for GidFilter {
fn test(&self, dir_entry: &DirEntry) -> bool {
match self.comp_op {
filter::CompOp::Equal => self.gid == dir_entry.metadata().unwrap().gid(),
filter::CompOp::Unequal => self.gid!= dir_entry.metadata().unwrap().gid(),
_ => {
eprintln!("Operator {:?} not covered for attribute gid!", self.comp_op);
process::exit(1);
},
}
}
}
| GidFilter | identifier_name |
gid_filter.rs | use filter;
use filter::Filter;
use walkdir::DirEntry;
use std::os::unix::fs::MetadataExt;
use std::process;
pub struct GidFilter {
gid: u32,
comp_op: filter::CompOp,
}
impl GidFilter {
pub fn new(comp_op: filter::CompOp, gid: u32) -> GidFilter {
GidFilter{comp_op: comp_op, gid: gid}
}
}
impl Filter for GidFilter { | eprintln!("Operator {:?} not covered for attribute gid!", self.comp_op);
process::exit(1);
},
}
}
} | fn test(&self, dir_entry: &DirEntry) -> bool {
match self.comp_op {
filter::CompOp::Equal => self.gid == dir_entry.metadata().unwrap().gid(),
filter::CompOp::Unequal => self.gid != dir_entry.metadata().unwrap().gid(),
_ => { | random_line_split |
config.rs | #![allow(dead_code)]
use log::LevelFilter;
use once_cell::sync::Lazy;
use std::str::FromStr;
static CONF: Lazy<Config> = Lazy::new(|| {
let log_level = std::env::var("SIMAG_LOG_LEVEL")
.or_else::<std::env::VarError, _>(|_| Ok("info".to_owned()))
.ok()
.map(|l| LevelFilter::from_str(&l).unwrap_or_else(|_| LevelFilter::Debug))
.unwrap_or_else(|| LevelFilter::Debug);
Config { log_level }
});
pub(super) struct Config {
pub log_level: log::LevelFilter,
}
#[cfg(any(test, debug_assertions))]
pub(super) mod tracing {
use super::*;
#[derive(Clone, Copy)]
pub struct | ;
impl Logger {
pub fn get_logger() -> &'static Logger {
Lazy::force(&LOGGER)
}
}
#[allow(unused_must_use)]
static LOGGER: Lazy<Logger> = Lazy::new(|| {
env_logger::builder()
.format_module_path(true)
.format_timestamp_nanos()
.target(env_logger::Target::Stdout)
.filter(None, CONF.log_level)
.try_init();
Logger
});
}
| Logger | identifier_name |
config.rs | #![allow(dead_code)]
use log::LevelFilter;
use once_cell::sync::Lazy;
use std::str::FromStr;
static CONF: Lazy<Config> = Lazy::new(|| {
let log_level = std::env::var("SIMAG_LOG_LEVEL")
.or_else::<std::env::VarError, _>(|_| Ok("info".to_owned()))
.ok()
.map(|l| LevelFilter::from_str(&l).unwrap_or_else(|_| LevelFilter::Debug))
.unwrap_or_else(|| LevelFilter::Debug);
Config { log_level }
});
pub(super) struct Config {
pub log_level: log::LevelFilter,
}
| pub struct Logger;
impl Logger {
pub fn get_logger() -> &'static Logger {
Lazy::force(&LOGGER)
}
}
#[allow(unused_must_use)]
static LOGGER: Lazy<Logger> = Lazy::new(|| {
env_logger::builder()
.format_module_path(true)
.format_timestamp_nanos()
.target(env_logger::Target::Stdout)
.filter(None, CONF.log_level)
.try_init();
Logger
});
} | #[cfg(any(test, debug_assertions))]
pub(super) mod tracing {
use super::*;
#[derive(Clone, Copy)] | random_line_split |
builder.rs | use crate::enums::{CapStyle, DashStyle, LineJoin};
use crate::factory::IFactory;
use crate::stroke_style::StrokeStyle;
use com_wrapper::ComWrapper;
use dcommon::Error;
use winapi::shared::winerror::SUCCEEDED;
use winapi::um::d2d1::D2D1_STROKE_STYLE_PROPERTIES;
pub struct StrokeStyleBuilder<'a> {
factory: &'a dyn IFactory,
start_cap: CapStyle,
end_cap: CapStyle,
dash_cap: CapStyle,
line_join: LineJoin,
miter_limit: f32,
dash_style: DashStyle,
dash_offset: f32,
dashes: Option<&'a [f32]>,
}
impl<'a> StrokeStyleBuilder<'a> {
pub fn new(factory: &'a dyn IFactory) -> Self {
// default values taken from D2D1::StrokeStyleProperties in d2d1helper.h
StrokeStyleBuilder {
factory,
start_cap: CapStyle::Flat,
end_cap: CapStyle::Flat,
dash_cap: CapStyle::Flat,
line_join: LineJoin::Miter,
miter_limit: 10.0,
dash_style: DashStyle::Solid,
dash_offset: 0.0,
dashes: None,
}
}
pub fn build(self) -> Result<StrokeStyle, Error> {
unsafe {
let properties = self.to_d2d1();
let (dashes, dash_count) = self
.dashes
.map(|d| (d.as_ptr(), d.len() as u32))
.unwrap_or((std::ptr::null(), 0));
let mut ptr = std::ptr::null_mut();
let hr =
self.factory
.raw_f()
.CreateStrokeStyle(&properties, dashes, dash_count, &mut ptr);
if SUCCEEDED(hr) | else {
Err(hr.into())
}
}
}
pub fn with_start_cap(mut self, start_cap: CapStyle) -> Self {
self.start_cap = start_cap;
self
}
pub fn with_end_cap(mut self, end_cap: CapStyle) -> Self {
self.end_cap = end_cap;
self
}
pub fn with_dash_cap(mut self, dash_cap: CapStyle) -> Self {
self.dash_cap = dash_cap;
self
}
pub fn with_line_join(mut self, line_join: LineJoin) -> Self {
self.line_join = line_join;
self
}
pub fn with_miter_limit(mut self, miter_limit: f32) -> Self {
self.miter_limit = miter_limit;
self
}
pub fn with_dash_style(mut self, dash_style: DashStyle) -> Self {
self.dash_style = dash_style;
self
}
pub fn with_dash_offset(mut self, dash_offset: f32) -> Self {
self.dash_offset = dash_offset;
self
}
pub fn with_dashes(mut self, dashes: &'a [f32]) -> Self {
self.dash_style = DashStyle::Custom;
self.dashes = Some(dashes);
self
}
fn to_d2d1(&self) -> D2D1_STROKE_STYLE_PROPERTIES {
D2D1_STROKE_STYLE_PROPERTIES {
startCap: self.start_cap as u32,
endCap: self.end_cap as u32,
dashCap: self.dash_cap as u32,
lineJoin: self.line_join as u32,
miterLimit: self.miter_limit,
dashStyle: self.dash_style as u32,
dashOffset: self.dash_offset,
}
}
}
| {
Ok(StrokeStyle::from_raw(ptr))
} | conditional_block |
builder.rs | use crate::enums::{CapStyle, DashStyle, LineJoin};
use crate::factory::IFactory;
use crate::stroke_style::StrokeStyle;
use com_wrapper::ComWrapper;
use dcommon::Error;
use winapi::shared::winerror::SUCCEEDED;
use winapi::um::d2d1::D2D1_STROKE_STYLE_PROPERTIES;
pub struct StrokeStyleBuilder<'a> {
factory: &'a dyn IFactory,
start_cap: CapStyle,
end_cap: CapStyle,
dash_cap: CapStyle,
line_join: LineJoin,
miter_limit: f32,
dash_style: DashStyle,
dash_offset: f32,
dashes: Option<&'a [f32]>,
}
impl<'a> StrokeStyleBuilder<'a> {
pub fn new(factory: &'a dyn IFactory) -> Self {
// default values taken from D2D1::StrokeStyleProperties in d2d1helper.h
StrokeStyleBuilder {
factory,
start_cap: CapStyle::Flat,
end_cap: CapStyle::Flat,
dash_cap: CapStyle::Flat,
line_join: LineJoin::Miter,
miter_limit: 10.0,
dash_style: DashStyle::Solid,
dash_offset: 0.0,
dashes: None,
}
}
pub fn build(self) -> Result<StrokeStyle, Error> {
unsafe {
let properties = self.to_d2d1();
let (dashes, dash_count) = self
.dashes
.map(|d| (d.as_ptr(), d.len() as u32))
.unwrap_or((std::ptr::null(), 0));
let mut ptr = std::ptr::null_mut();
let hr =
self.factory
.raw_f()
.CreateStrokeStyle(&properties, dashes, dash_count, &mut ptr);
if SUCCEEDED(hr) {
Ok(StrokeStyle::from_raw(ptr))
} else {
Err(hr.into())
}
}
}
pub fn with_start_cap(mut self, start_cap: CapStyle) -> Self {
self.start_cap = start_cap;
self
} | }
pub fn with_dash_cap(mut self, dash_cap: CapStyle) -> Self {
self.dash_cap = dash_cap;
self
}
pub fn with_line_join(mut self, line_join: LineJoin) -> Self {
self.line_join = line_join;
self
}
pub fn with_miter_limit(mut self, miter_limit: f32) -> Self {
self.miter_limit = miter_limit;
self
}
pub fn with_dash_style(mut self, dash_style: DashStyle) -> Self {
self.dash_style = dash_style;
self
}
pub fn with_dash_offset(mut self, dash_offset: f32) -> Self {
self.dash_offset = dash_offset;
self
}
pub fn with_dashes(mut self, dashes: &'a [f32]) -> Self {
self.dash_style = DashStyle::Custom;
self.dashes = Some(dashes);
self
}
fn to_d2d1(&self) -> D2D1_STROKE_STYLE_PROPERTIES {
D2D1_STROKE_STYLE_PROPERTIES {
startCap: self.start_cap as u32,
endCap: self.end_cap as u32,
dashCap: self.dash_cap as u32,
lineJoin: self.line_join as u32,
miterLimit: self.miter_limit,
dashStyle: self.dash_style as u32,
dashOffset: self.dash_offset,
}
}
} |
pub fn with_end_cap(mut self, end_cap: CapStyle) -> Self {
self.end_cap = end_cap;
self | random_line_split |
builder.rs | use crate::enums::{CapStyle, DashStyle, LineJoin};
use crate::factory::IFactory;
use crate::stroke_style::StrokeStyle;
use com_wrapper::ComWrapper;
use dcommon::Error;
use winapi::shared::winerror::SUCCEEDED;
use winapi::um::d2d1::D2D1_STROKE_STYLE_PROPERTIES;
pub struct StrokeStyleBuilder<'a> {
factory: &'a dyn IFactory,
start_cap: CapStyle,
end_cap: CapStyle,
dash_cap: CapStyle,
line_join: LineJoin,
miter_limit: f32,
dash_style: DashStyle,
dash_offset: f32,
dashes: Option<&'a [f32]>,
}
impl<'a> StrokeStyleBuilder<'a> {
pub fn new(factory: &'a dyn IFactory) -> Self {
// default values taken from D2D1::StrokeStyleProperties in d2d1helper.h
StrokeStyleBuilder {
factory,
start_cap: CapStyle::Flat,
end_cap: CapStyle::Flat,
dash_cap: CapStyle::Flat,
line_join: LineJoin::Miter,
miter_limit: 10.0,
dash_style: DashStyle::Solid,
dash_offset: 0.0,
dashes: None,
}
}
pub fn build(self) -> Result<StrokeStyle, Error> {
unsafe {
let properties = self.to_d2d1();
let (dashes, dash_count) = self
.dashes
.map(|d| (d.as_ptr(), d.len() as u32))
.unwrap_or((std::ptr::null(), 0));
let mut ptr = std::ptr::null_mut();
let hr =
self.factory
.raw_f()
.CreateStrokeStyle(&properties, dashes, dash_count, &mut ptr);
if SUCCEEDED(hr) {
Ok(StrokeStyle::from_raw(ptr))
} else {
Err(hr.into())
}
}
}
pub fn with_start_cap(mut self, start_cap: CapStyle) -> Self |
pub fn with_end_cap(mut self, end_cap: CapStyle) -> Self {
self.end_cap = end_cap;
self
}
pub fn with_dash_cap(mut self, dash_cap: CapStyle) -> Self {
self.dash_cap = dash_cap;
self
}
pub fn with_line_join(mut self, line_join: LineJoin) -> Self {
self.line_join = line_join;
self
}
pub fn with_miter_limit(mut self, miter_limit: f32) -> Self {
self.miter_limit = miter_limit;
self
}
pub fn with_dash_style(mut self, dash_style: DashStyle) -> Self {
self.dash_style = dash_style;
self
}
pub fn with_dash_offset(mut self, dash_offset: f32) -> Self {
self.dash_offset = dash_offset;
self
}
pub fn with_dashes(mut self, dashes: &'a [f32]) -> Self {
self.dash_style = DashStyle::Custom;
self.dashes = Some(dashes);
self
}
fn to_d2d1(&self) -> D2D1_STROKE_STYLE_PROPERTIES {
D2D1_STROKE_STYLE_PROPERTIES {
startCap: self.start_cap as u32,
endCap: self.end_cap as u32,
dashCap: self.dash_cap as u32,
lineJoin: self.line_join as u32,
miterLimit: self.miter_limit,
dashStyle: self.dash_style as u32,
dashOffset: self.dash_offset,
}
}
}
| {
self.start_cap = start_cap;
self
} | identifier_body |
builder.rs | use crate::enums::{CapStyle, DashStyle, LineJoin};
use crate::factory::IFactory;
use crate::stroke_style::StrokeStyle;
use com_wrapper::ComWrapper;
use dcommon::Error;
use winapi::shared::winerror::SUCCEEDED;
use winapi::um::d2d1::D2D1_STROKE_STYLE_PROPERTIES;
pub struct StrokeStyleBuilder<'a> {
factory: &'a dyn IFactory,
start_cap: CapStyle,
end_cap: CapStyle,
dash_cap: CapStyle,
line_join: LineJoin,
miter_limit: f32,
dash_style: DashStyle,
dash_offset: f32,
dashes: Option<&'a [f32]>,
}
impl<'a> StrokeStyleBuilder<'a> {
pub fn new(factory: &'a dyn IFactory) -> Self {
// default values taken from D2D1::StrokeStyleProperties in d2d1helper.h
StrokeStyleBuilder {
factory,
start_cap: CapStyle::Flat,
end_cap: CapStyle::Flat,
dash_cap: CapStyle::Flat,
line_join: LineJoin::Miter,
miter_limit: 10.0,
dash_style: DashStyle::Solid,
dash_offset: 0.0,
dashes: None,
}
}
pub fn build(self) -> Result<StrokeStyle, Error> {
unsafe {
let properties = self.to_d2d1();
let (dashes, dash_count) = self
.dashes
.map(|d| (d.as_ptr(), d.len() as u32))
.unwrap_or((std::ptr::null(), 0));
let mut ptr = std::ptr::null_mut();
let hr =
self.factory
.raw_f()
.CreateStrokeStyle(&properties, dashes, dash_count, &mut ptr);
if SUCCEEDED(hr) {
Ok(StrokeStyle::from_raw(ptr))
} else {
Err(hr.into())
}
}
}
pub fn with_start_cap(mut self, start_cap: CapStyle) -> Self {
self.start_cap = start_cap;
self
}
pub fn with_end_cap(mut self, end_cap: CapStyle) -> Self {
self.end_cap = end_cap;
self
}
pub fn with_dash_cap(mut self, dash_cap: CapStyle) -> Self {
self.dash_cap = dash_cap;
self
}
pub fn with_line_join(mut self, line_join: LineJoin) -> Self {
self.line_join = line_join;
self
}
pub fn with_miter_limit(mut self, miter_limit: f32) -> Self {
self.miter_limit = miter_limit;
self
}
pub fn with_dash_style(mut self, dash_style: DashStyle) -> Self {
self.dash_style = dash_style;
self
}
pub fn with_dash_offset(mut self, dash_offset: f32) -> Self {
self.dash_offset = dash_offset;
self
}
pub fn | (mut self, dashes: &'a [f32]) -> Self {
self.dash_style = DashStyle::Custom;
self.dashes = Some(dashes);
self
}
fn to_d2d1(&self) -> D2D1_STROKE_STYLE_PROPERTIES {
D2D1_STROKE_STYLE_PROPERTIES {
startCap: self.start_cap as u32,
endCap: self.end_cap as u32,
dashCap: self.dash_cap as u32,
lineJoin: self.line_join as u32,
miterLimit: self.miter_limit,
dashStyle: self.dash_style as u32,
dashOffset: self.dash_offset,
}
}
}
| with_dashes | identifier_name |
defs.rs | /*
* Copyright (C) 2017 AltOS-Rust Team
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or | * GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
pub const GROUPA_ADDR: *const u32 = 0x4800_0000 as *const _;
pub const GROUPB_ADDR: *const u32 = 0x4800_0400 as *const _;
pub const GROUPC_ADDR: *const u32 = 0x4800_0800 as *const _;
pub const GROUPF_ADDR: *const u32 = 0x4800_1400 as *const _;
pub const OTYPER_OFFSET: u32 = 0x04;
pub const TYPE_PUSHPULL: u32 = 0b0;
pub const TYPE_OPENDRAIN: u32 = 0b1;
pub const OSPEEDR_OFFSET: u32 = 0x08;
pub const SPEED_MASK: u32 = 0b11;
pub const SPEED_LOW: u32 = 0b00;
pub const SPEED_LOW_ALT: u32 = 0b10;
pub const SPEED_MEDIUM: u32 = 0b01;
pub const SPEED_HIGH: u32 = 0b11;
pub const PUPDR_OFFSET: u32 = 0x0C;
pub const PUPD_MASK: u32 = 0b11;
pub const PUPD_NEITHER: u32 = 0b00;
pub const PUPD_UP: u32 = 0b01;
pub const PUPD_DOWN: u32 = 0b10;
pub const BSRR_OFFSET: u32 = 0x18;
pub const BSRR_RESET_OFFSET: u8 = 16;
pub const AFRL_OFFSET: u32 = 0x20;
pub const AFR_MASK: u32 = 0b1111;
pub const AF0: u32 = 0b0000;
pub const AF1: u32 = 0b0001;
pub const AF2: u32 = 0b0010;
pub const AF3: u32 = 0b0011;
pub const AF4: u32 = 0b0100;
pub const AF5: u32 = 0b0101;
pub const AF6: u32 = 0b0110;
pub const AF7: u32 = 0b0111;
pub const AFRH_OFFSET: u32 = 0x24;
pub const MODER_OFFSET: u32 = 0x00;
pub const MODE_MASK: u32 = 0b11;
pub const MODE_INPUT: u32 = 0b00;
pub const MODE_OUTPUT: u32 = 0b01;
pub const MODE_ALTERNATE: u32 = 0b10;
pub const MODE_ANALOG: u32 = 0b11; | * (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | random_line_split |
logger.rs | /* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// Author: Frank Honza <[email protected]>
use std;
use std::fmt::Write;
use super::rfb::{RFBState, RFBTransaction};
use crate::jsonbuilder::{JsonBuilder, JsonError};
fn | (tx: &RFBTransaction, js: &mut JsonBuilder) -> Result<(), JsonError> {
js.open_object("rfb")?;
// Protocol version
if let Some(tx_spv) = &tx.tc_server_protocol_version {
js.open_object("server_protocol_version")?;
js.set_string("major", &tx_spv.major)?;
js.set_string("minor", &tx_spv.minor)?;
js.close()?;
}
if let Some(tx_cpv) = &tx.ts_client_protocol_version {
js.open_object("client_protocol_version")?;
js.set_string("major", &tx_cpv.major)?;
js.set_string("minor", &tx_cpv.minor)?;
js.close()?;
}
// Authentication
js.open_object("authentication")?;
if let Some(chosen_security_type) = tx.chosen_security_type {
js.set_uint("security_type", chosen_security_type as u64)?;
}
match tx.chosen_security_type {
Some(2) => {
js.open_object("vnc")?;
if let Some(ref sc) = tx.tc_vnc_challenge {
let mut s = String::new();
for &byte in &sc.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("challenge", &s)?;
}
if let Some(ref sr) = tx.ts_vnc_response {
let mut s = String::new();
for &byte in &sr.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("response", &s)?;
}
js.close()?;
}
_ => ()
}
if let Some(security_result) = &tx.tc_security_result {
let _ = match security_result.status {
0 => js.set_string("security_result", "OK")?,
1 => js.set_string("security-result", "FAIL")?,
2 => js.set_string("security_result", "TOOMANY")?,
_ => js.set_string("security_result",
&format!("UNKNOWN ({})", security_result.status))?,
};
}
js.close()?; // Close authentication.
if let Some(ref reason) = tx.tc_failure_reason {
js.set_string("server_security_failure_reason", &reason.reason_string)?;
}
// Client/Server init
if let Some(s) = &tx.ts_client_init {
js.set_bool("screen_shared", s.shared!= 0)?;
}
if let Some(tc_server_init) = &tx.tc_server_init {
js.open_object("framebuffer")?;
js.set_uint("width", tc_server_init.width as u64)?;
js.set_uint("height", tc_server_init.height as u64)?;
js.set_string_from_bytes("name", &tc_server_init.name)?;
js.open_object("pixel_format")?;
js.set_uint("bits_per_pixel", tc_server_init.pixel_format.bits_per_pixel as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.set_bool("big_endian", tc_server_init.pixel_format.big_endian_flag!= 0)?;
js.set_bool("true_color", tc_server_init.pixel_format.true_colour_flag!= 0)?;
js.set_uint("red_max", tc_server_init.pixel_format.red_max as u64)?;
js.set_uint("green_max", tc_server_init.pixel_format.green_max as u64)?;
js.set_uint("blue_max", tc_server_init.pixel_format.blue_max as u64)?;
js.set_uint("red_shift", tc_server_init.pixel_format.red_shift as u64)?;
js.set_uint("green_shift", tc_server_init.pixel_format.green_shift as u64)?;
js.set_uint("blue_shift", tc_server_init.pixel_format.blue_shift as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.close()?;
js.close()?;
}
js.close()?;
return Ok(());
}
#[no_mangle]
pub unsafe extern "C" fn rs_rfb_logger_log(_state: &mut RFBState,
tx: *mut std::os::raw::c_void,
js: &mut JsonBuilder) -> bool {
let tx = cast_pointer!(tx, RFBTransaction);
log_rfb(tx, js).is_ok()
}
| log_rfb | identifier_name |
logger.rs | /* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// Author: Frank Honza <[email protected]>
use std;
use std::fmt::Write;
use super::rfb::{RFBState, RFBTransaction};
use crate::jsonbuilder::{JsonBuilder, JsonError};
fn log_rfb(tx: &RFBTransaction, js: &mut JsonBuilder) -> Result<(), JsonError> {
js.open_object("rfb")?;
// Protocol version
if let Some(tx_spv) = &tx.tc_server_protocol_version {
js.open_object("server_protocol_version")?;
js.set_string("major", &tx_spv.major)?;
js.set_string("minor", &tx_spv.minor)?;
js.close()?;
}
if let Some(tx_cpv) = &tx.ts_client_protocol_version {
js.open_object("client_protocol_version")?;
js.set_string("major", &tx_cpv.major)?;
js.set_string("minor", &tx_cpv.minor)?;
js.close()?;
}
// Authentication
js.open_object("authentication")?; | match tx.chosen_security_type {
Some(2) => {
js.open_object("vnc")?;
if let Some(ref sc) = tx.tc_vnc_challenge {
let mut s = String::new();
for &byte in &sc.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("challenge", &s)?;
}
if let Some(ref sr) = tx.ts_vnc_response {
let mut s = String::new();
for &byte in &sr.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("response", &s)?;
}
js.close()?;
}
_ => ()
}
if let Some(security_result) = &tx.tc_security_result {
let _ = match security_result.status {
0 => js.set_string("security_result", "OK")?,
1 => js.set_string("security-result", "FAIL")?,
2 => js.set_string("security_result", "TOOMANY")?,
_ => js.set_string("security_result",
&format!("UNKNOWN ({})", security_result.status))?,
};
}
js.close()?; // Close authentication.
if let Some(ref reason) = tx.tc_failure_reason {
js.set_string("server_security_failure_reason", &reason.reason_string)?;
}
// Client/Server init
if let Some(s) = &tx.ts_client_init {
js.set_bool("screen_shared", s.shared!= 0)?;
}
if let Some(tc_server_init) = &tx.tc_server_init {
js.open_object("framebuffer")?;
js.set_uint("width", tc_server_init.width as u64)?;
js.set_uint("height", tc_server_init.height as u64)?;
js.set_string_from_bytes("name", &tc_server_init.name)?;
js.open_object("pixel_format")?;
js.set_uint("bits_per_pixel", tc_server_init.pixel_format.bits_per_pixel as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.set_bool("big_endian", tc_server_init.pixel_format.big_endian_flag!= 0)?;
js.set_bool("true_color", tc_server_init.pixel_format.true_colour_flag!= 0)?;
js.set_uint("red_max", tc_server_init.pixel_format.red_max as u64)?;
js.set_uint("green_max", tc_server_init.pixel_format.green_max as u64)?;
js.set_uint("blue_max", tc_server_init.pixel_format.blue_max as u64)?;
js.set_uint("red_shift", tc_server_init.pixel_format.red_shift as u64)?;
js.set_uint("green_shift", tc_server_init.pixel_format.green_shift as u64)?;
js.set_uint("blue_shift", tc_server_init.pixel_format.blue_shift as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.close()?;
js.close()?;
}
js.close()?;
return Ok(());
}
#[no_mangle]
pub unsafe extern "C" fn rs_rfb_logger_log(_state: &mut RFBState,
tx: *mut std::os::raw::c_void,
js: &mut JsonBuilder) -> bool {
let tx = cast_pointer!(tx, RFBTransaction);
log_rfb(tx, js).is_ok()
} | if let Some(chosen_security_type) = tx.chosen_security_type {
js.set_uint("security_type", chosen_security_type as u64)?;
} | random_line_split |
logger.rs | /* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// Author: Frank Honza <[email protected]>
use std;
use std::fmt::Write;
use super::rfb::{RFBState, RFBTransaction};
use crate::jsonbuilder::{JsonBuilder, JsonError};
fn log_rfb(tx: &RFBTransaction, js: &mut JsonBuilder) -> Result<(), JsonError> | js.set_uint("security_type", chosen_security_type as u64)?;
}
match tx.chosen_security_type {
Some(2) => {
js.open_object("vnc")?;
if let Some(ref sc) = tx.tc_vnc_challenge {
let mut s = String::new();
for &byte in &sc.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("challenge", &s)?;
}
if let Some(ref sr) = tx.ts_vnc_response {
let mut s = String::new();
for &byte in &sr.secret[..] {
write!(&mut s, "{:02x}", byte).expect("Unable to write");
}
js.set_string("response", &s)?;
}
js.close()?;
}
_ => ()
}
if let Some(security_result) = &tx.tc_security_result {
let _ = match security_result.status {
0 => js.set_string("security_result", "OK")?,
1 => js.set_string("security-result", "FAIL")?,
2 => js.set_string("security_result", "TOOMANY")?,
_ => js.set_string("security_result",
&format!("UNKNOWN ({})", security_result.status))?,
};
}
js.close()?; // Close authentication.
if let Some(ref reason) = tx.tc_failure_reason {
js.set_string("server_security_failure_reason", &reason.reason_string)?;
}
// Client/Server init
if let Some(s) = &tx.ts_client_init {
js.set_bool("screen_shared", s.shared!= 0)?;
}
if let Some(tc_server_init) = &tx.tc_server_init {
js.open_object("framebuffer")?;
js.set_uint("width", tc_server_init.width as u64)?;
js.set_uint("height", tc_server_init.height as u64)?;
js.set_string_from_bytes("name", &tc_server_init.name)?;
js.open_object("pixel_format")?;
js.set_uint("bits_per_pixel", tc_server_init.pixel_format.bits_per_pixel as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.set_bool("big_endian", tc_server_init.pixel_format.big_endian_flag!= 0)?;
js.set_bool("true_color", tc_server_init.pixel_format.true_colour_flag!= 0)?;
js.set_uint("red_max", tc_server_init.pixel_format.red_max as u64)?;
js.set_uint("green_max", tc_server_init.pixel_format.green_max as u64)?;
js.set_uint("blue_max", tc_server_init.pixel_format.blue_max as u64)?;
js.set_uint("red_shift", tc_server_init.pixel_format.red_shift as u64)?;
js.set_uint("green_shift", tc_server_init.pixel_format.green_shift as u64)?;
js.set_uint("blue_shift", tc_server_init.pixel_format.blue_shift as u64)?;
js.set_uint("depth", tc_server_init.pixel_format.depth as u64)?;
js.close()?;
js.close()?;
}
js.close()?;
return Ok(());
}
#[no_mangle]
pub unsafe extern "C" fn rs_rfb_logger_log(_state: &mut RFBState,
tx: *mut std::os::raw::c_void,
js: &mut JsonBuilder) -> bool {
let tx = cast_pointer!(tx, RFBTransaction);
log_rfb(tx, js).is_ok()
}
| {
js.open_object("rfb")?;
// Protocol version
if let Some(tx_spv) = &tx.tc_server_protocol_version {
js.open_object("server_protocol_version")?;
js.set_string("major", &tx_spv.major)?;
js.set_string("minor", &tx_spv.minor)?;
js.close()?;
}
if let Some(tx_cpv) = &tx.ts_client_protocol_version {
js.open_object("client_protocol_version")?;
js.set_string("major", &tx_cpv.major)?;
js.set_string("minor", &tx_cpv.minor)?;
js.close()?;
}
// Authentication
js.open_object("authentication")?;
if let Some(chosen_security_type) = tx.chosen_security_type { | identifier_body |
util.rs | use hyper::header::{Accept, Connection, ContentType, Headers, Quality,
QualityItem, qitem};
use hyper::mime::{Mime, SubLevel, TopLevel};
use protobuf::{self, Message};
use proto::mesos::*;
pub fn protobuf_headers() -> Headers {
let mut headers = Headers::new();
headers.set(Accept(vec![
qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])),
qitem(Mime(TopLevel::Application,
SubLevel::Ext("x-protobuf".to_owned()), vec![])),
]));
headers.set(ContentType(Mime(TopLevel::Application,
SubLevel::Ext("x-protobuf".to_owned()),
vec![])));
headers
}
pub fn framework_id<'a>(id: &'a str) -> FrameworkID {
let mut framework_id = FrameworkID::new();
framework_id.set_value(id.to_string());
framework_id
}
pub fn framework_info<'a>(user: &'a str,
name: &'a str,
failover_timeout: f64)
-> FrameworkInfo {
let mut framework_info = FrameworkInfo::new();
framework_info.set_user(user.to_string());
framework_info.set_name(name.to_string());
framework_info.set_failover_timeout(failover_timeout);
framework_info
}
pub fn task_id<'a>(id: &'a str) -> TaskID {
let mut task_id = TaskID::new();
task_id.set_value(id.to_string());
task_id
}
pub fn task_info<'a>(name: &'a str,
task_id: &TaskID,
agent_id: &AgentID,
command: &CommandInfo,
resources: Vec<Resource>)
-> TaskInfo {
let mut task_info = TaskInfo::new();
task_info.set_name(name.to_string());
task_info.set_task_id(task_id.clone());
task_info.set_agent_id(agent_id.clone());
task_info.set_command(command.clone());
task_info.set_resources(protobuf::RepeatedField::from_vec(resources));
task_info
}
pub fn task_info_for_container<'a>(name: &'a str,
task_id: &TaskID,
agent_id: &AgentID,
command: &CommandInfo,
container: &ContainerInfo,
resources: Vec<Resource>)
-> TaskInfo {
let mut task_info = TaskInfo::new();
task_info.set_name(name.to_string());
task_info.set_task_id(task_id.clone());
task_info.set_agent_id(agent_id.clone());
task_info.set_command(command.clone());
task_info.set_container(container.clone());
task_info.set_resources(protobuf::RepeatedField::from_vec(resources));
task_info
}
pub fn launch_operation(task_infos: Vec<TaskInfo>) -> Operation {
let mut launch = Operation_Launch::new();
launch.set_task_infos(protobuf::RepeatedField::from_vec(task_infos));
let mut operation = Operation::new();
operation.set_field_type(Operation_Type::LAUNCH);
operation.set_launch(launch);
operation
}
pub fn scalar<'a>(name: &'a str, role: &'a str, value: f64) -> Resource {
let mut scalar = Value_Scalar::new();
scalar.set_value(value);
let mut res = Resource::new();
res.set_name(name.to_string());
res.set_role(role.to_string());
res.set_field_type(Value_Type::SCALAR);
res.set_scalar(scalar);
res
}
pub fn get_scalar_resource_sum<'a>(name: &'a str, offers: Vec<&Offer>) -> f64 | {
offers.iter()
.flat_map(|o| o.get_resources())
.filter(|r| r.get_name() == "mem")
.map(|c| c.get_scalar())
.fold(0f64, |acc, mem_res| acc + mem_res.get_value())
} | identifier_body |
|
util.rs | use hyper::header::{Accept, Connection, ContentType, Headers, Quality,
QualityItem, qitem};
use hyper::mime::{Mime, SubLevel, TopLevel};
use protobuf::{self, Message};
use proto::mesos::*;
pub fn protobuf_headers() -> Headers {
let mut headers = Headers::new();
headers.set(Accept(vec![
qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])),
qitem(Mime(TopLevel::Application,
SubLevel::Ext("x-protobuf".to_owned()), vec![])),
]));
headers.set(ContentType(Mime(TopLevel::Application,
SubLevel::Ext("x-protobuf".to_owned()),
vec![])));
headers
}
pub fn framework_id<'a>(id: &'a str) -> FrameworkID {
let mut framework_id = FrameworkID::new();
framework_id.set_value(id.to_string());
framework_id
}
pub fn framework_info<'a>(user: &'a str,
name: &'a str,
failover_timeout: f64)
-> FrameworkInfo {
let mut framework_info = FrameworkInfo::new();
framework_info.set_user(user.to_string());
framework_info.set_name(name.to_string());
framework_info.set_failover_timeout(failover_timeout);
framework_info
}
pub fn task_id<'a>(id: &'a str) -> TaskID {
let mut task_id = TaskID::new();
task_id.set_value(id.to_string());
task_id
}
pub fn task_info<'a>(name: &'a str,
task_id: &TaskID,
agent_id: &AgentID,
command: &CommandInfo,
resources: Vec<Resource>)
-> TaskInfo {
let mut task_info = TaskInfo::new();
task_info.set_name(name.to_string());
task_info.set_task_id(task_id.clone());
task_info.set_agent_id(agent_id.clone());
task_info.set_command(command.clone());
task_info.set_resources(protobuf::RepeatedField::from_vec(resources));
task_info
}
pub fn task_info_for_container<'a>(name: &'a str,
task_id: &TaskID,
agent_id: &AgentID,
command: &CommandInfo,
container: &ContainerInfo,
resources: Vec<Resource>)
-> TaskInfo {
let mut task_info = TaskInfo::new();
task_info.set_name(name.to_string());
task_info.set_task_id(task_id.clone());
task_info.set_agent_id(agent_id.clone());
task_info.set_command(command.clone());
task_info.set_container(container.clone());
task_info.set_resources(protobuf::RepeatedField::from_vec(resources));
task_info
}
pub fn launch_operation(task_infos: Vec<TaskInfo>) -> Operation {
let mut launch = Operation_Launch::new();
launch.set_task_infos(protobuf::RepeatedField::from_vec(task_infos));
let mut operation = Operation::new();
operation.set_field_type(Operation_Type::LAUNCH);
operation.set_launch(launch);
operation
}
pub fn scalar<'a>(name: &'a str, role: &'a str, value: f64) -> Resource {
let mut scalar = Value_Scalar::new();
scalar.set_value(value);
| res.set_role(role.to_string());
res.set_field_type(Value_Type::SCALAR);
res.set_scalar(scalar);
res
}
pub fn get_scalar_resource_sum<'a>(name: &'a str, offers: Vec<&Offer>) -> f64 {
offers.iter()
.flat_map(|o| o.get_resources())
.filter(|r| r.get_name() == "mem")
.map(|c| c.get_scalar())
.fold(0f64, |acc, mem_res| acc + mem_res.get_value())
} | let mut res = Resource::new();
res.set_name(name.to_string()); | random_line_split |
util.rs | use hyper::header::{Accept, Connection, ContentType, Headers, Quality,
QualityItem, qitem};
use hyper::mime::{Mime, SubLevel, TopLevel};
use protobuf::{self, Message};
use proto::mesos::*;
pub fn protobuf_headers() -> Headers {
let mut headers = Headers::new();
headers.set(Accept(vec![
qitem(Mime(TopLevel::Text, SubLevel::Html, vec![])),
qitem(Mime(TopLevel::Application,
SubLevel::Ext("x-protobuf".to_owned()), vec![])),
]));
headers.set(ContentType(Mime(TopLevel::Application,
SubLevel::Ext("x-protobuf".to_owned()),
vec![])));
headers
}
pub fn framework_id<'a>(id: &'a str) -> FrameworkID {
let mut framework_id = FrameworkID::new();
framework_id.set_value(id.to_string());
framework_id
}
pub fn framework_info<'a>(user: &'a str,
name: &'a str,
failover_timeout: f64)
-> FrameworkInfo {
let mut framework_info = FrameworkInfo::new();
framework_info.set_user(user.to_string());
framework_info.set_name(name.to_string());
framework_info.set_failover_timeout(failover_timeout);
framework_info
}
pub fn task_id<'a>(id: &'a str) -> TaskID {
let mut task_id = TaskID::new();
task_id.set_value(id.to_string());
task_id
}
pub fn task_info<'a>(name: &'a str,
task_id: &TaskID,
agent_id: &AgentID,
command: &CommandInfo,
resources: Vec<Resource>)
-> TaskInfo {
let mut task_info = TaskInfo::new();
task_info.set_name(name.to_string());
task_info.set_task_id(task_id.clone());
task_info.set_agent_id(agent_id.clone());
task_info.set_command(command.clone());
task_info.set_resources(protobuf::RepeatedField::from_vec(resources));
task_info
}
pub fn task_info_for_container<'a>(name: &'a str,
task_id: &TaskID,
agent_id: &AgentID,
command: &CommandInfo,
container: &ContainerInfo,
resources: Vec<Resource>)
-> TaskInfo {
let mut task_info = TaskInfo::new();
task_info.set_name(name.to_string());
task_info.set_task_id(task_id.clone());
task_info.set_agent_id(agent_id.clone());
task_info.set_command(command.clone());
task_info.set_container(container.clone());
task_info.set_resources(protobuf::RepeatedField::from_vec(resources));
task_info
}
pub fn | (task_infos: Vec<TaskInfo>) -> Operation {
let mut launch = Operation_Launch::new();
launch.set_task_infos(protobuf::RepeatedField::from_vec(task_infos));
let mut operation = Operation::new();
operation.set_field_type(Operation_Type::LAUNCH);
operation.set_launch(launch);
operation
}
pub fn scalar<'a>(name: &'a str, role: &'a str, value: f64) -> Resource {
let mut scalar = Value_Scalar::new();
scalar.set_value(value);
let mut res = Resource::new();
res.set_name(name.to_string());
res.set_role(role.to_string());
res.set_field_type(Value_Type::SCALAR);
res.set_scalar(scalar);
res
}
pub fn get_scalar_resource_sum<'a>(name: &'a str, offers: Vec<&Offer>) -> f64 {
offers.iter()
.flat_map(|o| o.get_resources())
.filter(|r| r.get_name() == "mem")
.map(|c| c.get_scalar())
.fold(0f64, |acc, mem_res| acc + mem_res.get_value())
}
| launch_operation | identifier_name |
object-safety-issue-22040.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for #22040.
use std::fmt::Debug;
trait Expr: Debug + PartialEq {
fn print_element_count(&self);
}
//#[derive(PartialEq)]
#[derive(Debug)]
struct SExpr<'x> {
elements: Vec<Box<Expr+ 'x>>,
}
impl<'x> PartialEq for SExpr<'x> {
fn eq(&self, other:&SExpr<'x>) -> bool {
println!("L1: {} L2: {}", self.elements.len(), other.elements.len());
//~^ ERROR E0038
let result = self.elements.len() == other.elements.len();
println!("Got compare {}", result);
return result;
}
}
impl <'x> SExpr<'x> {
fn new() -> SExpr<'x> { return SExpr{elements: Vec::new(),}; }
}
impl <'x> Expr for SExpr<'x> {
fn print_element_count(&self) |
}
fn main() {
let a: Box<Expr> = Box::new(SExpr::new()); //~ ERROR E0038
let b: Box<Expr> = Box::new(SExpr::new()); //~ ERROR E0038
// assert_eq!(a, b);
}
| {
println!("element count: {}", self.elements.len());
} | identifier_body |
object-safety-issue-22040.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for #22040.
use std::fmt::Debug;
trait Expr: Debug + PartialEq {
fn print_element_count(&self);
}
//#[derive(PartialEq)]
#[derive(Debug)]
struct SExpr<'x> {
elements: Vec<Box<Expr+ 'x>>,
}
impl<'x> PartialEq for SExpr<'x> {
fn | (&self, other:&SExpr<'x>) -> bool {
println!("L1: {} L2: {}", self.elements.len(), other.elements.len());
//~^ ERROR E0038
let result = self.elements.len() == other.elements.len();
println!("Got compare {}", result);
return result;
}
}
impl <'x> SExpr<'x> {
fn new() -> SExpr<'x> { return SExpr{elements: Vec::new(),}; }
}
impl <'x> Expr for SExpr<'x> {
fn print_element_count(&self) {
println!("element count: {}", self.elements.len());
}
}
fn main() {
let a: Box<Expr> = Box::new(SExpr::new()); //~ ERROR E0038
let b: Box<Expr> = Box::new(SExpr::new()); //~ ERROR E0038
// assert_eq!(a, b);
}
| eq | identifier_name |
object-safety-issue-22040.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for #22040.
use std::fmt::Debug;
trait Expr: Debug + PartialEq {
fn print_element_count(&self);
}
//#[derive(PartialEq)]
#[derive(Debug)]
struct SExpr<'x> {
elements: Vec<Box<Expr+ 'x>>,
}
impl<'x> PartialEq for SExpr<'x> {
fn eq(&self, other:&SExpr<'x>) -> bool {
println!("L1: {} L2: {}", self.elements.len(), other.elements.len());
//~^ ERROR E0038
let result = self.elements.len() == other.elements.len();
|
impl <'x> SExpr<'x> {
fn new() -> SExpr<'x> { return SExpr{elements: Vec::new(),}; }
}
impl <'x> Expr for SExpr<'x> {
fn print_element_count(&self) {
println!("element count: {}", self.elements.len());
}
}
fn main() {
let a: Box<Expr> = Box::new(SExpr::new()); //~ ERROR E0038
let b: Box<Expr> = Box::new(SExpr::new()); //~ ERROR E0038
// assert_eq!(a, b);
} | println!("Got compare {}", result);
return result;
}
} | random_line_split |
cell-does-not-clone.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
struct Foo {
x: int
}
impl Clone for Foo {
fn clone(&self) -> Foo {
// Using Cell in any way should never cause clone() to be
// invoked -- after all, that would permit evil user code to
// abuse `Cell` and trigger crashes.
panic!();
}
}
impl Copy for Foo {}
pub fn main() | {
let x = Cell::new(Foo { x: 22 });
let _y = x.get();
let _z = x.clone();
} | identifier_body |
|
cell-does-not-clone.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
struct Foo {
x: int
}
impl Clone for Foo {
fn | (&self) -> Foo {
// Using Cell in any way should never cause clone() to be
// invoked -- after all, that would permit evil user code to
// abuse `Cell` and trigger crashes.
panic!();
}
}
impl Copy for Foo {}
pub fn main() {
let x = Cell::new(Foo { x: 22 });
let _y = x.get();
let _z = x.clone();
}
| clone | identifier_name |
cell-does-not-clone.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
struct Foo {
x: int
}
impl Clone for Foo { | panic!();
}
}
impl Copy for Foo {}
pub fn main() {
let x = Cell::new(Foo { x: 22 });
let _y = x.get();
let _z = x.clone();
} | fn clone(&self) -> Foo {
// Using Cell in any way should never cause clone() to be
// invoked -- after all, that would permit evil user code to
// abuse `Cell` and trigger crashes.
| random_line_split |
arguments.rs | use std;
pub enum | {
Invalid,
Help,
Create(String, Vec<String>),
Extract(String),
List(String),
}
impl Arguments {
pub fn parseargs() -> Arguments {
enum Action { Create, Extract, List }
let mut action = None;
let mut archive: Option<String> = None;
let mut files: Vec<String> = Vec::new();
let mut args = std::env::args();
args.next();
while let Some(arg) = args.next() {
match arg.as_ref() {
"-h" => return Arguments::Help,
"-c" => action = Some(Action::Create),
"-x" => action = Some(Action::Extract),
"-l" => action = Some(Action::List),
"-f" => archive = args.next(),
_ => files.push(arg),
}
}
let archive = match archive {
None => return Arguments::Invalid,
Some(fname) => fname,
};
return match action {
None => Arguments::Invalid,
Some(Action::Create) => Arguments::Create(archive, files),
Some(Action::Extract) => Arguments::Extract(archive),
Some(Action::List) => Arguments::List(archive),
};
}
}
| Arguments | identifier_name |
arguments.rs | use std;
pub enum Arguments {
Invalid,
Help,
Create(String, Vec<String>),
Extract(String),
List(String),
}
impl Arguments {
pub fn parseargs() -> Arguments {
enum Action { Create, Extract, List }
let mut action = None;
let mut archive: Option<String> = None;
let mut files: Vec<String> = Vec::new();
let mut args = std::env::args();
args.next();
while let Some(arg) = args.next() {
match arg.as_ref() {
"-h" => return Arguments::Help,
"-c" => action = Some(Action::Create),
"-x" => action = Some(Action::Extract),
"-l" => action = Some(Action::List),
"-f" => archive = args.next(),
_ => files.push(arg),
}
} | let archive = match archive {
None => return Arguments::Invalid,
Some(fname) => fname,
};
return match action {
None => Arguments::Invalid,
Some(Action::Create) => Arguments::Create(archive, files),
Some(Action::Extract) => Arguments::Extract(archive),
Some(Action::List) => Arguments::List(archive),
};
}
} | random_line_split |
|
insert.rs | use database::Database;
use database::Errors;
use database::errors::log_n_wrap;
use database::Errors::{NotFound, Conflict};
use std::vec::Vec;
use serde_json::Value;
use serde_json;
use rand;
impl Database {
/// Inserts the record to the given path.
pub fn insert(&mut self, keys: &mut Vec<String>, value: Value) -> Result<Value, Errors> | record at index: {:?}",
&value_with_id,
idx)))
} else {
array.push(value_with_id.clone());
info!(&self.logger, "Insert - Ok id: {:?}", &id);
debug!(&self.logger, "Insert - Value {}", &value_with_id);
Ok(value_with_id.clone())
}
} else {
log_n_wrap(&self.logger,
Conflict(format!("Insert - Error already has an object with the \
given key: {:?}",
keys)))
}
} else {
log_n_wrap(&self.logger,
NotFound(format!("Insert - Error {:?}. No record with the given path:",
keys)))
}
}
} | {
let data = &mut self.data;
if let Ok(obj) = Self::get_object(keys, data) {
// Path Found. It should be an array to accomplish an operation. Otherwise it must be an update not insert.
if let Some(ref mut array) = obj.as_array_mut() {
let mut id = rand::random();
// If id comes with the record use it.
if let Some(id_value) = value.get("id") {
if let Some(parsed) = id_value.as_i64() {
id = parsed;
}
}
let value_with_id = &mut value.clone();
if let Some(obj_id) = value_with_id.as_object_mut() {
obj_id.insert("id".to_string(), serde_json::to_value(id).unwrap());
}
// TODO: random id conflict must be resolved.
if let Some(idx) = Database::find_index(array, &id) {
log_n_wrap(&self.logger,
Conflict(format!("Insert - Error {:?}. \"id\" duplicates \ | identifier_body |
insert.rs | use database::Database;
use database::Errors;
use database::errors::log_n_wrap;
use database::Errors::{NotFound, Conflict};
use std::vec::Vec;
use serde_json::Value;
use serde_json;
use rand;
impl Database {
/// Inserts the record to the given path.
pub fn | (&mut self, keys: &mut Vec<String>, value: Value) -> Result<Value, Errors> {
let data = &mut self.data;
if let Ok(obj) = Self::get_object(keys, data) {
// Path Found. It should be an array to accomplish an operation. Otherwise it must be an update not insert.
if let Some(ref mut array) = obj.as_array_mut() {
let mut id = rand::random();
// If id comes with the record use it.
if let Some(id_value) = value.get("id") {
if let Some(parsed) = id_value.as_i64() {
id = parsed;
}
}
let value_with_id = &mut value.clone();
if let Some(obj_id) = value_with_id.as_object_mut() {
obj_id.insert("id".to_string(), serde_json::to_value(id).unwrap());
}
// TODO: random id conflict must be resolved.
if let Some(idx) = Database::find_index(array, &id) {
log_n_wrap(&self.logger,
Conflict(format!("Insert - Error {:?}. \"id\" duplicates \
record at index: {:?}",
&value_with_id,
idx)))
} else {
array.push(value_with_id.clone());
info!(&self.logger, "Insert - Ok id: {:?}", &id);
debug!(&self.logger, "Insert - Value {}", &value_with_id);
Ok(value_with_id.clone())
}
} else {
log_n_wrap(&self.logger,
Conflict(format!("Insert - Error already has an object with the \
given key: {:?}",
keys)))
}
} else {
log_n_wrap(&self.logger,
NotFound(format!("Insert - Error {:?}. No record with the given path:",
keys)))
}
}
} | insert | identifier_name |
insert.rs | use database::Database;
use database::Errors;
use database::errors::log_n_wrap;
use database::Errors::{NotFound, Conflict};
use std::vec::Vec;
use serde_json::Value;
use serde_json;
use rand;
impl Database {
/// Inserts the record to the given path.
pub fn insert(&mut self, keys: &mut Vec<String>, value: Value) -> Result<Value, Errors> {
let data = &mut self.data;
if let Ok(obj) = Self::get_object(keys, data) {
// Path Found. It should be an array to accomplish an operation. Otherwise it must be an update not insert.
if let Some(ref mut array) = obj.as_array_mut() {
let mut id = rand::random();
// If id comes with the record use it.
if let Some(id_value) = value.get("id") {
if let Some(parsed) = id_value.as_i64() {
id = parsed;
}
}
let value_with_id = &mut value.clone();
if let Some(obj_id) = value_with_id.as_object_mut() {
obj_id.insert("id".to_string(), serde_json::to_value(id).unwrap());
}
// TODO: random id conflict must be resolved.
if let Some(idx) = Database::find_index(array, &id) {
log_n_wrap(&self.logger,
Conflict(format!("Insert - Error {:?}. \"id\" duplicates \
record at index: {:?}",
&value_with_id,
idx)))
} else {
array.push(value_with_id.clone());
info!(&self.logger, "Insert - Ok id: {:?}", &id);
debug!(&self.logger, "Insert - Value {}", &value_with_id);
Ok(value_with_id.clone())
}
} else {
log_n_wrap(&self.logger,
Conflict(format!("Insert - Error already has an object with the \
given key: {:?}",
keys)))
}
} else { | }
} | log_n_wrap(&self.logger,
NotFound(format!("Insert - Error {:?}. No record with the given path:",
keys)))
} | random_line_split |
insert.rs | use database::Database;
use database::Errors;
use database::errors::log_n_wrap;
use database::Errors::{NotFound, Conflict};
use std::vec::Vec;
use serde_json::Value;
use serde_json;
use rand;
impl Database {
/// Inserts the record to the given path.
pub fn insert(&mut self, keys: &mut Vec<String>, value: Value) -> Result<Value, Errors> {
let data = &mut self.data;
if let Ok(obj) = Self::get_object(keys, data) {
// Path Found. It should be an array to accomplish an operation. Otherwise it must be an update not insert.
if let Some(ref mut array) = obj.as_array_mut() {
let mut id = rand::random();
// If id comes with the record use it.
if let Some(id_value) = value.get("id") |
let value_with_id = &mut value.clone();
if let Some(obj_id) = value_with_id.as_object_mut() {
obj_id.insert("id".to_string(), serde_json::to_value(id).unwrap());
}
// TODO: random id conflict must be resolved.
if let Some(idx) = Database::find_index(array, &id) {
log_n_wrap(&self.logger,
Conflict(format!("Insert - Error {:?}. \"id\" duplicates \
record at index: {:?}",
&value_with_id,
idx)))
} else {
array.push(value_with_id.clone());
info!(&self.logger, "Insert - Ok id: {:?}", &id);
debug!(&self.logger, "Insert - Value {}", &value_with_id);
Ok(value_with_id.clone())
}
} else {
log_n_wrap(&self.logger,
Conflict(format!("Insert - Error already has an object with the \
given key: {:?}",
keys)))
}
} else {
log_n_wrap(&self.logger,
NotFound(format!("Insert - Error {:?}. No record with the given path:",
keys)))
}
}
} | {
if let Some(parsed) = id_value.as_i64() {
id = parsed;
}
} | conditional_block |
lib.rs | ();
let add_insts = self.add_insts();
let regex = &*self.original;
quote_expr!(self.cx, {
// When `regex!` is bound to a name that is not used, we have to make sure
// that dead_code warnings don't bubble up to the user from the generated
// code. Therefore, we suppress them by allowing dead_code. The effect is that
// the user is only warned about *their* unused variable/code, and not the
// unused code generated by regex!. See #14185 for an example.
#[allow(dead_code)]
static CAPTURES: &'static [Option<&'static str>] = &$cap_names;
#[allow(dead_code)]
static CAPTURE_NAME_IDX: &'static [(&'static str, usize)] = &$capture_name_idx;
#[allow(dead_code)]
fn exec<'t>(
mut caps: &mut [Option<usize>],
input: &'t str,
start: usize,
) -> bool {
#![allow(unused_imports)]
#![allow(unused_mut)]
use regex::internal::{Char, CharInput, InputAt, Input, Inst};
let input = CharInput::new(input.as_bytes());
let at = input.at(start);
return Nfa {
input: input,
ncaps: caps.len(),
}.exec(&mut NfaThreads::new(), &mut caps, at);
struct Nfa<'t> {
input: CharInput<'t>,
ncaps: usize,
}
impl<'t> Nfa<'t> {
#[allow(unused_variables)]
fn exec(
&mut self,
mut q: &mut NfaThreads,
mut caps: &mut [Option<usize>],
mut at: InputAt,
) -> bool {
let mut matched = false;
let (mut clist, mut nlist) = (&mut q.clist, &mut q.nlist);
clist.empty(); nlist.empty();
'LOOP: loop {
if clist.size == 0 {
if matched || (!at.is_start() && $is_anchored_start) {
break;
}
// TODO: Prefix matching... Hmm.
// Prefix matching now uses a DFA, so I think this is
// going to require encoding that DFA statically.
}
if clist.size == 0 || (!$is_anchored_start &&!matched) {
self.add(clist, &mut caps, 0, at);
}
let at_next = self.input.at(at.next_pos());
for i in 0..clist.size {
let pc = clist.pc(i);
let tcaps = clist.caps(i);
if self.step(nlist, caps, tcaps, pc, at, at_next) {
matched = true;
if caps.len() == 0 {
break 'LOOP;
}
break;
}
}
if at.char().is_none() {
break;
}
at = at_next;
::std::mem::swap(&mut clist, &mut nlist);
nlist.empty();
}
matched
}
// Sometimes `nlist` is never used (for empty regexes).
#[allow(unused_variables)]
#[inline]
fn step(
&self,
nlist: &mut Threads,
caps: &mut [Option<usize>],
thread_caps: &mut [Option<usize>],
pc: usize,
at: InputAt,
at_next: InputAt,
) -> bool {
$step_insts;
false
}
fn add(
&self,
nlist: &mut Threads,
thread_caps: &mut [Option<usize>],
pc: usize,
at: InputAt,
) {
if nlist.contains(pc) {
return;
}
let ti = nlist.add(pc);
$add_insts
}
}
struct NfaThreads {
clist: Threads,
nlist: Threads,
}
struct Threads {
dense: [Thread; $num_insts],
sparse: [usize; $num_insts],
size: usize,
}
struct Thread {
pc: usize,
caps: [Option<usize>; $num_cap_locs],
}
impl NfaThreads {
fn new() -> NfaThreads {
NfaThreads {
clist: Threads::new(),
nlist: Threads::new(),
}
}
fn swap(&mut self) {
::std::mem::swap(&mut self.clist, &mut self.nlist);
}
}
impl Threads {
fn new() -> Threads {
Threads {
// These unsafe blocks are used for performance reasons, as it
// gives us a zero-cost initialization of a sparse set. The
// trick is described in more detail here:
// http://research.swtch.com/sparse
// The idea here is to avoid initializing threads that never
// need to be initialized, particularly for larger regexs with
// a lot of instructions.
dense: unsafe { ::std::mem::uninitialized() },
sparse: unsafe { ::std::mem::uninitialized() },
size: 0,
}
}
#[inline]
fn add(&mut self, pc: usize) -> usize {
let i = self.size;
self.dense[i].pc = pc;
self.sparse[pc] = i;
self.size += 1;
i
}
#[inline]
fn thread(&mut self, i: usize) -> &mut Thread {
&mut self.dense[i]
}
#[inline]
fn contains(&self, pc: usize) -> bool {
let s = unsafe { ::std::ptr::read_volatile(&self.sparse[pc]) };
s < self.size && self.dense[s].pc == pc
}
#[inline]
fn empty(&mut self) {
self.size = 0;
}
#[inline]
fn pc(&self, i: usize) -> usize {
self.dense[i].pc
}
#[inline]
fn caps<'r>(&'r mut self, i: usize) -> &'r mut [Option<usize>] {
&mut self.dense[i].caps
}
}
}
::regex::Regex(::regex::internal::_Regex::Plugin(::regex::internal::Plugin {
original: $regex,
names: &CAPTURES,
groups: &CAPTURE_NAME_IDX,
prog: exec,
}))
})
}
// Generates code for the `add` method, which is responsible for adding
// zero-width states to the next queue of states to visit.
fn add_insts(&self) -> P<ast::Expr> {
let arms = self.prog.iter().enumerate().map(|(pc, inst)| {
let body = match *inst {
Inst::EmptyLook(ref inst) => {
let nextpc = inst.goto;
match inst.look {
EmptyLook::StartLine => {
quote_expr!(self.cx, {
let prev = self.input.previous_char(at);
if prev.is_none() || prev == '\n' {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::EndLine => {
quote_expr!(self.cx, {
if at.char().is_none() || at.char() == '\n' {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::StartText => {
quote_expr!(self.cx, {
let prev = self.input.previous_char(at);
if prev.is_none() {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::EndText => {
quote_expr!(self.cx, {
if at.char().is_none() {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::WordBoundary
| EmptyLook::NotWordBoundary => {
let m = if inst.look == EmptyLook::WordBoundary {
quote_expr!(self.cx, { w1 ^ w2 })
} else {
quote_expr!(self.cx, {!(w1 ^ w2) })
};
quote_expr!(self.cx, {
let prev = self.input.previous_char(at);
let w1 = prev.is_word_char();
let w2 = at.char().is_word_char();
if $m {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::WordBoundaryAscii
| EmptyLook::NotWordBoundaryAscii => {
unreachable!()
}
}
}
Inst::Save(ref inst) => {
let nextpc = inst.goto;
let slot = inst.slot;
quote_expr!(self.cx, {
if $slot >= self.ncaps {
self.add(nlist, thread_caps, $nextpc, at);
} else {
let old = thread_caps[$slot];
thread_caps[$slot] = Some(at.pos());
self.add(nlist, thread_caps, $nextpc, at);
thread_caps[$slot] = old;
}
})
}
Inst::Split(ref inst) => {
let (x, y) = (inst.goto1, inst.goto2);
quote_expr!(self.cx, {
self.add(nlist, thread_caps, $x, at);
self.add(nlist, thread_caps, $y, at);
})
}
// For Match, Char, Ranges
_ => quote_expr!(self.cx, {
let mut t = &mut nlist.thread(ti);
for (slot, val) in t.caps.iter_mut().zip(thread_caps.iter()) {
*slot = *val;
}
}),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Generates the code for the `step` method, which processes all states
// in the current queue that consume a single character.
fn step_insts(&self) -> P<ast::Expr> {
let arms = self.prog.iter().enumerate().map(|(pc, inst)| {
let body = match *inst {
Inst::Match(_) => quote_expr!(self.cx, {
for (slot, val) in caps.iter_mut().zip(thread_caps.iter()) {
*slot = *val;
}
return true;
}),
Inst::Char(ref inst) => {
let nextpc = inst.goto;
let c = inst.c;
quote_expr!(self.cx, {
if $c == at.char() {
self.add(nlist, thread_caps, $nextpc, at_next);
}
return false;
})
}
Inst::Ranges(ref inst) => {
let match_class = self.match_class(&inst.ranges);
let nextpc = inst.goto;
quote_expr!(self.cx, {
let mut c = at.char();
if let Some(c) = c.as_char() {
if $match_class {
self.add(nlist, thread_caps, $nextpc, at_next);
}
}
return false;
})
}
// EmptyLook, Save, Jump, Split
_ => quote_expr!(self.cx, { return false; }),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Translates a character class into a match expression.
// This avoids a binary search (and is hopefully replaced by a jump
// table).
fn match_class(&self, ranges: &[(char, char)]) -> P<ast::Expr> {
let mut arms = ranges.iter().map(|&(start, end)| {
let pat = self.cx.pat(
self.sp, ast::PatKind::Range(
quote_expr!(self.cx, $start), quote_expr!(self.cx, $end)));
self.cx.arm(self.sp, vec!(pat), quote_expr!(self.cx, true))
}).collect::<Vec<ast::Arm>>();
arms.push(self.wild_arm_expr(quote_expr!(self.cx, false)));
let match_on = quote_expr!(self.cx, c);
self.cx.expr_match(self.sp, match_on, arms)
}
// Generates code for checking a literal prefix of the search string.
// The code is only generated if the regex *has* a literal prefix.
// Otherwise, a no-op is returned.
// fn check_prefix(&self) -> P<ast::Expr> {
// if self.prog.prefixes.len() == 0 {
// self.empty_block()
// } else {
// quote_expr!(self.cx,
// if clist.size == 0 {
// let haystack = &self.input.as_bytes()[self.ic..];
// match find_prefix(prefix_bytes, haystack) {
// None => break,
// Some(i) => {
// self.ic += i;
// next_ic = self.chars.set(self.ic);
// }
// }
// }
// )
// }
// }
// Builds a `match pc {... }` expression from a list of arms, specifically
// for matching the current program counter with an instruction.
// A wild-card arm is automatically added that executes a no-op. It will
// never be used, but is added to satisfy the compiler complaining about
// non-exhaustive patterns.
fn match_insts(&self, mut arms: Vec<ast::Arm>) -> P<ast::Expr> | {
arms.push(self.wild_arm_expr(self.empty_block()));
self.cx.expr_match(self.sp, quote_expr!(self.cx, pc), arms)
} | identifier_body |
|
lib.rs | = match Compiler::new().size_limit(usize::MAX).compile(&[expr]) {
Ok(re) => re,
Err(err) => {
cx.span_err(sp, &err.to_string());
return DummyResult::any(sp)
}
};
let names = prog.captures.iter().cloned().collect();
let mut gen = NfaGen {
cx: &*cx,
sp: sp,
prog: prog,
names: names,
original: regex,
};
MacEager::expr(gen.code())
}
struct NfaGen<'a> {
cx: &'a ExtCtxt<'a>,
sp: codemap::Span,
prog: Program,
names: Vec<Option<String>>,
original: String,
}
impl<'a> NfaGen<'a> {
fn code(&mut self) -> P<ast::Expr> {
// Most or all of the following things are used in the quasiquoted
// expression returned.
let num_cap_locs = 2 * self.prog.captures.len();
let num_insts = self.prog.len();
let cap_names = self.vec_expr(self.names.iter(),
&mut |cx, name| match *name {
Some(ref name) => {
let name = &**name;
quote_expr!(cx, Some($name))
}
None => cx.expr_none(self.sp),
}
);
let capture_name_idx = {
let mut capture_name_idx = BTreeMap::new();
for (i, name) in self.names.iter().enumerate() {
if let Some(ref name) = *name {
capture_name_idx.insert(name.to_owned(), i);
}
}
self.vec_expr(capture_name_idx.iter(),
&mut |cx, (name, group_idx)|
quote_expr!(cx, ($name, $group_idx))
)
};
let is_anchored_start = self.prog.is_anchored_start;
let step_insts = self.step_insts();
let add_insts = self.add_insts();
let regex = &*self.original;
quote_expr!(self.cx, {
// When `regex!` is bound to a name that is not used, we have to make sure
// that dead_code warnings don't bubble up to the user from the generated
// code. Therefore, we suppress them by allowing dead_code. The effect is that
// the user is only warned about *their* unused variable/code, and not the
// unused code generated by regex!. See #14185 for an example.
#[allow(dead_code)]
static CAPTURES: &'static [Option<&'static str>] = &$cap_names;
#[allow(dead_code)]
static CAPTURE_NAME_IDX: &'static [(&'static str, usize)] = &$capture_name_idx;
#[allow(dead_code)]
fn exec<'t>(
mut caps: &mut [Option<usize>],
input: &'t str,
start: usize,
) -> bool {
#![allow(unused_imports)]
#![allow(unused_mut)]
use regex::internal::{Char, CharInput, InputAt, Input, Inst};
let input = CharInput::new(input.as_bytes());
let at = input.at(start);
return Nfa {
input: input,
ncaps: caps.len(),
}.exec(&mut NfaThreads::new(), &mut caps, at);
struct Nfa<'t> {
input: CharInput<'t>,
ncaps: usize,
}
impl<'t> Nfa<'t> {
#[allow(unused_variables)]
fn exec(
&mut self,
mut q: &mut NfaThreads,
mut caps: &mut [Option<usize>],
mut at: InputAt,
) -> bool {
let mut matched = false;
let (mut clist, mut nlist) = (&mut q.clist, &mut q.nlist);
clist.empty(); nlist.empty();
'LOOP: loop {
if clist.size == 0 {
if matched || (!at.is_start() && $is_anchored_start) {
break;
}
// TODO: Prefix matching... Hmm.
// Prefix matching now uses a DFA, so I think this is
// going to require encoding that DFA statically.
}
if clist.size == 0 || (!$is_anchored_start &&!matched) {
self.add(clist, &mut caps, 0, at);
}
let at_next = self.input.at(at.next_pos());
for i in 0..clist.size {
let pc = clist.pc(i);
let tcaps = clist.caps(i);
if self.step(nlist, caps, tcaps, pc, at, at_next) {
matched = true;
if caps.len() == 0 {
break 'LOOP;
}
break;
}
}
if at.char().is_none() {
break;
}
at = at_next;
::std::mem::swap(&mut clist, &mut nlist);
nlist.empty();
}
matched
}
// Sometimes `nlist` is never used (for empty regexes).
#[allow(unused_variables)]
#[inline]
fn step(
&self,
nlist: &mut Threads,
caps: &mut [Option<usize>],
thread_caps: &mut [Option<usize>],
pc: usize,
at: InputAt,
at_next: InputAt,
) -> bool {
$step_insts;
false
}
fn add(
&self,
nlist: &mut Threads,
thread_caps: &mut [Option<usize>],
pc: usize,
at: InputAt,
) {
if nlist.contains(pc) {
return;
}
let ti = nlist.add(pc);
$add_insts
}
}
struct NfaThreads {
clist: Threads,
nlist: Threads,
}
struct Threads {
dense: [Thread; $num_insts],
sparse: [usize; $num_insts],
size: usize,
}
struct Thread {
pc: usize,
caps: [Option<usize>; $num_cap_locs],
}
impl NfaThreads {
fn new() -> NfaThreads {
NfaThreads {
clist: Threads::new(),
nlist: Threads::new(),
}
}
fn swap(&mut self) {
::std::mem::swap(&mut self.clist, &mut self.nlist);
}
}
impl Threads {
fn new() -> Threads {
Threads {
// These unsafe blocks are used for performance reasons, as it
// gives us a zero-cost initialization of a sparse set. The
// trick is described in more detail here:
// http://research.swtch.com/sparse
// The idea here is to avoid initializing threads that never
// need to be initialized, particularly for larger regexs with
// a lot of instructions.
dense: unsafe { ::std::mem::uninitialized() },
sparse: unsafe { ::std::mem::uninitialized() },
size: 0,
}
}
#[inline]
fn add(&mut self, pc: usize) -> usize {
let i = self.size;
self.dense[i].pc = pc;
self.sparse[pc] = i;
self.size += 1;
i
}
#[inline]
fn thread(&mut self, i: usize) -> &mut Thread {
&mut self.dense[i]
}
#[inline]
fn contains(&self, pc: usize) -> bool {
let s = unsafe { ::std::ptr::read_volatile(&self.sparse[pc]) };
s < self.size && self.dense[s].pc == pc
}
#[inline]
fn empty(&mut self) {
self.size = 0;
}
#[inline]
fn pc(&self, i: usize) -> usize {
self.dense[i].pc
}
#[inline]
fn caps<'r>(&'r mut self, i: usize) -> &'r mut [Option<usize>] {
&mut self.dense[i].caps
}
}
}
::regex::Regex(::regex::internal::_Regex::Plugin(::regex::internal::Plugin {
original: $regex,
names: &CAPTURES,
groups: &CAPTURE_NAME_IDX,
prog: exec,
}))
})
}
// Generates code for the `add` method, which is responsible for adding
// zero-width states to the next queue of states to visit.
fn add_insts(&self) -> P<ast::Expr> {
let arms = self.prog.iter().enumerate().map(|(pc, inst)| {
let body = match *inst {
Inst::EmptyLook(ref inst) => {
let nextpc = inst.goto;
match inst.look {
EmptyLook::StartLine => {
quote_expr!(self.cx, {
let prev = self.input.previous_char(at);
if prev.is_none() || prev == '\n' {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::EndLine => {
quote_expr!(self.cx, {
if at.char().is_none() || at.char() == '\n' {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::StartText => {
quote_expr!(self.cx, {
let prev = self.input.previous_char(at);
if prev.is_none() {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::EndText => {
quote_expr!(self.cx, {
if at.char().is_none() {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::WordBoundary
| EmptyLook::NotWordBoundary => {
let m = if inst.look == EmptyLook::WordBoundary {
quote_expr!(self.cx, { w1 ^ w2 })
} else {
quote_expr!(self.cx, {!(w1 ^ w2) })
};
quote_expr!(self.cx, {
let prev = self.input.previous_char(at);
let w1 = prev.is_word_char();
let w2 = at.char().is_word_char();
if $m {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::WordBoundaryAscii
| EmptyLook::NotWordBoundaryAscii => {
unreachable!()
}
}
}
Inst::Save(ref inst) => {
let nextpc = inst.goto;
let slot = inst.slot;
quote_expr!(self.cx, {
if $slot >= self.ncaps {
self.add(nlist, thread_caps, $nextpc, at);
} else {
let old = thread_caps[$slot];
thread_caps[$slot] = Some(at.pos());
self.add(nlist, thread_caps, $nextpc, at);
thread_caps[$slot] = old;
}
})
}
Inst::Split(ref inst) => {
let (x, y) = (inst.goto1, inst.goto2);
quote_expr!(self.cx, {
self.add(nlist, thread_caps, $x, at);
self.add(nlist, thread_caps, $y, at);
})
}
// For Match, Char, Ranges
_ => quote_expr!(self.cx, {
let mut t = &mut nlist.thread(ti);
for (slot, val) in t.caps.iter_mut().zip(thread_caps.iter()) {
*slot = *val;
}
}),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Generates the code for the `step` method, which processes all states
// in the current queue that consume a single character.
fn step_insts(&self) -> P<ast::Expr> {
let arms = self.prog.iter().enumerate().map(|(pc, inst)| {
let body = match *inst {
Inst::Match(_) => quote_expr!(self.cx, {
for (slot, val) in caps.iter_mut().zip(thread_caps.iter()) {
*slot = *val;
}
return true;
}),
Inst::Char(ref inst) => {
let nextpc = inst.goto;
let c = inst.c;
quote_expr!(self.cx, {
if $c == at.char() {
self.add(nlist, thread_caps, $nextpc, at_next);
}
return false;
})
}
Inst::Ranges(ref inst) => {
let match_class = self.match_class(&inst.ranges);
let nextpc = inst.goto;
quote_expr!(self.cx, {
let mut c = at.char();
if let Some(c) = c.as_char() {
if $match_class {
self.add(nlist, thread_caps, $nextpc, at_next);
}
}
return false;
})
}
// EmptyLook, Save, Jump, Split
_ => quote_expr!(self.cx, { return false; }),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Translates a character class into a match expression.
// This avoids a binary search (and is hopefully replaced by a jump
// table).
fn | (&self, ranges: &[(char, char)]) -> P<ast:: | match_class | identifier_name |
lib.rs | prog = match Compiler::new().size_limit(usize::MAX).compile(&[expr]) {
Ok(re) => re,
Err(err) => {
cx.span_err(sp, &err.to_string());
return DummyResult::any(sp)
}
};
let names = prog.captures.iter().cloned().collect();
let mut gen = NfaGen {
cx: &*cx,
sp: sp,
prog: prog,
names: names,
original: regex,
};
MacEager::expr(gen.code())
}
struct NfaGen<'a> {
cx: &'a ExtCtxt<'a>,
sp: codemap::Span,
prog: Program,
names: Vec<Option<String>>,
original: String,
}
impl<'a> NfaGen<'a> {
fn code(&mut self) -> P<ast::Expr> {
// Most or all of the following things are used in the quasiquoted
// expression returned.
let num_cap_locs = 2 * self.prog.captures.len();
let num_insts = self.prog.len();
let cap_names = self.vec_expr(self.names.iter(),
&mut |cx, name| match *name {
Some(ref name) => {
let name = &**name;
quote_expr!(cx, Some($name))
}
None => cx.expr_none(self.sp),
}
);
let capture_name_idx = {
let mut capture_name_idx = BTreeMap::new();
for (i, name) in self.names.iter().enumerate() {
if let Some(ref name) = *name {
capture_name_idx.insert(name.to_owned(), i);
}
}
self.vec_expr(capture_name_idx.iter(),
&mut |cx, (name, group_idx)|
quote_expr!(cx, ($name, $group_idx))
)
};
let is_anchored_start = self.prog.is_anchored_start;
let step_insts = self.step_insts();
let add_insts = self.add_insts();
let regex = &*self.original;
quote_expr!(self.cx, {
// When `regex!` is bound to a name that is not used, we have to make sure
// that dead_code warnings don't bubble up to the user from the generated
// code. Therefore, we suppress them by allowing dead_code. The effect is that
// the user is only warned about *their* unused variable/code, and not the
// unused code generated by regex!. See #14185 for an example.
#[allow(dead_code)]
static CAPTURES: &'static [Option<&'static str>] = &$cap_names;
#[allow(dead_code)]
static CAPTURE_NAME_IDX: &'static [(&'static str, usize)] = &$capture_name_idx;
#[allow(dead_code)]
fn exec<'t>(
mut caps: &mut [Option<usize>],
input: &'t str,
start: usize,
) -> bool {
#![allow(unused_imports)]
#![allow(unused_mut)]
use regex::internal::{Char, CharInput, InputAt, Input, Inst};
let input = CharInput::new(input.as_bytes());
let at = input.at(start);
return Nfa {
input: input,
ncaps: caps.len(),
}.exec(&mut NfaThreads::new(), &mut caps, at);
struct Nfa<'t> {
input: CharInput<'t>,
ncaps: usize,
}
impl<'t> Nfa<'t> {
#[allow(unused_variables)]
fn exec(
&mut self,
mut q: &mut NfaThreads,
mut caps: &mut [Option<usize>],
mut at: InputAt,
) -> bool {
let mut matched = false;
let (mut clist, mut nlist) = (&mut q.clist, &mut q.nlist);
clist.empty(); nlist.empty();
'LOOP: loop {
if clist.size == 0 {
if matched || (!at.is_start() && $is_anchored_start) {
break;
}
// TODO: Prefix matching... Hmm.
// Prefix matching now uses a DFA, so I think this is
// going to require encoding that DFA statically.
}
if clist.size == 0 || (!$is_anchored_start &&!matched) {
self.add(clist, &mut caps, 0, at);
}
let at_next = self.input.at(at.next_pos());
for i in 0..clist.size {
let pc = clist.pc(i);
let tcaps = clist.caps(i);
if self.step(nlist, caps, tcaps, pc, at, at_next) {
matched = true;
if caps.len() == 0 {
break 'LOOP;
}
break;
}
}
if at.char().is_none() {
break;
}
at = at_next;
::std::mem::swap(&mut clist, &mut nlist);
nlist.empty();
}
matched
}
// Sometimes `nlist` is never used (for empty regexes).
#[allow(unused_variables)]
#[inline]
fn step(
&self,
nlist: &mut Threads,
caps: &mut [Option<usize>],
thread_caps: &mut [Option<usize>],
pc: usize,
at: InputAt,
at_next: InputAt,
) -> bool {
$step_insts;
false
}
fn add(
&self,
nlist: &mut Threads,
thread_caps: &mut [Option<usize>],
pc: usize,
at: InputAt,
) {
if nlist.contains(pc) {
return;
}
let ti = nlist.add(pc);
$add_insts
}
}
struct NfaThreads {
clist: Threads,
nlist: Threads,
}
struct Threads {
dense: [Thread; $num_insts],
sparse: [usize; $num_insts],
size: usize,
}
struct Thread {
pc: usize,
caps: [Option<usize>; $num_cap_locs],
}
impl NfaThreads {
fn new() -> NfaThreads {
NfaThreads {
clist: Threads::new(),
nlist: Threads::new(),
}
}
fn swap(&mut self) {
::std::mem::swap(&mut self.clist, &mut self.nlist);
}
}
impl Threads {
fn new() -> Threads {
Threads {
// These unsafe blocks are used for performance reasons, as it
// gives us a zero-cost initialization of a sparse set. The
// trick is described in more detail here:
// http://research.swtch.com/sparse
// The idea here is to avoid initializing threads that never
// need to be initialized, particularly for larger regexs with
// a lot of instructions.
dense: unsafe { ::std::mem::uninitialized() },
sparse: unsafe { ::std::mem::uninitialized() },
size: 0,
}
}
#[inline]
fn add(&mut self, pc: usize) -> usize {
let i = self.size;
self.dense[i].pc = pc;
self.sparse[pc] = i;
self.size += 1;
i
}
#[inline]
fn thread(&mut self, i: usize) -> &mut Thread {
&mut self.dense[i]
}
#[inline]
fn contains(&self, pc: usize) -> bool {
let s = unsafe { ::std::ptr::read_volatile(&self.sparse[pc]) };
s < self.size && self.dense[s].pc == pc
}
#[inline]
fn empty(&mut self) {
self.size = 0;
}
#[inline]
fn pc(&self, i: usize) -> usize {
self.dense[i].pc
}
#[inline]
fn caps<'r>(&'r mut self, i: usize) -> &'r mut [Option<usize>] {
&mut self.dense[i].caps
}
}
}
::regex::Regex(::regex::internal::_Regex::Plugin(::regex::internal::Plugin {
original: $regex,
names: &CAPTURES,
groups: &CAPTURE_NAME_IDX,
prog: exec,
}))
})
}
// Generates code for the `add` method, which is responsible for adding
// zero-width states to the next queue of states to visit.
fn add_insts(&self) -> P<ast::Expr> {
let arms = self.prog.iter().enumerate().map(|(pc, inst)| {
let body = match *inst {
Inst::EmptyLook(ref inst) => {
let nextpc = inst.goto;
match inst.look {
EmptyLook::StartLine => {
quote_expr!(self.cx, {
let prev = self.input.previous_char(at);
if prev.is_none() || prev == '\n' {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::EndLine => {
quote_expr!(self.cx, {
if at.char().is_none() || at.char() == '\n' {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::StartText => {
quote_expr!(self.cx, {
let prev = self.input.previous_char(at);
if prev.is_none() {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::EndText => {
quote_expr!(self.cx, {
if at.char().is_none() {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::WordBoundary
| EmptyLook::NotWordBoundary => {
let m = if inst.look == EmptyLook::WordBoundary {
quote_expr!(self.cx, { w1 ^ w2 })
} else {
quote_expr!(self.cx, {!(w1 ^ w2) })
};
quote_expr!(self.cx, {
let prev = self.input.previous_char(at);
let w1 = prev.is_word_char();
let w2 = at.char().is_word_char();
if $m {
self.add(nlist, thread_caps, $nextpc, at);
}
})
}
EmptyLook::WordBoundaryAscii
| EmptyLook::NotWordBoundaryAscii => {
unreachable!()
}
}
}
Inst::Save(ref inst) => {
let nextpc = inst.goto;
let slot = inst.slot;
quote_expr!(self.cx, {
if $slot >= self.ncaps {
self.add(nlist, thread_caps, $nextpc, at);
} else {
let old = thread_caps[$slot];
thread_caps[$slot] = Some(at.pos());
self.add(nlist, thread_caps, $nextpc, at);
thread_caps[$slot] = old;
}
})
}
Inst::Split(ref inst) => {
let (x, y) = (inst.goto1, inst.goto2);
quote_expr!(self.cx, {
self.add(nlist, thread_caps, $x, at);
self.add(nlist, thread_caps, $y, at);
})
}
// For Match, Char, Ranges
_ => quote_expr!(self.cx, {
let mut t = &mut nlist.thread(ti);
for (slot, val) in t.caps.iter_mut().zip(thread_caps.iter()) {
*slot = *val;
}
}),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Generates the code for the `step` method, which processes all states
// in the current queue that consume a single character.
fn step_insts(&self) -> P<ast::Expr> {
let arms = self.prog.iter().enumerate().map(|(pc, inst)| {
let body = match *inst {
Inst::Match(_) => quote_expr!(self.cx, {
for (slot, val) in caps.iter_mut().zip(thread_caps.iter()) {
*slot = *val;
}
return true;
}),
Inst::Char(ref inst) => {
let nextpc = inst.goto;
let c = inst.c;
quote_expr!(self.cx, {
if $c == at.char() {
self.add(nlist, thread_caps, $nextpc, at_next);
}
return false;
})
}
Inst::Ranges(ref inst) => {
let match_class = self.match_class(&inst.ranges);
let nextpc = inst.goto;
quote_expr!(self.cx, {
let mut c = at.char();
if let Some(c) = c.as_char() {
if $match_class { | }
return false;
})
}
// EmptyLook, Save, Jump, Split
_ => quote_expr!(self.cx, { return false; }),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Translates a character class into a match expression.
// This avoids a binary search (and is hopefully replaced by a jump
// table).
fn match_class(&self, ranges: &[(char, char)]) -> P<ast::Expr> | self.add(nlist, thread_caps, $nextpc, at_next);
} | random_line_split |
query19.rs | use timely::dataflow::*;
use timely::dataflow::operators::*;
use timely::dataflow::operators::probe::Handle as ProbeHandle;
use differential_dataflow::AsCollection;
use differential_dataflow::operators::*;
use differential_dataflow::lattice::Lattice;
// use differential_dataflow::difference::DiffPair;
use ::Collections;
// use ::types::create_date;
// -- $ID$
// -- TPC-H/TPC-R Discounted Revenue Query (Q19)
// -- Functional Query Definition
// -- Approved February 1998
// :x
// :o
// select
// sum(l_extendedprice* (1 - l_discount)) as revenue
// from
// lineitem,
// part
// where
// (
// p_partkey = l_partkey
// and p_brand = ':1'
// and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG')
// and l_quantity >= :4 and l_quantity <= :4 + 10
// and p_size between 1 and 5
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// )
// or
// (
// p_partkey = l_partkey
// and p_brand = ':2'
// and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK')
// and l_quantity >= :5 and l_quantity <= :5 + 10
// and p_size between 1 and 10
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// )
// or
// (
// p_partkey = l_partkey
// and p_brand = ':3'
// and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG')
// and l_quantity >= :6 and l_quantity <= :6 + 10
// and p_size between 1 and 15
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// );
// :n -1
fn starts_with(source: &[u8], query: &[u8]) -> bool {
source.len() >= query.len() && &source[..query.len()] == query
} | println!("TODO: query 19 could use some _u attention");
let lineitems =
collections
.lineitems()
.inner
.flat_map(|(x,t,d)|
if (starts_with(&x.ship_mode, b"AIR") || starts_with(&x.ship_mode, b"AIR REG")) && starts_with(&x.ship_instruct, b"DELIVER IN PERSON") {
Some(((x.part_key, x.quantity), t, d * (x.extended_price * (100 - x.discount) / 100) as isize))
}
else { None }
)
.as_collection();
let lines1 = lineitems.filter(|&(_, quant)| quant >= 1 && quant <= 11).map(|x| (x.0, ()));
let lines2 = lineitems.filter(|&(_, quant)| quant >= 10 && quant <= 20).map(|x| (x.0, ()));
let lines3 = lineitems.filter(|&(_, quant)| quant >= 20 && quant <= 30).map(|x| (x.0, ()));
let parts = collections.parts().map(|p| (p.part_key, (p.brand, p.container, p.size)));
let parts1 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#12") && 1 <= size && size <= 5 && (starts_with(&container, b"SM CASE") || starts_with(&container, b"SM BOX") || starts_with(&container, b"SM PACK") || starts_with(&container, b"MED PKG"))).map(|x| x.0);
let parts2 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#23") && 1 <= size && size <= 10 && (starts_with(&container, b"MED BAG") || starts_with(&container, b"MED BOX") || starts_with(&container, b"MED PKG") || starts_with(&container, b"MED PACK"))).map(|x| x.0);
let parts3 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#34") && 1 <= size && size <= 15 && (starts_with(&container, b"LG CASE") || starts_with(&container, b"LG BOX") || starts_with(&container, b"LG PACK") || starts_with(&container, b"LG PCG"))).map(|x| x.0);
let result1 = lines1.semijoin_u(&parts1);
let result2 = lines2.semijoin_u(&parts2);
let result3 = lines3.semijoin_u(&parts3);
result1
.concat(&result2)
.concat(&result3)
.map(|(x,_)| x)
.count_u()
.probe()
} |
pub fn query<G: Scope>(collections: &mut Collections<G>) -> ProbeHandle<G::Timestamp>
where G::Timestamp: Lattice+Ord {
| random_line_split |
query19.rs | use timely::dataflow::*;
use timely::dataflow::operators::*;
use timely::dataflow::operators::probe::Handle as ProbeHandle;
use differential_dataflow::AsCollection;
use differential_dataflow::operators::*;
use differential_dataflow::lattice::Lattice;
// use differential_dataflow::difference::DiffPair;
use ::Collections;
// use ::types::create_date;
// -- $ID$
// -- TPC-H/TPC-R Discounted Revenue Query (Q19)
// -- Functional Query Definition
// -- Approved February 1998
// :x
// :o
// select
// sum(l_extendedprice* (1 - l_discount)) as revenue
// from
// lineitem,
// part
// where
// (
// p_partkey = l_partkey
// and p_brand = ':1'
// and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG')
// and l_quantity >= :4 and l_quantity <= :4 + 10
// and p_size between 1 and 5
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// )
// or
// (
// p_partkey = l_partkey
// and p_brand = ':2'
// and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK')
// and l_quantity >= :5 and l_quantity <= :5 + 10
// and p_size between 1 and 10
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// )
// or
// (
// p_partkey = l_partkey
// and p_brand = ':3'
// and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG')
// and l_quantity >= :6 and l_quantity <= :6 + 10
// and p_size between 1 and 15
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// );
// :n -1
fn starts_with(source: &[u8], query: &[u8]) -> bool {
source.len() >= query.len() && &source[..query.len()] == query
}
pub fn query<G: Scope>(collections: &mut Collections<G>) -> ProbeHandle<G::Timestamp>
where G::Timestamp: Lattice+Ord {
println!("TODO: query 19 could use some _u attention");
let lineitems =
collections
.lineitems()
.inner
.flat_map(|(x,t,d)|
if (starts_with(&x.ship_mode, b"AIR") || starts_with(&x.ship_mode, b"AIR REG")) && starts_with(&x.ship_instruct, b"DELIVER IN PERSON") |
else { None }
)
.as_collection();
let lines1 = lineitems.filter(|&(_, quant)| quant >= 1 && quant <= 11).map(|x| (x.0, ()));
let lines2 = lineitems.filter(|&(_, quant)| quant >= 10 && quant <= 20).map(|x| (x.0, ()));
let lines3 = lineitems.filter(|&(_, quant)| quant >= 20 && quant <= 30).map(|x| (x.0, ()));
let parts = collections.parts().map(|p| (p.part_key, (p.brand, p.container, p.size)));
let parts1 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#12") && 1 <= size && size <= 5 && (starts_with(&container, b"SM CASE") || starts_with(&container, b"SM BOX") || starts_with(&container, b"SM PACK") || starts_with(&container, b"MED PKG"))).map(|x| x.0);
let parts2 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#23") && 1 <= size && size <= 10 && (starts_with(&container, b"MED BAG") || starts_with(&container, b"MED BOX") || starts_with(&container, b"MED PKG") || starts_with(&container, b"MED PACK"))).map(|x| x.0);
let parts3 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#34") && 1 <= size && size <= 15 && (starts_with(&container, b"LG CASE") || starts_with(&container, b"LG BOX") || starts_with(&container, b"LG PACK") || starts_with(&container, b"LG PCG"))).map(|x| x.0);
let result1 = lines1.semijoin_u(&parts1);
let result2 = lines2.semijoin_u(&parts2);
let result3 = lines3.semijoin_u(&parts3);
result1
.concat(&result2)
.concat(&result3)
.map(|(x,_)| x)
.count_u()
.probe()
} | {
Some(((x.part_key, x.quantity), t, d * (x.extended_price * (100 - x.discount) / 100) as isize))
} | conditional_block |
query19.rs | use timely::dataflow::*;
use timely::dataflow::operators::*;
use timely::dataflow::operators::probe::Handle as ProbeHandle;
use differential_dataflow::AsCollection;
use differential_dataflow::operators::*;
use differential_dataflow::lattice::Lattice;
// use differential_dataflow::difference::DiffPair;
use ::Collections;
// use ::types::create_date;
// -- $ID$
// -- TPC-H/TPC-R Discounted Revenue Query (Q19)
// -- Functional Query Definition
// -- Approved February 1998
// :x
// :o
// select
// sum(l_extendedprice* (1 - l_discount)) as revenue
// from
// lineitem,
// part
// where
// (
// p_partkey = l_partkey
// and p_brand = ':1'
// and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG')
// and l_quantity >= :4 and l_quantity <= :4 + 10
// and p_size between 1 and 5
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// )
// or
// (
// p_partkey = l_partkey
// and p_brand = ':2'
// and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK')
// and l_quantity >= :5 and l_quantity <= :5 + 10
// and p_size between 1 and 10
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// )
// or
// (
// p_partkey = l_partkey
// and p_brand = ':3'
// and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG')
// and l_quantity >= :6 and l_quantity <= :6 + 10
// and p_size between 1 and 15
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// );
// :n -1
fn starts_with(source: &[u8], query: &[u8]) -> bool |
pub fn query<G: Scope>(collections: &mut Collections<G>) -> ProbeHandle<G::Timestamp>
where G::Timestamp: Lattice+Ord {
println!("TODO: query 19 could use some _u attention");
let lineitems =
collections
.lineitems()
.inner
.flat_map(|(x,t,d)|
if (starts_with(&x.ship_mode, b"AIR") || starts_with(&x.ship_mode, b"AIR REG")) && starts_with(&x.ship_instruct, b"DELIVER IN PERSON") {
Some(((x.part_key, x.quantity), t, d * (x.extended_price * (100 - x.discount) / 100) as isize))
}
else { None }
)
.as_collection();
let lines1 = lineitems.filter(|&(_, quant)| quant >= 1 && quant <= 11).map(|x| (x.0, ()));
let lines2 = lineitems.filter(|&(_, quant)| quant >= 10 && quant <= 20).map(|x| (x.0, ()));
let lines3 = lineitems.filter(|&(_, quant)| quant >= 20 && quant <= 30).map(|x| (x.0, ()));
let parts = collections.parts().map(|p| (p.part_key, (p.brand, p.container, p.size)));
let parts1 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#12") && 1 <= size && size <= 5 && (starts_with(&container, b"SM CASE") || starts_with(&container, b"SM BOX") || starts_with(&container, b"SM PACK") || starts_with(&container, b"MED PKG"))).map(|x| x.0);
let parts2 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#23") && 1 <= size && size <= 10 && (starts_with(&container, b"MED BAG") || starts_with(&container, b"MED BOX") || starts_with(&container, b"MED PKG") || starts_with(&container, b"MED PACK"))).map(|x| x.0);
let parts3 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#34") && 1 <= size && size <= 15 && (starts_with(&container, b"LG CASE") || starts_with(&container, b"LG BOX") || starts_with(&container, b"LG PACK") || starts_with(&container, b"LG PCG"))).map(|x| x.0);
let result1 = lines1.semijoin_u(&parts1);
let result2 = lines2.semijoin_u(&parts2);
let result3 = lines3.semijoin_u(&parts3);
result1
.concat(&result2)
.concat(&result3)
.map(|(x,_)| x)
.count_u()
.probe()
} | {
source.len() >= query.len() && &source[..query.len()] == query
} | identifier_body |
query19.rs | use timely::dataflow::*;
use timely::dataflow::operators::*;
use timely::dataflow::operators::probe::Handle as ProbeHandle;
use differential_dataflow::AsCollection;
use differential_dataflow::operators::*;
use differential_dataflow::lattice::Lattice;
// use differential_dataflow::difference::DiffPair;
use ::Collections;
// use ::types::create_date;
// -- $ID$
// -- TPC-H/TPC-R Discounted Revenue Query (Q19)
// -- Functional Query Definition
// -- Approved February 1998
// :x
// :o
// select
// sum(l_extendedprice* (1 - l_discount)) as revenue
// from
// lineitem,
// part
// where
// (
// p_partkey = l_partkey
// and p_brand = ':1'
// and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG')
// and l_quantity >= :4 and l_quantity <= :4 + 10
// and p_size between 1 and 5
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// )
// or
// (
// p_partkey = l_partkey
// and p_brand = ':2'
// and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK')
// and l_quantity >= :5 and l_quantity <= :5 + 10
// and p_size between 1 and 10
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// )
// or
// (
// p_partkey = l_partkey
// and p_brand = ':3'
// and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG')
// and l_quantity >= :6 and l_quantity <= :6 + 10
// and p_size between 1 and 15
// and l_shipmode in ('AIR', 'AIR REG')
// and l_shipinstruct = 'DELIVER IN PERSON'
// );
// :n -1
fn starts_with(source: &[u8], query: &[u8]) -> bool {
source.len() >= query.len() && &source[..query.len()] == query
}
pub fn | <G: Scope>(collections: &mut Collections<G>) -> ProbeHandle<G::Timestamp>
where G::Timestamp: Lattice+Ord {
println!("TODO: query 19 could use some _u attention");
let lineitems =
collections
.lineitems()
.inner
.flat_map(|(x,t,d)|
if (starts_with(&x.ship_mode, b"AIR") || starts_with(&x.ship_mode, b"AIR REG")) && starts_with(&x.ship_instruct, b"DELIVER IN PERSON") {
Some(((x.part_key, x.quantity), t, d * (x.extended_price * (100 - x.discount) / 100) as isize))
}
else { None }
)
.as_collection();
let lines1 = lineitems.filter(|&(_, quant)| quant >= 1 && quant <= 11).map(|x| (x.0, ()));
let lines2 = lineitems.filter(|&(_, quant)| quant >= 10 && quant <= 20).map(|x| (x.0, ()));
let lines3 = lineitems.filter(|&(_, quant)| quant >= 20 && quant <= 30).map(|x| (x.0, ()));
let parts = collections.parts().map(|p| (p.part_key, (p.brand, p.container, p.size)));
let parts1 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#12") && 1 <= size && size <= 5 && (starts_with(&container, b"SM CASE") || starts_with(&container, b"SM BOX") || starts_with(&container, b"SM PACK") || starts_with(&container, b"MED PKG"))).map(|x| x.0);
let parts2 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#23") && 1 <= size && size <= 10 && (starts_with(&container, b"MED BAG") || starts_with(&container, b"MED BOX") || starts_with(&container, b"MED PKG") || starts_with(&container, b"MED PACK"))).map(|x| x.0);
let parts3 = parts.filter(|&(_key, (brand, container, size))| starts_with(&brand, b"Brand#34") && 1 <= size && size <= 15 && (starts_with(&container, b"LG CASE") || starts_with(&container, b"LG BOX") || starts_with(&container, b"LG PACK") || starts_with(&container, b"LG PCG"))).map(|x| x.0);
let result1 = lines1.semijoin_u(&parts1);
let result2 = lines2.semijoin_u(&parts2);
let result3 = lines3.semijoin_u(&parts3);
result1
.concat(&result2)
.concat(&result3)
.map(|(x,_)| x)
.count_u()
.probe()
} | query | identifier_name |
any_unique_aliases_generated.rs | // automatically generated by the FlatBuffers compiler, do not modify
extern crate flatbuffers;
use std::mem;
use std::cmp::Ordering;
use self::flatbuffers::{EndianScalar, Follow};
use super::*;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_ANY_UNIQUE_ALIASES: u8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_ANY_UNIQUE_ALIASES: u8 = 3;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_ANY_UNIQUE_ALIASES: [AnyUniqueAliases; 4] = [
AnyUniqueAliases::NONE,
AnyUniqueAliases::M,
AnyUniqueAliases::TS,
AnyUniqueAliases::M2,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct AnyUniqueAliases(pub u8);
#[allow(non_upper_case_globals)]
impl AnyUniqueAliases {
pub const NONE: Self = Self(0);
pub const M: Self = Self(1);
pub const TS: Self = Self(2);
pub const M2: Self = Self(3);
pub const ENUM_MIN: u8 = 0;
pub const ENUM_MAX: u8 = 3;
pub const ENUM_VALUES: &'static [Self] = &[
Self::NONE,
Self::M,
Self::TS,
Self::M2,
];
/// Returns the variant's name or "" if unknown.
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::NONE => Some("NONE"),
Self::M => Some("M"),
Self::TS => Some("TS"),
Self::M2 => Some("M2"),
_ => None,
}
}
}
impl std::fmt::Debug for AnyUniqueAliases {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for AnyUniqueAliases {
type Inner = Self;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = unsafe {
flatbuffers::read_scalar_at::<u8>(buf, loc)
};
Self(b)
}
}
impl flatbuffers::Push for AnyUniqueAliases {
type Output = AnyUniqueAliases;
#[inline]
fn push(&self, dst: &mut [u8], _rest: &[u8]) {
unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); }
}
}
impl flatbuffers::EndianScalar for AnyUniqueAliases {
#[inline]
fn to_little_endian(self) -> Self {
let b = u8::to_le(self.0);
Self(b)
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(self) -> Self |
}
impl<'a> flatbuffers::Verifiable for AnyUniqueAliases {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
u8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for AnyUniqueAliases {}
pub struct AnyUniqueAliasesUnionTableOffset {}
#[allow(clippy::upper_case_acronyms)]
#[non_exhaustive]
#[derive(Debug, Clone, PartialEq)]
pub enum AnyUniqueAliasesT {
NONE,
M(Box<MonsterT>),
TS(Box<TestSimpleTableWithEnumT>),
M2(Box<super::example_2::MonsterT>),
}
impl Default for AnyUniqueAliasesT {
fn default() -> Self {
Self::NONE
}
}
impl AnyUniqueAliasesT {
pub fn any_unique_aliases_type(&self) -> AnyUniqueAliases {
match self {
Self::NONE => AnyUniqueAliases::NONE,
Self::M(_) => AnyUniqueAliases::M,
Self::TS(_) => AnyUniqueAliases::TS,
Self::M2(_) => AnyUniqueAliases::M2,
}
}
pub fn pack(&self, fbb: &mut flatbuffers::FlatBufferBuilder) -> Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>> {
match self {
Self::NONE => None,
Self::M(v) => Some(v.pack(fbb).as_union_value()),
Self::TS(v) => Some(v.pack(fbb).as_union_value()),
Self::M2(v) => Some(v.pack(fbb).as_union_value()),
}
}
/// If the union variant matches, return the owned MonsterT, setting the union to NONE.
pub fn take_m(&mut self) -> Option<Box<MonsterT>> {
if let Self::M(_) = self {
let v = std::mem::replace(self, Self::NONE);
if let Self::M(w) = v {
Some(w)
} else {
unreachable!()
}
} else {
None
}
}
/// If the union variant matches, return a reference to the MonsterT.
pub fn as_m(&self) -> Option<&MonsterT> {
if let Self::M(v) = self { Some(v.as_ref()) } else { None }
}
/// If the union variant matches, return a mutable reference to the MonsterT.
pub fn as_m_mut(&mut self) -> Option<&mut MonsterT> {
if let Self::M(v) = self { Some(v.as_mut()) } else { None }
}
/// If the union variant matches, return the owned TestSimpleTableWithEnumT, setting the union to NONE.
pub fn take_ts(&mut self) -> Option<Box<TestSimpleTableWithEnumT>> {
if let Self::TS(_) = self {
let v = std::mem::replace(self, Self::NONE);
if let Self::TS(w) = v {
Some(w)
} else {
unreachable!()
}
} else {
None
}
}
/// If the union variant matches, return a reference to the TestSimpleTableWithEnumT.
pub fn as_ts(&self) -> Option<&TestSimpleTableWithEnumT> {
if let Self::TS(v) = self { Some(v.as_ref()) } else { None }
}
/// If the union variant matches, return a mutable reference to the TestSimpleTableWithEnumT.
pub fn as_ts_mut(&mut self) -> Option<&mut TestSimpleTableWithEnumT> {
if let Self::TS(v) = self { Some(v.as_mut()) } else { None }
}
/// If the union variant matches, return the owned super::example_2::MonsterT, setting the union to NONE.
pub fn take_m2(&mut self) -> Option<Box<super::example_2::MonsterT>> {
if let Self::M2(_) = self {
let v = std::mem::replace(self, Self::NONE);
if let Self::M2(w) = v {
Some(w)
} else {
unreachable!()
}
} else {
None
}
}
/// If the union variant matches, return a reference to the super::example_2::MonsterT.
pub fn as_m2(&self) -> Option<&super::example_2::MonsterT> {
if let Self::M2(v) = self { Some(v.as_ref()) } else { None }
}
/// If the union variant matches, return a mutable reference to the super::example_2::MonsterT.
pub fn as_m2_mut(&mut self) -> Option<&mut super::example_2::MonsterT> {
if let Self::M2(v) = self { Some(v.as_mut()) } else { None }
}
}
| {
let b = u8::from_le(self.0);
Self(b)
} | identifier_body |
any_unique_aliases_generated.rs | // automatically generated by the FlatBuffers compiler, do not modify
extern crate flatbuffers;
use std::mem;
use std::cmp::Ordering;
use self::flatbuffers::{EndianScalar, Follow};
use super::*;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_ANY_UNIQUE_ALIASES: u8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_ANY_UNIQUE_ALIASES: u8 = 3;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_ANY_UNIQUE_ALIASES: [AnyUniqueAliases; 4] = [
AnyUniqueAliases::NONE,
AnyUniqueAliases::M,
AnyUniqueAliases::TS,
AnyUniqueAliases::M2,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct AnyUniqueAliases(pub u8);
#[allow(non_upper_case_globals)]
impl AnyUniqueAliases {
pub const NONE: Self = Self(0);
pub const M: Self = Self(1);
pub const TS: Self = Self(2);
pub const M2: Self = Self(3);
pub const ENUM_MIN: u8 = 0;
pub const ENUM_MAX: u8 = 3;
pub const ENUM_VALUES: &'static [Self] = &[
Self::NONE,
Self::M,
Self::TS,
Self::M2,
];
/// Returns the variant's name or "" if unknown.
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::NONE => Some("NONE"),
Self::M => Some("M"),
Self::TS => Some("TS"),
Self::M2 => Some("M2"),
_ => None,
}
}
}
impl std::fmt::Debug for AnyUniqueAliases {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for AnyUniqueAliases {
type Inner = Self;
#[inline]
fn | (buf: &'a [u8], loc: usize) -> Self::Inner {
let b = unsafe {
flatbuffers::read_scalar_at::<u8>(buf, loc)
};
Self(b)
}
}
impl flatbuffers::Push for AnyUniqueAliases {
type Output = AnyUniqueAliases;
#[inline]
fn push(&self, dst: &mut [u8], _rest: &[u8]) {
unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); }
}
}
impl flatbuffers::EndianScalar for AnyUniqueAliases {
#[inline]
fn to_little_endian(self) -> Self {
let b = u8::to_le(self.0);
Self(b)
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(self) -> Self {
let b = u8::from_le(self.0);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for AnyUniqueAliases {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
u8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for AnyUniqueAliases {}
pub struct AnyUniqueAliasesUnionTableOffset {}
#[allow(clippy::upper_case_acronyms)]
#[non_exhaustive]
#[derive(Debug, Clone, PartialEq)]
pub enum AnyUniqueAliasesT {
NONE,
M(Box<MonsterT>),
TS(Box<TestSimpleTableWithEnumT>),
M2(Box<super::example_2::MonsterT>),
}
impl Default for AnyUniqueAliasesT {
fn default() -> Self {
Self::NONE
}
}
impl AnyUniqueAliasesT {
pub fn any_unique_aliases_type(&self) -> AnyUniqueAliases {
match self {
Self::NONE => AnyUniqueAliases::NONE,
Self::M(_) => AnyUniqueAliases::M,
Self::TS(_) => AnyUniqueAliases::TS,
Self::M2(_) => AnyUniqueAliases::M2,
}
}
pub fn pack(&self, fbb: &mut flatbuffers::FlatBufferBuilder) -> Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>> {
match self {
Self::NONE => None,
Self::M(v) => Some(v.pack(fbb).as_union_value()),
Self::TS(v) => Some(v.pack(fbb).as_union_value()),
Self::M2(v) => Some(v.pack(fbb).as_union_value()),
}
}
/// If the union variant matches, return the owned MonsterT, setting the union to NONE.
pub fn take_m(&mut self) -> Option<Box<MonsterT>> {
if let Self::M(_) = self {
let v = std::mem::replace(self, Self::NONE);
if let Self::M(w) = v {
Some(w)
} else {
unreachable!()
}
} else {
None
}
}
/// If the union variant matches, return a reference to the MonsterT.
pub fn as_m(&self) -> Option<&MonsterT> {
if let Self::M(v) = self { Some(v.as_ref()) } else { None }
}
/// If the union variant matches, return a mutable reference to the MonsterT.
pub fn as_m_mut(&mut self) -> Option<&mut MonsterT> {
if let Self::M(v) = self { Some(v.as_mut()) } else { None }
}
/// If the union variant matches, return the owned TestSimpleTableWithEnumT, setting the union to NONE.
pub fn take_ts(&mut self) -> Option<Box<TestSimpleTableWithEnumT>> {
if let Self::TS(_) = self {
let v = std::mem::replace(self, Self::NONE);
if let Self::TS(w) = v {
Some(w)
} else {
unreachable!()
}
} else {
None
}
}
/// If the union variant matches, return a reference to the TestSimpleTableWithEnumT.
pub fn as_ts(&self) -> Option<&TestSimpleTableWithEnumT> {
if let Self::TS(v) = self { Some(v.as_ref()) } else { None }
}
/// If the union variant matches, return a mutable reference to the TestSimpleTableWithEnumT.
pub fn as_ts_mut(&mut self) -> Option<&mut TestSimpleTableWithEnumT> {
if let Self::TS(v) = self { Some(v.as_mut()) } else { None }
}
/// If the union variant matches, return the owned super::example_2::MonsterT, setting the union to NONE.
pub fn take_m2(&mut self) -> Option<Box<super::example_2::MonsterT>> {
if let Self::M2(_) = self {
let v = std::mem::replace(self, Self::NONE);
if let Self::M2(w) = v {
Some(w)
} else {
unreachable!()
}
} else {
None
}
}
/// If the union variant matches, return a reference to the super::example_2::MonsterT.
pub fn as_m2(&self) -> Option<&super::example_2::MonsterT> {
if let Self::M2(v) = self { Some(v.as_ref()) } else { None }
}
/// If the union variant matches, return a mutable reference to the super::example_2::MonsterT.
pub fn as_m2_mut(&mut self) -> Option<&mut super::example_2::MonsterT> {
if let Self::M2(v) = self { Some(v.as_mut()) } else { None }
}
}
| follow | identifier_name |
any_unique_aliases_generated.rs | // automatically generated by the FlatBuffers compiler, do not modify
extern crate flatbuffers;
use std::mem;
use std::cmp::Ordering;
use self::flatbuffers::{EndianScalar, Follow};
use super::*;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MIN_ANY_UNIQUE_ALIASES: u8 = 0;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
pub const ENUM_MAX_ANY_UNIQUE_ALIASES: u8 = 3;
#[deprecated(since = "2.0.0", note = "Use associated constants instead. This will no longer be generated in 2021.")]
#[allow(non_camel_case_types)]
pub const ENUM_VALUES_ANY_UNIQUE_ALIASES: [AnyUniqueAliases; 4] = [
AnyUniqueAliases::NONE,
AnyUniqueAliases::M,
AnyUniqueAliases::TS,
AnyUniqueAliases::M2,
];
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
#[repr(transparent)]
pub struct AnyUniqueAliases(pub u8);
#[allow(non_upper_case_globals)]
impl AnyUniqueAliases {
pub const NONE: Self = Self(0);
pub const M: Self = Self(1);
pub const TS: Self = Self(2);
pub const M2: Self = Self(3);
pub const ENUM_MIN: u8 = 0;
pub const ENUM_MAX: u8 = 3;
pub const ENUM_VALUES: &'static [Self] = &[
Self::NONE,
Self::M,
Self::TS, | ];
/// Returns the variant's name or "" if unknown.
pub fn variant_name(self) -> Option<&'static str> {
match self {
Self::NONE => Some("NONE"),
Self::M => Some("M"),
Self::TS => Some("TS"),
Self::M2 => Some("M2"),
_ => None,
}
}
}
impl std::fmt::Debug for AnyUniqueAliases {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
if let Some(name) = self.variant_name() {
f.write_str(name)
} else {
f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
}
}
}
impl<'a> flatbuffers::Follow<'a> for AnyUniqueAliases {
type Inner = Self;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
let b = unsafe {
flatbuffers::read_scalar_at::<u8>(buf, loc)
};
Self(b)
}
}
impl flatbuffers::Push for AnyUniqueAliases {
type Output = AnyUniqueAliases;
#[inline]
fn push(&self, dst: &mut [u8], _rest: &[u8]) {
unsafe { flatbuffers::emplace_scalar::<u8>(dst, self.0); }
}
}
impl flatbuffers::EndianScalar for AnyUniqueAliases {
#[inline]
fn to_little_endian(self) -> Self {
let b = u8::to_le(self.0);
Self(b)
}
#[inline]
#[allow(clippy::wrong_self_convention)]
fn from_little_endian(self) -> Self {
let b = u8::from_le(self.0);
Self(b)
}
}
impl<'a> flatbuffers::Verifiable for AnyUniqueAliases {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
u8::run_verifier(v, pos)
}
}
impl flatbuffers::SimpleToVerifyInSlice for AnyUniqueAliases {}
pub struct AnyUniqueAliasesUnionTableOffset {}
#[allow(clippy::upper_case_acronyms)]
#[non_exhaustive]
#[derive(Debug, Clone, PartialEq)]
pub enum AnyUniqueAliasesT {
NONE,
M(Box<MonsterT>),
TS(Box<TestSimpleTableWithEnumT>),
M2(Box<super::example_2::MonsterT>),
}
impl Default for AnyUniqueAliasesT {
fn default() -> Self {
Self::NONE
}
}
impl AnyUniqueAliasesT {
pub fn any_unique_aliases_type(&self) -> AnyUniqueAliases {
match self {
Self::NONE => AnyUniqueAliases::NONE,
Self::M(_) => AnyUniqueAliases::M,
Self::TS(_) => AnyUniqueAliases::TS,
Self::M2(_) => AnyUniqueAliases::M2,
}
}
pub fn pack(&self, fbb: &mut flatbuffers::FlatBufferBuilder) -> Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>> {
match self {
Self::NONE => None,
Self::M(v) => Some(v.pack(fbb).as_union_value()),
Self::TS(v) => Some(v.pack(fbb).as_union_value()),
Self::M2(v) => Some(v.pack(fbb).as_union_value()),
}
}
/// If the union variant matches, return the owned MonsterT, setting the union to NONE.
pub fn take_m(&mut self) -> Option<Box<MonsterT>> {
if let Self::M(_) = self {
let v = std::mem::replace(self, Self::NONE);
if let Self::M(w) = v {
Some(w)
} else {
unreachable!()
}
} else {
None
}
}
/// If the union variant matches, return a reference to the MonsterT.
pub fn as_m(&self) -> Option<&MonsterT> {
if let Self::M(v) = self { Some(v.as_ref()) } else { None }
}
/// If the union variant matches, return a mutable reference to the MonsterT.
pub fn as_m_mut(&mut self) -> Option<&mut MonsterT> {
if let Self::M(v) = self { Some(v.as_mut()) } else { None }
}
/// If the union variant matches, return the owned TestSimpleTableWithEnumT, setting the union to NONE.
pub fn take_ts(&mut self) -> Option<Box<TestSimpleTableWithEnumT>> {
if let Self::TS(_) = self {
let v = std::mem::replace(self, Self::NONE);
if let Self::TS(w) = v {
Some(w)
} else {
unreachable!()
}
} else {
None
}
}
/// If the union variant matches, return a reference to the TestSimpleTableWithEnumT.
pub fn as_ts(&self) -> Option<&TestSimpleTableWithEnumT> {
if let Self::TS(v) = self { Some(v.as_ref()) } else { None }
}
/// If the union variant matches, return a mutable reference to the TestSimpleTableWithEnumT.
pub fn as_ts_mut(&mut self) -> Option<&mut TestSimpleTableWithEnumT> {
if let Self::TS(v) = self { Some(v.as_mut()) } else { None }
}
/// If the union variant matches, return the owned super::example_2::MonsterT, setting the union to NONE.
pub fn take_m2(&mut self) -> Option<Box<super::example_2::MonsterT>> {
if let Self::M2(_) = self {
let v = std::mem::replace(self, Self::NONE);
if let Self::M2(w) = v {
Some(w)
} else {
unreachable!()
}
} else {
None
}
}
/// If the union variant matches, return a reference to the super::example_2::MonsterT.
pub fn as_m2(&self) -> Option<&super::example_2::MonsterT> {
if let Self::M2(v) = self { Some(v.as_ref()) } else { None }
}
/// If the union variant matches, return a mutable reference to the super::example_2::MonsterT.
pub fn as_m2_mut(&mut self) -> Option<&mut super::example_2::MonsterT> {
if let Self::M2(v) = self { Some(v.as_mut()) } else { None }
}
} | Self::M2, | random_line_split |
client.rs | //! The modules which contains CDRS Cassandra client.
use std::net;
use std::io;
use std::collections::HashMap;
use query::{Query, QueryParams, QueryBatch};
use frame::{Frame, Opcode, Flag};
use frame::frame_response::ResponseBody;
use IntoBytes;
use frame::parser::parse_frame;
use types::*;
use frame::events::SimpleServerEvent;
use compression::Compression;
use authenticators::Authenticator;
use error;
use transport::CDRSTransport;
use events::{Listener, EventStream, new_listener};
/// CDRS driver structure that provides a basic functionality to work with DB including
/// establishing new connection, getting supported options, preparing and executing CQL
/// queries, using compression and other.
#[derive(Eq, PartialEq, Ord, PartialOrd)]
pub struct CDRS<T: Authenticator, X: CDRSTransport> {
compressor: Compression,
authenticator: T,
transport: X,
}
/// Map of options supported by Cassandra server.
pub type CassandraOptions = HashMap<String, Vec<String>>;
impl<'a, T: Authenticator + 'a, X: CDRSTransport + 'a> CDRS<T, X> {
/// The method creates new instance of CDRS driver. At this step an instance doesn't
/// connected to DB Server. To create new instance two parameters are needed to be
/// provided - `addr` is IP address of DB Server, `authenticator` is a selected authenticator
/// that is supported by particular DB Server. There are few authenticators already
/// provided by this trait.
pub fn new(transport: X, authenticator: T) -> CDRS<T, X> {
CDRS {
compressor: Compression::None,
authenticator: authenticator,
transport: transport,
}
}
/// The method makes an Option request to DB Server. As a response the server returns
/// a map of supported options.
pub fn get_options(&mut self) -> error::Result<CassandraOptions> {
let options_frame = Frame::new_req_options().into_cbytes();
try!(self.transport.write(options_frame.as_slice()));
parse_frame(&mut self.transport, &self.compressor)
.map(|frame| match frame.get_body() {
Ok(ResponseBody::Supported(ref supported_body)) => supported_body.data.clone(),
_ => unreachable!(),
})
}
/// The method establishes connection to the server which address was provided on previous
/// step. To create connection it's required to provide a compression method from a list
/// of supported ones. In 4-th version of Cassandra protocol lz4 (`Compression::Lz4`)
/// and snappy (`Compression::Snappy`) are supported. There is also one special compression
/// method provided by CRDR driver, it's `Compression::None` that tells drivers that it
/// should work without compression. If compression is provided then incomming frames
/// will be decompressed automatically.
pub fn start(mut self, compressor: Compression) -> error::Result<Session<T, X>> {
self.compressor = compressor;
let startup_frame = Frame::new_req_startup(compressor.as_str()).into_cbytes();
try!(self.transport.write(startup_frame.as_slice()));
let start_response = try!(parse_frame(&mut self.transport, &compressor));
if start_response.opcode == Opcode::Ready {
return Ok(Session::start(self));
}
if start_response.opcode == Opcode::Authenticate {
let body = start_response.get_body()?;
let authenticator = body.get_authenticator().expect(
"Cassandra Server did communicate that it needed password
authentication but the auth schema was missing in the body response",
);
// This creates a new scope; avoiding a clone
// and we check whether
// 1. any authenticators has been passed in by client and if not send error back
// 2. authenticator is provided by the client and `auth_scheme` presented by
// the server and client are same if not send error back
// 3. if it falls through it means the preliminary conditions are true
let auth_check = self.authenticator
.get_cassandra_name()
.ok_or(error::Error::General("No authenticator was provided".to_string()))
.map(|auth| {
if authenticator!= auth {
let io_err = io::Error::new(
io::ErrorKind::NotFound,
format!(
"Unsupported type of authenticator. {:?} got,
but {} is supported.",
authenticator,
authenticator
),
);
return Err(error::Error::Io(io_err));
}
Ok(())
});
if let Err(err) = auth_check {
return Err(err);
}
let auth_token_bytes = self.authenticator.get_auth_token().into_cbytes();
try!(self.transport
.write(Frame::new_req_auth_response(auth_token_bytes)
.into_cbytes()
.as_slice()));
try!(parse_frame(&mut self.transport, &compressor));
return Ok(Session::start(self));
}
unimplemented!();
}
fn drop_connection(&mut self) -> error::Result<()> {
self.transport
.close(net::Shutdown::Both)
.map_err(|err| error::Error::Io(err))
}
}
| pub struct Session<T: Authenticator, X: CDRSTransport> {
started: bool,
cdrs: CDRS<T, X>,
compressor: Compression,
}
impl<T: Authenticator, X: CDRSTransport> Session<T, X> {
/// Creates new session basing on CDRS instance.
pub fn start(cdrs: CDRS<T, X>) -> Session<T, X> {
let compressor = cdrs.compressor.clone();
Session {
cdrs: cdrs,
started: true,
compressor: compressor,
}
}
/// The method overrides a compression method of current session
pub fn compressor(&mut self, compressor: Compression) -> &mut Self {
self.compressor = compressor;
self
}
/// Manually ends current session.
/// Apart of that session will be ended automatically when the instance is dropped.
pub fn end(&mut self) {
if self.started {
self.started = false;
match self.cdrs.drop_connection() {
Ok(_) => (),
Err(err) => {
println!("Error occured during dropping CDRS {:?}", err);
}
}
}
}
/// The method makes a request to DB Server to prepare provided query.
pub fn prepare(&mut self,
query: String,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let options_frame = Frame::new_req_prepare(query, flags).into_cbytes();
try!(self.cdrs.transport.write(options_frame.as_slice()));
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
/// The method makes a request to DB Server to execute a query with provided id
/// using provided query parameters. `id` is an ID of a query which Server
/// returns back to a driver as a response to `prepare` request.
pub fn execute(&mut self,
id: &CBytesShort,
query_parameters: QueryParams,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let options_frame = Frame::new_req_execute(id, query_parameters, flags).into_cbytes();
(self.cdrs.transport.write(options_frame.as_slice()))?;
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
/// The method makes a request to DB Server to execute a query provided in `query` argument.
/// you can build the query with QueryBuilder
/// ```
/// use cdrs::query::QueryBuilder;
/// use cdrs::compression::Compression;
/// use cdrs::consistency::Consistency;
///
/// let select_query = QueryBuilder::new("select * from emp").finalize();
/// ```
pub fn query(&mut self,
query: Query,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let query_frame = Frame::new_req_query(query.query,
query.consistency,
query.values,
query.with_names,
query.page_size,
query.paging_state,
query.serial_consistency,
query.timestamp,
flags)
.into_cbytes();
try!(self.cdrs.transport.write(query_frame.as_slice()));
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
pub fn batch(&mut self,
batch_query: QueryBatch,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let query_frame = Frame::new_req_batch(batch_query, flags).into_cbytes();
try!(self.cdrs.transport.write(query_frame.as_slice()));
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
/// It consumes CDRS
pub fn listen_for<'a>(mut self,
events: Vec<SimpleServerEvent>)
-> error::Result<(Listener<X>, EventStream)> {
let query_frame = Frame::new_req_register(events).into_cbytes();
try!(self.cdrs.transport.write(query_frame.as_slice()));
try!(parse_frame(&mut self.cdrs.transport, &self.compressor));
Ok(new_listener(self.cdrs.transport))
}
} | /// The object that provides functionality for communication with Cassandra server. | random_line_split |
client.rs | //! The modules which contains CDRS Cassandra client.
use std::net;
use std::io;
use std::collections::HashMap;
use query::{Query, QueryParams, QueryBatch};
use frame::{Frame, Opcode, Flag};
use frame::frame_response::ResponseBody;
use IntoBytes;
use frame::parser::parse_frame;
use types::*;
use frame::events::SimpleServerEvent;
use compression::Compression;
use authenticators::Authenticator;
use error;
use transport::CDRSTransport;
use events::{Listener, EventStream, new_listener};
/// CDRS driver structure that provides a basic functionality to work with DB including
/// establishing new connection, getting supported options, preparing and executing CQL
/// queries, using compression and other.
#[derive(Eq, PartialEq, Ord, PartialOrd)]
pub struct CDRS<T: Authenticator, X: CDRSTransport> {
compressor: Compression,
authenticator: T,
transport: X,
}
/// Map of options supported by Cassandra server.
pub type CassandraOptions = HashMap<String, Vec<String>>;
impl<'a, T: Authenticator + 'a, X: CDRSTransport + 'a> CDRS<T, X> {
/// The method creates new instance of CDRS driver. At this step an instance doesn't
/// connected to DB Server. To create new instance two parameters are needed to be
/// provided - `addr` is IP address of DB Server, `authenticator` is a selected authenticator
/// that is supported by particular DB Server. There are few authenticators already
/// provided by this trait.
pub fn new(transport: X, authenticator: T) -> CDRS<T, X> {
CDRS {
compressor: Compression::None,
authenticator: authenticator,
transport: transport,
}
}
/// The method makes an Option request to DB Server. As a response the server returns
/// a map of supported options.
pub fn get_options(&mut self) -> error::Result<CassandraOptions> {
let options_frame = Frame::new_req_options().into_cbytes();
try!(self.transport.write(options_frame.as_slice()));
parse_frame(&mut self.transport, &self.compressor)
.map(|frame| match frame.get_body() {
Ok(ResponseBody::Supported(ref supported_body)) => supported_body.data.clone(),
_ => unreachable!(),
})
}
/// The method establishes connection to the server which address was provided on previous
/// step. To create connection it's required to provide a compression method from a list
/// of supported ones. In 4-th version of Cassandra protocol lz4 (`Compression::Lz4`)
/// and snappy (`Compression::Snappy`) are supported. There is also one special compression
/// method provided by CRDR driver, it's `Compression::None` that tells drivers that it
/// should work without compression. If compression is provided then incomming frames
/// will be decompressed automatically.
pub fn start(mut self, compressor: Compression) -> error::Result<Session<T, X>> {
self.compressor = compressor;
let startup_frame = Frame::new_req_startup(compressor.as_str()).into_cbytes();
try!(self.transport.write(startup_frame.as_slice()));
let start_response = try!(parse_frame(&mut self.transport, &compressor));
if start_response.opcode == Opcode::Ready {
return Ok(Session::start(self));
}
if start_response.opcode == Opcode::Authenticate | io::ErrorKind::NotFound,
format!(
"Unsupported type of authenticator. {:?} got,
but {} is supported.",
authenticator,
authenticator
),
);
return Err(error::Error::Io(io_err));
}
Ok(())
});
if let Err(err) = auth_check {
return Err(err);
}
let auth_token_bytes = self.authenticator.get_auth_token().into_cbytes();
try!(self.transport
.write(Frame::new_req_auth_response(auth_token_bytes)
.into_cbytes()
.as_slice()));
try!(parse_frame(&mut self.transport, &compressor));
return Ok(Session::start(self));
}
unimplemented!();
}
fn drop_connection(&mut self) -> error::Result<()> {
self.transport
.close(net::Shutdown::Both)
.map_err(|err| error::Error::Io(err))
}
}
/// The object that provides functionality for communication with Cassandra server.
pub struct Session<T: Authenticator, X: CDRSTransport> {
started: bool,
cdrs: CDRS<T, X>,
compressor: Compression,
}
impl<T: Authenticator, X: CDRSTransport> Session<T, X> {
/// Creates new session basing on CDRS instance.
pub fn start(cdrs: CDRS<T, X>) -> Session<T, X> {
let compressor = cdrs.compressor.clone();
Session {
cdrs: cdrs,
started: true,
compressor: compressor,
}
}
/// The method overrides a compression method of current session
pub fn compressor(&mut self, compressor: Compression) -> &mut Self {
self.compressor = compressor;
self
}
/// Manually ends current session.
/// Apart of that session will be ended automatically when the instance is dropped.
pub fn end(&mut self) {
if self.started {
self.started = false;
match self.cdrs.drop_connection() {
Ok(_) => (),
Err(err) => {
println!("Error occured during dropping CDRS {:?}", err);
}
}
}
}
/// The method makes a request to DB Server to prepare provided query.
pub fn prepare(&mut self,
query: String,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let options_frame = Frame::new_req_prepare(query, flags).into_cbytes();
try!(self.cdrs.transport.write(options_frame.as_slice()));
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
/// The method makes a request to DB Server to execute a query with provided id
/// using provided query parameters. `id` is an ID of a query which Server
/// returns back to a driver as a response to `prepare` request.
pub fn execute(&mut self,
id: &CBytesShort,
query_parameters: QueryParams,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let options_frame = Frame::new_req_execute(id, query_parameters, flags).into_cbytes();
(self.cdrs.transport.write(options_frame.as_slice()))?;
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
/// The method makes a request to DB Server to execute a query provided in `query` argument.
/// you can build the query with QueryBuilder
/// ```
/// use cdrs::query::QueryBuilder;
/// use cdrs::compression::Compression;
/// use cdrs::consistency::Consistency;
///
/// let select_query = QueryBuilder::new("select * from emp").finalize();
/// ```
pub fn query(&mut self,
query: Query,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let query_frame = Frame::new_req_query(query.query,
query.consistency,
query.values,
query.with_names,
query.page_size,
query.paging_state,
query.serial_consistency,
query.timestamp,
flags)
.into_cbytes();
try!(self.cdrs.transport.write(query_frame.as_slice()));
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
pub fn batch(&mut self,
batch_query: QueryBatch,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let query_frame = Frame::new_req_batch(batch_query, flags).into_cbytes();
try!(self.cdrs.transport.write(query_frame.as_slice()));
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
/// It consumes CDRS
pub fn listen_for<'a>(mut self,
events: Vec<SimpleServerEvent>)
-> error::Result<(Listener<X>, EventStream)> {
let query_frame = Frame::new_req_register(events).into_cbytes();
try!(self.cdrs.transport.write(query_frame.as_slice()));
try!(parse_frame(&mut self.cdrs.transport, &self.compressor));
Ok(new_listener(self.cdrs.transport))
}
}
| {
let body = start_response.get_body()?;
let authenticator = body.get_authenticator().expect(
"Cassandra Server did communicate that it needed password
authentication but the auth schema was missing in the body response",
);
// This creates a new scope; avoiding a clone
// and we check whether
// 1. any authenticators has been passed in by client and if not send error back
// 2. authenticator is provided by the client and `auth_scheme` presented by
// the server and client are same if not send error back
// 3. if it falls through it means the preliminary conditions are true
let auth_check = self.authenticator
.get_cassandra_name()
.ok_or(error::Error::General("No authenticator was provided".to_string()))
.map(|auth| {
if authenticator != auth {
let io_err = io::Error::new( | conditional_block |
client.rs | //! The modules which contains CDRS Cassandra client.
use std::net;
use std::io;
use std::collections::HashMap;
use query::{Query, QueryParams, QueryBatch};
use frame::{Frame, Opcode, Flag};
use frame::frame_response::ResponseBody;
use IntoBytes;
use frame::parser::parse_frame;
use types::*;
use frame::events::SimpleServerEvent;
use compression::Compression;
use authenticators::Authenticator;
use error;
use transport::CDRSTransport;
use events::{Listener, EventStream, new_listener};
/// CDRS driver structure that provides a basic functionality to work with DB including
/// establishing new connection, getting supported options, preparing and executing CQL
/// queries, using compression and other.
#[derive(Eq, PartialEq, Ord, PartialOrd)]
pub struct CDRS<T: Authenticator, X: CDRSTransport> {
compressor: Compression,
authenticator: T,
transport: X,
}
/// Map of options supported by Cassandra server.
pub type CassandraOptions = HashMap<String, Vec<String>>;
impl<'a, T: Authenticator + 'a, X: CDRSTransport + 'a> CDRS<T, X> {
/// The method creates new instance of CDRS driver. At this step an instance doesn't
/// connected to DB Server. To create new instance two parameters are needed to be
/// provided - `addr` is IP address of DB Server, `authenticator` is a selected authenticator
/// that is supported by particular DB Server. There are few authenticators already
/// provided by this trait.
pub fn new(transport: X, authenticator: T) -> CDRS<T, X> {
CDRS {
compressor: Compression::None,
authenticator: authenticator,
transport: transport,
}
}
/// The method makes an Option request to DB Server. As a response the server returns
/// a map of supported options.
pub fn get_options(&mut self) -> error::Result<CassandraOptions> {
let options_frame = Frame::new_req_options().into_cbytes();
try!(self.transport.write(options_frame.as_slice()));
parse_frame(&mut self.transport, &self.compressor)
.map(|frame| match frame.get_body() {
Ok(ResponseBody::Supported(ref supported_body)) => supported_body.data.clone(),
_ => unreachable!(),
})
}
/// The method establishes connection to the server which address was provided on previous
/// step. To create connection it's required to provide a compression method from a list
/// of supported ones. In 4-th version of Cassandra protocol lz4 (`Compression::Lz4`)
/// and snappy (`Compression::Snappy`) are supported. There is also one special compression
/// method provided by CRDR driver, it's `Compression::None` that tells drivers that it
/// should work without compression. If compression is provided then incomming frames
/// will be decompressed automatically.
pub fn start(mut self, compressor: Compression) -> error::Result<Session<T, X>> {
self.compressor = compressor;
let startup_frame = Frame::new_req_startup(compressor.as_str()).into_cbytes();
try!(self.transport.write(startup_frame.as_slice()));
let start_response = try!(parse_frame(&mut self.transport, &compressor));
if start_response.opcode == Opcode::Ready {
return Ok(Session::start(self));
}
if start_response.opcode == Opcode::Authenticate {
let body = start_response.get_body()?;
let authenticator = body.get_authenticator().expect(
"Cassandra Server did communicate that it needed password
authentication but the auth schema was missing in the body response",
);
// This creates a new scope; avoiding a clone
// and we check whether
// 1. any authenticators has been passed in by client and if not send error back
// 2. authenticator is provided by the client and `auth_scheme` presented by
// the server and client are same if not send error back
// 3. if it falls through it means the preliminary conditions are true
let auth_check = self.authenticator
.get_cassandra_name()
.ok_or(error::Error::General("No authenticator was provided".to_string()))
.map(|auth| {
if authenticator!= auth {
let io_err = io::Error::new(
io::ErrorKind::NotFound,
format!(
"Unsupported type of authenticator. {:?} got,
but {} is supported.",
authenticator,
authenticator
),
);
return Err(error::Error::Io(io_err));
}
Ok(())
});
if let Err(err) = auth_check {
return Err(err);
}
let auth_token_bytes = self.authenticator.get_auth_token().into_cbytes();
try!(self.transport
.write(Frame::new_req_auth_response(auth_token_bytes)
.into_cbytes()
.as_slice()));
try!(parse_frame(&mut self.transport, &compressor));
return Ok(Session::start(self));
}
unimplemented!();
}
fn drop_connection(&mut self) -> error::Result<()> {
self.transport
.close(net::Shutdown::Both)
.map_err(|err| error::Error::Io(err))
}
}
/// The object that provides functionality for communication with Cassandra server.
pub struct Session<T: Authenticator, X: CDRSTransport> {
started: bool,
cdrs: CDRS<T, X>,
compressor: Compression,
}
impl<T: Authenticator, X: CDRSTransport> Session<T, X> {
/// Creates new session basing on CDRS instance.
pub fn start(cdrs: CDRS<T, X>) -> Session<T, X> {
let compressor = cdrs.compressor.clone();
Session {
cdrs: cdrs,
started: true,
compressor: compressor,
}
}
/// The method overrides a compression method of current session
pub fn compressor(&mut self, compressor: Compression) -> &mut Self {
self.compressor = compressor;
self
}
/// Manually ends current session.
/// Apart of that session will be ended automatically when the instance is dropped.
pub fn end(&mut self) {
if self.started {
self.started = false;
match self.cdrs.drop_connection() {
Ok(_) => (),
Err(err) => {
println!("Error occured during dropping CDRS {:?}", err);
}
}
}
}
/// The method makes a request to DB Server to prepare provided query.
pub fn prepare(&mut self,
query: String,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let options_frame = Frame::new_req_prepare(query, flags).into_cbytes();
try!(self.cdrs.transport.write(options_frame.as_slice()));
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
/// The method makes a request to DB Server to execute a query with provided id
/// using provided query parameters. `id` is an ID of a query which Server
/// returns back to a driver as a response to `prepare` request.
pub fn execute(&mut self,
id: &CBytesShort,
query_parameters: QueryParams,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let options_frame = Frame::new_req_execute(id, query_parameters, flags).into_cbytes();
(self.cdrs.transport.write(options_frame.as_slice()))?;
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
/// The method makes a request to DB Server to execute a query provided in `query` argument.
/// you can build the query with QueryBuilder
/// ```
/// use cdrs::query::QueryBuilder;
/// use cdrs::compression::Compression;
/// use cdrs::consistency::Consistency;
///
/// let select_query = QueryBuilder::new("select * from emp").finalize();
/// ```
pub fn query(&mut self,
query: Query,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let query_frame = Frame::new_req_query(query.query,
query.consistency,
query.values,
query.with_names,
query.page_size,
query.paging_state,
query.serial_consistency,
query.timestamp,
flags)
.into_cbytes();
try!(self.cdrs.transport.write(query_frame.as_slice()));
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
pub fn batch(&mut self,
batch_query: QueryBatch,
with_tracing: bool,
with_warnings: bool)
-> error::Result<Frame> {
let mut flags = vec![];
if with_tracing {
flags.push(Flag::Tracing);
}
if with_warnings {
flags.push(Flag::Warning);
}
let query_frame = Frame::new_req_batch(batch_query, flags).into_cbytes();
try!(self.cdrs.transport.write(query_frame.as_slice()));
parse_frame(&mut self.cdrs.transport, &self.compressor)
}
/// It consumes CDRS
pub fn | <'a>(mut self,
events: Vec<SimpleServerEvent>)
-> error::Result<(Listener<X>, EventStream)> {
let query_frame = Frame::new_req_register(events).into_cbytes();
try!(self.cdrs.transport.write(query_frame.as_slice()));
try!(parse_frame(&mut self.cdrs.transport, &self.compressor));
Ok(new_listener(self.cdrs.transport))
}
}
| listen_for | identifier_name |
manual_non_exhaustive.rs | use clippy_utils::attrs::is_doc_hidden;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet_opt;
use clippy_utils::{meets_msrv, msrvs};
use if_chain::if_chain;
use rustc_ast::ast::{FieldDef, Item, ItemKind, Variant, VariantData, VisibilityKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_semver::RustcVersion;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
/// Checks for manual implementations of the non-exhaustive pattern.
///
/// ### Why is this bad?
/// Using the #[non_exhaustive] attribute expresses better the intent
/// and allows possible optimizations when applied to enums.
///
/// ### Example
/// ```rust
/// struct S {
/// pub a: i32,
/// pub b: i32,
/// _c: (),
/// }
///
/// enum E {
/// A,
/// B,
/// #[doc(hidden)]
/// _C,
/// }
///
/// struct T(pub i32, pub i32, ());
/// ```
/// Use instead:
/// ```rust
/// #[non_exhaustive]
/// struct S {
/// pub a: i32,
/// pub b: i32,
/// }
///
/// #[non_exhaustive]
/// enum E {
/// A,
/// B,
/// }
///
/// #[non_exhaustive]
/// struct T(pub i32, pub i32);
/// ```
pub MANUAL_NON_EXHAUSTIVE,
style,
"manual implementations of the non-exhaustive pattern can be simplified using #[non_exhaustive]"
}
#[derive(Clone)]
pub struct ManualNonExhaustive {
msrv: Option<RustcVersion>,
}
impl ManualNonExhaustive {
#[must_use]
pub fn new(msrv: Option<RustcVersion>) -> Self {
Self { msrv }
}
}
impl_lint_pass!(ManualNonExhaustive => [MANUAL_NON_EXHAUSTIVE]);
impl EarlyLintPass for ManualNonExhaustive {
fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
if!meets_msrv(self.msrv.as_ref(), &msrvs::NON_EXHAUSTIVE) {
return;
}
match &item.kind {
ItemKind::Enum(def, _) => {
check_manual_non_exhaustive_enum(cx, item, &def.variants);
},
ItemKind::Struct(variant_data, _) => {
if let VariantData::Unit(..) = variant_data {
return;
}
check_manual_non_exhaustive_struct(cx, item, variant_data);
},
_ => {},
}
}
extract_msrv_attr!(EarlyContext);
}
fn check_manual_non_exhaustive_enum(cx: &EarlyContext<'_>, item: &Item, variants: &[Variant]) {
fn is_non_exhaustive_marker(variant: &Variant) -> bool {
matches!(variant.data, VariantData::Unit(_))
&& variant.ident.as_str().starts_with('_')
&& is_doc_hidden(&variant.attrs)
}
let mut markers = variants.iter().filter(|v| is_non_exhaustive_marker(v));
if_chain! {
if let Some(marker) = markers.next();
if markers.count() == 0 && variants.len() > 1;
then {
span_lint_and_then(
cx,
MANUAL_NON_EXHAUSTIVE,
item.span,
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if_chain! {
if!item.attrs.iter().any(|attr| attr.has_name(sym::non_exhaustive));
let header_span = cx.sess.source_map().span_until_char(item.span, '{');
if let Some(snippet) = snippet_opt(cx, header_span);
then {
diag.span_suggestion(
header_span,
"add the attribute",
format!("#[non_exhaustive] {}", snippet),
Applicability::Unspecified,
);
}
}
diag.span_help(marker.span, "remove this variant");
});
}
}
}
fn check_manual_non_exhaustive_struct(cx: &EarlyContext<'_>, item: &Item, data: &VariantData) {
fn | (field: &FieldDef) -> bool {
matches!(field.vis.kind, VisibilityKind::Inherited)
}
fn is_non_exhaustive_marker(field: &FieldDef) -> bool {
is_private(field) && field.ty.kind.is_unit() && field.ident.map_or(true, |n| n.as_str().starts_with('_'))
}
fn find_header_span(cx: &EarlyContext<'_>, item: &Item, data: &VariantData) -> Span {
let delimiter = match data {
VariantData::Struct(..) => '{',
VariantData::Tuple(..) => '(',
VariantData::Unit(_) => unreachable!("`VariantData::Unit` is already handled above"),
};
cx.sess.source_map().span_until_char(item.span, delimiter)
}
let fields = data.fields();
let private_fields = fields.iter().filter(|f| is_private(f)).count();
let public_fields = fields.iter().filter(|f| f.vis.kind.is_pub()).count();
if_chain! {
if private_fields == 1 && public_fields >= 1 && public_fields == fields.len() - 1;
if let Some(marker) = fields.iter().find(|f| is_non_exhaustive_marker(f));
then {
span_lint_and_then(
cx,
MANUAL_NON_EXHAUSTIVE,
item.span,
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if_chain! {
if!item.attrs.iter().any(|attr| attr.has_name(sym::non_exhaustive));
let header_span = find_header_span(cx, item, data);
if let Some(snippet) = snippet_opt(cx, header_span);
then {
diag.span_suggestion(
header_span,
"add the attribute",
format!("#[non_exhaustive] {}", snippet),
Applicability::Unspecified,
);
}
}
diag.span_help(marker.span, "remove this field");
});
}
}
}
| is_private | identifier_name |
manual_non_exhaustive.rs | use clippy_utils::attrs::is_doc_hidden;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet_opt;
use clippy_utils::{meets_msrv, msrvs};
use if_chain::if_chain;
use rustc_ast::ast::{FieldDef, Item, ItemKind, Variant, VariantData, VisibilityKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_semver::RustcVersion;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
/// Checks for manual implementations of the non-exhaustive pattern.
///
/// ### Why is this bad?
/// Using the #[non_exhaustive] attribute expresses better the intent
/// and allows possible optimizations when applied to enums.
///
/// ### Example
/// ```rust
/// struct S {
/// pub a: i32,
/// pub b: i32,
/// _c: (),
/// }
///
/// enum E {
/// A,
/// B,
/// #[doc(hidden)]
/// _C,
/// }
///
/// struct T(pub i32, pub i32, ());
/// ```
/// Use instead:
/// ```rust
/// #[non_exhaustive]
/// struct S {
/// pub a: i32,
/// pub b: i32,
/// }
///
/// #[non_exhaustive]
/// enum E {
/// A,
/// B,
/// }
///
/// #[non_exhaustive]
/// struct T(pub i32, pub i32);
/// ```
pub MANUAL_NON_EXHAUSTIVE,
style,
"manual implementations of the non-exhaustive pattern can be simplified using #[non_exhaustive]"
}
#[derive(Clone)]
pub struct ManualNonExhaustive {
msrv: Option<RustcVersion>,
}
impl ManualNonExhaustive {
#[must_use]
pub fn new(msrv: Option<RustcVersion>) -> Self {
Self { msrv }
}
}
impl_lint_pass!(ManualNonExhaustive => [MANUAL_NON_EXHAUSTIVE]);
impl EarlyLintPass for ManualNonExhaustive {
fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
if!meets_msrv(self.msrv.as_ref(), &msrvs::NON_EXHAUSTIVE) {
return;
}
match &item.kind {
ItemKind::Enum(def, _) => {
check_manual_non_exhaustive_enum(cx, item, &def.variants);
},
ItemKind::Struct(variant_data, _) => {
if let VariantData::Unit(..) = variant_data {
return;
}
check_manual_non_exhaustive_struct(cx, item, variant_data);
},
_ => {},
}
}
extract_msrv_attr!(EarlyContext);
}
fn check_manual_non_exhaustive_enum(cx: &EarlyContext<'_>, item: &Item, variants: &[Variant]) {
fn is_non_exhaustive_marker(variant: &Variant) -> bool {
matches!(variant.data, VariantData::Unit(_))
&& variant.ident.as_str().starts_with('_')
&& is_doc_hidden(&variant.attrs)
}
let mut markers = variants.iter().filter(|v| is_non_exhaustive_marker(v));
if_chain! {
if let Some(marker) = markers.next();
if markers.count() == 0 && variants.len() > 1;
then {
span_lint_and_then(
cx,
MANUAL_NON_EXHAUSTIVE,
item.span,
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if_chain! {
if!item.attrs.iter().any(|attr| attr.has_name(sym::non_exhaustive));
let header_span = cx.sess.source_map().span_until_char(item.span, '{');
if let Some(snippet) = snippet_opt(cx, header_span);
then {
diag.span_suggestion(
header_span,
"add the attribute",
format!("#[non_exhaustive] {}", snippet),
Applicability::Unspecified,
);
}
}
diag.span_help(marker.span, "remove this variant");
});
} | }
}
fn check_manual_non_exhaustive_struct(cx: &EarlyContext<'_>, item: &Item, data: &VariantData) {
fn is_private(field: &FieldDef) -> bool {
matches!(field.vis.kind, VisibilityKind::Inherited)
}
fn is_non_exhaustive_marker(field: &FieldDef) -> bool {
is_private(field) && field.ty.kind.is_unit() && field.ident.map_or(true, |n| n.as_str().starts_with('_'))
}
fn find_header_span(cx: &EarlyContext<'_>, item: &Item, data: &VariantData) -> Span {
let delimiter = match data {
VariantData::Struct(..) => '{',
VariantData::Tuple(..) => '(',
VariantData::Unit(_) => unreachable!("`VariantData::Unit` is already handled above"),
};
cx.sess.source_map().span_until_char(item.span, delimiter)
}
let fields = data.fields();
let private_fields = fields.iter().filter(|f| is_private(f)).count();
let public_fields = fields.iter().filter(|f| f.vis.kind.is_pub()).count();
if_chain! {
if private_fields == 1 && public_fields >= 1 && public_fields == fields.len() - 1;
if let Some(marker) = fields.iter().find(|f| is_non_exhaustive_marker(f));
then {
span_lint_and_then(
cx,
MANUAL_NON_EXHAUSTIVE,
item.span,
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if_chain! {
if!item.attrs.iter().any(|attr| attr.has_name(sym::non_exhaustive));
let header_span = find_header_span(cx, item, data);
if let Some(snippet) = snippet_opt(cx, header_span);
then {
diag.span_suggestion(
header_span,
"add the attribute",
format!("#[non_exhaustive] {}", snippet),
Applicability::Unspecified,
);
}
}
diag.span_help(marker.span, "remove this field");
});
}
}
} | random_line_split |
|
manual_non_exhaustive.rs | use clippy_utils::attrs::is_doc_hidden;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet_opt;
use clippy_utils::{meets_msrv, msrvs};
use if_chain::if_chain;
use rustc_ast::ast::{FieldDef, Item, ItemKind, Variant, VariantData, VisibilityKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_semver::RustcVersion;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
/// Checks for manual implementations of the non-exhaustive pattern.
///
/// ### Why is this bad?
/// Using the #[non_exhaustive] attribute expresses better the intent
/// and allows possible optimizations when applied to enums.
///
/// ### Example
/// ```rust
/// struct S {
/// pub a: i32,
/// pub b: i32,
/// _c: (),
/// }
///
/// enum E {
/// A,
/// B,
/// #[doc(hidden)]
/// _C,
/// }
///
/// struct T(pub i32, pub i32, ());
/// ```
/// Use instead:
/// ```rust
/// #[non_exhaustive]
/// struct S {
/// pub a: i32,
/// pub b: i32,
/// }
///
/// #[non_exhaustive]
/// enum E {
/// A,
/// B,
/// }
///
/// #[non_exhaustive]
/// struct T(pub i32, pub i32);
/// ```
pub MANUAL_NON_EXHAUSTIVE,
style,
"manual implementations of the non-exhaustive pattern can be simplified using #[non_exhaustive]"
}
#[derive(Clone)]
pub struct ManualNonExhaustive {
msrv: Option<RustcVersion>,
}
impl ManualNonExhaustive {
#[must_use]
pub fn new(msrv: Option<RustcVersion>) -> Self {
Self { msrv }
}
}
impl_lint_pass!(ManualNonExhaustive => [MANUAL_NON_EXHAUSTIVE]);
impl EarlyLintPass for ManualNonExhaustive {
fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
if!meets_msrv(self.msrv.as_ref(), &msrvs::NON_EXHAUSTIVE) {
return;
}
match &item.kind {
ItemKind::Enum(def, _) => {
check_manual_non_exhaustive_enum(cx, item, &def.variants);
},
ItemKind::Struct(variant_data, _) => | ,
_ => {},
}
}
extract_msrv_attr!(EarlyContext);
}
fn check_manual_non_exhaustive_enum(cx: &EarlyContext<'_>, item: &Item, variants: &[Variant]) {
fn is_non_exhaustive_marker(variant: &Variant) -> bool {
matches!(variant.data, VariantData::Unit(_))
&& variant.ident.as_str().starts_with('_')
&& is_doc_hidden(&variant.attrs)
}
let mut markers = variants.iter().filter(|v| is_non_exhaustive_marker(v));
if_chain! {
if let Some(marker) = markers.next();
if markers.count() == 0 && variants.len() > 1;
then {
span_lint_and_then(
cx,
MANUAL_NON_EXHAUSTIVE,
item.span,
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if_chain! {
if!item.attrs.iter().any(|attr| attr.has_name(sym::non_exhaustive));
let header_span = cx.sess.source_map().span_until_char(item.span, '{');
if let Some(snippet) = snippet_opt(cx, header_span);
then {
diag.span_suggestion(
header_span,
"add the attribute",
format!("#[non_exhaustive] {}", snippet),
Applicability::Unspecified,
);
}
}
diag.span_help(marker.span, "remove this variant");
});
}
}
}
fn check_manual_non_exhaustive_struct(cx: &EarlyContext<'_>, item: &Item, data: &VariantData) {
fn is_private(field: &FieldDef) -> bool {
matches!(field.vis.kind, VisibilityKind::Inherited)
}
fn is_non_exhaustive_marker(field: &FieldDef) -> bool {
is_private(field) && field.ty.kind.is_unit() && field.ident.map_or(true, |n| n.as_str().starts_with('_'))
}
fn find_header_span(cx: &EarlyContext<'_>, item: &Item, data: &VariantData) -> Span {
let delimiter = match data {
VariantData::Struct(..) => '{',
VariantData::Tuple(..) => '(',
VariantData::Unit(_) => unreachable!("`VariantData::Unit` is already handled above"),
};
cx.sess.source_map().span_until_char(item.span, delimiter)
}
let fields = data.fields();
let private_fields = fields.iter().filter(|f| is_private(f)).count();
let public_fields = fields.iter().filter(|f| f.vis.kind.is_pub()).count();
if_chain! {
if private_fields == 1 && public_fields >= 1 && public_fields == fields.len() - 1;
if let Some(marker) = fields.iter().find(|f| is_non_exhaustive_marker(f));
then {
span_lint_and_then(
cx,
MANUAL_NON_EXHAUSTIVE,
item.span,
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if_chain! {
if!item.attrs.iter().any(|attr| attr.has_name(sym::non_exhaustive));
let header_span = find_header_span(cx, item, data);
if let Some(snippet) = snippet_opt(cx, header_span);
then {
diag.span_suggestion(
header_span,
"add the attribute",
format!("#[non_exhaustive] {}", snippet),
Applicability::Unspecified,
);
}
}
diag.span_help(marker.span, "remove this field");
});
}
}
}
| {
if let VariantData::Unit(..) = variant_data {
return;
}
check_manual_non_exhaustive_struct(cx, item, variant_data);
} | conditional_block |
manual_non_exhaustive.rs | use clippy_utils::attrs::is_doc_hidden;
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::source::snippet_opt;
use clippy_utils::{meets_msrv, msrvs};
use if_chain::if_chain;
use rustc_ast::ast::{FieldDef, Item, ItemKind, Variant, VariantData, VisibilityKind};
use rustc_errors::Applicability;
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_semver::RustcVersion;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
/// Checks for manual implementations of the non-exhaustive pattern.
///
/// ### Why is this bad?
/// Using the #[non_exhaustive] attribute expresses better the intent
/// and allows possible optimizations when applied to enums.
///
/// ### Example
/// ```rust
/// struct S {
/// pub a: i32,
/// pub b: i32,
/// _c: (),
/// }
///
/// enum E {
/// A,
/// B,
/// #[doc(hidden)]
/// _C,
/// }
///
/// struct T(pub i32, pub i32, ());
/// ```
/// Use instead:
/// ```rust
/// #[non_exhaustive]
/// struct S {
/// pub a: i32,
/// pub b: i32,
/// }
///
/// #[non_exhaustive]
/// enum E {
/// A,
/// B,
/// }
///
/// #[non_exhaustive]
/// struct T(pub i32, pub i32);
/// ```
pub MANUAL_NON_EXHAUSTIVE,
style,
"manual implementations of the non-exhaustive pattern can be simplified using #[non_exhaustive]"
}
#[derive(Clone)]
pub struct ManualNonExhaustive {
msrv: Option<RustcVersion>,
}
impl ManualNonExhaustive {
#[must_use]
pub fn new(msrv: Option<RustcVersion>) -> Self {
Self { msrv }
}
}
impl_lint_pass!(ManualNonExhaustive => [MANUAL_NON_EXHAUSTIVE]);
impl EarlyLintPass for ManualNonExhaustive {
fn check_item(&mut self, cx: &EarlyContext<'_>, item: &Item) {
if!meets_msrv(self.msrv.as_ref(), &msrvs::NON_EXHAUSTIVE) {
return;
}
match &item.kind {
ItemKind::Enum(def, _) => {
check_manual_non_exhaustive_enum(cx, item, &def.variants);
},
ItemKind::Struct(variant_data, _) => {
if let VariantData::Unit(..) = variant_data {
return;
}
check_manual_non_exhaustive_struct(cx, item, variant_data);
},
_ => {},
}
}
extract_msrv_attr!(EarlyContext);
}
fn check_manual_non_exhaustive_enum(cx: &EarlyContext<'_>, item: &Item, variants: &[Variant]) {
fn is_non_exhaustive_marker(variant: &Variant) -> bool {
matches!(variant.data, VariantData::Unit(_))
&& variant.ident.as_str().starts_with('_')
&& is_doc_hidden(&variant.attrs)
}
let mut markers = variants.iter().filter(|v| is_non_exhaustive_marker(v));
if_chain! {
if let Some(marker) = markers.next();
if markers.count() == 0 && variants.len() > 1;
then {
span_lint_and_then(
cx,
MANUAL_NON_EXHAUSTIVE,
item.span,
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if_chain! {
if!item.attrs.iter().any(|attr| attr.has_name(sym::non_exhaustive));
let header_span = cx.sess.source_map().span_until_char(item.span, '{');
if let Some(snippet) = snippet_opt(cx, header_span);
then {
diag.span_suggestion(
header_span,
"add the attribute",
format!("#[non_exhaustive] {}", snippet),
Applicability::Unspecified,
);
}
}
diag.span_help(marker.span, "remove this variant");
});
}
}
}
fn check_manual_non_exhaustive_struct(cx: &EarlyContext<'_>, item: &Item, data: &VariantData) {
fn is_private(field: &FieldDef) -> bool {
matches!(field.vis.kind, VisibilityKind::Inherited)
}
fn is_non_exhaustive_marker(field: &FieldDef) -> bool |
fn find_header_span(cx: &EarlyContext<'_>, item: &Item, data: &VariantData) -> Span {
let delimiter = match data {
VariantData::Struct(..) => '{',
VariantData::Tuple(..) => '(',
VariantData::Unit(_) => unreachable!("`VariantData::Unit` is already handled above"),
};
cx.sess.source_map().span_until_char(item.span, delimiter)
}
let fields = data.fields();
let private_fields = fields.iter().filter(|f| is_private(f)).count();
let public_fields = fields.iter().filter(|f| f.vis.kind.is_pub()).count();
if_chain! {
if private_fields == 1 && public_fields >= 1 && public_fields == fields.len() - 1;
if let Some(marker) = fields.iter().find(|f| is_non_exhaustive_marker(f));
then {
span_lint_and_then(
cx,
MANUAL_NON_EXHAUSTIVE,
item.span,
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if_chain! {
if!item.attrs.iter().any(|attr| attr.has_name(sym::non_exhaustive));
let header_span = find_header_span(cx, item, data);
if let Some(snippet) = snippet_opt(cx, header_span);
then {
diag.span_suggestion(
header_span,
"add the attribute",
format!("#[non_exhaustive] {}", snippet),
Applicability::Unspecified,
);
}
}
diag.span_help(marker.span, "remove this field");
});
}
}
}
| {
is_private(field) && field.ty.kind.is_unit() && field.ident.map_or(true, |n| n.as_str().starts_with('_'))
} | identifier_body |
test_sync.rs | use std::sync::Arc;
use std::thread;
// use protobuf::CodedInputStream;
// use protobuf::Message;
use quick_protobuf::*;
use super::basic::*;
// test messages are sync
#[test]
fn test_sync() | })
.collect();
let results = threads
.into_iter()
.map(|t| t.join().unwrap())
.collect::<Vec<_>>();
assert_eq!(&[Some(23), Some(23), Some(23), Some(23)], &results[..]);
}
| {
let m = Arc::new({
let mut r = TestTypesSingular::default();
r.int32_field = Some(23);
r
});
let threads: Vec<_> = (0..4)
.map(|_| {
let m_copy = m.clone();
thread::spawn(move || {
let mut bytes = Vec::new();
{
let mut writer = Writer::new(&mut bytes);
m_copy.write_message(&mut writer).unwrap();
}
let mut reader = BytesReader::from_bytes(&bytes);
let read = TestTypesSingular::from_reader(&mut reader, &bytes).unwrap();
read.int32_field
}) | identifier_body |
test_sync.rs | use std::sync::Arc;
use std::thread;
// use protobuf::CodedInputStream;
// use protobuf::Message;
use quick_protobuf::*;
use super::basic::*;
// test messages are sync
#[test]
fn | () {
let m = Arc::new({
let mut r = TestTypesSingular::default();
r.int32_field = Some(23);
r
});
let threads: Vec<_> = (0..4)
.map(|_| {
let m_copy = m.clone();
thread::spawn(move || {
let mut bytes = Vec::new();
{
let mut writer = Writer::new(&mut bytes);
m_copy.write_message(&mut writer).unwrap();
}
let mut reader = BytesReader::from_bytes(&bytes);
let read = TestTypesSingular::from_reader(&mut reader, &bytes).unwrap();
read.int32_field
})
})
.collect();
let results = threads
.into_iter()
.map(|t| t.join().unwrap())
.collect::<Vec<_>>();
assert_eq!(&[Some(23), Some(23), Some(23), Some(23)], &results[..]);
}
| test_sync | identifier_name |
test_sync.rs | use std::sync::Arc;
use std::thread;
// use protobuf::CodedInputStream;
// use protobuf::Message;
use quick_protobuf::*;
use super::basic::*;
// test messages are sync
#[test]
fn test_sync() {
let m = Arc::new({
let mut r = TestTypesSingular::default();
r.int32_field = Some(23);
r
});
let threads: Vec<_> = (0..4)
.map(|_| {
let m_copy = m.clone();
thread::spawn(move || {
let mut bytes = Vec::new(); | let mut reader = BytesReader::from_bytes(&bytes);
let read = TestTypesSingular::from_reader(&mut reader, &bytes).unwrap();
read.int32_field
})
})
.collect();
let results = threads
.into_iter()
.map(|t| t.join().unwrap())
.collect::<Vec<_>>();
assert_eq!(&[Some(23), Some(23), Some(23), Some(23)], &results[..]);
} | {
let mut writer = Writer::new(&mut bytes);
m_copy.write_message(&mut writer).unwrap();
} | random_line_split |
boost_query.rs | use crate::fastfield::AliveBitSet;
use crate::query::explanation::does_not_match;
use crate::query::{Explanation, Query, Scorer, Weight};
use crate::{DocId, DocSet, Score, Searcher, SegmentReader, Term};
use std::collections::BTreeMap;
use std::fmt;
/// `BoostQuery` is a wrapper over a query used to boost its score.
///
/// The document set matched by the `BoostQuery` is strictly the same as the underlying query.
/// The score of each document, is the score of the underlying query multiplied by the `boost`
/// factor.
pub struct BoostQuery {
query: Box<dyn Query>,
boost: Score,
}
impl BoostQuery {
/// Builds a boost query.
pub fn new(query: Box<dyn Query>, boost: Score) -> BoostQuery {
BoostQuery { query, boost }
}
}
impl Clone for BoostQuery {
fn clone(&self) -> Self {
BoostQuery {
query: self.query.box_clone(),
boost: self.boost,
}
}
}
impl fmt::Debug for BoostQuery {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Boost(query={:?}, boost={})", self.query, self.boost)
}
}
impl Query for BoostQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
let weight_without_boost = self.query.weight(searcher, scoring_enabled)?;
let boosted_weight = if scoring_enabled {
Box::new(BoostWeight::new(weight_without_boost, self.boost)) | }
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
self.query.query_terms(terms)
}
}
pub(crate) struct BoostWeight {
weight: Box<dyn Weight>,
boost: Score,
}
impl BoostWeight {
pub fn new(weight: Box<dyn Weight>, boost: Score) -> Self {
BoostWeight { weight, boost }
}
}
impl Weight for BoostWeight {
fn scorer(&self, reader: &SegmentReader, boost: Score) -> crate::Result<Box<dyn Scorer>> {
self.weight.scorer(reader, boost * self.boost)
}
fn explain(&self, reader: &SegmentReader, doc: u32) -> crate::Result<Explanation> {
let mut scorer = self.scorer(reader, 1.0)?;
if scorer.seek(doc)!= doc {
return Err(does_not_match(doc));
}
let mut explanation =
Explanation::new(format!("Boost x{} of...", self.boost), scorer.score());
let underlying_explanation = self.weight.explain(reader, doc)?;
explanation.add_detail(underlying_explanation);
Ok(explanation)
}
fn count(&self, reader: &SegmentReader) -> crate::Result<u32> {
self.weight.count(reader)
}
}
pub(crate) struct BoostScorer<S: Scorer> {
underlying: S,
boost: Score,
}
impl<S: Scorer> BoostScorer<S> {
pub fn new(underlying: S, boost: Score) -> BoostScorer<S> {
BoostScorer { underlying, boost }
}
}
impl<S: Scorer> DocSet for BoostScorer<S> {
fn advance(&mut self) -> DocId {
self.underlying.advance()
}
fn seek(&mut self, target: DocId) -> DocId {
self.underlying.seek(target)
}
fn fill_buffer(&mut self, buffer: &mut [DocId]) -> usize {
self.underlying.fill_buffer(buffer)
}
fn doc(&self) -> u32 {
self.underlying.doc()
}
fn size_hint(&self) -> u32 {
self.underlying.size_hint()
}
fn count(&mut self, alive_bitset: &AliveBitSet) -> u32 {
self.underlying.count(alive_bitset)
}
fn count_including_deleted(&mut self) -> u32 {
self.underlying.count_including_deleted()
}
}
impl<S: Scorer> Scorer for BoostScorer<S> {
fn score(&mut self) -> Score {
self.underlying.score() * self.boost
}
}
#[cfg(test)]
mod tests {
use super::BoostQuery;
use crate::query::{AllQuery, Query};
use crate::schema::Schema;
use crate::{DocAddress, Document, Index};
#[test]
fn test_boost_query_explain() -> crate::Result<()> {
let schema = Schema::builder().build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_for_tests()?;
index_writer.add_document(Document::new())?;
index_writer.commit()?;
let reader = index.reader()?;
let searcher = reader.searcher();
let query = BoostQuery::new(Box::new(AllQuery), 0.2);
let explanation = query.explain(&searcher, DocAddress::new(0, 0u32)).unwrap();
assert_eq!(
explanation.to_pretty_json(),
"{\n \"value\": 0.2,\n \"description\": \"Boost x0.2 of...\",\n \"details\": [\n {\n \"value\": 1.0,\n \"description\": \"AllQuery\",\n \"context\": []\n }\n ],\n \"context\": []\n}"
);
Ok(())
}
} | } else {
weight_without_boost
};
Ok(boosted_weight) | random_line_split |
boost_query.rs | use crate::fastfield::AliveBitSet;
use crate::query::explanation::does_not_match;
use crate::query::{Explanation, Query, Scorer, Weight};
use crate::{DocId, DocSet, Score, Searcher, SegmentReader, Term};
use std::collections::BTreeMap;
use std::fmt;
/// `BoostQuery` is a wrapper over a query used to boost its score.
///
/// The document set matched by the `BoostQuery` is strictly the same as the underlying query.
/// The score of each document, is the score of the underlying query multiplied by the `boost`
/// factor.
pub struct BoostQuery {
query: Box<dyn Query>,
boost: Score,
}
impl BoostQuery {
/// Builds a boost query.
pub fn new(query: Box<dyn Query>, boost: Score) -> BoostQuery {
BoostQuery { query, boost }
}
}
impl Clone for BoostQuery {
fn clone(&self) -> Self {
BoostQuery {
query: self.query.box_clone(),
boost: self.boost,
}
}
}
impl fmt::Debug for BoostQuery {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Boost(query={:?}, boost={})", self.query, self.boost)
}
}
impl Query for BoostQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
let weight_without_boost = self.query.weight(searcher, scoring_enabled)?;
let boosted_weight = if scoring_enabled {
Box::new(BoostWeight::new(weight_without_boost, self.boost))
} else {
weight_without_boost
};
Ok(boosted_weight)
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
self.query.query_terms(terms)
}
}
pub(crate) struct BoostWeight {
weight: Box<dyn Weight>,
boost: Score,
}
impl BoostWeight {
pub fn new(weight: Box<dyn Weight>, boost: Score) -> Self {
BoostWeight { weight, boost }
}
}
impl Weight for BoostWeight {
fn scorer(&self, reader: &SegmentReader, boost: Score) -> crate::Result<Box<dyn Scorer>> {
self.weight.scorer(reader, boost * self.boost)
}
fn explain(&self, reader: &SegmentReader, doc: u32) -> crate::Result<Explanation> {
let mut scorer = self.scorer(reader, 1.0)?;
if scorer.seek(doc)!= doc |
let mut explanation =
Explanation::new(format!("Boost x{} of...", self.boost), scorer.score());
let underlying_explanation = self.weight.explain(reader, doc)?;
explanation.add_detail(underlying_explanation);
Ok(explanation)
}
fn count(&self, reader: &SegmentReader) -> crate::Result<u32> {
self.weight.count(reader)
}
}
pub(crate) struct BoostScorer<S: Scorer> {
underlying: S,
boost: Score,
}
impl<S: Scorer> BoostScorer<S> {
pub fn new(underlying: S, boost: Score) -> BoostScorer<S> {
BoostScorer { underlying, boost }
}
}
impl<S: Scorer> DocSet for BoostScorer<S> {
fn advance(&mut self) -> DocId {
self.underlying.advance()
}
fn seek(&mut self, target: DocId) -> DocId {
self.underlying.seek(target)
}
fn fill_buffer(&mut self, buffer: &mut [DocId]) -> usize {
self.underlying.fill_buffer(buffer)
}
fn doc(&self) -> u32 {
self.underlying.doc()
}
fn size_hint(&self) -> u32 {
self.underlying.size_hint()
}
fn count(&mut self, alive_bitset: &AliveBitSet) -> u32 {
self.underlying.count(alive_bitset)
}
fn count_including_deleted(&mut self) -> u32 {
self.underlying.count_including_deleted()
}
}
impl<S: Scorer> Scorer for BoostScorer<S> {
fn score(&mut self) -> Score {
self.underlying.score() * self.boost
}
}
#[cfg(test)]
mod tests {
use super::BoostQuery;
use crate::query::{AllQuery, Query};
use crate::schema::Schema;
use crate::{DocAddress, Document, Index};
#[test]
fn test_boost_query_explain() -> crate::Result<()> {
let schema = Schema::builder().build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_for_tests()?;
index_writer.add_document(Document::new())?;
index_writer.commit()?;
let reader = index.reader()?;
let searcher = reader.searcher();
let query = BoostQuery::new(Box::new(AllQuery), 0.2);
let explanation = query.explain(&searcher, DocAddress::new(0, 0u32)).unwrap();
assert_eq!(
explanation.to_pretty_json(),
"{\n \"value\": 0.2,\n \"description\": \"Boost x0.2 of...\",\n \"details\": [\n {\n \"value\": 1.0,\n \"description\": \"AllQuery\",\n \"context\": []\n }\n ],\n \"context\": []\n}"
);
Ok(())
}
}
| {
return Err(does_not_match(doc));
} | conditional_block |
boost_query.rs | use crate::fastfield::AliveBitSet;
use crate::query::explanation::does_not_match;
use crate::query::{Explanation, Query, Scorer, Weight};
use crate::{DocId, DocSet, Score, Searcher, SegmentReader, Term};
use std::collections::BTreeMap;
use std::fmt;
/// `BoostQuery` is a wrapper over a query used to boost its score.
///
/// The document set matched by the `BoostQuery` is strictly the same as the underlying query.
/// The score of each document, is the score of the underlying query multiplied by the `boost`
/// factor.
pub struct BoostQuery {
query: Box<dyn Query>,
boost: Score,
}
impl BoostQuery {
/// Builds a boost query.
pub fn new(query: Box<dyn Query>, boost: Score) -> BoostQuery {
BoostQuery { query, boost }
}
}
impl Clone for BoostQuery {
fn clone(&self) -> Self {
BoostQuery {
query: self.query.box_clone(),
boost: self.boost,
}
}
}
impl fmt::Debug for BoostQuery {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Boost(query={:?}, boost={})", self.query, self.boost)
}
}
impl Query for BoostQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
let weight_without_boost = self.query.weight(searcher, scoring_enabled)?;
let boosted_weight = if scoring_enabled {
Box::new(BoostWeight::new(weight_without_boost, self.boost))
} else {
weight_without_boost
};
Ok(boosted_weight)
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
self.query.query_terms(terms)
}
}
pub(crate) struct BoostWeight {
weight: Box<dyn Weight>,
boost: Score,
}
impl BoostWeight {
pub fn new(weight: Box<dyn Weight>, boost: Score) -> Self {
BoostWeight { weight, boost }
}
}
impl Weight for BoostWeight {
fn scorer(&self, reader: &SegmentReader, boost: Score) -> crate::Result<Box<dyn Scorer>> {
self.weight.scorer(reader, boost * self.boost)
}
fn explain(&self, reader: &SegmentReader, doc: u32) -> crate::Result<Explanation> {
let mut scorer = self.scorer(reader, 1.0)?;
if scorer.seek(doc)!= doc {
return Err(does_not_match(doc));
}
let mut explanation =
Explanation::new(format!("Boost x{} of...", self.boost), scorer.score());
let underlying_explanation = self.weight.explain(reader, doc)?;
explanation.add_detail(underlying_explanation);
Ok(explanation)
}
fn count(&self, reader: &SegmentReader) -> crate::Result<u32> {
self.weight.count(reader)
}
}
pub(crate) struct BoostScorer<S: Scorer> {
underlying: S,
boost: Score,
}
impl<S: Scorer> BoostScorer<S> {
pub fn new(underlying: S, boost: Score) -> BoostScorer<S> {
BoostScorer { underlying, boost }
}
}
impl<S: Scorer> DocSet for BoostScorer<S> {
fn advance(&mut self) -> DocId {
self.underlying.advance()
}
fn seek(&mut self, target: DocId) -> DocId {
self.underlying.seek(target)
}
fn fill_buffer(&mut self, buffer: &mut [DocId]) -> usize {
self.underlying.fill_buffer(buffer)
}
fn doc(&self) -> u32 |
fn size_hint(&self) -> u32 {
self.underlying.size_hint()
}
fn count(&mut self, alive_bitset: &AliveBitSet) -> u32 {
self.underlying.count(alive_bitset)
}
fn count_including_deleted(&mut self) -> u32 {
self.underlying.count_including_deleted()
}
}
impl<S: Scorer> Scorer for BoostScorer<S> {
fn score(&mut self) -> Score {
self.underlying.score() * self.boost
}
}
#[cfg(test)]
mod tests {
use super::BoostQuery;
use crate::query::{AllQuery, Query};
use crate::schema::Schema;
use crate::{DocAddress, Document, Index};
#[test]
fn test_boost_query_explain() -> crate::Result<()> {
let schema = Schema::builder().build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_for_tests()?;
index_writer.add_document(Document::new())?;
index_writer.commit()?;
let reader = index.reader()?;
let searcher = reader.searcher();
let query = BoostQuery::new(Box::new(AllQuery), 0.2);
let explanation = query.explain(&searcher, DocAddress::new(0, 0u32)).unwrap();
assert_eq!(
explanation.to_pretty_json(),
"{\n \"value\": 0.2,\n \"description\": \"Boost x0.2 of...\",\n \"details\": [\n {\n \"value\": 1.0,\n \"description\": \"AllQuery\",\n \"context\": []\n }\n ],\n \"context\": []\n}"
);
Ok(())
}
}
| {
self.underlying.doc()
} | identifier_body |
boost_query.rs | use crate::fastfield::AliveBitSet;
use crate::query::explanation::does_not_match;
use crate::query::{Explanation, Query, Scorer, Weight};
use crate::{DocId, DocSet, Score, Searcher, SegmentReader, Term};
use std::collections::BTreeMap;
use std::fmt;
/// `BoostQuery` is a wrapper over a query used to boost its score.
///
/// The document set matched by the `BoostQuery` is strictly the same as the underlying query.
/// The score of each document, is the score of the underlying query multiplied by the `boost`
/// factor.
pub struct BoostQuery {
query: Box<dyn Query>,
boost: Score,
}
impl BoostQuery {
/// Builds a boost query.
pub fn new(query: Box<dyn Query>, boost: Score) -> BoostQuery {
BoostQuery { query, boost }
}
}
impl Clone for BoostQuery {
fn clone(&self) -> Self {
BoostQuery {
query: self.query.box_clone(),
boost: self.boost,
}
}
}
impl fmt::Debug for BoostQuery {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Boost(query={:?}, boost={})", self.query, self.boost)
}
}
impl Query for BoostQuery {
fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> crate::Result<Box<dyn Weight>> {
let weight_without_boost = self.query.weight(searcher, scoring_enabled)?;
let boosted_weight = if scoring_enabled {
Box::new(BoostWeight::new(weight_without_boost, self.boost))
} else {
weight_without_boost
};
Ok(boosted_weight)
}
fn query_terms(&self, terms: &mut BTreeMap<Term, bool>) {
self.query.query_terms(terms)
}
}
pub(crate) struct BoostWeight {
weight: Box<dyn Weight>,
boost: Score,
}
impl BoostWeight {
pub fn new(weight: Box<dyn Weight>, boost: Score) -> Self {
BoostWeight { weight, boost }
}
}
impl Weight for BoostWeight {
fn scorer(&self, reader: &SegmentReader, boost: Score) -> crate::Result<Box<dyn Scorer>> {
self.weight.scorer(reader, boost * self.boost)
}
fn explain(&self, reader: &SegmentReader, doc: u32) -> crate::Result<Explanation> {
let mut scorer = self.scorer(reader, 1.0)?;
if scorer.seek(doc)!= doc {
return Err(does_not_match(doc));
}
let mut explanation =
Explanation::new(format!("Boost x{} of...", self.boost), scorer.score());
let underlying_explanation = self.weight.explain(reader, doc)?;
explanation.add_detail(underlying_explanation);
Ok(explanation)
}
fn count(&self, reader: &SegmentReader) -> crate::Result<u32> {
self.weight.count(reader)
}
}
pub(crate) struct BoostScorer<S: Scorer> {
underlying: S,
boost: Score,
}
impl<S: Scorer> BoostScorer<S> {
pub fn new(underlying: S, boost: Score) -> BoostScorer<S> {
BoostScorer { underlying, boost }
}
}
impl<S: Scorer> DocSet for BoostScorer<S> {
fn | (&mut self) -> DocId {
self.underlying.advance()
}
fn seek(&mut self, target: DocId) -> DocId {
self.underlying.seek(target)
}
fn fill_buffer(&mut self, buffer: &mut [DocId]) -> usize {
self.underlying.fill_buffer(buffer)
}
fn doc(&self) -> u32 {
self.underlying.doc()
}
fn size_hint(&self) -> u32 {
self.underlying.size_hint()
}
fn count(&mut self, alive_bitset: &AliveBitSet) -> u32 {
self.underlying.count(alive_bitset)
}
fn count_including_deleted(&mut self) -> u32 {
self.underlying.count_including_deleted()
}
}
impl<S: Scorer> Scorer for BoostScorer<S> {
fn score(&mut self) -> Score {
self.underlying.score() * self.boost
}
}
#[cfg(test)]
mod tests {
use super::BoostQuery;
use crate::query::{AllQuery, Query};
use crate::schema::Schema;
use crate::{DocAddress, Document, Index};
#[test]
fn test_boost_query_explain() -> crate::Result<()> {
let schema = Schema::builder().build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_for_tests()?;
index_writer.add_document(Document::new())?;
index_writer.commit()?;
let reader = index.reader()?;
let searcher = reader.searcher();
let query = BoostQuery::new(Box::new(AllQuery), 0.2);
let explanation = query.explain(&searcher, DocAddress::new(0, 0u32)).unwrap();
assert_eq!(
explanation.to_pretty_json(),
"{\n \"value\": 0.2,\n \"description\": \"Boost x0.2 of...\",\n \"details\": [\n {\n \"value\": 1.0,\n \"description\": \"AllQuery\",\n \"context\": []\n }\n ],\n \"context\": []\n}"
);
Ok(())
}
}
| advance | identifier_name |
comparator.rs | #[cfg(test)]
mod comparator {
use libc::c_char;
use utils::{tmpdir, db_put_simple};
use leveldb::database::{Database};
use leveldb::iterator::Iterable;
use leveldb::options::{Options,ReadOptions};
use leveldb::comparator::{Comparator,OrdComparator};
use std::cmp::Ordering;
struct ReverseComparator {}
impl Comparator for ReverseComparator { | }
fn compare(&self, a: &[u8], b: &[u8]) -> Ordering {
b.cmp(a)
}
}
#[test]
fn test_comparator() {
let comparator: ReverseComparator = ReverseComparator {};
let mut opts = Options::new();
opts.create_if_missing = true;
let tmp = tmpdir("reverse_comparator");
let database = &mut Database::open_with_comparator(tmp.path(), opts, comparator).unwrap();
db_put_simple(database, b"1", &[1]);
db_put_simple(database, b"2", &[2]);
let read_opts = ReadOptions::new();
let mut iter = database.iter(read_opts);
assert_eq!((b"2".to_vec().as_slice(), vec![2]), iter.next().unwrap());
assert_eq!((b"1".to_vec().as_slice(), vec![1]), iter.next().unwrap());
}
#[test]
fn test_ord_comparator() {
let comparator: OrdComparator = OrdComparator::new("foo");
let mut opts = Options::new();
opts.create_if_missing = true;
let tmp = tmpdir("ord_comparator");
let database = &mut Database::open_with_comparator(tmp.path(), opts, comparator).unwrap();
db_put_simple(database, b"1", &[1]);
db_put_simple(database, b"2", &[2]);
let read_opts = ReadOptions::new();
let mut iter = database.iter(read_opts);
assert_eq!((b"1".to_vec().as_slice(), vec![1]), iter.next().unwrap());
assert_eq!((b"2".to_vec().as_slice(), vec![2]), iter.next().unwrap());
}
} |
fn name(&self) -> *const c_char {
"reverse".as_ptr() as *const c_char | random_line_split |
comparator.rs | #[cfg(test)]
mod comparator {
use libc::c_char;
use utils::{tmpdir, db_put_simple};
use leveldb::database::{Database};
use leveldb::iterator::Iterable;
use leveldb::options::{Options,ReadOptions};
use leveldb::comparator::{Comparator,OrdComparator};
use std::cmp::Ordering;
struct ReverseComparator {}
impl Comparator for ReverseComparator {
fn name(&self) -> *const c_char {
"reverse".as_ptr() as *const c_char
}
fn compare(&self, a: &[u8], b: &[u8]) -> Ordering {
b.cmp(a)
}
}
#[test]
fn | () {
let comparator: ReverseComparator = ReverseComparator {};
let mut opts = Options::new();
opts.create_if_missing = true;
let tmp = tmpdir("reverse_comparator");
let database = &mut Database::open_with_comparator(tmp.path(), opts, comparator).unwrap();
db_put_simple(database, b"1", &[1]);
db_put_simple(database, b"2", &[2]);
let read_opts = ReadOptions::new();
let mut iter = database.iter(read_opts);
assert_eq!((b"2".to_vec().as_slice(), vec![2]), iter.next().unwrap());
assert_eq!((b"1".to_vec().as_slice(), vec![1]), iter.next().unwrap());
}
#[test]
fn test_ord_comparator() {
let comparator: OrdComparator = OrdComparator::new("foo");
let mut opts = Options::new();
opts.create_if_missing = true;
let tmp = tmpdir("ord_comparator");
let database = &mut Database::open_with_comparator(tmp.path(), opts, comparator).unwrap();
db_put_simple(database, b"1", &[1]);
db_put_simple(database, b"2", &[2]);
let read_opts = ReadOptions::new();
let mut iter = database.iter(read_opts);
assert_eq!((b"1".to_vec().as_slice(), vec![1]), iter.next().unwrap());
assert_eq!((b"2".to_vec().as_slice(), vec![2]), iter.next().unwrap());
}
}
| test_comparator | identifier_name |
manticore_protocol_spdm_GetVersion__req_to_wire.rs | // Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details. | //!! DO NOT EDIT!!
// To regenerate this file, run `fuzz/generate_proto_tests.py`.
#![no_main]
#![allow(non_snake_case)]
use libfuzzer_sys::fuzz_target;
use manticore::protocol::Command;
use manticore::protocol::wire::ToWire;
use manticore::protocol::borrowed::AsStatic;
use manticore::protocol::borrowed::Borrowed;
use manticore::protocol::spdm::GetVersion as C;
type Req<'a> = <C as Command<'a>>::Req;
fuzz_target!(|data: AsStatic<'static, Req<'static>>| {
let mut out = [0u8; 1024];
let _ = Req::borrow(&data).to_wire(&mut &mut out[..]);
}); | // SPDX-License-Identifier: Apache-2.0
| random_line_split |
loader.rs |
use collections::{HashMap, HashSet};
use flate;
use time;
pub static MACOS_DLL_PREFIX: &'static str = "lib";
pub static MACOS_DLL_SUFFIX: &'static str = ".dylib";
pub static WIN32_DLL_PREFIX: &'static str = "";
pub static WIN32_DLL_SUFFIX: &'static str = ".dll";
pub static LINUX_DLL_PREFIX: &'static str = "lib";
pub static LINUX_DLL_SUFFIX: &'static str = ".so";
pub static FREEBSD_DLL_PREFIX: &'static str = "lib";
pub static FREEBSD_DLL_SUFFIX: &'static str = ".so";
pub static ANDROID_DLL_PREFIX: &'static str = "lib";
pub static ANDROID_DLL_SUFFIX: &'static str = ".so";
pub enum Os {
OsMacos,
OsWin32,
OsLinux,
OsAndroid,
OsFreebsd
}
pub struct CrateMismatch {
path: Path,
got: ~str,
}
pub struct Context<'a> {
pub sess: &'a Session,
pub span: Span,
pub ident: &'a str,
pub crate_id: &'a CrateId,
pub id_hash: &'a str,
pub hash: Option<&'a Svh>,
pub triple: &'a str,
pub os: Os,
pub filesearch: FileSearch<'a>,
pub root: &'a Option<CratePaths>,
pub rejected_via_hash: Vec<CrateMismatch>,
pub rejected_via_triple: Vec<CrateMismatch>,
}
pub struct Library {
pub dylib: Option<Path>,
pub rlib: Option<Path>,
pub metadata: MetadataBlob,
}
pub struct ArchiveMetadata {
archive: ArchiveRO,
// See comments in ArchiveMetadata::new for why this is static
data: &'static [u8],
}
pub struct CratePaths {
pub ident: ~str,
pub dylib: Option<Path>,
pub rlib: Option<Path>
}
impl CratePaths {
fn paths(&self) -> Vec<Path> {
match (&self.dylib, &self.rlib) {
(&None, &None) => vec!(),
(&Some(ref p), &None) |
(&None, &Some(ref p)) => vec!(p.clone()),
(&Some(ref p1), &Some(ref p2)) => vec!(p1.clone(), p2.clone()),
}
}
}
impl<'a> Context<'a> {
pub fn maybe_load_library_crate(&mut self) -> Option<Library> {
self.find_library_crate()
}
pub fn load_library_crate(&mut self) -> Library {
match self.find_library_crate() {
Some(t) => t,
None => {
self.report_load_errs();
unreachable!()
}
}
}
pub fn report_load_errs(&mut self) {
let message = if self.rejected_via_hash.len() > 0 {
format!("found possibly newer version of crate `{}`",
self.ident)
} else if self.rejected_via_triple.len() > 0 {
format!("found incorrect triple for crate `{}`", self.ident)
} else {
format!("can't find crate for `{}`", self.ident)
};
let message = match self.root {
&None => message,
&Some(ref r) => format!("{} which `{}` depends on",
message, r.ident)
};
self.sess.span_err(self.span, message);
let mismatches = self.rejected_via_triple.iter();
if self.rejected_via_triple.len() > 0 {
self.sess.span_note(self.span, format!("expected triple of {}", self.triple));
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}, triple {}: {}",
self.ident, i+1, got, path.display()));
}
}
if self.rejected_via_hash.len() > 0 {
self.sess.span_note(self.span, "perhaps this crate needs \
to be recompiled?");
let mismatches = self.rejected_via_hash.iter();
for (i, &CrateMismatch{ ref path,.. }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
self.ident, i+1, path.display()));
}
match self.root {
&None => {}
&Some(ref r) => {
for (i, path) in r.paths().iter().enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
r.ident, i+1, path.display()));
}
}
}
}
self.sess.abort_if_errors();
}
fn find_library_crate(&mut self) -> Option<Library> {
let (dyprefix, dysuffix) = self.dylibname();
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
let dylib_prefix = format!("{}{}-", dyprefix, self.crate_id.name);
let rlib_prefix = format!("lib{}-", self.crate_id.name);
let mut candidates = HashMap::new();
// First, find all possible candidate rlibs and dylibs purely based on
// the name of the files themselves. We're trying to match against an
// exact crate_id and a possibly an exact hash.
//
// During this step, we can filter all found libraries based on the
// name and id found in the crate id (we ignore the path portion for
// filename matching), as well as the exact hash (if specified). If we
// end up having many candidates, we must look at the metadata to
// perform exact matches against hashes/crate ids. Note that opening up
// the metadata is where we do an exact match against the full contents
// of the crate id (path/name/id).
//
// The goal of this step is to look at as little metadata as possible.
self.filesearch.search(|path| {
let file = match path.filename_str() {
None => return FileDoesntMatch,
Some(file) => file,
};
if file.starts_with(rlib_prefix) && file.ends_with(".rlib") {
info!("rlib candidate: {}", path.display());
match self.try_match(file, rlib_prefix, ".rlib") {
Some(hash) => {
info!("rlib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (ref mut rlibs, _) = *slot;
rlibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("rlib rejected");
FileDoesntMatch
}
}
} else if file.starts_with(dylib_prefix) && file.ends_with(dysuffix){
info!("dylib candidate: {}", path.display());
match self.try_match(file, dylib_prefix, dysuffix) {
Some(hash) => {
info!("dylib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (_, ref mut dylibs) = *slot;
dylibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("dylib rejected");
FileDoesntMatch
}
}
} else {
FileDoesntMatch
}
});
// We have now collected all known libraries into a set of candidates
// keyed of the filename hash listed. For each filename, we also have a
// list of rlibs/dylibs that apply. Here, we map each of these lists
// (per hash), to a Library candidate for returning.
//
// A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this
// search is being performed for.
let mut libraries = Vec::new();
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
let mut metadata = None;
let rlib = self.extract_one(rlibs, "rlib", &mut metadata);
let dylib = self.extract_one(dylibs, "dylib", &mut metadata);
match metadata {
Some(metadata) => {
libraries.push(Library {
dylib: dylib,
rlib: rlib,
metadata: metadata,
})
}
None => {}
}
}
// Having now translated all relevant found hashes into libraries, see
// what we've got and figure out if we found multiple candidates for
// libraries or not.
match libraries.len() {
0 => None,
1 => Some(libraries.move_iter().next().unwrap()),
_ => {
self.sess.span_err(self.span,
format!("multiple matching crates for `{}`",
self.crate_id.name));
self.sess.note("candidates:");
for lib in libraries.iter() {
match lib.dylib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
match lib.rlib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
let data = lib.metadata.as_slice();
let crate_id = decoder::get_crate_id(data);
note_crateid_attr(self.sess.diagnostic(), &crate_id);
}
None
}
}
}
// Attempts to match the requested version of a library against the file
// specified. The prefix/suffix are specified (disambiguates between
// rlib/dylib).
//
// The return value is `None` if `file` doesn't look like a rust-generated
// library, or if a specific version was requested and it doesn't match the
// apparent file's version.
//
// If everything checks out, then `Some(hash)` is returned where `hash` is
// the listed hash in the filename itself.
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<~str>{
let middle = file.slice(prefix.len(), file.len() - suffix.len());
debug!("matching -- {}, middle: {}", file, middle);
let mut parts = middle.splitn('-', 1);
let hash = match parts.next() { Some(h) => h, None => return None };
debug!("matching -- {}, hash: {} (want {})", file, hash, self.id_hash);
let vers = match parts.next() { Some(v) => v, None => return None };
debug!("matching -- {}, vers: {} (want {})", file, vers,
self.crate_id.version);
match self.crate_id.version {
Some(ref version) if version.as_slice()!= vers => return None,
Some(..) => {} // check the hash
// hash is irrelevant, no version specified
None => return Some(hash.to_owned())
}
debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file);
Some(hash.to_owned())
} else {
None
}
}
// Attempts to extract *one* library from the set `m`. If the set has no
// elements, `None` is returned. If the set has more than one element, then
// the errors and notes are emitted about the set of libraries.
//
// With only one library in the set, this function will extract it, and then
// read the metadata from it if `*slot` is `None`. If the metadata couldn't
// be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted).
fn extract_one(&mut self, m: HashSet<Path>, flavor: &str,
slot: &mut Option<MetadataBlob>) -> Option<Path> {
let mut ret = None::<Path>;
let mut error = 0;
if slot.is_some() {
// FIXME(#10786): for an optimization, we only read one of the
// library's metadata sections. In theory we should
// read both, but reading dylib metadata is quite
// slow.
if m.len() == 0 {
return None
} else if m.len() == 1 {
return Some(m.move_iter().next().unwrap())
}
}
for lib in m.move_iter() {
info!("{} reading metadata from: {}", flavor, lib.display());
let metadata = match get_metadata_section(self.os, &lib) {
Ok(blob) => {
if self.crate_matches(blob.as_slice(), &lib) {
blob
} else {
info!("metadata mismatch");
continue
}
}
Err(_) => {
info!("no metadata found");
continue
}
};
if ret.is_some() {
self.sess.span_err(self.span,
format!("multiple {} candidates for `{}` \
found", flavor, self.crate_id.name));
self.sess.span_note(self.span,
format!(r"candidate \#1: {}",
ret.get_ref().display()));
error = 1;
ret = None;
}
if error > 0 {
error += 1;
self.sess.span_note(self.span,
format!(r"candidate \#{}: {}", error,
lib.display()));
continue
}
*slot = Some(metadata);
ret = Some(lib);
}
return if error > 0 {None} else {ret}
}
fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool {
match decoder::maybe_get_crate_id(crate_data) {
Some(ref id) if self.crate_id.matches(id) => {}
_ => { info!("Rejecting via crate_id"); return false }
}
let hash = match decoder::maybe_get_crate_hash(crate_data) {
Some(hash) => hash, None => {
info!("Rejecting via lack of crate hash");
return false;
}
};
let triple = decoder::get_crate_triple(crate_data);
if triple.as_slice()!= self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch{ path: libpath.clone(),
got: triple.to_owned() });
return false;
}
match self.hash {
None => true,
Some(myhash) => {
if *myhash!= hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch{ path: libpath.clone(),
got: myhash.as_str().to_owned() });
false
} else {
true
}
}
}
}
// Returns the corresponding (prefix, suffix) that files need to have for
// dynamic libraries
fn dylibname(&self) -> (&'static str, &'static str) {
match self.os {
OsWin32 => (WIN32_DLL_PREFIX, WIN32_DLL_SUFFIX),
OsMacos => (MACOS_DLL_PREFIX, MACOS_DLL_SUFFIX),
OsLinux => (LINUX_DLL_PREFIX, LINUX_DLL_SUFFIX),
OsAndroid => (ANDROID_DLL_PREFIX, ANDROID_DLL_SUFFIX),
OsFreebsd => (FREEBSD_DLL_PREFIX, FREEBSD_DLL_SUFFIX),
}
}
}
pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) {
diag.handler().note(format!("crate_id: {}", crateid.to_str()));
}
impl ArchiveMetadata {
fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
let data: &'static [u8] = {
let data = match ar.read(METADATA_FILENAME) {
Some(data) => data,
| random_line_split |
||
loader.rs | (&None, &None) => vec!(),
(&Some(ref p), &None) |
(&None, &Some(ref p)) => vec!(p.clone()),
(&Some(ref p1), &Some(ref p2)) => vec!(p1.clone(), p2.clone()),
}
}
}
impl<'a> Context<'a> {
pub fn maybe_load_library_crate(&mut self) -> Option<Library> {
self.find_library_crate()
}
pub fn load_library_crate(&mut self) -> Library {
match self.find_library_crate() {
Some(t) => t,
None => {
self.report_load_errs();
unreachable!()
}
}
}
pub fn report_load_errs(&mut self) {
let message = if self.rejected_via_hash.len() > 0 {
format!("found possibly newer version of crate `{}`",
self.ident)
} else if self.rejected_via_triple.len() > 0 {
format!("found incorrect triple for crate `{}`", self.ident)
} else {
format!("can't find crate for `{}`", self.ident)
};
let message = match self.root {
&None => message,
&Some(ref r) => format!("{} which `{}` depends on",
message, r.ident)
};
self.sess.span_err(self.span, message);
let mismatches = self.rejected_via_triple.iter();
if self.rejected_via_triple.len() > 0 {
self.sess.span_note(self.span, format!("expected triple of {}", self.triple));
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}, triple {}: {}",
self.ident, i+1, got, path.display()));
}
}
if self.rejected_via_hash.len() > 0 {
self.sess.span_note(self.span, "perhaps this crate needs \
to be recompiled?");
let mismatches = self.rejected_via_hash.iter();
for (i, &CrateMismatch{ ref path,.. }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
self.ident, i+1, path.display()));
}
match self.root {
&None => {}
&Some(ref r) => {
for (i, path) in r.paths().iter().enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
r.ident, i+1, path.display()));
}
}
}
}
self.sess.abort_if_errors();
}
fn find_library_crate(&mut self) -> Option<Library> {
let (dyprefix, dysuffix) = self.dylibname();
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
let dylib_prefix = format!("{}{}-", dyprefix, self.crate_id.name);
let rlib_prefix = format!("lib{}-", self.crate_id.name);
let mut candidates = HashMap::new();
// First, find all possible candidate rlibs and dylibs purely based on
// the name of the files themselves. We're trying to match against an
// exact crate_id and a possibly an exact hash.
//
// During this step, we can filter all found libraries based on the
// name and id found in the crate id (we ignore the path portion for
// filename matching), as well as the exact hash (if specified). If we
// end up having many candidates, we must look at the metadata to
// perform exact matches against hashes/crate ids. Note that opening up
// the metadata is where we do an exact match against the full contents
// of the crate id (path/name/id).
//
// The goal of this step is to look at as little metadata as possible.
self.filesearch.search(|path| {
let file = match path.filename_str() {
None => return FileDoesntMatch,
Some(file) => file,
};
if file.starts_with(rlib_prefix) && file.ends_with(".rlib") {
info!("rlib candidate: {}", path.display());
match self.try_match(file, rlib_prefix, ".rlib") {
Some(hash) => {
info!("rlib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (ref mut rlibs, _) = *slot;
rlibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("rlib rejected");
FileDoesntMatch
}
}
} else if file.starts_with(dylib_prefix) && file.ends_with(dysuffix){
info!("dylib candidate: {}", path.display());
match self.try_match(file, dylib_prefix, dysuffix) {
Some(hash) => {
info!("dylib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (_, ref mut dylibs) = *slot;
dylibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("dylib rejected");
FileDoesntMatch
}
}
} else {
FileDoesntMatch
}
});
// We have now collected all known libraries into a set of candidates
// keyed of the filename hash listed. For each filename, we also have a
// list of rlibs/dylibs that apply. Here, we map each of these lists
// (per hash), to a Library candidate for returning.
//
// A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this
// search is being performed for.
let mut libraries = Vec::new();
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
let mut metadata = None;
let rlib = self.extract_one(rlibs, "rlib", &mut metadata);
let dylib = self.extract_one(dylibs, "dylib", &mut metadata);
match metadata {
Some(metadata) => {
libraries.push(Library {
dylib: dylib,
rlib: rlib,
metadata: metadata,
})
}
None => {}
}
}
// Having now translated all relevant found hashes into libraries, see
// what we've got and figure out if we found multiple candidates for
// libraries or not.
match libraries.len() {
0 => None,
1 => Some(libraries.move_iter().next().unwrap()),
_ => {
self.sess.span_err(self.span,
format!("multiple matching crates for `{}`",
self.crate_id.name));
self.sess.note("candidates:");
for lib in libraries.iter() {
match lib.dylib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
match lib.rlib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => |
}
let data = lib.metadata.as_slice();
let crate_id = decoder::get_crate_id(data);
note_crateid_attr(self.sess.diagnostic(), &crate_id);
}
None
}
}
}
// Attempts to match the requested version of a library against the file
// specified. The prefix/suffix are specified (disambiguates between
// rlib/dylib).
//
// The return value is `None` if `file` doesn't look like a rust-generated
// library, or if a specific version was requested and it doesn't match the
// apparent file's version.
//
// If everything checks out, then `Some(hash)` is returned where `hash` is
// the listed hash in the filename itself.
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<~str>{
let middle = file.slice(prefix.len(), file.len() - suffix.len());
debug!("matching -- {}, middle: {}", file, middle);
let mut parts = middle.splitn('-', 1);
let hash = match parts.next() { Some(h) => h, None => return None };
debug!("matching -- {}, hash: {} (want {})", file, hash, self.id_hash);
let vers = match parts.next() { Some(v) => v, None => return None };
debug!("matching -- {}, vers: {} (want {})", file, vers,
self.crate_id.version);
match self.crate_id.version {
Some(ref version) if version.as_slice()!= vers => return None,
Some(..) => {} // check the hash
// hash is irrelevant, no version specified
None => return Some(hash.to_owned())
}
debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file);
Some(hash.to_owned())
} else {
None
}
}
// Attempts to extract *one* library from the set `m`. If the set has no
// elements, `None` is returned. If the set has more than one element, then
// the errors and notes are emitted about the set of libraries.
//
// With only one library in the set, this function will extract it, and then
// read the metadata from it if `*slot` is `None`. If the metadata couldn't
// be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted).
fn extract_one(&mut self, m: HashSet<Path>, flavor: &str,
slot: &mut Option<MetadataBlob>) -> Option<Path> {
let mut ret = None::<Path>;
let mut error = 0;
if slot.is_some() {
// FIXME(#10786): for an optimization, we only read one of the
// library's metadata sections. In theory we should
// read both, but reading dylib metadata is quite
// slow.
if m.len() == 0 {
return None
} else if m.len() == 1 {
return Some(m.move_iter().next().unwrap())
}
}
for lib in m.move_iter() {
info!("{} reading metadata from: {}", flavor, lib.display());
let metadata = match get_metadata_section(self.os, &lib) {
Ok(blob) => {
if self.crate_matches(blob.as_slice(), &lib) {
blob
} else {
info!("metadata mismatch");
continue
}
}
Err(_) => {
info!("no metadata found");
continue
}
};
if ret.is_some() {
self.sess.span_err(self.span,
format!("multiple {} candidates for `{}` \
found", flavor, self.crate_id.name));
self.sess.span_note(self.span,
format!(r"candidate \#1: {}",
ret.get_ref().display()));
error = 1;
ret = None;
}
if error > 0 {
error += 1;
self.sess.span_note(self.span,
format!(r"candidate \#{}: {}", error,
lib.display()));
continue
}
*slot = Some(metadata);
ret = Some(lib);
}
return if error > 0 {None} else {ret}
}
fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool {
match decoder::maybe_get_crate_id(crate_data) {
Some(ref id) if self.crate_id.matches(id) => {}
_ => { info!("Rejecting via crate_id"); return false }
}
let hash = match decoder::maybe_get_crate_hash(crate_data) {
Some(hash) => hash, None => {
info!("Rejecting via lack of crate hash");
return false;
}
};
let triple = decoder::get_crate_triple(crate_data);
if triple.as_slice()!= self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch{ path: libpath.clone(),
got: triple.to_owned() });
return false;
}
match self.hash {
None => true,
Some(myhash) => {
if *myhash!= hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch{ path: libpath.clone(),
got: myhash.as_str().to_owned() });
false
} else {
true
}
}
}
}
// Returns the corresponding (prefix, suffix) that files need to have for
// dynamic libraries
fn dylibname(&self) -> (&'static str, &'static str) {
match self.os {
OsWin32 => (WIN32_DLL_PREFIX, WIN32_DLL_SUFFIX),
OsMacos => (MACOS_DLL_PREFIX, MACOS_DLL_SUFFIX),
OsLinux => (LINUX_DLL_PREFIX, LINUX_DLL_SUFFIX),
OsAndroid => (ANDROID_DLL_PREFIX, ANDROID_DLL_SUFFIX),
OsFreebsd => (FREEBSD_DLL_PREFIX, FREEBSD_DLL_SUFFIX),
}
}
}
pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) {
diag.handler().note(format!("crate_id: {}", crateid.to_str()));
}
impl ArchiveMetadata {
fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
let data: &'static [u8] = {
let data = match ar.read(METADATA_FILENAME) {
Some(data) => data,
None => {
debug!("didn't find '{}' in the archive", METADATA_FILENAME);
return None;
}
};
// This data is actually a pointer inside of the archive itself, but
// we essentially want to cache it because the lookup inside the
// archive is a fairly expensive operation (and it's queried for
// *very* frequently). For this reason, we transmute it to the
// static lifetime to put into the struct. Note that the buffer is
// never actually handed out with a static lifetime, but rather the
// buffer is loaned with the lifetime of this containing object.
// Hence, we're guaranteed that the buffer will never be used after
// this object is dead, so this is a safe operation to transmute and
// store the data as a static buffer.
unsafe { cast::transmute(data) }
};
Some(ArchiveMetadata {
archive: ar,
data: data,
})
}
pub fn as_slice<'a>(&'a self) -> &'a [u8] { self.data }
}
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, ~str> {
let start = time::precise_time_ns();
let ret = get_metadata_section_imp(os, filename);
info!("reading {} => {}ms", filename.filename_display(),
(time::precise_time_ns() - start) / 1000000);
return ret;
| {} | conditional_block |
loader.rs | {
pub ident: ~str,
pub dylib: Option<Path>,
pub rlib: Option<Path>
}
impl CratePaths {
fn paths(&self) -> Vec<Path> {
match (&self.dylib, &self.rlib) {
(&None, &None) => vec!(),
(&Some(ref p), &None) |
(&None, &Some(ref p)) => vec!(p.clone()),
(&Some(ref p1), &Some(ref p2)) => vec!(p1.clone(), p2.clone()),
}
}
}
impl<'a> Context<'a> {
pub fn maybe_load_library_crate(&mut self) -> Option<Library> {
self.find_library_crate()
}
pub fn load_library_crate(&mut self) -> Library {
match self.find_library_crate() {
Some(t) => t,
None => {
self.report_load_errs();
unreachable!()
}
}
}
pub fn report_load_errs(&mut self) {
let message = if self.rejected_via_hash.len() > 0 {
format!("found possibly newer version of crate `{}`",
self.ident)
} else if self.rejected_via_triple.len() > 0 {
format!("found incorrect triple for crate `{}`", self.ident)
} else {
format!("can't find crate for `{}`", self.ident)
};
let message = match self.root {
&None => message,
&Some(ref r) => format!("{} which `{}` depends on",
message, r.ident)
};
self.sess.span_err(self.span, message);
let mismatches = self.rejected_via_triple.iter();
if self.rejected_via_triple.len() > 0 {
self.sess.span_note(self.span, format!("expected triple of {}", self.triple));
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}, triple {}: {}",
self.ident, i+1, got, path.display()));
}
}
if self.rejected_via_hash.len() > 0 {
self.sess.span_note(self.span, "perhaps this crate needs \
to be recompiled?");
let mismatches = self.rejected_via_hash.iter();
for (i, &CrateMismatch{ ref path,.. }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
self.ident, i+1, path.display()));
}
match self.root {
&None => {}
&Some(ref r) => {
for (i, path) in r.paths().iter().enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
r.ident, i+1, path.display()));
}
}
}
}
self.sess.abort_if_errors();
}
fn find_library_crate(&mut self) -> Option<Library> {
let (dyprefix, dysuffix) = self.dylibname();
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
let dylib_prefix = format!("{}{}-", dyprefix, self.crate_id.name);
let rlib_prefix = format!("lib{}-", self.crate_id.name);
let mut candidates = HashMap::new();
// First, find all possible candidate rlibs and dylibs purely based on
// the name of the files themselves. We're trying to match against an
// exact crate_id and a possibly an exact hash.
//
// During this step, we can filter all found libraries based on the
// name and id found in the crate id (we ignore the path portion for
// filename matching), as well as the exact hash (if specified). If we
// end up having many candidates, we must look at the metadata to
// perform exact matches against hashes/crate ids. Note that opening up
// the metadata is where we do an exact match against the full contents
// of the crate id (path/name/id).
//
// The goal of this step is to look at as little metadata as possible.
self.filesearch.search(|path| {
let file = match path.filename_str() {
None => return FileDoesntMatch,
Some(file) => file,
};
if file.starts_with(rlib_prefix) && file.ends_with(".rlib") {
info!("rlib candidate: {}", path.display());
match self.try_match(file, rlib_prefix, ".rlib") {
Some(hash) => {
info!("rlib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (ref mut rlibs, _) = *slot;
rlibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("rlib rejected");
FileDoesntMatch
}
}
} else if file.starts_with(dylib_prefix) && file.ends_with(dysuffix){
info!("dylib candidate: {}", path.display());
match self.try_match(file, dylib_prefix, dysuffix) {
Some(hash) => {
info!("dylib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (_, ref mut dylibs) = *slot;
dylibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("dylib rejected");
FileDoesntMatch
}
}
} else {
FileDoesntMatch
}
});
// We have now collected all known libraries into a set of candidates
// keyed of the filename hash listed. For each filename, we also have a
// list of rlibs/dylibs that apply. Here, we map each of these lists
// (per hash), to a Library candidate for returning.
//
// A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this
// search is being performed for.
let mut libraries = Vec::new();
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
let mut metadata = None;
let rlib = self.extract_one(rlibs, "rlib", &mut metadata);
let dylib = self.extract_one(dylibs, "dylib", &mut metadata);
match metadata {
Some(metadata) => {
libraries.push(Library {
dylib: dylib,
rlib: rlib,
metadata: metadata,
})
}
None => {}
}
}
// Having now translated all relevant found hashes into libraries, see
// what we've got and figure out if we found multiple candidates for
// libraries or not.
match libraries.len() {
0 => None,
1 => Some(libraries.move_iter().next().unwrap()),
_ => {
self.sess.span_err(self.span,
format!("multiple matching crates for `{}`",
self.crate_id.name));
self.sess.note("candidates:");
for lib in libraries.iter() {
match lib.dylib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
match lib.rlib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
let data = lib.metadata.as_slice();
let crate_id = decoder::get_crate_id(data);
note_crateid_attr(self.sess.diagnostic(), &crate_id);
}
None
}
}
}
// Attempts to match the requested version of a library against the file
// specified. The prefix/suffix are specified (disambiguates between
// rlib/dylib).
//
// The return value is `None` if `file` doesn't look like a rust-generated
// library, or if a specific version was requested and it doesn't match the
// apparent file's version.
//
// If everything checks out, then `Some(hash)` is returned where `hash` is
// the listed hash in the filename itself.
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<~str>{
let middle = file.slice(prefix.len(), file.len() - suffix.len());
debug!("matching -- {}, middle: {}", file, middle);
let mut parts = middle.splitn('-', 1);
let hash = match parts.next() { Some(h) => h, None => return None };
debug!("matching -- {}, hash: {} (want {})", file, hash, self.id_hash);
let vers = match parts.next() { Some(v) => v, None => return None };
debug!("matching -- {}, vers: {} (want {})", file, vers,
self.crate_id.version);
match self.crate_id.version {
Some(ref version) if version.as_slice()!= vers => return None,
Some(..) => {} // check the hash
// hash is irrelevant, no version specified
None => return Some(hash.to_owned())
}
debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file);
Some(hash.to_owned())
} else {
None
}
}
// Attempts to extract *one* library from the set `m`. If the set has no
// elements, `None` is returned. If the set has more than one element, then
// the errors and notes are emitted about the set of libraries.
//
// With only one library in the set, this function will extract it, and then
// read the metadata from it if `*slot` is `None`. If the metadata couldn't
// be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted).
fn extract_one(&mut self, m: HashSet<Path>, flavor: &str,
slot: &mut Option<MetadataBlob>) -> Option<Path> {
let mut ret = None::<Path>;
let mut error = 0;
if slot.is_some() {
// FIXME(#10786): for an optimization, we only read one of the
// library's metadata sections. In theory we should
// read both, but reading dylib metadata is quite
// slow.
if m.len() == 0 {
return None
} else if m.len() == 1 {
return Some(m.move_iter().next().unwrap())
}
}
for lib in m.move_iter() {
info!("{} reading metadata from: {}", flavor, lib.display());
let metadata = match get_metadata_section(self.os, &lib) {
Ok(blob) => {
if self.crate_matches(blob.as_slice(), &lib) {
blob
} else {
info!("metadata mismatch");
continue
}
}
Err(_) => {
info!("no metadata found");
continue
}
};
if ret.is_some() {
self.sess.span_err(self.span,
format!("multiple {} candidates for `{}` \
found", flavor, self.crate_id.name));
self.sess.span_note(self.span,
format!(r"candidate \#1: {}",
ret.get_ref().display()));
error = 1;
ret = None;
}
if error > 0 {
error += 1;
self.sess.span_note(self.span,
format!(r"candidate \#{}: {}", error,
lib.display()));
continue
}
*slot = Some(metadata);
ret = Some(lib);
}
return if error > 0 {None} else {ret}
}
fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool {
match decoder::maybe_get_crate_id(crate_data) {
Some(ref id) if self.crate_id.matches(id) => {}
_ => { info!("Rejecting via crate_id"); return false }
}
let hash = match decoder::maybe_get_crate_hash(crate_data) {
Some(hash) => hash, None => {
info!("Rejecting via lack of crate hash");
return false;
}
};
let triple = decoder::get_crate_triple(crate_data);
if triple.as_slice()!= self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch{ path: libpath.clone(),
got: triple.to_owned() });
return false;
}
match self.hash {
None => true,
Some(myhash) => {
if *myhash!= hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch{ path: libpath.clone(),
got: myhash.as_str().to_owned() });
false
} else {
true
}
}
}
}
// Returns the corresponding (prefix, suffix) that files need to have for
// dynamic libraries
fn dylibname(&self) -> (&'static str, &'static str) {
match self.os {
OsWin32 => (WIN32_DLL_PREFIX, WIN32_DLL_SUFFIX),
OsMacos => (MACOS_DLL_PREFIX, MACOS_DLL_SUFFIX),
OsLinux => (LINUX_DLL_PREFIX, LINUX_DLL_SUFFIX),
OsAndroid => (ANDROID_DLL_PREFIX, ANDROID_DLL_SUFFIX),
OsFreebsd => (FREEBSD_DLL_PREFIX, FREEBSD_DLL_SUFFIX),
}
}
}
pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) {
diag.handler().note(format!("crate_id: {}", crateid.to_str()));
}
impl ArchiveMetadata {
fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
let data: &'static [u8] = {
let data = match ar.read(METADATA_FILENAME) {
Some(data) => data,
None => {
debug!("didn't find '{}' in the archive", METADATA_FILENAME);
return None;
}
};
// This data is actually a pointer inside of the archive itself, but
// we essentially want to cache it because the lookup inside the
// archive is a fairly expensive operation (and it's queried for
// *very* frequently). For this reason, we transmute it to the
// static lifetime to put into the struct. Note that the buffer is
// never actually handed out with a static lifetime, but rather the
// buffer is loaned with the lifetime of this containing object.
// Hence, we're guaranteed that the buffer will never be used after
// this object is dead, so this is a safe operation to transmute and
// store the data as a static buffer.
unsafe { cast::transmute(data) }
};
Some(ArchiveMetadata {
archive: ar,
data: data,
})
}
pub fn as_slice<'a>(&'a self) -> &'a [u8] { self.data }
}
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, ~str> { | CratePaths | identifier_name |
|
loader.rs |
}
impl<'a> Context<'a> {
pub fn maybe_load_library_crate(&mut self) -> Option<Library> {
self.find_library_crate()
}
pub fn load_library_crate(&mut self) -> Library {
match self.find_library_crate() {
Some(t) => t,
None => {
self.report_load_errs();
unreachable!()
}
}
}
pub fn report_load_errs(&mut self) {
let message = if self.rejected_via_hash.len() > 0 {
format!("found possibly newer version of crate `{}`",
self.ident)
} else if self.rejected_via_triple.len() > 0 {
format!("found incorrect triple for crate `{}`", self.ident)
} else {
format!("can't find crate for `{}`", self.ident)
};
let message = match self.root {
&None => message,
&Some(ref r) => format!("{} which `{}` depends on",
message, r.ident)
};
self.sess.span_err(self.span, message);
let mismatches = self.rejected_via_triple.iter();
if self.rejected_via_triple.len() > 0 {
self.sess.span_note(self.span, format!("expected triple of {}", self.triple));
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}, triple {}: {}",
self.ident, i+1, got, path.display()));
}
}
if self.rejected_via_hash.len() > 0 {
self.sess.span_note(self.span, "perhaps this crate needs \
to be recompiled?");
let mismatches = self.rejected_via_hash.iter();
for (i, &CrateMismatch{ ref path,.. }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
self.ident, i+1, path.display()));
}
match self.root {
&None => {}
&Some(ref r) => {
for (i, path) in r.paths().iter().enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
r.ident, i+1, path.display()));
}
}
}
}
self.sess.abort_if_errors();
}
fn find_library_crate(&mut self) -> Option<Library> {
let (dyprefix, dysuffix) = self.dylibname();
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
let dylib_prefix = format!("{}{}-", dyprefix, self.crate_id.name);
let rlib_prefix = format!("lib{}-", self.crate_id.name);
let mut candidates = HashMap::new();
// First, find all possible candidate rlibs and dylibs purely based on
// the name of the files themselves. We're trying to match against an
// exact crate_id and a possibly an exact hash.
//
// During this step, we can filter all found libraries based on the
// name and id found in the crate id (we ignore the path portion for
// filename matching), as well as the exact hash (if specified). If we
// end up having many candidates, we must look at the metadata to
// perform exact matches against hashes/crate ids. Note that opening up
// the metadata is where we do an exact match against the full contents
// of the crate id (path/name/id).
//
// The goal of this step is to look at as little metadata as possible.
self.filesearch.search(|path| {
let file = match path.filename_str() {
None => return FileDoesntMatch,
Some(file) => file,
};
if file.starts_with(rlib_prefix) && file.ends_with(".rlib") {
info!("rlib candidate: {}", path.display());
match self.try_match(file, rlib_prefix, ".rlib") {
Some(hash) => {
info!("rlib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (ref mut rlibs, _) = *slot;
rlibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("rlib rejected");
FileDoesntMatch
}
}
} else if file.starts_with(dylib_prefix) && file.ends_with(dysuffix){
info!("dylib candidate: {}", path.display());
match self.try_match(file, dylib_prefix, dysuffix) {
Some(hash) => {
info!("dylib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (_, ref mut dylibs) = *slot;
dylibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("dylib rejected");
FileDoesntMatch
}
}
} else {
FileDoesntMatch
}
});
// We have now collected all known libraries into a set of candidates
// keyed of the filename hash listed. For each filename, we also have a
// list of rlibs/dylibs that apply. Here, we map each of these lists
// (per hash), to a Library candidate for returning.
//
// A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this
// search is being performed for.
let mut libraries = Vec::new();
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
let mut metadata = None;
let rlib = self.extract_one(rlibs, "rlib", &mut metadata);
let dylib = self.extract_one(dylibs, "dylib", &mut metadata);
match metadata {
Some(metadata) => {
libraries.push(Library {
dylib: dylib,
rlib: rlib,
metadata: metadata,
})
}
None => {}
}
}
// Having now translated all relevant found hashes into libraries, see
// what we've got and figure out if we found multiple candidates for
// libraries or not.
match libraries.len() {
0 => None,
1 => Some(libraries.move_iter().next().unwrap()),
_ => {
self.sess.span_err(self.span,
format!("multiple matching crates for `{}`",
self.crate_id.name));
self.sess.note("candidates:");
for lib in libraries.iter() {
match lib.dylib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
match lib.rlib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
let data = lib.metadata.as_slice();
let crate_id = decoder::get_crate_id(data);
note_crateid_attr(self.sess.diagnostic(), &crate_id);
}
None
}
}
}
// Attempts to match the requested version of a library against the file
// specified. The prefix/suffix are specified (disambiguates between
// rlib/dylib).
//
// The return value is `None` if `file` doesn't look like a rust-generated
// library, or if a specific version was requested and it doesn't match the
// apparent file's version.
//
// If everything checks out, then `Some(hash)` is returned where `hash` is
// the listed hash in the filename itself.
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<~str>{
let middle = file.slice(prefix.len(), file.len() - suffix.len());
debug!("matching -- {}, middle: {}", file, middle);
let mut parts = middle.splitn('-', 1);
let hash = match parts.next() { Some(h) => h, None => return None };
debug!("matching -- {}, hash: {} (want {})", file, hash, self.id_hash);
let vers = match parts.next() { Some(v) => v, None => return None };
debug!("matching -- {}, vers: {} (want {})", file, vers,
self.crate_id.version);
match self.crate_id.version {
Some(ref version) if version.as_slice()!= vers => return None,
Some(..) => {} // check the hash
// hash is irrelevant, no version specified
None => return Some(hash.to_owned())
}
debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file);
Some(hash.to_owned())
} else {
None
}
}
// Attempts to extract *one* library from the set `m`. If the set has no
// elements, `None` is returned. If the set has more than one element, then
// the errors and notes are emitted about the set of libraries.
//
// With only one library in the set, this function will extract it, and then
// read the metadata from it if `*slot` is `None`. If the metadata couldn't
// be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted).
fn extract_one(&mut self, m: HashSet<Path>, flavor: &str,
slot: &mut Option<MetadataBlob>) -> Option<Path> {
let mut ret = None::<Path>;
let mut error = 0;
if slot.is_some() {
// FIXME(#10786): for an optimization, we only read one of the
// library's metadata sections. In theory we should
// read both, but reading dylib metadata is quite
// slow.
if m.len() == 0 {
return None
} else if m.len() == 1 {
return Some(m.move_iter().next().unwrap())
}
}
for lib in m.move_iter() {
info!("{} reading metadata from: {}", flavor, lib.display());
let metadata = match get_metadata_section(self.os, &lib) {
Ok(blob) => {
if self.crate_matches(blob.as_slice(), &lib) {
blob
} else {
info!("metadata mismatch");
continue
}
}
Err(_) => {
info!("no metadata found");
continue
}
};
if ret.is_some() {
self.sess.span_err(self.span,
format!("multiple {} candidates for `{}` \
found", flavor, self.crate_id.name));
self.sess.span_note(self.span,
format!(r"candidate \#1: {}",
ret.get_ref().display()));
error = 1;
ret = None;
}
if error > 0 {
error += 1;
self.sess.span_note(self.span,
format!(r"candidate \#{}: {}", error,
lib.display()));
continue
}
*slot = Some(metadata);
ret = Some(lib);
}
return if error > 0 {None} else {ret}
}
fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool {
match decoder::maybe_get_crate_id(crate_data) {
Some(ref id) if self.crate_id.matches(id) => {}
_ => { info!("Rejecting via crate_id"); return false }
}
let hash = match decoder::maybe_get_crate_hash(crate_data) {
Some(hash) => hash, None => {
info!("Rejecting via lack of crate hash");
return false;
}
};
let triple = decoder::get_crate_triple(crate_data);
if triple.as_slice()!= self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch{ path: libpath.clone(),
got: triple.to_owned() });
return false;
}
match self.hash {
None => true,
Some(myhash) => {
if *myhash!= hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch{ path: libpath.clone(),
got: myhash.as_str().to_owned() });
false
} else {
true
}
}
}
}
// Returns the corresponding (prefix, suffix) that files need to have for
// dynamic libraries
fn dylibname(&self) -> (&'static str, &'static str) {
match self.os {
OsWin32 => (WIN32_DLL_PREFIX, WIN32_DLL_SUFFIX),
OsMacos => (MACOS_DLL_PREFIX, MACOS_DLL_SUFFIX),
OsLinux => (LINUX_DLL_PREFIX, LINUX_DLL_SUFFIX),
OsAndroid => (ANDROID_DLL_PREFIX, ANDROID_DLL_SUFFIX),
OsFreebsd => (FREEBSD_DLL_PREFIX, FREEBSD_DLL_SUFFIX),
}
}
}
pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) {
diag.handler().note(format!("crate_id: {}", crateid.to_str()));
}
impl ArchiveMetadata {
fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
let data: &'static [u8] = {
let data = match ar.read(METADATA_FILENAME) {
Some(data) => data,
None => {
debug!("didn't find '{}' in the archive", METADATA_FILENAME);
return None;
}
};
// This data is actually a pointer inside of the archive itself, but
// we essentially want to cache it because the lookup inside the
// archive is a fairly expensive operation (and it's queried for
// *very* frequently). For this reason, we transmute it to the
// static lifetime to put into the struct. Note that the buffer is
// never actually handed out with a static lifetime, but rather the
// buffer is loaned with the lifetime of this containing object.
// Hence, we're guaranteed that the buffer will never be used after
// this object is dead, so this is a safe operation to transmute and
// store the data as a static buffer.
unsafe { cast::transmute(data) }
};
Some(ArchiveMetadata {
archive: ar,
data: data,
})
}
pub fn as_slice<'a>(&'a self) -> &'a [u8] { self.data }
}
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, ~str> {
let start = time::precise_time_ns();
let ret = get_metadata_section_imp(os, filename);
info!("reading {} => {}ms", filename.filename_display(),
| {
match (&self.dylib, &self.rlib) {
(&None, &None) => vec!(),
(&Some(ref p), &None) |
(&None, &Some(ref p)) => vec!(p.clone()),
(&Some(ref p1), &Some(ref p2)) => vec!(p1.clone(), p2.clone()),
}
} | identifier_body |
|
generic_type_does_not_live_long_enough.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(existential_type)]
fn main() {
let y = 42;
let x = wrong_generic(&y);
let z: i32 = x; //~ ERROR mismatched types
}
existential type WrongGeneric<T>:'static;
//~^ ERROR the parameter type `T` may not live long enough
fn wrong_generic<T>(t: T) -> WrongGeneric<T> {
t | } | random_line_split |
|
generic_type_does_not_live_long_enough.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(existential_type)]
fn main() |
existential type WrongGeneric<T>:'static;
//~^ ERROR the parameter type `T` may not live long enough
fn wrong_generic<T>(t: T) -> WrongGeneric<T> {
t
}
| {
let y = 42;
let x = wrong_generic(&y);
let z: i32 = x; //~ ERROR mismatched types
} | identifier_body |
generic_type_does_not_live_long_enough.rs | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(existential_type)]
fn | () {
let y = 42;
let x = wrong_generic(&y);
let z: i32 = x; //~ ERROR mismatched types
}
existential type WrongGeneric<T>:'static;
//~^ ERROR the parameter type `T` may not live long enough
fn wrong_generic<T>(t: T) -> WrongGeneric<T> {
t
}
| main | identifier_name |
sectionalize_pass.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Breaks rustdocs into sections according to their headers
use astsrv;
use doc::ItemUtils;
use doc;
use fold::Fold;
use fold;
use pass::Pass;
use std::iterator::IteratorUtil;
pub fn mk_pass() -> Pass {
Pass {
name: ~"sectionalize",
f: run
}
}
pub fn run(_srv: astsrv::Srv, doc: doc::Doc) -> doc::Doc {
let fold = Fold {
fold_item: fold_item,
fold_trait: fold_trait,
fold_impl: fold_impl,
.. fold::default_any_fold(())
};
(fold.fold_doc)(&fold, doc)
}
fn fold_item(fold: &fold::Fold<()>, doc: doc::ItemDoc) -> doc::ItemDoc {
let doc = fold::default_seq_fold_item(fold, doc);
let (desc, sections) = sectionalize(copy doc.desc);
doc::ItemDoc {
desc: desc,
sections: sections,
.. doc
}
}
fn fold_trait(fold: &fold::Fold<()>, doc: doc::TraitDoc) -> doc::TraitDoc {
let doc = fold::default_seq_fold_trait(fold, doc);
doc::TraitDoc {
methods: do doc.methods.map |method| {
let (desc, sections) = sectionalize(copy method.desc);
doc::MethodDoc {
desc: desc,
sections: sections,
.. copy *method
}
},
.. doc
}
}
fn fold_impl(fold: &fold::Fold<()>, doc: doc::ImplDoc) -> doc::ImplDoc {
let doc = fold::default_seq_fold_impl(fold, doc);
doc::ImplDoc {
methods: do doc.methods.map |method| {
let (desc, sections) = sectionalize(copy method.desc);
doc::MethodDoc {
desc: desc,
sections: sections,
.. copy *method
}
},
.. doc
}
}
fn sectionalize(desc: Option<~str>) -> (Option<~str>, ~[doc::Section]) {
/*!
* Take a description of the form
*
* General text
*
* # Section header
*
* Section text
*
* # Section header
*
* Section text
*
* and remove each header and accompanying text into section records.
*/
if desc.is_none() {
return (None, ~[]);
}
let mut new_desc = None::<~str>;
let mut current_section = None;
let mut sections = ~[];
for desc.get_ref().any_line_iter().advance |line| {
match parse_header(line) {
Some(header) => {
if current_section.is_some() {
sections.push(copy *current_section.get_ref());
}
current_section = Some(doc::Section {
header: header.to_owned(),
body: ~""
});
}
None => {
match copy current_section {
Some(section) => {
current_section = Some(doc::Section {
body: fmt!("%s\n%s", section.body, line),
.. section
});
}
None => {
new_desc = match copy new_desc {
Some(desc) => {
Some(fmt!("%s\n%s", desc, line))
}
None => {
Some(line.to_owned())
}
};
}
}
}
}
}
if current_section.is_some() {
sections.push(current_section.unwrap());
}
(new_desc, sections)
}
fn parse_header<'a>(line: &'a str) -> Option<&'a str> {
if line.starts_with("# ") {
Some(line.slice_from(2))
} else {
None
}
}
#[cfg(test)]
mod test {
use astsrv;
use attr_pass;
use doc;
use extract;
use prune_hidden_pass;
use sectionalize_pass::run;
fn mk_doc(source: ~str) -> doc::Doc {
do astsrv::from_str(copy source) |srv| {
let doc = extract::from_srv(srv.clone(), ~"");
let doc = (attr_pass::mk_pass().f)(srv.clone(), doc);
let doc = (prune_hidden_pass::mk_pass().f)(srv.clone(), doc);
run(srv.clone(), doc)
}
}
#[test]
fn should_create_section_headers() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections[0].header.contains("Header"));
}
#[test]
fn should_create_section_bodies() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a { | fn should_not_create_sections_from_indented_headers() {
let doc = mk_doc(
~"#[doc = \"\n\
Text\n # Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections.is_empty());
}
#[test]
fn should_remove_section_text_from_main_desc() {
let doc = mk_doc(
~"#[doc = \"\
Description\n\n\
# Header\n\
Body\"]\
mod a {
}");
assert!(!doc.cratemod().mods()[0].desc().get().contains("Header"));
assert!(!doc.cratemod().mods()[0].desc().get().contains("Body"));
}
#[test]
fn should_eliminate_desc_if_it_is_just_whitespace() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert_eq!(doc.cratemod().mods()[0].desc(), None);
}
#[test]
fn should_sectionalize_trait_methods() {
let doc = mk_doc(
~"trait i {
#[doc = \"\
# Header\n\
Body\"]\
fn a(); }");
assert_eq!(doc.cratemod().traits()[0].methods[0].sections.len(), 1u);
}
#[test]
fn should_sectionalize_impl_methods() {
let doc = mk_doc(
~"impl bool {
#[doc = \"\
# Header\n\
Body\"]\
fn a() { } }");
assert_eq!(doc.cratemod().impls()[0].methods[0].sections.len(), 1u);
}
} | }");
assert!(doc.cratemod().mods()[0].item.sections[0].body.contains("Body"));
}
#[test] | random_line_split |
sectionalize_pass.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Breaks rustdocs into sections according to their headers
use astsrv;
use doc::ItemUtils;
use doc;
use fold::Fold;
use fold;
use pass::Pass;
use std::iterator::IteratorUtil;
pub fn mk_pass() -> Pass {
Pass {
name: ~"sectionalize",
f: run
}
}
pub fn run(_srv: astsrv::Srv, doc: doc::Doc) -> doc::Doc {
let fold = Fold {
fold_item: fold_item,
fold_trait: fold_trait,
fold_impl: fold_impl,
.. fold::default_any_fold(())
};
(fold.fold_doc)(&fold, doc)
}
fn fold_item(fold: &fold::Fold<()>, doc: doc::ItemDoc) -> doc::ItemDoc |
fn fold_trait(fold: &fold::Fold<()>, doc: doc::TraitDoc) -> doc::TraitDoc {
let doc = fold::default_seq_fold_trait(fold, doc);
doc::TraitDoc {
methods: do doc.methods.map |method| {
let (desc, sections) = sectionalize(copy method.desc);
doc::MethodDoc {
desc: desc,
sections: sections,
.. copy *method
}
},
.. doc
}
}
fn fold_impl(fold: &fold::Fold<()>, doc: doc::ImplDoc) -> doc::ImplDoc {
let doc = fold::default_seq_fold_impl(fold, doc);
doc::ImplDoc {
methods: do doc.methods.map |method| {
let (desc, sections) = sectionalize(copy method.desc);
doc::MethodDoc {
desc: desc,
sections: sections,
.. copy *method
}
},
.. doc
}
}
fn sectionalize(desc: Option<~str>) -> (Option<~str>, ~[doc::Section]) {
/*!
* Take a description of the form
*
* General text
*
* # Section header
*
* Section text
*
* # Section header
*
* Section text
*
* and remove each header and accompanying text into section records.
*/
if desc.is_none() {
return (None, ~[]);
}
let mut new_desc = None::<~str>;
let mut current_section = None;
let mut sections = ~[];
for desc.get_ref().any_line_iter().advance |line| {
match parse_header(line) {
Some(header) => {
if current_section.is_some() {
sections.push(copy *current_section.get_ref());
}
current_section = Some(doc::Section {
header: header.to_owned(),
body: ~""
});
}
None => {
match copy current_section {
Some(section) => {
current_section = Some(doc::Section {
body: fmt!("%s\n%s", section.body, line),
.. section
});
}
None => {
new_desc = match copy new_desc {
Some(desc) => {
Some(fmt!("%s\n%s", desc, line))
}
None => {
Some(line.to_owned())
}
};
}
}
}
}
}
if current_section.is_some() {
sections.push(current_section.unwrap());
}
(new_desc, sections)
}
fn parse_header<'a>(line: &'a str) -> Option<&'a str> {
if line.starts_with("# ") {
Some(line.slice_from(2))
} else {
None
}
}
#[cfg(test)]
mod test {
use astsrv;
use attr_pass;
use doc;
use extract;
use prune_hidden_pass;
use sectionalize_pass::run;
fn mk_doc(source: ~str) -> doc::Doc {
do astsrv::from_str(copy source) |srv| {
let doc = extract::from_srv(srv.clone(), ~"");
let doc = (attr_pass::mk_pass().f)(srv.clone(), doc);
let doc = (prune_hidden_pass::mk_pass().f)(srv.clone(), doc);
run(srv.clone(), doc)
}
}
#[test]
fn should_create_section_headers() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections[0].header.contains("Header"));
}
#[test]
fn should_create_section_bodies() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections[0].body.contains("Body"));
}
#[test]
fn should_not_create_sections_from_indented_headers() {
let doc = mk_doc(
~"#[doc = \"\n\
Text\n # Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections.is_empty());
}
#[test]
fn should_remove_section_text_from_main_desc() {
let doc = mk_doc(
~"#[doc = \"\
Description\n\n\
# Header\n\
Body\"]\
mod a {
}");
assert!(!doc.cratemod().mods()[0].desc().get().contains("Header"));
assert!(!doc.cratemod().mods()[0].desc().get().contains("Body"));
}
#[test]
fn should_eliminate_desc_if_it_is_just_whitespace() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert_eq!(doc.cratemod().mods()[0].desc(), None);
}
#[test]
fn should_sectionalize_trait_methods() {
let doc = mk_doc(
~"trait i {
#[doc = \"\
# Header\n\
Body\"]\
fn a(); }");
assert_eq!(doc.cratemod().traits()[0].methods[0].sections.len(), 1u);
}
#[test]
fn should_sectionalize_impl_methods() {
let doc = mk_doc(
~"impl bool {
#[doc = \"\
# Header\n\
Body\"]\
fn a() { } }");
assert_eq!(doc.cratemod().impls()[0].methods[0].sections.len(), 1u);
}
}
| {
let doc = fold::default_seq_fold_item(fold, doc);
let (desc, sections) = sectionalize(copy doc.desc);
doc::ItemDoc {
desc: desc,
sections: sections,
.. doc
}
} | identifier_body |
sectionalize_pass.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Breaks rustdocs into sections according to their headers
use astsrv;
use doc::ItemUtils;
use doc;
use fold::Fold;
use fold;
use pass::Pass;
use std::iterator::IteratorUtil;
pub fn mk_pass() -> Pass {
Pass {
name: ~"sectionalize",
f: run
}
}
pub fn run(_srv: astsrv::Srv, doc: doc::Doc) -> doc::Doc {
let fold = Fold {
fold_item: fold_item,
fold_trait: fold_trait,
fold_impl: fold_impl,
.. fold::default_any_fold(())
};
(fold.fold_doc)(&fold, doc)
}
fn fold_item(fold: &fold::Fold<()>, doc: doc::ItemDoc) -> doc::ItemDoc {
let doc = fold::default_seq_fold_item(fold, doc);
let (desc, sections) = sectionalize(copy doc.desc);
doc::ItemDoc {
desc: desc,
sections: sections,
.. doc
}
}
fn fold_trait(fold: &fold::Fold<()>, doc: doc::TraitDoc) -> doc::TraitDoc {
let doc = fold::default_seq_fold_trait(fold, doc);
doc::TraitDoc {
methods: do doc.methods.map |method| {
let (desc, sections) = sectionalize(copy method.desc);
doc::MethodDoc {
desc: desc,
sections: sections,
.. copy *method
}
},
.. doc
}
}
fn fold_impl(fold: &fold::Fold<()>, doc: doc::ImplDoc) -> doc::ImplDoc {
let doc = fold::default_seq_fold_impl(fold, doc);
doc::ImplDoc {
methods: do doc.methods.map |method| {
let (desc, sections) = sectionalize(copy method.desc);
doc::MethodDoc {
desc: desc,
sections: sections,
.. copy *method
}
},
.. doc
}
}
fn | (desc: Option<~str>) -> (Option<~str>, ~[doc::Section]) {
/*!
* Take a description of the form
*
* General text
*
* # Section header
*
* Section text
*
* # Section header
*
* Section text
*
* and remove each header and accompanying text into section records.
*/
if desc.is_none() {
return (None, ~[]);
}
let mut new_desc = None::<~str>;
let mut current_section = None;
let mut sections = ~[];
for desc.get_ref().any_line_iter().advance |line| {
match parse_header(line) {
Some(header) => {
if current_section.is_some() {
sections.push(copy *current_section.get_ref());
}
current_section = Some(doc::Section {
header: header.to_owned(),
body: ~""
});
}
None => {
match copy current_section {
Some(section) => {
current_section = Some(doc::Section {
body: fmt!("%s\n%s", section.body, line),
.. section
});
}
None => {
new_desc = match copy new_desc {
Some(desc) => {
Some(fmt!("%s\n%s", desc, line))
}
None => {
Some(line.to_owned())
}
};
}
}
}
}
}
if current_section.is_some() {
sections.push(current_section.unwrap());
}
(new_desc, sections)
}
fn parse_header<'a>(line: &'a str) -> Option<&'a str> {
if line.starts_with("# ") {
Some(line.slice_from(2))
} else {
None
}
}
#[cfg(test)]
mod test {
use astsrv;
use attr_pass;
use doc;
use extract;
use prune_hidden_pass;
use sectionalize_pass::run;
fn mk_doc(source: ~str) -> doc::Doc {
do astsrv::from_str(copy source) |srv| {
let doc = extract::from_srv(srv.clone(), ~"");
let doc = (attr_pass::mk_pass().f)(srv.clone(), doc);
let doc = (prune_hidden_pass::mk_pass().f)(srv.clone(), doc);
run(srv.clone(), doc)
}
}
#[test]
fn should_create_section_headers() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections[0].header.contains("Header"));
}
#[test]
fn should_create_section_bodies() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections[0].body.contains("Body"));
}
#[test]
fn should_not_create_sections_from_indented_headers() {
let doc = mk_doc(
~"#[doc = \"\n\
Text\n # Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections.is_empty());
}
#[test]
fn should_remove_section_text_from_main_desc() {
let doc = mk_doc(
~"#[doc = \"\
Description\n\n\
# Header\n\
Body\"]\
mod a {
}");
assert!(!doc.cratemod().mods()[0].desc().get().contains("Header"));
assert!(!doc.cratemod().mods()[0].desc().get().contains("Body"));
}
#[test]
fn should_eliminate_desc_if_it_is_just_whitespace() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert_eq!(doc.cratemod().mods()[0].desc(), None);
}
#[test]
fn should_sectionalize_trait_methods() {
let doc = mk_doc(
~"trait i {
#[doc = \"\
# Header\n\
Body\"]\
fn a(); }");
assert_eq!(doc.cratemod().traits()[0].methods[0].sections.len(), 1u);
}
#[test]
fn should_sectionalize_impl_methods() {
let doc = mk_doc(
~"impl bool {
#[doc = \"\
# Header\n\
Body\"]\
fn a() { } }");
assert_eq!(doc.cratemod().impls()[0].methods[0].sections.len(), 1u);
}
}
| sectionalize | identifier_name |
sectionalize_pass.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Breaks rustdocs into sections according to their headers
use astsrv;
use doc::ItemUtils;
use doc;
use fold::Fold;
use fold;
use pass::Pass;
use std::iterator::IteratorUtil;
pub fn mk_pass() -> Pass {
Pass {
name: ~"sectionalize",
f: run
}
}
pub fn run(_srv: astsrv::Srv, doc: doc::Doc) -> doc::Doc {
let fold = Fold {
fold_item: fold_item,
fold_trait: fold_trait,
fold_impl: fold_impl,
.. fold::default_any_fold(())
};
(fold.fold_doc)(&fold, doc)
}
fn fold_item(fold: &fold::Fold<()>, doc: doc::ItemDoc) -> doc::ItemDoc {
let doc = fold::default_seq_fold_item(fold, doc);
let (desc, sections) = sectionalize(copy doc.desc);
doc::ItemDoc {
desc: desc,
sections: sections,
.. doc
}
}
fn fold_trait(fold: &fold::Fold<()>, doc: doc::TraitDoc) -> doc::TraitDoc {
let doc = fold::default_seq_fold_trait(fold, doc);
doc::TraitDoc {
methods: do doc.methods.map |method| {
let (desc, sections) = sectionalize(copy method.desc);
doc::MethodDoc {
desc: desc,
sections: sections,
.. copy *method
}
},
.. doc
}
}
fn fold_impl(fold: &fold::Fold<()>, doc: doc::ImplDoc) -> doc::ImplDoc {
let doc = fold::default_seq_fold_impl(fold, doc);
doc::ImplDoc {
methods: do doc.methods.map |method| {
let (desc, sections) = sectionalize(copy method.desc);
doc::MethodDoc {
desc: desc,
sections: sections,
.. copy *method
}
},
.. doc
}
}
fn sectionalize(desc: Option<~str>) -> (Option<~str>, ~[doc::Section]) {
/*!
* Take a description of the form
*
* General text
*
* # Section header
*
* Section text
*
* # Section header
*
* Section text
*
* and remove each header and accompanying text into section records.
*/
if desc.is_none() {
return (None, ~[]);
}
let mut new_desc = None::<~str>;
let mut current_section = None;
let mut sections = ~[];
for desc.get_ref().any_line_iter().advance |line| {
match parse_header(line) {
Some(header) => {
if current_section.is_some() {
sections.push(copy *current_section.get_ref());
}
current_section = Some(doc::Section {
header: header.to_owned(),
body: ~""
});
}
None => {
match copy current_section {
Some(section) => {
current_section = Some(doc::Section {
body: fmt!("%s\n%s", section.body, line),
.. section
});
}
None => {
new_desc = match copy new_desc {
Some(desc) => {
Some(fmt!("%s\n%s", desc, line))
}
None => {
Some(line.to_owned())
}
};
}
}
}
}
}
if current_section.is_some() |
(new_desc, sections)
}
fn parse_header<'a>(line: &'a str) -> Option<&'a str> {
if line.starts_with("# ") {
Some(line.slice_from(2))
} else {
None
}
}
#[cfg(test)]
mod test {
use astsrv;
use attr_pass;
use doc;
use extract;
use prune_hidden_pass;
use sectionalize_pass::run;
fn mk_doc(source: ~str) -> doc::Doc {
do astsrv::from_str(copy source) |srv| {
let doc = extract::from_srv(srv.clone(), ~"");
let doc = (attr_pass::mk_pass().f)(srv.clone(), doc);
let doc = (prune_hidden_pass::mk_pass().f)(srv.clone(), doc);
run(srv.clone(), doc)
}
}
#[test]
fn should_create_section_headers() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections[0].header.contains("Header"));
}
#[test]
fn should_create_section_bodies() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections[0].body.contains("Body"));
}
#[test]
fn should_not_create_sections_from_indented_headers() {
let doc = mk_doc(
~"#[doc = \"\n\
Text\n # Header\n\
Body\"]\
mod a {
}");
assert!(doc.cratemod().mods()[0].item.sections.is_empty());
}
#[test]
fn should_remove_section_text_from_main_desc() {
let doc = mk_doc(
~"#[doc = \"\
Description\n\n\
# Header\n\
Body\"]\
mod a {
}");
assert!(!doc.cratemod().mods()[0].desc().get().contains("Header"));
assert!(!doc.cratemod().mods()[0].desc().get().contains("Body"));
}
#[test]
fn should_eliminate_desc_if_it_is_just_whitespace() {
let doc = mk_doc(
~"#[doc = \"\
# Header\n\
Body\"]\
mod a {
}");
assert_eq!(doc.cratemod().mods()[0].desc(), None);
}
#[test]
fn should_sectionalize_trait_methods() {
let doc = mk_doc(
~"trait i {
#[doc = \"\
# Header\n\
Body\"]\
fn a(); }");
assert_eq!(doc.cratemod().traits()[0].methods[0].sections.len(), 1u);
}
#[test]
fn should_sectionalize_impl_methods() {
let doc = mk_doc(
~"impl bool {
#[doc = \"\
# Header\n\
Body\"]\
fn a() { } }");
assert_eq!(doc.cratemod().impls()[0].methods[0].sections.len(), 1u);
}
}
| {
sections.push(current_section.unwrap());
} | conditional_block |
sepcomp-unwind.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-bitrig
// compile-flags: -C codegen-units=3
// Test unwinding through multiple compilation units.
// According to acrichto, in the distant past `ld -r` (which is used during
// linking when codegen-units > 1) was known to produce object files with
// damaged unwinding tables. This may be related to GNU binutils bug #6893
// ("Partial linking results in corrupt.eh_frame_hdr"), but I'm not certain.
// In any case, this test should let us know if enabling parallel codegen ever
// breaks unwinding.
use std::thread;
fn | () -> usize { 0 }
mod a {
pub fn f() {
panic!();
}
}
mod b {
pub fn g() {
::a::f();
}
}
fn main() {
thread::spawn(move|| { ::b::g() }).join().err().unwrap();
}
| pad | identifier_name |
sepcomp-unwind.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-bitrig
// compile-flags: -C codegen-units=3
// Test unwinding through multiple compilation units.
// According to acrichto, in the distant past `ld -r` (which is used during
// linking when codegen-units > 1) was known to produce object files with
// damaged unwinding tables. This may be related to GNU binutils bug #6893
// ("Partial linking results in corrupt.eh_frame_hdr"), but I'm not certain.
// In any case, this test should let us know if enabling parallel codegen ever
// breaks unwinding.
use std::thread;
fn pad() -> usize { 0 }
mod a {
pub fn f() {
panic!();
}
}
mod b {
pub fn g() {
::a::f();
}
}
| fn main() {
thread::spawn(move|| { ::b::g() }).join().err().unwrap();
} | random_line_split |
|
sepcomp-unwind.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-bitrig
// compile-flags: -C codegen-units=3
// Test unwinding through multiple compilation units.
// According to acrichto, in the distant past `ld -r` (which is used during
// linking when codegen-units > 1) was known to produce object files with
// damaged unwinding tables. This may be related to GNU binutils bug #6893
// ("Partial linking results in corrupt.eh_frame_hdr"), but I'm not certain.
// In any case, this test should let us know if enabling parallel codegen ever
// breaks unwinding.
use std::thread;
fn pad() -> usize { 0 }
mod a {
pub fn f() {
panic!();
}
}
mod b {
pub fn g() {
::a::f();
}
}
fn main() | {
thread::spawn(move|| { ::b::g() }).join().err().unwrap();
} | identifier_body |
|
storageevent.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::StorageEventBinding;
use dom::bindings::codegen::Bindings::StorageEventBinding::{StorageEventMethods};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, MutNullableHeap, Root, RootedReference};
use dom::bindings::reflector::reflect_dom_object;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::storage::Storage;
use string_cache::Atom; | use util::str::DOMString;
#[dom_struct]
pub struct StorageEvent {
event: Event,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: MutNullableHeap<JS<Storage>>
}
impl StorageEvent {
pub fn new_inherited(key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> StorageEvent {
StorageEvent {
event: Event::new_inherited(),
key: key,
oldValue: oldValue,
newValue: newValue,
url: url,
storageArea: MutNullableHeap::new(storageArea)
}
}
pub fn new(global: GlobalRef,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> Root<StorageEvent> {
let ev = reflect_dom_object(box StorageEvent::new_inherited(key, oldValue, newValue,
url, storageArea),
global,
StorageEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable);
}
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &StorageEventBinding::StorageEventInit) -> Fallible<Root<StorageEvent>> {
let key = init.key.clone();
let oldValue = init.oldValue.clone();
let newValue = init.newValue.clone();
let url = init.url.clone();
let storageArea = init.storageArea.r();
let bubbles = if init.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble };
let cancelable = if init.parent.cancelable {
EventCancelable::Cancelable
} else {
EventCancelable::NotCancelable
};
let event = StorageEvent::new(global, Atom::from(&*type_),
bubbles, cancelable,
key, oldValue, newValue,
url, storageArea);
Ok(event)
}
}
impl StorageEventMethods for StorageEvent {
// https://html.spec.whatwg.org/multipage/#dom-storageevent-key
fn GetKey(&self) -> Option<DOMString> {
self.key.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-oldvalue
fn GetOldValue(&self) -> Option<DOMString> {
self.oldValue.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-newvalue
fn GetNewValue(&self) -> Option<DOMString> {
self.newValue.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-url
fn Url(&self) -> DOMString {
self.url.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-storagearea
fn GetStorageArea(&self) -> Option<Root<Storage>> {
self.storageArea.get()
}
} | random_line_split |
|
storageevent.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::StorageEventBinding;
use dom::bindings::codegen::Bindings::StorageEventBinding::{StorageEventMethods};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, MutNullableHeap, Root, RootedReference};
use dom::bindings::reflector::reflect_dom_object;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::storage::Storage;
use string_cache::Atom;
use util::str::DOMString;
#[dom_struct]
pub struct StorageEvent {
event: Event,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: MutNullableHeap<JS<Storage>>
}
impl StorageEvent {
pub fn new_inherited(key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> StorageEvent {
StorageEvent {
event: Event::new_inherited(),
key: key,
oldValue: oldValue,
newValue: newValue,
url: url,
storageArea: MutNullableHeap::new(storageArea)
}
}
pub fn new(global: GlobalRef,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> Root<StorageEvent> {
let ev = reflect_dom_object(box StorageEvent::new_inherited(key, oldValue, newValue,
url, storageArea),
global,
StorageEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable);
}
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &StorageEventBinding::StorageEventInit) -> Fallible<Root<StorageEvent>> {
let key = init.key.clone();
let oldValue = init.oldValue.clone();
let newValue = init.newValue.clone();
let url = init.url.clone();
let storageArea = init.storageArea.r();
let bubbles = if init.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble };
let cancelable = if init.parent.cancelable {
EventCancelable::Cancelable
} else {
EventCancelable::NotCancelable
};
let event = StorageEvent::new(global, Atom::from(&*type_),
bubbles, cancelable,
key, oldValue, newValue,
url, storageArea);
Ok(event)
}
}
impl StorageEventMethods for StorageEvent {
// https://html.spec.whatwg.org/multipage/#dom-storageevent-key
fn GetKey(&self) -> Option<DOMString> {
self.key.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-oldvalue
fn GetOldValue(&self) -> Option<DOMString> {
self.oldValue.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-newvalue
fn | (&self) -> Option<DOMString> {
self.newValue.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-url
fn Url(&self) -> DOMString {
self.url.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-storagearea
fn GetStorageArea(&self) -> Option<Root<Storage>> {
self.storageArea.get()
}
}
| GetNewValue | identifier_name |
storageevent.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::StorageEventBinding;
use dom::bindings::codegen::Bindings::StorageEventBinding::{StorageEventMethods};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, MutNullableHeap, Root, RootedReference};
use dom::bindings::reflector::reflect_dom_object;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::storage::Storage;
use string_cache::Atom;
use util::str::DOMString;
#[dom_struct]
pub struct StorageEvent {
event: Event,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: MutNullableHeap<JS<Storage>>
}
impl StorageEvent {
pub fn new_inherited(key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> StorageEvent {
StorageEvent {
event: Event::new_inherited(),
key: key,
oldValue: oldValue,
newValue: newValue,
url: url,
storageArea: MutNullableHeap::new(storageArea)
}
}
pub fn new(global: GlobalRef,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> Root<StorageEvent> {
let ev = reflect_dom_object(box StorageEvent::new_inherited(key, oldValue, newValue,
url, storageArea),
global,
StorageEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable);
}
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &StorageEventBinding::StorageEventInit) -> Fallible<Root<StorageEvent>> {
let key = init.key.clone();
let oldValue = init.oldValue.clone();
let newValue = init.newValue.clone();
let url = init.url.clone();
let storageArea = init.storageArea.r();
let bubbles = if init.parent.bubbles { EventBubbles::Bubbles } else | ;
let cancelable = if init.parent.cancelable {
EventCancelable::Cancelable
} else {
EventCancelable::NotCancelable
};
let event = StorageEvent::new(global, Atom::from(&*type_),
bubbles, cancelable,
key, oldValue, newValue,
url, storageArea);
Ok(event)
}
}
impl StorageEventMethods for StorageEvent {
// https://html.spec.whatwg.org/multipage/#dom-storageevent-key
fn GetKey(&self) -> Option<DOMString> {
self.key.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-oldvalue
fn GetOldValue(&self) -> Option<DOMString> {
self.oldValue.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-newvalue
fn GetNewValue(&self) -> Option<DOMString> {
self.newValue.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-url
fn Url(&self) -> DOMString {
self.url.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-storagearea
fn GetStorageArea(&self) -> Option<Root<Storage>> {
self.storageArea.get()
}
}
| { EventBubbles::DoesNotBubble } | conditional_block |
storageevent.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::StorageEventBinding;
use dom::bindings::codegen::Bindings::StorageEventBinding::{StorageEventMethods};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, MutNullableHeap, Root, RootedReference};
use dom::bindings::reflector::reflect_dom_object;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::storage::Storage;
use string_cache::Atom;
use util::str::DOMString;
#[dom_struct]
pub struct StorageEvent {
event: Event,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: MutNullableHeap<JS<Storage>>
}
impl StorageEvent {
pub fn new_inherited(key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> StorageEvent {
StorageEvent {
event: Event::new_inherited(),
key: key,
oldValue: oldValue,
newValue: newValue,
url: url,
storageArea: MutNullableHeap::new(storageArea)
}
}
pub fn new(global: GlobalRef,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> Root<StorageEvent> {
let ev = reflect_dom_object(box StorageEvent::new_inherited(key, oldValue, newValue,
url, storageArea),
global,
StorageEventBinding::Wrap);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable);
}
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &StorageEventBinding::StorageEventInit) -> Fallible<Root<StorageEvent>> {
let key = init.key.clone();
let oldValue = init.oldValue.clone();
let newValue = init.newValue.clone();
let url = init.url.clone();
let storageArea = init.storageArea.r();
let bubbles = if init.parent.bubbles { EventBubbles::Bubbles } else { EventBubbles::DoesNotBubble };
let cancelable = if init.parent.cancelable {
EventCancelable::Cancelable
} else {
EventCancelable::NotCancelable
};
let event = StorageEvent::new(global, Atom::from(&*type_),
bubbles, cancelable,
key, oldValue, newValue,
url, storageArea);
Ok(event)
}
}
impl StorageEventMethods for StorageEvent {
// https://html.spec.whatwg.org/multipage/#dom-storageevent-key
fn GetKey(&self) -> Option<DOMString> {
self.key.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-oldvalue
fn GetOldValue(&self) -> Option<DOMString> {
self.oldValue.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-newvalue
fn GetNewValue(&self) -> Option<DOMString> {
self.newValue.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-url
fn Url(&self) -> DOMString |
// https://html.spec.whatwg.org/multipage/#dom-storageevent-storagearea
fn GetStorageArea(&self) -> Option<Root<Storage>> {
self.storageArea.get()
}
}
| {
self.url.clone()
} | identifier_body |
task-perf-spawnalot.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::os;
use std::task;
use std::uint;
fn f(n: uint) {
let mut i = 0u;
while i < n {
task::try(|| g() );
i += 1u;
}
}
fn | () { }
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"400"]
} else if args.len() <= 1u {
~[~"", ~"10"]
} else {
args
};
let n = uint::from_str(args[1]).get();
let mut i = 0u;
while i < n { task::spawn(|| f(n) ); i += 1u; }
}
| g | identifier_name |
task-perf-spawnalot.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::os;
use std::task;
use std::uint;
fn f(n: uint) {
let mut i = 0u;
while i < n {
task::try(|| g() );
i += 1u;
}
}
fn g() { }
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() | else if args.len() <= 1u {
~[~"", ~"10"]
} else {
args
};
let n = uint::from_str(args[1]).get();
let mut i = 0u;
while i < n { task::spawn(|| f(n) ); i += 1u; }
}
| {
~[~"", ~"400"]
} | conditional_block |
task-perf-spawnalot.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at | // option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::os;
use std::task;
use std::uint;
fn f(n: uint) {
let mut i = 0u;
while i < n {
task::try(|| g() );
i += 1u;
}
}
fn g() { }
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"400"]
} else if args.len() <= 1u {
~[~"", ~"10"]
} else {
args
};
let n = uint::from_str(args[1]).get();
let mut i = 0u;
while i < n { task::spawn(|| f(n) ); i += 1u; }
} | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | random_line_split |
task-perf-spawnalot.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::os;
use std::task;
use std::uint;
fn f(n: uint) {
let mut i = 0u;
while i < n {
task::try(|| g() );
i += 1u;
}
}
fn g() |
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"400"]
} else if args.len() <= 1u {
~[~"", ~"10"]
} else {
args
};
let n = uint::from_str(args[1]).get();
let mut i = 0u;
while i < n { task::spawn(|| f(n) ); i += 1u; }
}
| { } | identifier_body |
archive.rs | use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt::Debug;
use std::fs::File;
use std::io::{Seek, SeekFrom};
use std::mem;
use std::path::Path;
use std::path::PathBuf;
use std::slice;
use std::vec::Vec;
use std::result::Result as StdResult;
use common::ReadExt;
use meta::WadMetadata;
use types::{WadLump, WadInfo, WadName, WadNameCast};
use util::wad_type_from_info;
use error::{Result, Error, ErrorKind, InFile};
use error::ErrorKind::{BadWadHeader, MissingRequiredLump};
pub struct Archive {
file: RefCell<File>,
index_map: HashMap<WadName, usize>,
lumps: Vec<LumpInfo>,
levels: Vec<usize>,
meta: WadMetadata,
path: PathBuf,
}
impl Archive {
pub fn open<W, M>(wad_path: &W, meta_path: &M) -> Result<Archive>
where W: AsRef<Path> + Debug,
M: AsRef<Path> + Debug {
let wad_path = wad_path.as_ref().to_owned();
info!("Loading wad file '{:?}'...", wad_path);
// Open file, read and check header.
let mut file = try!(File::open(&wad_path).in_file(&wad_path));
let header = try!(file.read_binary::<WadInfo>().in_file(&wad_path));
try!(wad_type_from_info(&header).ok_or_else(|| BadWadHeader.in_file(&wad_path)));
// Read lump info.
let mut lumps = Vec::with_capacity(header.num_lumps as usize);
let mut levels = Vec::with_capacity(64);
let mut index_map = HashMap::new();
try!(file.seek(SeekFrom::Start(header.info_table_offset as u64)).in_file(&wad_path));
for i_lump in 0.. header.num_lumps {
let mut fileinfo = try!(file.read_binary::<WadLump>().in_file(&wad_path));
fileinfo.name.canonicalise();
index_map.insert(fileinfo.name, lumps.len());
lumps.push(LumpInfo { name: fileinfo.name,
offset: fileinfo.file_pos as u64,
size: fileinfo.size as usize });
// Our heuristic for level lumps is that they are preceeded by the "THINGS" lump.
if fileinfo.name == b"THINGS\0\0".to_wad_name() |
}
// Read metadata.
let meta = try!(WadMetadata::from_file(meta_path));
Ok(Archive {
meta: meta,
file: RefCell::new(file),
lumps: lumps,
index_map: index_map,
levels: levels,
path: wad_path,
})
}
pub fn num_levels(&self) -> usize { self.levels.len() }
pub fn level_lump_index(&self, level_index: usize) -> usize {
self.levels[level_index]
}
pub fn level_name(&self, level_index: usize) -> &WadName {
self.lump_name(self.levels[level_index])
}
pub fn num_lumps(&self) -> usize { self.lumps.len() }
pub fn named_lump_index(&self, name: &WadName) -> Option<usize> {
self.index_map.get(name).map(|x| *x)
}
pub fn required_named_lump_index(&self, name: &WadName) -> Result<usize> {
self.named_lump_index(name).ok_or(MissingRequiredLump(*name)).in_archive(self)
}
pub fn lump_name(&self, lump_index: usize) -> &WadName {
&self.lumps[lump_index].name
}
pub fn is_virtual_lump(&self, lump_index: usize) -> bool {
self.lumps[lump_index].size == 0
}
pub fn read_required_named_lump<T: Copy>(&self, name: &WadName) -> Result<Vec<T>> {
self.read_named_lump(name)
.unwrap_or_else(|| Err(MissingRequiredLump(*name).in_archive(self)))
}
pub fn read_named_lump<T: Copy>(&self, name: &WadName) -> Option<Result<Vec<T>>> {
self.named_lump_index(name).map(|index| self.read_lump(index))
}
pub fn read_lump<T: Copy>(&self, index: usize) -> Result<Vec<T>> {
let mut file = self.file.borrow_mut();
let info = self.lumps[index];
assert!(info.size > 0);
assert!(info.size % mem::size_of::<T>() == 0);
let num_elems = info.size / mem::size_of::<T>();
let mut buf = Vec::with_capacity(num_elems);
try!(file.seek(SeekFrom::Start(info.offset)).in_archive(self));
unsafe {
buf.set_len(num_elems);
try!(file.read_at_least(slice::from_raw_parts_mut(
(buf.as_mut_ptr() as *mut u8), info.size)).in_archive(self))
}
Ok(buf)
}
pub fn read_lump_single<T: Copy>(&self, index: usize) -> Result<T> {
let mut file = self.file.borrow_mut();
let info = self.lumps[index];
assert!(info.size == mem::size_of::<T>());
try!(file.seek(SeekFrom::Start(info.offset)).in_archive(self));
Ok(try!(file.read_binary().in_archive(self)))
}
pub fn metadata(&self) -> &WadMetadata { &self.meta }
}
pub trait InArchive {
type Output;
fn in_archive(self, archive: &Archive) -> Self::Output;
}
impl InArchive for Error {
type Output = Error;
fn in_archive(self, archive: &Archive) -> Error {
self.in_file(&archive.path)
}
}
impl InArchive for ErrorKind {
type Output = Error;
fn in_archive(self, archive: &Archive) -> Error {
self.in_file(&archive.path)
}
}
impl<S, E: Into<Error>> InArchive for StdResult<S, E> {
type Output = Result<S>;
fn in_archive(self, archive: &Archive) -> Result<S> {
self.map_err(|e| e.into().in_archive(archive))
}
}
#[derive(Copy, Clone)]
struct LumpInfo {
name: WadName,
offset: u64,
size: usize,
}
| {
assert!(i_lump > 0);
levels.push((i_lump - 1) as usize);
} | conditional_block |
archive.rs | use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt::Debug;
use std::fs::File;
use std::io::{Seek, SeekFrom};
use std::mem;
use std::path::Path;
use std::path::PathBuf;
use std::slice;
use std::vec::Vec;
use std::result::Result as StdResult;
use common::ReadExt;
use meta::WadMetadata;
use types::{WadLump, WadInfo, WadName, WadNameCast};
use util::wad_type_from_info;
use error::{Result, Error, ErrorKind, InFile};
use error::ErrorKind::{BadWadHeader, MissingRequiredLump};
pub struct Archive {
file: RefCell<File>,
index_map: HashMap<WadName, usize>,
lumps: Vec<LumpInfo>,
levels: Vec<usize>,
meta: WadMetadata,
path: PathBuf,
}
impl Archive {
pub fn open<W, M>(wad_path: &W, meta_path: &M) -> Result<Archive>
where W: AsRef<Path> + Debug,
M: AsRef<Path> + Debug {
let wad_path = wad_path.as_ref().to_owned();
info!("Loading wad file '{:?}'...", wad_path);
// Open file, read and check header.
let mut file = try!(File::open(&wad_path).in_file(&wad_path));
let header = try!(file.read_binary::<WadInfo>().in_file(&wad_path));
try!(wad_type_from_info(&header).ok_or_else(|| BadWadHeader.in_file(&wad_path)));
// Read lump info.
let mut lumps = Vec::with_capacity(header.num_lumps as usize);
let mut levels = Vec::with_capacity(64);
let mut index_map = HashMap::new();
try!(file.seek(SeekFrom::Start(header.info_table_offset as u64)).in_file(&wad_path));
for i_lump in 0.. header.num_lumps {
let mut fileinfo = try!(file.read_binary::<WadLump>().in_file(&wad_path));
fileinfo.name.canonicalise();
index_map.insert(fileinfo.name, lumps.len());
lumps.push(LumpInfo { name: fileinfo.name,
offset: fileinfo.file_pos as u64,
size: fileinfo.size as usize });
// Our heuristic for level lumps is that they are preceeded by the "THINGS" lump.
if fileinfo.name == b"THINGS\0\0".to_wad_name() {
assert!(i_lump > 0);
levels.push((i_lump - 1) as usize);
}
}
// Read metadata.
let meta = try!(WadMetadata::from_file(meta_path));
Ok(Archive {
meta: meta,
file: RefCell::new(file),
lumps: lumps,
index_map: index_map,
levels: levels,
path: wad_path,
})
}
pub fn num_levels(&self) -> usize { self.levels.len() }
pub fn level_lump_index(&self, level_index: usize) -> usize {
self.levels[level_index]
}
pub fn level_name(&self, level_index: usize) -> &WadName {
self.lump_name(self.levels[level_index])
}
pub fn num_lumps(&self) -> usize { self.lumps.len() }
pub fn named_lump_index(&self, name: &WadName) -> Option<usize> {
self.index_map.get(name).map(|x| *x)
}
pub fn required_named_lump_index(&self, name: &WadName) -> Result<usize> {
self.named_lump_index(name).ok_or(MissingRequiredLump(*name)).in_archive(self)
}
pub fn lump_name(&self, lump_index: usize) -> &WadName {
&self.lumps[lump_index].name
}
pub fn is_virtual_lump(&self, lump_index: usize) -> bool {
self.lumps[lump_index].size == 0
}
pub fn read_required_named_lump<T: Copy>(&self, name: &WadName) -> Result<Vec<T>> |
pub fn read_named_lump<T: Copy>(&self, name: &WadName) -> Option<Result<Vec<T>>> {
self.named_lump_index(name).map(|index| self.read_lump(index))
}
pub fn read_lump<T: Copy>(&self, index: usize) -> Result<Vec<T>> {
let mut file = self.file.borrow_mut();
let info = self.lumps[index];
assert!(info.size > 0);
assert!(info.size % mem::size_of::<T>() == 0);
let num_elems = info.size / mem::size_of::<T>();
let mut buf = Vec::with_capacity(num_elems);
try!(file.seek(SeekFrom::Start(info.offset)).in_archive(self));
unsafe {
buf.set_len(num_elems);
try!(file.read_at_least(slice::from_raw_parts_mut(
(buf.as_mut_ptr() as *mut u8), info.size)).in_archive(self))
}
Ok(buf)
}
pub fn read_lump_single<T: Copy>(&self, index: usize) -> Result<T> {
let mut file = self.file.borrow_mut();
let info = self.lumps[index];
assert!(info.size == mem::size_of::<T>());
try!(file.seek(SeekFrom::Start(info.offset)).in_archive(self));
Ok(try!(file.read_binary().in_archive(self)))
}
pub fn metadata(&self) -> &WadMetadata { &self.meta }
}
pub trait InArchive {
type Output;
fn in_archive(self, archive: &Archive) -> Self::Output;
}
impl InArchive for Error {
type Output = Error;
fn in_archive(self, archive: &Archive) -> Error {
self.in_file(&archive.path)
}
}
impl InArchive for ErrorKind {
type Output = Error;
fn in_archive(self, archive: &Archive) -> Error {
self.in_file(&archive.path)
}
}
impl<S, E: Into<Error>> InArchive for StdResult<S, E> {
type Output = Result<S>;
fn in_archive(self, archive: &Archive) -> Result<S> {
self.map_err(|e| e.into().in_archive(archive))
}
}
#[derive(Copy, Clone)]
struct LumpInfo {
name: WadName,
offset: u64,
size: usize,
}
| {
self.read_named_lump(name)
.unwrap_or_else(|| Err(MissingRequiredLump(*name).in_archive(self)))
} | identifier_body |
archive.rs | use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt::Debug;
use std::fs::File;
use std::io::{Seek, SeekFrom};
use std::mem;
use std::path::Path;
use std::path::PathBuf;
use std::slice;
use std::vec::Vec;
use std::result::Result as StdResult;
use common::ReadExt;
use meta::WadMetadata;
use types::{WadLump, WadInfo, WadName, WadNameCast};
use util::wad_type_from_info;
use error::{Result, Error, ErrorKind, InFile};
use error::ErrorKind::{BadWadHeader, MissingRequiredLump};
pub struct Archive {
file: RefCell<File>,
index_map: HashMap<WadName, usize>,
lumps: Vec<LumpInfo>,
levels: Vec<usize>,
meta: WadMetadata,
path: PathBuf,
}
impl Archive {
pub fn open<W, M>(wad_path: &W, meta_path: &M) -> Result<Archive>
where W: AsRef<Path> + Debug,
M: AsRef<Path> + Debug {
let wad_path = wad_path.as_ref().to_owned();
info!("Loading wad file '{:?}'...", wad_path);
// Open file, read and check header.
let mut file = try!(File::open(&wad_path).in_file(&wad_path));
let header = try!(file.read_binary::<WadInfo>().in_file(&wad_path));
try!(wad_type_from_info(&header).ok_or_else(|| BadWadHeader.in_file(&wad_path)));
// Read lump info.
let mut lumps = Vec::with_capacity(header.num_lumps as usize);
let mut levels = Vec::with_capacity(64);
let mut index_map = HashMap::new();
try!(file.seek(SeekFrom::Start(header.info_table_offset as u64)).in_file(&wad_path));
for i_lump in 0.. header.num_lumps {
let mut fileinfo = try!(file.read_binary::<WadLump>().in_file(&wad_path));
fileinfo.name.canonicalise();
index_map.insert(fileinfo.name, lumps.len());
lumps.push(LumpInfo { name: fileinfo.name,
offset: fileinfo.file_pos as u64,
size: fileinfo.size as usize });
// Our heuristic for level lumps is that they are preceeded by the "THINGS" lump.
if fileinfo.name == b"THINGS\0\0".to_wad_name() {
assert!(i_lump > 0);
levels.push((i_lump - 1) as usize);
}
}
// Read metadata.
let meta = try!(WadMetadata::from_file(meta_path));
Ok(Archive { | levels: levels,
path: wad_path,
})
}
pub fn num_levels(&self) -> usize { self.levels.len() }
pub fn level_lump_index(&self, level_index: usize) -> usize {
self.levels[level_index]
}
pub fn level_name(&self, level_index: usize) -> &WadName {
self.lump_name(self.levels[level_index])
}
pub fn num_lumps(&self) -> usize { self.lumps.len() }
pub fn named_lump_index(&self, name: &WadName) -> Option<usize> {
self.index_map.get(name).map(|x| *x)
}
pub fn required_named_lump_index(&self, name: &WadName) -> Result<usize> {
self.named_lump_index(name).ok_or(MissingRequiredLump(*name)).in_archive(self)
}
pub fn lump_name(&self, lump_index: usize) -> &WadName {
&self.lumps[lump_index].name
}
pub fn is_virtual_lump(&self, lump_index: usize) -> bool {
self.lumps[lump_index].size == 0
}
pub fn read_required_named_lump<T: Copy>(&self, name: &WadName) -> Result<Vec<T>> {
self.read_named_lump(name)
.unwrap_or_else(|| Err(MissingRequiredLump(*name).in_archive(self)))
}
pub fn read_named_lump<T: Copy>(&self, name: &WadName) -> Option<Result<Vec<T>>> {
self.named_lump_index(name).map(|index| self.read_lump(index))
}
pub fn read_lump<T: Copy>(&self, index: usize) -> Result<Vec<T>> {
let mut file = self.file.borrow_mut();
let info = self.lumps[index];
assert!(info.size > 0);
assert!(info.size % mem::size_of::<T>() == 0);
let num_elems = info.size / mem::size_of::<T>();
let mut buf = Vec::with_capacity(num_elems);
try!(file.seek(SeekFrom::Start(info.offset)).in_archive(self));
unsafe {
buf.set_len(num_elems);
try!(file.read_at_least(slice::from_raw_parts_mut(
(buf.as_mut_ptr() as *mut u8), info.size)).in_archive(self))
}
Ok(buf)
}
pub fn read_lump_single<T: Copy>(&self, index: usize) -> Result<T> {
let mut file = self.file.borrow_mut();
let info = self.lumps[index];
assert!(info.size == mem::size_of::<T>());
try!(file.seek(SeekFrom::Start(info.offset)).in_archive(self));
Ok(try!(file.read_binary().in_archive(self)))
}
pub fn metadata(&self) -> &WadMetadata { &self.meta }
}
pub trait InArchive {
type Output;
fn in_archive(self, archive: &Archive) -> Self::Output;
}
impl InArchive for Error {
type Output = Error;
fn in_archive(self, archive: &Archive) -> Error {
self.in_file(&archive.path)
}
}
impl InArchive for ErrorKind {
type Output = Error;
fn in_archive(self, archive: &Archive) -> Error {
self.in_file(&archive.path)
}
}
impl<S, E: Into<Error>> InArchive for StdResult<S, E> {
type Output = Result<S>;
fn in_archive(self, archive: &Archive) -> Result<S> {
self.map_err(|e| e.into().in_archive(archive))
}
}
#[derive(Copy, Clone)]
struct LumpInfo {
name: WadName,
offset: u64,
size: usize,
} | meta: meta,
file: RefCell::new(file),
lumps: lumps,
index_map: index_map, | random_line_split |
archive.rs | use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt::Debug;
use std::fs::File;
use std::io::{Seek, SeekFrom};
use std::mem;
use std::path::Path;
use std::path::PathBuf;
use std::slice;
use std::vec::Vec;
use std::result::Result as StdResult;
use common::ReadExt;
use meta::WadMetadata;
use types::{WadLump, WadInfo, WadName, WadNameCast};
use util::wad_type_from_info;
use error::{Result, Error, ErrorKind, InFile};
use error::ErrorKind::{BadWadHeader, MissingRequiredLump};
pub struct Archive {
file: RefCell<File>,
index_map: HashMap<WadName, usize>,
lumps: Vec<LumpInfo>,
levels: Vec<usize>,
meta: WadMetadata,
path: PathBuf,
}
impl Archive {
pub fn open<W, M>(wad_path: &W, meta_path: &M) -> Result<Archive>
where W: AsRef<Path> + Debug,
M: AsRef<Path> + Debug {
let wad_path = wad_path.as_ref().to_owned();
info!("Loading wad file '{:?}'...", wad_path);
// Open file, read and check header.
let mut file = try!(File::open(&wad_path).in_file(&wad_path));
let header = try!(file.read_binary::<WadInfo>().in_file(&wad_path));
try!(wad_type_from_info(&header).ok_or_else(|| BadWadHeader.in_file(&wad_path)));
// Read lump info.
let mut lumps = Vec::with_capacity(header.num_lumps as usize);
let mut levels = Vec::with_capacity(64);
let mut index_map = HashMap::new();
try!(file.seek(SeekFrom::Start(header.info_table_offset as u64)).in_file(&wad_path));
for i_lump in 0.. header.num_lumps {
let mut fileinfo = try!(file.read_binary::<WadLump>().in_file(&wad_path));
fileinfo.name.canonicalise();
index_map.insert(fileinfo.name, lumps.len());
lumps.push(LumpInfo { name: fileinfo.name,
offset: fileinfo.file_pos as u64,
size: fileinfo.size as usize });
// Our heuristic for level lumps is that they are preceeded by the "THINGS" lump.
if fileinfo.name == b"THINGS\0\0".to_wad_name() {
assert!(i_lump > 0);
levels.push((i_lump - 1) as usize);
}
}
// Read metadata.
let meta = try!(WadMetadata::from_file(meta_path));
Ok(Archive {
meta: meta,
file: RefCell::new(file),
lumps: lumps,
index_map: index_map,
levels: levels,
path: wad_path,
})
}
pub fn num_levels(&self) -> usize { self.levels.len() }
pub fn | (&self, level_index: usize) -> usize {
self.levels[level_index]
}
pub fn level_name(&self, level_index: usize) -> &WadName {
self.lump_name(self.levels[level_index])
}
pub fn num_lumps(&self) -> usize { self.lumps.len() }
pub fn named_lump_index(&self, name: &WadName) -> Option<usize> {
self.index_map.get(name).map(|x| *x)
}
pub fn required_named_lump_index(&self, name: &WadName) -> Result<usize> {
self.named_lump_index(name).ok_or(MissingRequiredLump(*name)).in_archive(self)
}
pub fn lump_name(&self, lump_index: usize) -> &WadName {
&self.lumps[lump_index].name
}
pub fn is_virtual_lump(&self, lump_index: usize) -> bool {
self.lumps[lump_index].size == 0
}
pub fn read_required_named_lump<T: Copy>(&self, name: &WadName) -> Result<Vec<T>> {
self.read_named_lump(name)
.unwrap_or_else(|| Err(MissingRequiredLump(*name).in_archive(self)))
}
pub fn read_named_lump<T: Copy>(&self, name: &WadName) -> Option<Result<Vec<T>>> {
self.named_lump_index(name).map(|index| self.read_lump(index))
}
pub fn read_lump<T: Copy>(&self, index: usize) -> Result<Vec<T>> {
let mut file = self.file.borrow_mut();
let info = self.lumps[index];
assert!(info.size > 0);
assert!(info.size % mem::size_of::<T>() == 0);
let num_elems = info.size / mem::size_of::<T>();
let mut buf = Vec::with_capacity(num_elems);
try!(file.seek(SeekFrom::Start(info.offset)).in_archive(self));
unsafe {
buf.set_len(num_elems);
try!(file.read_at_least(slice::from_raw_parts_mut(
(buf.as_mut_ptr() as *mut u8), info.size)).in_archive(self))
}
Ok(buf)
}
pub fn read_lump_single<T: Copy>(&self, index: usize) -> Result<T> {
let mut file = self.file.borrow_mut();
let info = self.lumps[index];
assert!(info.size == mem::size_of::<T>());
try!(file.seek(SeekFrom::Start(info.offset)).in_archive(self));
Ok(try!(file.read_binary().in_archive(self)))
}
pub fn metadata(&self) -> &WadMetadata { &self.meta }
}
pub trait InArchive {
type Output;
fn in_archive(self, archive: &Archive) -> Self::Output;
}
impl InArchive for Error {
type Output = Error;
fn in_archive(self, archive: &Archive) -> Error {
self.in_file(&archive.path)
}
}
impl InArchive for ErrorKind {
type Output = Error;
fn in_archive(self, archive: &Archive) -> Error {
self.in_file(&archive.path)
}
}
impl<S, E: Into<Error>> InArchive for StdResult<S, E> {
type Output = Result<S>;
fn in_archive(self, archive: &Archive) -> Result<S> {
self.map_err(|e| e.into().in_archive(archive))
}
}
#[derive(Copy, Clone)]
struct LumpInfo {
name: WadName,
offset: u64,
size: usize,
}
| level_lump_index | identifier_name |
issue-19340-1.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(unused_variables)] | // pretty-expanded FIXME #23616
extern crate issue_19340_1 as lib;
use lib::Homura;
fn main() {
let homura = Homura::Madoka { name: "Kaname".to_string() };
match homura {
Homura::Madoka { name } => (),
};
} | // aux-build:issue-19340-1.rs
| random_line_split |
issue-19340-1.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(unused_variables)]
// aux-build:issue-19340-1.rs
// pretty-expanded FIXME #23616
extern crate issue_19340_1 as lib;
use lib::Homura;
fn main() | {
let homura = Homura::Madoka { name: "Kaname".to_string() };
match homura {
Homura::Madoka { name } => (),
};
} | identifier_body |
|
issue-19340-1.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(unused_variables)]
// aux-build:issue-19340-1.rs
// pretty-expanded FIXME #23616
extern crate issue_19340_1 as lib;
use lib::Homura;
fn | () {
let homura = Homura::Madoka { name: "Kaname".to_string() };
match homura {
Homura::Madoka { name } => (),
};
}
| main | identifier_name |
element_wrapper.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A wrapper over an element and a snapshot, that allows us to selector-match
//! against a past state of the element.
use {Atom, CaseSensitivityExt, LocalName, Namespace, WeakAtom};
use dom::TElement;
use element_state::ElementState;
use selector_parser::{NonTSPseudoClass, PseudoElement, SelectorImpl, Snapshot, SnapshotMap, AttrValue};
use selectors::{Element, OpaqueElement};
use selectors::attr::{AttrSelectorOperation, CaseSensitivity, NamespaceConstraint};
use selectors::matching::{ElementSelectorFlags, MatchingContext};
use std::cell::Cell;
use std::fmt;
/// In order to compute restyle hints, we perform a selector match against a
/// list of partial selectors whose rightmost simple selector may be sensitive
/// to the thing being changed. We do this matching twice, once for the element
/// as it exists now and once for the element as it existed at the time of the
/// last restyle. If the results of the selector match differ, that means that
/// the given partial selector is sensitive to the change, and we compute a
/// restyle hint based on its combinator.
///
/// In order to run selector matching against the old element state, we generate
/// a wrapper for the element which claims to have the old state. This is the
/// ElementWrapper logic below.
///
/// Gecko does this differently for element states, and passes a mask called
/// mStateMask, which indicates the states that need to be ignored during
/// selector matching. This saves an ElementWrapper allocation and an additional
/// selector match call at the expense of additional complexity inside the
/// selector matching logic. This only works for boolean states though, so we
/// still need to take the ElementWrapper approach for attribute-dependent
/// style. So we do it the same both ways for now to reduce complexity, but it's
/// worth measuring the performance impact (if any) of the mStateMask approach.
pub trait ElementSnapshot : Sized {
/// The state of the snapshot, if any.
fn state(&self) -> Option<ElementState>;
/// If this snapshot contains attribute information.
fn has_attrs(&self) -> bool;
/// The ID attribute per this snapshot. Should only be called if
/// `has_attrs()` returns true.
fn id_attr(&self) -> Option<&WeakAtom>;
/// Whether this snapshot contains the class `name`. Should only be called
/// if `has_attrs()` returns true.
fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool;
/// A callback that should be called for each class of the snapshot. Should
/// only be called if `has_attrs()` returns true.
fn each_class<F>(&self, F)
where
F: FnMut(&Atom);
/// The `xml:lang=""` or `lang=""` attribute value per this snapshot.
fn lang_attr(&self) -> Option<AttrValue>;
}
/// A simple wrapper over an element and a snapshot, that allows us to
/// selector-match against a past state of the element.
#[derive(Clone)]
pub struct ElementWrapper<'a, E>
where
E: TElement,
{
element: E,
cached_snapshot: Cell<Option<&'a Snapshot>>,
snapshot_map: &'a SnapshotMap,
}
impl<'a, E> ElementWrapper<'a, E>
where
E: TElement,
{
/// Trivially constructs an `ElementWrapper`.
pub fn new(el: E, snapshot_map: &'a SnapshotMap) -> Self {
ElementWrapper {
element: el,
cached_snapshot: Cell::new(None),
snapshot_map: snapshot_map,
}
}
/// Gets the snapshot associated with this element, if any.
pub fn snapshot(&self) -> Option<&'a Snapshot> {
if!self.element.has_snapshot() {
return None;
}
if let Some(s) = self.cached_snapshot.get() {
return Some(s);
}
let snapshot = self.snapshot_map.get(&self.element);
debug_assert!(snapshot.is_some(), "has_snapshot lied!");
self.cached_snapshot.set(snapshot);
snapshot
}
/// Returns the states that have changed since the element was snapshotted.
pub fn state_changes(&self) -> ElementState {
let snapshot = match self.snapshot() {
Some(s) => s,
None => return ElementState::empty(),
};
match snapshot.state() {
Some(state) => state ^ self.element.state(),
None => ElementState::empty(),
}
}
/// Returns the value of the `xml:lang=""` (or, if appropriate, `lang=""`)
/// attribute from this element's snapshot or the closest ancestor
/// element snapshot with the attribute specified.
fn get_lang(&self) -> Option<AttrValue> {
let mut current = self.clone();
loop {
let lang = match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => snapshot.lang_attr(),
_ => current.element.lang_attr(),
};
if lang.is_some() {
return lang;
}
current = current.parent_element()?;
}
}
}
impl<'a, E> fmt::Debug for ElementWrapper<'a, E>
where
E: TElement,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Ignore other fields for now, can change later if needed.
self.element.fmt(f)
}
}
impl<'a, E> Element for ElementWrapper<'a, E>
where
E: TElement,
{
type Impl = SelectorImpl;
fn match_non_ts_pseudo_class<F>(
&self,
pseudo_class: &NonTSPseudoClass,
context: &mut MatchingContext<Self::Impl>,
_setter: &mut F,
) -> bool
where
F: FnMut(&Self, ElementSelectorFlags),
{
// Some pseudo-classes need special handling to evaluate them against
// the snapshot.
match *pseudo_class {
#[cfg(feature = "gecko")]
NonTSPseudoClass::MozAny(ref selectors) => {
use selectors::matching::matches_complex_selector;
return context.nest(|context| {
selectors.iter().any(|s| {
matches_complex_selector(s.iter(), self, context, _setter)
})
});
}
// :dir is implemented in terms of state flags, but which state flag
// it maps to depends on the argument to :dir. That means we can't
// just add its state flags to the NonTSPseudoClass, because if we
// added all of them there, and tested via intersects() here, we'd
// get incorrect behavior for :not(:dir()) cases.
//
// FIXME(bz): How can I set this up so once Servo adds :dir()
// support we don't forget to update this code?
#[cfg(feature = "gecko")]
NonTSPseudoClass::Dir(ref dir) => {
use invalidation::element::invalidation_map::dir_selector_to_state;
let selector_flag = dir_selector_to_state(dir);
if selector_flag.is_empty() {
// :dir() with some random argument; does not match.
return false;
}
let state = match self.snapshot().and_then(|s| s.state()) {
Some(snapshot_state) => snapshot_state,
None => self.element.state(),
};
return state.contains(selector_flag);
}
// For :link and :visited, we don't actually want to test the
// element state directly.
//
// Instead, we use the `visited_handling` to determine if they
// match.
NonTSPseudoClass::Link => {
return self.is_link() && context.visited_handling().matches_unvisited()
}
NonTSPseudoClass::Visited => {
return self.is_link() && context.visited_handling().matches_visited()
}
#[cfg(feature = "gecko")]
NonTSPseudoClass::MozTableBorderNonzero => {
if let Some(snapshot) = self.snapshot() {
if snapshot.has_other_pseudo_class_state() {
return snapshot.mIsTableBorderNonzero();
}
}
}
#[cfg(feature = "gecko")]
NonTSPseudoClass::MozBrowserFrame => {
if let Some(snapshot) = self.snapshot() {
if snapshot.has_other_pseudo_class_state() {
return snapshot.mIsMozBrowserFrame();
}
}
}
// :lang() needs to match using the closest ancestor xml:lang="" or
// lang="" attribtue from snapshots.
NonTSPseudoClass::Lang(ref lang_arg) => {
return self.element.match_element_lang(Some(self.get_lang()), lang_arg);
}
_ => {}
}
let flag = pseudo_class.state_flag();
if flag.is_empty() {
return self.element.match_non_ts_pseudo_class(
pseudo_class,
context,
&mut |_, _| {},
)
}
match self.snapshot().and_then(|s| s.state()) {
Some(snapshot_state) => snapshot_state.intersects(flag),
None => {
self.element.match_non_ts_pseudo_class(
pseudo_class,
context,
&mut |_, _| {},
)
}
}
}
fn match_pseudo_element(
&self,
pseudo_element: &PseudoElement,
context: &mut MatchingContext<Self::Impl>,
) -> bool {
self.element.match_pseudo_element(pseudo_element, context)
}
fn is_link(&self) -> bool {
self.element.is_link()
}
fn opaque(&self) -> OpaqueElement {
self.element.opaque()
}
fn parent_element(&self) -> Option<Self> {
self.element.parent_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn first_child_element(&self) -> Option<Self> {
self.element.first_child_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn last_child_element(&self) -> Option<Self> {
self.element.last_child_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn prev_sibling_element(&self) -> Option<Self> |
fn next_sibling_element(&self) -> Option<Self> {
self.element.next_sibling_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
#[inline]
fn is_html_element_in_html_document(&self) -> bool {
self.element.is_html_element_in_html_document()
}
#[inline]
fn is_html_slot_element(&self) -> bool {
self.element.is_html_slot_element()
}
#[inline]
fn local_name(&self) -> &<Self::Impl as ::selectors::SelectorImpl>::BorrowedLocalName {
self.element.local_name()
}
#[inline]
fn namespace(&self) -> &<Self::Impl as ::selectors::SelectorImpl>::BorrowedNamespaceUrl {
self.element.namespace()
}
fn attr_matches(
&self,
ns: &NamespaceConstraint<&Namespace>,
local_name: &LocalName,
operation: &AttrSelectorOperation<&AttrValue>,
) -> bool {
match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => {
snapshot.attr_matches(ns, local_name, operation)
}
_ => self.element.attr_matches(ns, local_name, operation)
}
}
fn has_id(&self, id: &Atom, case_sensitivity: CaseSensitivity) -> bool {
match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => {
snapshot.id_attr().map_or(false, |atom| case_sensitivity.eq_atom(&atom, id))
}
_ => self.element.has_id(id, case_sensitivity)
}
}
fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool {
match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => {
snapshot.has_class(name, case_sensitivity)
}
_ => self.element.has_class(name, case_sensitivity)
}
}
fn is_empty(&self) -> bool {
self.element.is_empty()
}
fn is_root(&self) -> bool {
self.element.is_root()
}
fn pseudo_element_originating_element(&self) -> Option<Self> {
self.element.pseudo_element_originating_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn assigned_slot(&self) -> Option<Self> {
self.element.assigned_slot()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn blocks_ancestor_combinators(&self) -> bool {
self.element.blocks_ancestor_combinators()
}
}
| {
self.element.prev_sibling_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
} | identifier_body |
element_wrapper.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A wrapper over an element and a snapshot, that allows us to selector-match
//! against a past state of the element.
use {Atom, CaseSensitivityExt, LocalName, Namespace, WeakAtom};
use dom::TElement;
use element_state::ElementState;
use selector_parser::{NonTSPseudoClass, PseudoElement, SelectorImpl, Snapshot, SnapshotMap, AttrValue};
use selectors::{Element, OpaqueElement};
use selectors::attr::{AttrSelectorOperation, CaseSensitivity, NamespaceConstraint};
use selectors::matching::{ElementSelectorFlags, MatchingContext};
use std::cell::Cell;
use std::fmt;
/// In order to compute restyle hints, we perform a selector match against a
/// list of partial selectors whose rightmost simple selector may be sensitive
/// to the thing being changed. We do this matching twice, once for the element
/// as it exists now and once for the element as it existed at the time of the
/// last restyle. If the results of the selector match differ, that means that
/// the given partial selector is sensitive to the change, and we compute a
/// restyle hint based on its combinator.
///
/// In order to run selector matching against the old element state, we generate
/// a wrapper for the element which claims to have the old state. This is the
/// ElementWrapper logic below.
///
/// Gecko does this differently for element states, and passes a mask called
/// mStateMask, which indicates the states that need to be ignored during
/// selector matching. This saves an ElementWrapper allocation and an additional
/// selector match call at the expense of additional complexity inside the
/// selector matching logic. This only works for boolean states though, so we
/// still need to take the ElementWrapper approach for attribute-dependent
/// style. So we do it the same both ways for now to reduce complexity, but it's
/// worth measuring the performance impact (if any) of the mStateMask approach.
pub trait ElementSnapshot : Sized {
/// The state of the snapshot, if any.
fn state(&self) -> Option<ElementState>;
/// If this snapshot contains attribute information.
fn has_attrs(&self) -> bool;
/// The ID attribute per this snapshot. Should only be called if
/// `has_attrs()` returns true.
fn id_attr(&self) -> Option<&WeakAtom>;
/// Whether this snapshot contains the class `name`. Should only be called
/// if `has_attrs()` returns true.
fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool;
/// A callback that should be called for each class of the snapshot. Should
/// only be called if `has_attrs()` returns true.
fn each_class<F>(&self, F)
where
F: FnMut(&Atom);
/// The `xml:lang=""` or `lang=""` attribute value per this snapshot.
fn lang_attr(&self) -> Option<AttrValue>;
}
/// A simple wrapper over an element and a snapshot, that allows us to
/// selector-match against a past state of the element.
#[derive(Clone)]
pub struct ElementWrapper<'a, E>
where
E: TElement,
{
element: E,
cached_snapshot: Cell<Option<&'a Snapshot>>,
snapshot_map: &'a SnapshotMap,
}
impl<'a, E> ElementWrapper<'a, E>
where
E: TElement,
{
/// Trivially constructs an `ElementWrapper`.
pub fn new(el: E, snapshot_map: &'a SnapshotMap) -> Self {
ElementWrapper {
element: el,
cached_snapshot: Cell::new(None),
snapshot_map: snapshot_map,
}
}
/// Gets the snapshot associated with this element, if any.
pub fn snapshot(&self) -> Option<&'a Snapshot> {
if!self.element.has_snapshot() {
return None;
}
if let Some(s) = self.cached_snapshot.get() {
return Some(s);
}
let snapshot = self.snapshot_map.get(&self.element);
debug_assert!(snapshot.is_some(), "has_snapshot lied!");
self.cached_snapshot.set(snapshot);
snapshot
}
/// Returns the states that have changed since the element was snapshotted.
pub fn state_changes(&self) -> ElementState {
let snapshot = match self.snapshot() {
Some(s) => s,
None => return ElementState::empty(),
};
match snapshot.state() {
Some(state) => state ^ self.element.state(),
None => ElementState::empty(),
}
}
/// Returns the value of the `xml:lang=""` (or, if appropriate, `lang=""`)
/// attribute from this element's snapshot or the closest ancestor
/// element snapshot with the attribute specified.
fn get_lang(&self) -> Option<AttrValue> {
let mut current = self.clone();
loop {
let lang = match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => snapshot.lang_attr(),
_ => current.element.lang_attr(),
};
if lang.is_some() {
return lang;
}
current = current.parent_element()?;
}
}
}
impl<'a, E> fmt::Debug for ElementWrapper<'a, E>
where
E: TElement,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Ignore other fields for now, can change later if needed.
self.element.fmt(f)
}
}
impl<'a, E> Element for ElementWrapper<'a, E>
where
E: TElement,
{
type Impl = SelectorImpl;
fn match_non_ts_pseudo_class<F>(
&self,
pseudo_class: &NonTSPseudoClass,
context: &mut MatchingContext<Self::Impl>,
_setter: &mut F,
) -> bool
where
F: FnMut(&Self, ElementSelectorFlags),
{
// Some pseudo-classes need special handling to evaluate them against
// the snapshot.
match *pseudo_class {
#[cfg(feature = "gecko")]
NonTSPseudoClass::MozAny(ref selectors) => {
use selectors::matching::matches_complex_selector;
return context.nest(|context| {
selectors.iter().any(|s| {
matches_complex_selector(s.iter(), self, context, _setter)
})
});
}
// :dir is implemented in terms of state flags, but which state flag
// it maps to depends on the argument to :dir. That means we can't
// just add its state flags to the NonTSPseudoClass, because if we
// added all of them there, and tested via intersects() here, we'd
// get incorrect behavior for :not(:dir()) cases.
//
// FIXME(bz): How can I set this up so once Servo adds :dir()
// support we don't forget to update this code?
#[cfg(feature = "gecko")]
NonTSPseudoClass::Dir(ref dir) => {
use invalidation::element::invalidation_map::dir_selector_to_state;
let selector_flag = dir_selector_to_state(dir);
if selector_flag.is_empty() {
// :dir() with some random argument; does not match.
return false;
}
let state = match self.snapshot().and_then(|s| s.state()) {
Some(snapshot_state) => snapshot_state,
None => self.element.state(),
};
return state.contains(selector_flag);
}
// For :link and :visited, we don't actually want to test the
// element state directly.
//
// Instead, we use the `visited_handling` to determine if they
// match.
NonTSPseudoClass::Link => {
return self.is_link() && context.visited_handling().matches_unvisited()
}
NonTSPseudoClass::Visited => {
return self.is_link() && context.visited_handling().matches_visited()
}
#[cfg(feature = "gecko")]
NonTSPseudoClass::MozTableBorderNonzero => {
if let Some(snapshot) = self.snapshot() {
if snapshot.has_other_pseudo_class_state() {
return snapshot.mIsTableBorderNonzero();
}
}
}
#[cfg(feature = "gecko")]
NonTSPseudoClass::MozBrowserFrame => {
if let Some(snapshot) = self.snapshot() {
if snapshot.has_other_pseudo_class_state() {
return snapshot.mIsMozBrowserFrame();
}
}
}
// :lang() needs to match using the closest ancestor xml:lang="" or
// lang="" attribtue from snapshots.
NonTSPseudoClass::Lang(ref lang_arg) => {
return self.element.match_element_lang(Some(self.get_lang()), lang_arg);
}
_ => {}
}
let flag = pseudo_class.state_flag();
if flag.is_empty() {
return self.element.match_non_ts_pseudo_class(
pseudo_class,
context,
&mut |_, _| {},
)
}
match self.snapshot().and_then(|s| s.state()) {
Some(snapshot_state) => snapshot_state.intersects(flag),
None => {
self.element.match_non_ts_pseudo_class(
pseudo_class,
context,
&mut |_, _| {},
)
}
}
}
fn match_pseudo_element(
&self,
pseudo_element: &PseudoElement,
context: &mut MatchingContext<Self::Impl>,
) -> bool {
self.element.match_pseudo_element(pseudo_element, context)
}
fn is_link(&self) -> bool {
self.element.is_link()
}
fn opaque(&self) -> OpaqueElement {
self.element.opaque()
}
fn parent_element(&self) -> Option<Self> {
self.element.parent_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn first_child_element(&self) -> Option<Self> {
self.element.first_child_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn | (&self) -> Option<Self> {
self.element.last_child_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn prev_sibling_element(&self) -> Option<Self> {
self.element.prev_sibling_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn next_sibling_element(&self) -> Option<Self> {
self.element.next_sibling_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
#[inline]
fn is_html_element_in_html_document(&self) -> bool {
self.element.is_html_element_in_html_document()
}
#[inline]
fn is_html_slot_element(&self) -> bool {
self.element.is_html_slot_element()
}
#[inline]
fn local_name(&self) -> &<Self::Impl as ::selectors::SelectorImpl>::BorrowedLocalName {
self.element.local_name()
}
#[inline]
fn namespace(&self) -> &<Self::Impl as ::selectors::SelectorImpl>::BorrowedNamespaceUrl {
self.element.namespace()
}
fn attr_matches(
&self,
ns: &NamespaceConstraint<&Namespace>,
local_name: &LocalName,
operation: &AttrSelectorOperation<&AttrValue>,
) -> bool {
match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => {
snapshot.attr_matches(ns, local_name, operation)
}
_ => self.element.attr_matches(ns, local_name, operation)
}
}
fn has_id(&self, id: &Atom, case_sensitivity: CaseSensitivity) -> bool {
match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => {
snapshot.id_attr().map_or(false, |atom| case_sensitivity.eq_atom(&atom, id))
}
_ => self.element.has_id(id, case_sensitivity)
}
}
fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool {
match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => {
snapshot.has_class(name, case_sensitivity)
}
_ => self.element.has_class(name, case_sensitivity)
}
}
fn is_empty(&self) -> bool {
self.element.is_empty()
}
fn is_root(&self) -> bool {
self.element.is_root()
}
fn pseudo_element_originating_element(&self) -> Option<Self> {
self.element.pseudo_element_originating_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn assigned_slot(&self) -> Option<Self> {
self.element.assigned_slot()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn blocks_ancestor_combinators(&self) -> bool {
self.element.blocks_ancestor_combinators()
}
}
| last_child_element | identifier_name |
element_wrapper.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A wrapper over an element and a snapshot, that allows us to selector-match
//! against a past state of the element.
use {Atom, CaseSensitivityExt, LocalName, Namespace, WeakAtom};
use dom::TElement;
use element_state::ElementState;
use selector_parser::{NonTSPseudoClass, PseudoElement, SelectorImpl, Snapshot, SnapshotMap, AttrValue};
use selectors::{Element, OpaqueElement};
use selectors::attr::{AttrSelectorOperation, CaseSensitivity, NamespaceConstraint};
use selectors::matching::{ElementSelectorFlags, MatchingContext};
use std::cell::Cell;
use std::fmt;
/// In order to compute restyle hints, we perform a selector match against a
/// list of partial selectors whose rightmost simple selector may be sensitive
/// to the thing being changed. We do this matching twice, once for the element
/// as it exists now and once for the element as it existed at the time of the
/// last restyle. If the results of the selector match differ, that means that
/// the given partial selector is sensitive to the change, and we compute a
/// restyle hint based on its combinator.
///
/// In order to run selector matching against the old element state, we generate
/// a wrapper for the element which claims to have the old state. This is the
/// ElementWrapper logic below.
///
/// Gecko does this differently for element states, and passes a mask called
/// mStateMask, which indicates the states that need to be ignored during
/// selector matching. This saves an ElementWrapper allocation and an additional
/// selector match call at the expense of additional complexity inside the
/// selector matching logic. This only works for boolean states though, so we
/// still need to take the ElementWrapper approach for attribute-dependent
/// style. So we do it the same both ways for now to reduce complexity, but it's
/// worth measuring the performance impact (if any) of the mStateMask approach.
pub trait ElementSnapshot : Sized {
/// The state of the snapshot, if any.
fn state(&self) -> Option<ElementState>;
/// If this snapshot contains attribute information.
fn has_attrs(&self) -> bool;
/// The ID attribute per this snapshot. Should only be called if
/// `has_attrs()` returns true.
fn id_attr(&self) -> Option<&WeakAtom>;
/// Whether this snapshot contains the class `name`. Should only be called
/// if `has_attrs()` returns true.
fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool;
/// A callback that should be called for each class of the snapshot. Should
/// only be called if `has_attrs()` returns true.
fn each_class<F>(&self, F)
where
F: FnMut(&Atom);
/// The `xml:lang=""` or `lang=""` attribute value per this snapshot.
fn lang_attr(&self) -> Option<AttrValue>;
}
/// A simple wrapper over an element and a snapshot, that allows us to
/// selector-match against a past state of the element.
#[derive(Clone)]
pub struct ElementWrapper<'a, E>
where
E: TElement,
{
element: E,
cached_snapshot: Cell<Option<&'a Snapshot>>,
snapshot_map: &'a SnapshotMap,
}
impl<'a, E> ElementWrapper<'a, E>
where
E: TElement,
{
/// Trivially constructs an `ElementWrapper`.
pub fn new(el: E, snapshot_map: &'a SnapshotMap) -> Self {
ElementWrapper {
element: el,
cached_snapshot: Cell::new(None),
snapshot_map: snapshot_map,
}
}
/// Gets the snapshot associated with this element, if any.
pub fn snapshot(&self) -> Option<&'a Snapshot> {
if!self.element.has_snapshot() {
return None;
}
| return Some(s);
}
let snapshot = self.snapshot_map.get(&self.element);
debug_assert!(snapshot.is_some(), "has_snapshot lied!");
self.cached_snapshot.set(snapshot);
snapshot
}
/// Returns the states that have changed since the element was snapshotted.
pub fn state_changes(&self) -> ElementState {
let snapshot = match self.snapshot() {
Some(s) => s,
None => return ElementState::empty(),
};
match snapshot.state() {
Some(state) => state ^ self.element.state(),
None => ElementState::empty(),
}
}
/// Returns the value of the `xml:lang=""` (or, if appropriate, `lang=""`)
/// attribute from this element's snapshot or the closest ancestor
/// element snapshot with the attribute specified.
fn get_lang(&self) -> Option<AttrValue> {
let mut current = self.clone();
loop {
let lang = match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => snapshot.lang_attr(),
_ => current.element.lang_attr(),
};
if lang.is_some() {
return lang;
}
current = current.parent_element()?;
}
}
}
impl<'a, E> fmt::Debug for ElementWrapper<'a, E>
where
E: TElement,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Ignore other fields for now, can change later if needed.
self.element.fmt(f)
}
}
impl<'a, E> Element for ElementWrapper<'a, E>
where
E: TElement,
{
type Impl = SelectorImpl;
fn match_non_ts_pseudo_class<F>(
&self,
pseudo_class: &NonTSPseudoClass,
context: &mut MatchingContext<Self::Impl>,
_setter: &mut F,
) -> bool
where
F: FnMut(&Self, ElementSelectorFlags),
{
// Some pseudo-classes need special handling to evaluate them against
// the snapshot.
match *pseudo_class {
#[cfg(feature = "gecko")]
NonTSPseudoClass::MozAny(ref selectors) => {
use selectors::matching::matches_complex_selector;
return context.nest(|context| {
selectors.iter().any(|s| {
matches_complex_selector(s.iter(), self, context, _setter)
})
});
}
// :dir is implemented in terms of state flags, but which state flag
// it maps to depends on the argument to :dir. That means we can't
// just add its state flags to the NonTSPseudoClass, because if we
// added all of them there, and tested via intersects() here, we'd
// get incorrect behavior for :not(:dir()) cases.
//
// FIXME(bz): How can I set this up so once Servo adds :dir()
// support we don't forget to update this code?
#[cfg(feature = "gecko")]
NonTSPseudoClass::Dir(ref dir) => {
use invalidation::element::invalidation_map::dir_selector_to_state;
let selector_flag = dir_selector_to_state(dir);
if selector_flag.is_empty() {
// :dir() with some random argument; does not match.
return false;
}
let state = match self.snapshot().and_then(|s| s.state()) {
Some(snapshot_state) => snapshot_state,
None => self.element.state(),
};
return state.contains(selector_flag);
}
// For :link and :visited, we don't actually want to test the
// element state directly.
//
// Instead, we use the `visited_handling` to determine if they
// match.
NonTSPseudoClass::Link => {
return self.is_link() && context.visited_handling().matches_unvisited()
}
NonTSPseudoClass::Visited => {
return self.is_link() && context.visited_handling().matches_visited()
}
#[cfg(feature = "gecko")]
NonTSPseudoClass::MozTableBorderNonzero => {
if let Some(snapshot) = self.snapshot() {
if snapshot.has_other_pseudo_class_state() {
return snapshot.mIsTableBorderNonzero();
}
}
}
#[cfg(feature = "gecko")]
NonTSPseudoClass::MozBrowserFrame => {
if let Some(snapshot) = self.snapshot() {
if snapshot.has_other_pseudo_class_state() {
return snapshot.mIsMozBrowserFrame();
}
}
}
// :lang() needs to match using the closest ancestor xml:lang="" or
// lang="" attribtue from snapshots.
NonTSPseudoClass::Lang(ref lang_arg) => {
return self.element.match_element_lang(Some(self.get_lang()), lang_arg);
}
_ => {}
}
let flag = pseudo_class.state_flag();
if flag.is_empty() {
return self.element.match_non_ts_pseudo_class(
pseudo_class,
context,
&mut |_, _| {},
)
}
match self.snapshot().and_then(|s| s.state()) {
Some(snapshot_state) => snapshot_state.intersects(flag),
None => {
self.element.match_non_ts_pseudo_class(
pseudo_class,
context,
&mut |_, _| {},
)
}
}
}
fn match_pseudo_element(
&self,
pseudo_element: &PseudoElement,
context: &mut MatchingContext<Self::Impl>,
) -> bool {
self.element.match_pseudo_element(pseudo_element, context)
}
fn is_link(&self) -> bool {
self.element.is_link()
}
fn opaque(&self) -> OpaqueElement {
self.element.opaque()
}
fn parent_element(&self) -> Option<Self> {
self.element.parent_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn first_child_element(&self) -> Option<Self> {
self.element.first_child_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn last_child_element(&self) -> Option<Self> {
self.element.last_child_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn prev_sibling_element(&self) -> Option<Self> {
self.element.prev_sibling_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn next_sibling_element(&self) -> Option<Self> {
self.element.next_sibling_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
#[inline]
fn is_html_element_in_html_document(&self) -> bool {
self.element.is_html_element_in_html_document()
}
#[inline]
fn is_html_slot_element(&self) -> bool {
self.element.is_html_slot_element()
}
#[inline]
fn local_name(&self) -> &<Self::Impl as ::selectors::SelectorImpl>::BorrowedLocalName {
self.element.local_name()
}
#[inline]
fn namespace(&self) -> &<Self::Impl as ::selectors::SelectorImpl>::BorrowedNamespaceUrl {
self.element.namespace()
}
fn attr_matches(
&self,
ns: &NamespaceConstraint<&Namespace>,
local_name: &LocalName,
operation: &AttrSelectorOperation<&AttrValue>,
) -> bool {
match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => {
snapshot.attr_matches(ns, local_name, operation)
}
_ => self.element.attr_matches(ns, local_name, operation)
}
}
fn has_id(&self, id: &Atom, case_sensitivity: CaseSensitivity) -> bool {
match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => {
snapshot.id_attr().map_or(false, |atom| case_sensitivity.eq_atom(&atom, id))
}
_ => self.element.has_id(id, case_sensitivity)
}
}
fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool {
match self.snapshot() {
Some(snapshot) if snapshot.has_attrs() => {
snapshot.has_class(name, case_sensitivity)
}
_ => self.element.has_class(name, case_sensitivity)
}
}
fn is_empty(&self) -> bool {
self.element.is_empty()
}
fn is_root(&self) -> bool {
self.element.is_root()
}
fn pseudo_element_originating_element(&self) -> Option<Self> {
self.element.pseudo_element_originating_element()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn assigned_slot(&self) -> Option<Self> {
self.element.assigned_slot()
.map(|e| ElementWrapper::new(e, self.snapshot_map))
}
fn blocks_ancestor_combinators(&self) -> bool {
self.element.blocks_ancestor_combinators()
}
} | if let Some(s) = self.cached_snapshot.get() { | random_line_split |
lib.rs | #![deny(missing_debug_implementations)]
use janus_plugin_sys as ffi;
use bitflags::bitflags;
pub use debug::LogLevel;
pub use debug::log;
pub use jansson::{JanssonDecodingFlags, JanssonEncodingFlags, JanssonValue, RawJanssonValue};
pub use session::SessionWrapper;
pub use ffi::events::janus_eventhandler as EventHandler;
pub use ffi::plugin::janus_callbacks as PluginCallbacks;
pub use ffi::plugin::janus_plugin as Plugin;
pub use ffi::plugin::janus_plugin_result as RawPluginResult;
pub use ffi::plugin::janus_plugin_session as PluginSession;
pub use ffi::plugin::janus_plugin_rtp as PluginRtpPacket;
pub use ffi::plugin::janus_plugin_rtcp as PluginRtcpPacket;
pub use ffi::plugin::janus_plugin_data as PluginDataPacket;
pub use ffi::plugin::janus_plugin_rtp_extensions as PluginRtpExtensions;
use ffi::plugin::janus_plugin_result_type as PluginResultType;
use std::error::Error;
use std::fmt;
use std::ffi::CStr;
use std::mem;
use std::ops::Deref;
use std::os::raw::{c_char, c_int};
use std::ptr;
pub mod debug;
pub mod rtcp;
pub mod sdp;
pub mod session;
pub mod jansson;
pub mod utils;
pub mod refcount;
bitflags! {
/// Flags that control which events an event handler receives.
pub struct JanusEventType: u32 {
const JANUS_EVENT_TYPE_SESSION = 1 << 0;
const JANUS_EVENT_TYPE_HANDLE = 1 << 1;
const JANUS_EVENT_TYPE_JSEP = 1 << 3; // yes, really
const JANUS_EVENT_TYPE_WEBRTC = 1 << 4;
const JANUS_EVENT_TYPE_MEDIA = 1 << 5;
const JANUS_EVENT_TYPE_PLUGIN = 1 << 6;
const JANUS_EVENT_TYPE_TRANSPORT = 1 << 7;
const JANUS_EVENT_TYPE_CORE = 1 << 8;
}
}
/// An error emitted by the Janus core in response to a plugin pushing an event.
#[derive(Debug, Clone, Copy)]
pub struct JanusError {
pub code: i32
}
/// A result from pushing an event to Janus core.
pub type JanusResult = Result<(), JanusError>;
impl JanusError {
/// Returns Janus's description text for this error.
pub fn to_cstr(self) -> &'static CStr {
unsafe { CStr::from_ptr(ffi::janus_get_api_error(self.code)) }
}
/// Converts a Janus result code to either success or a potential error.
pub fn | (val: i32) -> JanusResult {
match val {
0 => Ok(()),
e => Err(JanusError { code: e })
}
}
}
impl Error for JanusError {}
impl fmt::Display for JanusError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} (code: {})", self.to_cstr().to_str().unwrap()
, self.code)
}
}
/// A Janus plugin result; what a plugin returns to the gateway as a direct response to a signalling message.
#[derive(Debug)]
pub struct PluginResult {
ptr: *mut RawPluginResult,
}
impl PluginResult {
/// Creates a new plugin result.
pub unsafe fn new(type_: PluginResultType, text: *const c_char, content: *mut RawJanssonValue) -> Self {
Self { ptr: ffi::plugin::janus_plugin_result_new(type_, text, content) }
}
/// Creates a plugin result indicating a synchronously successful request. The provided response
/// JSON will be passed back to the client.
pub fn ok(response: JanssonValue) -> Self {
unsafe { Self::new(PluginResultType::JANUS_PLUGIN_OK, ptr::null(), response.into_raw()) }
}
/// Creates a plugin result indicating an asynchronous request in progress. If provided, the hint text
/// will be synchronously passed back to the client in the acknowledgement.
pub fn ok_wait(hint: Option<&'static CStr>) -> Self {
let hint_ptr = hint.map(|x| x.as_ptr()).unwrap_or_else(ptr::null);
unsafe { Self::new(PluginResultType::JANUS_PLUGIN_OK_WAIT, hint_ptr, ptr::null_mut()) }
}
/// Creates a plugin result indicating an error. The provided error text will be synchronously passed
/// back to the client.
pub fn error(msg: &'static CStr) -> Self {
unsafe { Self::new(PluginResultType::JANUS_PLUGIN_ERROR, msg.as_ptr(), ptr::null_mut()) }
}
/// Transfers ownership of this result to the wrapped raw pointer. The consumer is responsible for calling
/// `janus_plugin_result_destroy` on the pointer when finished.
pub fn into_raw(self) -> *mut RawPluginResult {
let ptr = self.ptr;
mem::forget(self);
ptr
}
}
impl Deref for PluginResult {
type Target = RawPluginResult;
fn deref(&self) -> &RawPluginResult {
unsafe { &*self.ptr }
}
}
impl Drop for PluginResult {
fn drop(&mut self) {
unsafe { ffi::plugin::janus_plugin_result_destroy(self.ptr) }
}
}
unsafe impl Send for PluginResult {}
#[derive(Debug)]
/// Represents metadata about this library which Janus can query at runtime.
pub struct LibraryMetadata<'a> {
pub api_version: c_int,
pub version: c_int,
pub version_str: &'a CStr,
pub description: &'a CStr,
pub name: &'a CStr,
pub author: &'a CStr,
pub package: &'a CStr,
}
/// Helper macro to produce a Janus plugin instance. Should be called with
/// a `LibraryMetadata` instance and a series of exported plugin callbacks.
#[macro_export]
macro_rules! build_plugin {
($md:expr, $($cb:ident),*) => {{
extern "C" fn get_api_compatibility() -> c_int { $md.api_version }
extern "C" fn get_version() -> c_int { $md.version }
extern "C" fn get_version_string() -> *const c_char { $md.version_str.as_ptr() }
extern "C" fn get_description() -> *const c_char { $md.description.as_ptr() }
extern "C" fn get_name() -> *const c_char { $md.name.as_ptr() }
extern "C" fn get_author() -> *const c_char { $md.author.as_ptr() }
extern "C" fn get_package() -> *const c_char { $md.package.as_ptr() }
$crate::Plugin {
get_api_compatibility,
get_version,
get_version_string,
get_description,
get_name,
get_author,
get_package,
$($cb,)*
}
}}
}
/// Macro to export a Janus plugin instance from this module.
#[macro_export]
macro_rules! export_plugin {
($pl:expr) => {
/// Called by Janus to create an instance of this plugin, using the provided callbacks to dispatch events.
#[no_mangle]
pub extern "C" fn create() -> *const $crate::Plugin { $pl }
}
}
/// Helper macro to produce a Janus event handler instance. Should be called with
/// a `LibraryMetadata` instance and a series of exported event handler callbacks.
#[macro_export]
macro_rules! build_eventhandler {
($md:expr, $mask:expr, $($cb:ident),*) => {{
extern "C" fn get_api_compatibility() -> c_int { $md.api_version }
extern "C" fn get_version() -> c_int { $md.version }
extern "C" fn get_version_string() -> *const c_char { $md.version_str.as_ptr() }
extern "C" fn get_description() -> *const c_char { $md.description.as_ptr() }
extern "C" fn get_name() -> *const c_char { $md.name.as_ptr() }
extern "C" fn get_author() -> *const c_char { $md.author.as_ptr() }
extern "C" fn get_package() -> *const c_char { $md.package.as_ptr() }
$crate::EventHandler {
events_mask: $mask,
get_api_compatibility,
get_version,
get_version_string,
get_description,
get_name,
get_author,
get_package,
$($cb,)*
}
}}
}
/// Macro to export a Janus event handler instance from this module.
#[macro_export]
macro_rules! export_eventhandler {
($evh:expr) => {
/// Called by Janus to create an instance of this event handler, using the provided callbacks to dispatch events.
#[no_mangle]
pub extern "C" fn create() -> *const $crate::EventHandler { $evh }
}
}
| from | identifier_name |
lib.rs | #![deny(missing_debug_implementations)]
use janus_plugin_sys as ffi;
use bitflags::bitflags;
pub use debug::LogLevel;
pub use debug::log;
pub use jansson::{JanssonDecodingFlags, JanssonEncodingFlags, JanssonValue, RawJanssonValue};
pub use session::SessionWrapper;
pub use ffi::events::janus_eventhandler as EventHandler;
pub use ffi::plugin::janus_callbacks as PluginCallbacks;
pub use ffi::plugin::janus_plugin as Plugin;
pub use ffi::plugin::janus_plugin_result as RawPluginResult;
pub use ffi::plugin::janus_plugin_session as PluginSession;
pub use ffi::plugin::janus_plugin_rtp as PluginRtpPacket;
pub use ffi::plugin::janus_plugin_rtcp as PluginRtcpPacket;
pub use ffi::plugin::janus_plugin_data as PluginDataPacket;
pub use ffi::plugin::janus_plugin_rtp_extensions as PluginRtpExtensions;
use ffi::plugin::janus_plugin_result_type as PluginResultType;
use std::error::Error;
use std::fmt;
use std::ffi::CStr;
use std::mem;
use std::ops::Deref;
use std::os::raw::{c_char, c_int};
use std::ptr;
pub mod debug;
pub mod rtcp;
pub mod sdp;
pub mod session;
pub mod jansson;
pub mod utils;
pub mod refcount;
bitflags! {
/// Flags that control which events an event handler receives.
pub struct JanusEventType: u32 {
const JANUS_EVENT_TYPE_SESSION = 1 << 0;
const JANUS_EVENT_TYPE_HANDLE = 1 << 1;
const JANUS_EVENT_TYPE_JSEP = 1 << 3; // yes, really
const JANUS_EVENT_TYPE_WEBRTC = 1 << 4;
const JANUS_EVENT_TYPE_MEDIA = 1 << 5;
const JANUS_EVENT_TYPE_PLUGIN = 1 << 6;
const JANUS_EVENT_TYPE_TRANSPORT = 1 << 7;
const JANUS_EVENT_TYPE_CORE = 1 << 8;
}
}
/// An error emitted by the Janus core in response to a plugin pushing an event.
#[derive(Debug, Clone, Copy)]
pub struct JanusError {
pub code: i32
}
/// A result from pushing an event to Janus core.
pub type JanusResult = Result<(), JanusError>;
impl JanusError {
/// Returns Janus's description text for this error.
pub fn to_cstr(self) -> &'static CStr {
unsafe { CStr::from_ptr(ffi::janus_get_api_error(self.code)) }
}
/// Converts a Janus result code to either success or a potential error.
pub fn from(val: i32) -> JanusResult {
match val {
0 => Ok(()),
e => Err(JanusError { code: e })
}
}
}
impl Error for JanusError {}
impl fmt::Display for JanusError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} (code: {})", self.to_cstr().to_str().unwrap()
, self.code)
}
}
/// A Janus plugin result; what a plugin returns to the gateway as a direct response to a signalling message.
#[derive(Debug)]
pub struct PluginResult {
ptr: *mut RawPluginResult,
}
impl PluginResult {
/// Creates a new plugin result.
pub unsafe fn new(type_: PluginResultType, text: *const c_char, content: *mut RawJanssonValue) -> Self {
Self { ptr: ffi::plugin::janus_plugin_result_new(type_, text, content) }
}
/// Creates a plugin result indicating a synchronously successful request. The provided response
/// JSON will be passed back to the client.
pub fn ok(response: JanssonValue) -> Self {
unsafe { Self::new(PluginResultType::JANUS_PLUGIN_OK, ptr::null(), response.into_raw()) }
}
/// Creates a plugin result indicating an asynchronous request in progress. If provided, the hint text
/// will be synchronously passed back to the client in the acknowledgement.
pub fn ok_wait(hint: Option<&'static CStr>) -> Self {
let hint_ptr = hint.map(|x| x.as_ptr()).unwrap_or_else(ptr::null);
unsafe { Self::new(PluginResultType::JANUS_PLUGIN_OK_WAIT, hint_ptr, ptr::null_mut()) }
}
/// Creates a plugin result indicating an error. The provided error text will be synchronously passed
/// back to the client.
pub fn error(msg: &'static CStr) -> Self {
unsafe { Self::new(PluginResultType::JANUS_PLUGIN_ERROR, msg.as_ptr(), ptr::null_mut()) }
}
/// Transfers ownership of this result to the wrapped raw pointer. The consumer is responsible for calling
/// `janus_plugin_result_destroy` on the pointer when finished.
pub fn into_raw(self) -> *mut RawPluginResult |
}
impl Deref for PluginResult {
type Target = RawPluginResult;
fn deref(&self) -> &RawPluginResult {
unsafe { &*self.ptr }
}
}
impl Drop for PluginResult {
fn drop(&mut self) {
unsafe { ffi::plugin::janus_plugin_result_destroy(self.ptr) }
}
}
unsafe impl Send for PluginResult {}
#[derive(Debug)]
/// Represents metadata about this library which Janus can query at runtime.
pub struct LibraryMetadata<'a> {
pub api_version: c_int,
pub version: c_int,
pub version_str: &'a CStr,
pub description: &'a CStr,
pub name: &'a CStr,
pub author: &'a CStr,
pub package: &'a CStr,
}
/// Helper macro to produce a Janus plugin instance. Should be called with
/// a `LibraryMetadata` instance and a series of exported plugin callbacks.
#[macro_export]
macro_rules! build_plugin {
($md:expr, $($cb:ident),*) => {{
extern "C" fn get_api_compatibility() -> c_int { $md.api_version }
extern "C" fn get_version() -> c_int { $md.version }
extern "C" fn get_version_string() -> *const c_char { $md.version_str.as_ptr() }
extern "C" fn get_description() -> *const c_char { $md.description.as_ptr() }
extern "C" fn get_name() -> *const c_char { $md.name.as_ptr() }
extern "C" fn get_author() -> *const c_char { $md.author.as_ptr() }
extern "C" fn get_package() -> *const c_char { $md.package.as_ptr() }
$crate::Plugin {
get_api_compatibility,
get_version,
get_version_string,
get_description,
get_name,
get_author,
get_package,
$($cb,)*
}
}}
}
/// Macro to export a Janus plugin instance from this module.
#[macro_export]
macro_rules! export_plugin {
($pl:expr) => {
/// Called by Janus to create an instance of this plugin, using the provided callbacks to dispatch events.
#[no_mangle]
pub extern "C" fn create() -> *const $crate::Plugin { $pl }
}
}
/// Helper macro to produce a Janus event handler instance. Should be called with
/// a `LibraryMetadata` instance and a series of exported event handler callbacks.
#[macro_export]
macro_rules! build_eventhandler {
($md:expr, $mask:expr, $($cb:ident),*) => {{
extern "C" fn get_api_compatibility() -> c_int { $md.api_version }
extern "C" fn get_version() -> c_int { $md.version }
extern "C" fn get_version_string() -> *const c_char { $md.version_str.as_ptr() }
extern "C" fn get_description() -> *const c_char { $md.description.as_ptr() }
extern "C" fn get_name() -> *const c_char { $md.name.as_ptr() }
extern "C" fn get_author() -> *const c_char { $md.author.as_ptr() }
extern "C" fn get_package() -> *const c_char { $md.package.as_ptr() }
$crate::EventHandler {
events_mask: $mask,
get_api_compatibility,
get_version,
get_version_string,
get_description,
get_name,
get_author,
get_package,
$($cb,)*
}
}}
}
/// Macro to export a Janus event handler instance from this module.
#[macro_export]
macro_rules! export_eventhandler {
($evh:expr) => {
/// Called by Janus to create an instance of this event handler, using the provided callbacks to dispatch events.
#[no_mangle]
pub extern "C" fn create() -> *const $crate::EventHandler { $evh }
}
}
| {
let ptr = self.ptr;
mem::forget(self);
ptr
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.