file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
init.rs | use std::sync::{atomic, mpsc};
use std::{io, process, thread};
use ctrlc;
use crate::control::acio;
use crate::{args, control, disk, log, rpc, throttle, tracker};
use crate::{CONFIG, SHUTDOWN, THROT_TOKS};
pub fn init(args: args::Args) -> Result<(), ()> {
if let Some(level) = args.level {
log::log_init(level);
} else if cfg!(debug_assertions) {
log::log_init(log::LogLevel::Debug);
} else {
log::log_init(log::LogLevel::Info);
}
|
if let Err(e) = init_signals() {
error!("Failed to initialize signal handlers: {}", e);
return Err(());
}
Ok(())
}
pub fn run() -> Result<(), ()> {
match init_threads() {
Ok(threads) => {
for thread in threads {
if thread.join().is_err() {
error!("Unclean shutdown detected, terminating");
return Err(());
}
}
info!("Shutdown complete");
Ok(())
}
Err(e) => {
error!("Couldn't initialize synapse: {}", e);
Err(())
}
}
}
fn init_threads() -> io::Result<Vec<thread::JoinHandle<()>>> {
let cpoll = amy::Poller::new()?;
let mut creg = cpoll.get_registrar();
let (dh, disk_broadcast, dhj) = disk::start(&mut creg)?;
let (rh, rhj) = rpc::RPC::start(&mut creg, disk_broadcast.clone())?;
let (th, thj) = tracker::Tracker::start(&mut creg, disk_broadcast.clone())?;
let chans = acio::ACChans {
disk_tx: dh.tx,
disk_rx: dh.rx,
rpc_tx: rh.tx,
rpc_rx: rh.rx,
trk_tx: th.tx,
trk_rx: th.rx,
};
let (tx, rx) = mpsc::channel();
let cdb = disk_broadcast.clone();
let chj = thread::Builder::new()
.name("control".to_string())
.spawn(move || {
let throttler = throttle::Throttler::new(None, None, THROT_TOKS, &creg).unwrap();
let acio = acio::ACIO::new(cpoll, creg, chans).expect("Could not initialize IO");
match control::Control::new(acio, throttler, cdb) {
Ok(mut c) => {
tx.send(Ok(())).unwrap();
c.run();
}
Err(e) => {
tx.send(Err(e)).unwrap();
}
}
})
.unwrap();
rx.recv().unwrap()?;
Ok(vec![chj, dhj, rhj, thj])
}
fn init_signals() -> Result<(), ctrlc::Error> {
ctrlc::set_handler(move || {
if SHUTDOWN.load(atomic::Ordering::SeqCst) {
info!("Terminating process!");
process::abort();
} else {
info!("Shutting down cleanly. Interrupt again to shut down immediately.");
SHUTDOWN.store(true, atomic::Ordering::SeqCst);
}
})
} | info!("Initializing");
// Since the config is lazy loaded, dereference now to check it.
CONFIG.port; | random_line_split |
init.rs | use std::sync::{atomic, mpsc};
use std::{io, process, thread};
use ctrlc;
use crate::control::acio;
use crate::{args, control, disk, log, rpc, throttle, tracker};
use crate::{CONFIG, SHUTDOWN, THROT_TOKS};
pub fn | (args: args::Args) -> Result<(), ()> {
if let Some(level) = args.level {
log::log_init(level);
} else if cfg!(debug_assertions) {
log::log_init(log::LogLevel::Debug);
} else {
log::log_init(log::LogLevel::Info);
}
info!("Initializing");
// Since the config is lazy loaded, dereference now to check it.
CONFIG.port;
if let Err(e) = init_signals() {
error!("Failed to initialize signal handlers: {}", e);
return Err(());
}
Ok(())
}
pub fn run() -> Result<(), ()> {
match init_threads() {
Ok(threads) => {
for thread in threads {
if thread.join().is_err() {
error!("Unclean shutdown detected, terminating");
return Err(());
}
}
info!("Shutdown complete");
Ok(())
}
Err(e) => {
error!("Couldn't initialize synapse: {}", e);
Err(())
}
}
}
fn init_threads() -> io::Result<Vec<thread::JoinHandle<()>>> {
let cpoll = amy::Poller::new()?;
let mut creg = cpoll.get_registrar();
let (dh, disk_broadcast, dhj) = disk::start(&mut creg)?;
let (rh, rhj) = rpc::RPC::start(&mut creg, disk_broadcast.clone())?;
let (th, thj) = tracker::Tracker::start(&mut creg, disk_broadcast.clone())?;
let chans = acio::ACChans {
disk_tx: dh.tx,
disk_rx: dh.rx,
rpc_tx: rh.tx,
rpc_rx: rh.rx,
trk_tx: th.tx,
trk_rx: th.rx,
};
let (tx, rx) = mpsc::channel();
let cdb = disk_broadcast.clone();
let chj = thread::Builder::new()
.name("control".to_string())
.spawn(move || {
let throttler = throttle::Throttler::new(None, None, THROT_TOKS, &creg).unwrap();
let acio = acio::ACIO::new(cpoll, creg, chans).expect("Could not initialize IO");
match control::Control::new(acio, throttler, cdb) {
Ok(mut c) => {
tx.send(Ok(())).unwrap();
c.run();
}
Err(e) => {
tx.send(Err(e)).unwrap();
}
}
})
.unwrap();
rx.recv().unwrap()?;
Ok(vec![chj, dhj, rhj, thj])
}
fn init_signals() -> Result<(), ctrlc::Error> {
ctrlc::set_handler(move || {
if SHUTDOWN.load(atomic::Ordering::SeqCst) {
info!("Terminating process!");
process::abort();
} else {
info!("Shutting down cleanly. Interrupt again to shut down immediately.");
SHUTDOWN.store(true, atomic::Ordering::SeqCst);
}
})
}
| init | identifier_name |
text.rs | extern crate graphics;
use piston_window::{Transformed};
use widget::{Widget, State};
use appearance;
use layout;
use renderer::{self, geometry};
use graphics::character::CharacterCache;
#[derive(Default, Clone, Debug)]
pub struct Text{
pub text: &'static str,
}
impl Widget for Text{
fn layout(&self, cartographer: &mut layout::Cartographer, appearance: &appearance::Appearance) -> geometry::Xy {
let mut size = 20.0;
if let Some(ref font) = appearance.font {
size = font.size;
}
geometry::Xy{
x: cartographer.glyphs.width(size as u32, self.text),
y: size
}
}
fn render<'a>(&self, renderer: &mut renderer::Renderer, appearance: &appearance::Appearance, geometry: &geometry::Geometry, _state: &'a State) {
// Determine font y-position related to size...
// Unsure if magic numbers, specific to a font, or related to font system in rust.
// 400:(-94..<97.249>..-100.4980)
// 400 / 97.249 = 4.113152834
let mut size = 20.0;
let mut color = [0.0, 0.0, 0.0, 1.0];
if let Some(ref font) = appearance.font |
// TODO - Font's don't render very nicely, seems partly related to sub-pixel positioning
graphics::text(color,
size as u32,
self.text,
renderer.glyphs,
renderer.context.transform.trans(geometry.position.x, geometry.position.y + (size - (size / 4.113152834))),
renderer.graphics);
}
}
| {
size = font.size;
color = font.color;
} | conditional_block |
text.rs | extern crate graphics;
use piston_window::{Transformed};
use widget::{Widget, State};
use appearance;
use layout;
use renderer::{self, geometry};
use graphics::character::CharacterCache;
#[derive(Default, Clone, Debug)]
pub struct Text{
pub text: &'static str,
}
impl Widget for Text{
fn layout(&self, cartographer: &mut layout::Cartographer, appearance: &appearance::Appearance) -> geometry::Xy {
let mut size = 20.0;
if let Some(ref font) = appearance.font {
size = font.size;
}
geometry::Xy{
x: cartographer.glyphs.width(size as u32, self.text),
y: size
}
}
fn | <'a>(&self, renderer: &mut renderer::Renderer, appearance: &appearance::Appearance, geometry: &geometry::Geometry, _state: &'a State) {
// Determine font y-position related to size...
// Unsure if magic numbers, specific to a font, or related to font system in rust.
// 400:(-94..<97.249>..-100.4980)
// 400 / 97.249 = 4.113152834
let mut size = 20.0;
let mut color = [0.0, 0.0, 0.0, 1.0];
if let Some(ref font) = appearance.font {
size = font.size;
color = font.color;
}
// TODO - Font's don't render very nicely, seems partly related to sub-pixel positioning
graphics::text(color,
size as u32,
self.text,
renderer.glyphs,
renderer.context.transform.trans(geometry.position.x, geometry.position.y + (size - (size / 4.113152834))),
renderer.graphics);
}
}
| render | identifier_name |
text.rs | extern crate graphics;
use piston_window::{Transformed};
use widget::{Widget, State};
use appearance;
use layout;
use renderer::{self, geometry};
use graphics::character::CharacterCache;
#[derive(Default, Clone, Debug)]
pub struct Text{
pub text: &'static str, | impl Widget for Text{
fn layout(&self, cartographer: &mut layout::Cartographer, appearance: &appearance::Appearance) -> geometry::Xy {
let mut size = 20.0;
if let Some(ref font) = appearance.font {
size = font.size;
}
geometry::Xy{
x: cartographer.glyphs.width(size as u32, self.text),
y: size
}
}
fn render<'a>(&self, renderer: &mut renderer::Renderer, appearance: &appearance::Appearance, geometry: &geometry::Geometry, _state: &'a State) {
// Determine font y-position related to size...
// Unsure if magic numbers, specific to a font, or related to font system in rust.
// 400:(-94..<97.249>..-100.4980)
// 400 / 97.249 = 4.113152834
let mut size = 20.0;
let mut color = [0.0, 0.0, 0.0, 1.0];
if let Some(ref font) = appearance.font {
size = font.size;
color = font.color;
}
// TODO - Font's don't render very nicely, seems partly related to sub-pixel positioning
graphics::text(color,
size as u32,
self.text,
renderer.glyphs,
renderer.context.transform.trans(geometry.position.x, geometry.position.y + (size - (size / 4.113152834))),
renderer.graphics);
}
} | }
| random_line_split |
text.rs | extern crate graphics;
use piston_window::{Transformed};
use widget::{Widget, State};
use appearance;
use layout;
use renderer::{self, geometry};
use graphics::character::CharacterCache;
#[derive(Default, Clone, Debug)]
pub struct Text{
pub text: &'static str,
}
impl Widget for Text{
fn layout(&self, cartographer: &mut layout::Cartographer, appearance: &appearance::Appearance) -> geometry::Xy |
fn render<'a>(&self, renderer: &mut renderer::Renderer, appearance: &appearance::Appearance, geometry: &geometry::Geometry, _state: &'a State) {
// Determine font y-position related to size...
// Unsure if magic numbers, specific to a font, or related to font system in rust.
// 400:(-94..<97.249>..-100.4980)
// 400 / 97.249 = 4.113152834
let mut size = 20.0;
let mut color = [0.0, 0.0, 0.0, 1.0];
if let Some(ref font) = appearance.font {
size = font.size;
color = font.color;
}
// TODO - Font's don't render very nicely, seems partly related to sub-pixel positioning
graphics::text(color,
size as u32,
self.text,
renderer.glyphs,
renderer.context.transform.trans(geometry.position.x, geometry.position.y + (size - (size / 4.113152834))),
renderer.graphics);
}
}
| {
let mut size = 20.0;
if let Some(ref font) = appearance.font {
size = font.size;
}
geometry::Xy{
x: cartographer.glyphs.width(size as u32, self.text),
y: size
}
} | identifier_body |
issue-13853.rs | trait Node {
fn zomg();
}
trait Graph<N: Node> {
fn nodes<'a, I: Iterator<Item=&'a N>>(&'a self) -> I
where N: 'a;
}
impl<N: Node> Graph<N> for Vec<N> {
fn nodes<'a, I: Iterator<Item=&'a N>>(&self) -> I
where N: 'a
{
self.iter() //~ ERROR mismatched types
}
}
struct Stuff;
impl Node for Stuff {
fn zomg() {
println!("zomg");
}
}
fn | <N: Node, G: Graph<N>>(graph: &G) {
for node in graph.iter() { //~ ERROR no method named `iter` found
node.zomg();
}
}
pub fn main() {
let graph = Vec::new();
graph.push(Stuff);
iterate(graph); //~ ERROR mismatched types
}
| iterate | identifier_name |
issue-13853.rs | trait Node {
fn zomg();
}
trait Graph<N: Node> {
fn nodes<'a, I: Iterator<Item=&'a N>>(&'a self) -> I
where N: 'a; |
impl<N: Node> Graph<N> for Vec<N> {
fn nodes<'a, I: Iterator<Item=&'a N>>(&self) -> I
where N: 'a
{
self.iter() //~ ERROR mismatched types
}
}
struct Stuff;
impl Node for Stuff {
fn zomg() {
println!("zomg");
}
}
fn iterate<N: Node, G: Graph<N>>(graph: &G) {
for node in graph.iter() { //~ ERROR no method named `iter` found
node.zomg();
}
}
pub fn main() {
let graph = Vec::new();
graph.push(Stuff);
iterate(graph); //~ ERROR mismatched types
} | } | random_line_split |
issue-13853.rs | trait Node {
fn zomg();
}
trait Graph<N: Node> {
fn nodes<'a, I: Iterator<Item=&'a N>>(&'a self) -> I
where N: 'a;
}
impl<N: Node> Graph<N> for Vec<N> {
fn nodes<'a, I: Iterator<Item=&'a N>>(&self) -> I
where N: 'a
{
self.iter() //~ ERROR mismatched types
}
}
struct Stuff;
impl Node for Stuff {
fn zomg() |
}
fn iterate<N: Node, G: Graph<N>>(graph: &G) {
for node in graph.iter() { //~ ERROR no method named `iter` found
node.zomg();
}
}
pub fn main() {
let graph = Vec::new();
graph.push(Stuff);
iterate(graph); //~ ERROR mismatched types
}
| {
println!("zomg");
} | identifier_body |
pat.rs | extern crate std;
#[derive(Debug)]
pub struct ProgramAssociationTable {
pub table_id: u8,
pub transport_stream_id: u16,
pub version_number: u8,
pub current_next_indicator: bool,
pub section_number: u8,
pub last_section_number: u8,
pub program_map: std::collections::HashMap<u16, u16>,
pub crc32: u32,
}
impl ProgramAssociationTable {
pub fn parse(payload: &[u8]) -> Result<Self, super::psi::ParseError> {
// ISO/IEC 13818-1 2.4.4.1 Table 2-29
// ISO/IEC 13818-1 2.4.4.2
let pointer_field = payload[0] as usize;
let payload = &payload[(1 + pointer_field)..];
// ISO/IEC 13818-1 2.4.4.3 Table 2-30
// ISO/IEC 13818-1 2.4.4.4
let table_id = payload[0];
if table_id!= 0x00 {
return Err(super::psi::ParseError::IncorrectTableId {
expected: 0x00,
actual: table_id,
});
}
// ISO/IEC 13818-1 2.4.4.5
let section_syntax_indicator = (payload[1] & 0b10000000)!= 0;
if!section_syntax_indicator {
return Err(super::psi::ParseError::IncorrectSectionSyntaxIndicator);
}
let section_length = ((payload[1] & 0b00001111) as usize) << 8 | payload[2] as usize;
let transport_stream_id = ((payload[3] as u16) << 8) | payload[4] as u16;
let version_number = (payload[5] & 0b00111110) >> 1;
let current_next_indicator = (payload[5] & 0b00000001)!= 0;
let section_number = payload[6];
let last_section_number = payload[7];
let n = (section_length - 5) / 4;
let mut program_map = std::collections::HashMap::new();
for i in 0..n {
let index = 8 + i * 4;
let program_number = (payload[index] as u16) << 8 | payload[index + 1] as u16;
let pid = ((payload[index + 2] & 0b00011111) as u16) << 8 | payload[index + 3] as u16;
if program_number == 0 {
// Network_PID
} else {
program_map.insert(pid, program_number);
}
}
let index = 8 + n * 4;
let crc32 = (payload[index] as u32) << 24 | (payload[index + 1] as u32) << 16 | | table_id: table_id,
transport_stream_id: transport_stream_id,
version_number: version_number,
current_next_indicator: current_next_indicator,
section_number: section_number,
last_section_number: last_section_number,
program_map: program_map,
crc32: crc32,
})
}
} | (payload[index + 2] as u32) << 8 |
payload[index + 3] as u32;
Ok(ProgramAssociationTable { | random_line_split |
pat.rs | extern crate std;
#[derive(Debug)]
pub struct ProgramAssociationTable {
pub table_id: u8,
pub transport_stream_id: u16,
pub version_number: u8,
pub current_next_indicator: bool,
pub section_number: u8,
pub last_section_number: u8,
pub program_map: std::collections::HashMap<u16, u16>,
pub crc32: u32,
}
impl ProgramAssociationTable {
pub fn parse(payload: &[u8]) -> Result<Self, super::psi::ParseError> | }
let section_length = ((payload[1] & 0b00001111) as usize) << 8 | payload[2] as usize;
let transport_stream_id = ((payload[3] as u16) << 8) | payload[4] as u16;
let version_number = (payload[5] & 0b00111110) >> 1;
let current_next_indicator = (payload[5] & 0b00000001)!= 0;
let section_number = payload[6];
let last_section_number = payload[7];
let n = (section_length - 5) / 4;
let mut program_map = std::collections::HashMap::new();
for i in 0..n {
let index = 8 + i * 4;
let program_number = (payload[index] as u16) << 8 | payload[index + 1] as u16;
let pid = ((payload[index + 2] & 0b00011111) as u16) << 8 | payload[index + 3] as u16;
if program_number == 0 {
// Network_PID
} else {
program_map.insert(pid, program_number);
}
}
let index = 8 + n * 4;
let crc32 = (payload[index] as u32) << 24 | (payload[index + 1] as u32) << 16 |
(payload[index + 2] as u32) << 8 |
payload[index + 3] as u32;
Ok(ProgramAssociationTable {
table_id: table_id,
transport_stream_id: transport_stream_id,
version_number: version_number,
current_next_indicator: current_next_indicator,
section_number: section_number,
last_section_number: last_section_number,
program_map: program_map,
crc32: crc32,
})
}
}
| {
// ISO/IEC 13818-1 2.4.4.1 Table 2-29
// ISO/IEC 13818-1 2.4.4.2
let pointer_field = payload[0] as usize;
let payload = &payload[(1 + pointer_field)..];
// ISO/IEC 13818-1 2.4.4.3 Table 2-30
// ISO/IEC 13818-1 2.4.4.4
let table_id = payload[0];
if table_id != 0x00 {
return Err(super::psi::ParseError::IncorrectTableId {
expected: 0x00,
actual: table_id,
});
}
// ISO/IEC 13818-1 2.4.4.5
let section_syntax_indicator = (payload[1] & 0b10000000) != 0;
if !section_syntax_indicator {
return Err(super::psi::ParseError::IncorrectSectionSyntaxIndicator); | identifier_body |
pat.rs | extern crate std;
#[derive(Debug)]
pub struct | {
pub table_id: u8,
pub transport_stream_id: u16,
pub version_number: u8,
pub current_next_indicator: bool,
pub section_number: u8,
pub last_section_number: u8,
pub program_map: std::collections::HashMap<u16, u16>,
pub crc32: u32,
}
impl ProgramAssociationTable {
pub fn parse(payload: &[u8]) -> Result<Self, super::psi::ParseError> {
// ISO/IEC 13818-1 2.4.4.1 Table 2-29
// ISO/IEC 13818-1 2.4.4.2
let pointer_field = payload[0] as usize;
let payload = &payload[(1 + pointer_field)..];
// ISO/IEC 13818-1 2.4.4.3 Table 2-30
// ISO/IEC 13818-1 2.4.4.4
let table_id = payload[0];
if table_id!= 0x00 {
return Err(super::psi::ParseError::IncorrectTableId {
expected: 0x00,
actual: table_id,
});
}
// ISO/IEC 13818-1 2.4.4.5
let section_syntax_indicator = (payload[1] & 0b10000000)!= 0;
if!section_syntax_indicator {
return Err(super::psi::ParseError::IncorrectSectionSyntaxIndicator);
}
let section_length = ((payload[1] & 0b00001111) as usize) << 8 | payload[2] as usize;
let transport_stream_id = ((payload[3] as u16) << 8) | payload[4] as u16;
let version_number = (payload[5] & 0b00111110) >> 1;
let current_next_indicator = (payload[5] & 0b00000001)!= 0;
let section_number = payload[6];
let last_section_number = payload[7];
let n = (section_length - 5) / 4;
let mut program_map = std::collections::HashMap::new();
for i in 0..n {
let index = 8 + i * 4;
let program_number = (payload[index] as u16) << 8 | payload[index + 1] as u16;
let pid = ((payload[index + 2] & 0b00011111) as u16) << 8 | payload[index + 3] as u16;
if program_number == 0 {
// Network_PID
} else {
program_map.insert(pid, program_number);
}
}
let index = 8 + n * 4;
let crc32 = (payload[index] as u32) << 24 | (payload[index + 1] as u32) << 16 |
(payload[index + 2] as u32) << 8 |
payload[index + 3] as u32;
Ok(ProgramAssociationTable {
table_id: table_id,
transport_stream_id: transport_stream_id,
version_number: version_number,
current_next_indicator: current_next_indicator,
section_number: section_number,
last_section_number: last_section_number,
program_map: program_map,
crc32: crc32,
})
}
}
| ProgramAssociationTable | identifier_name |
pat.rs | extern crate std;
#[derive(Debug)]
pub struct ProgramAssociationTable {
pub table_id: u8,
pub transport_stream_id: u16,
pub version_number: u8,
pub current_next_indicator: bool,
pub section_number: u8,
pub last_section_number: u8,
pub program_map: std::collections::HashMap<u16, u16>,
pub crc32: u32,
}
impl ProgramAssociationTable {
pub fn parse(payload: &[u8]) -> Result<Self, super::psi::ParseError> {
// ISO/IEC 13818-1 2.4.4.1 Table 2-29
// ISO/IEC 13818-1 2.4.4.2
let pointer_field = payload[0] as usize;
let payload = &payload[(1 + pointer_field)..];
// ISO/IEC 13818-1 2.4.4.3 Table 2-30
// ISO/IEC 13818-1 2.4.4.4
let table_id = payload[0];
if table_id!= 0x00 {
return Err(super::psi::ParseError::IncorrectTableId {
expected: 0x00,
actual: table_id,
});
}
// ISO/IEC 13818-1 2.4.4.5
let section_syntax_indicator = (payload[1] & 0b10000000)!= 0;
if!section_syntax_indicator {
return Err(super::psi::ParseError::IncorrectSectionSyntaxIndicator);
}
let section_length = ((payload[1] & 0b00001111) as usize) << 8 | payload[2] as usize;
let transport_stream_id = ((payload[3] as u16) << 8) | payload[4] as u16;
let version_number = (payload[5] & 0b00111110) >> 1;
let current_next_indicator = (payload[5] & 0b00000001)!= 0;
let section_number = payload[6];
let last_section_number = payload[7];
let n = (section_length - 5) / 4;
let mut program_map = std::collections::HashMap::new();
for i in 0..n {
let index = 8 + i * 4;
let program_number = (payload[index] as u16) << 8 | payload[index + 1] as u16;
let pid = ((payload[index + 2] & 0b00011111) as u16) << 8 | payload[index + 3] as u16;
if program_number == 0 | else {
program_map.insert(pid, program_number);
}
}
let index = 8 + n * 4;
let crc32 = (payload[index] as u32) << 24 | (payload[index + 1] as u32) << 16 |
(payload[index + 2] as u32) << 8 |
payload[index + 3] as u32;
Ok(ProgramAssociationTable {
table_id: table_id,
transport_stream_id: transport_stream_id,
version_number: version_number,
current_next_indicator: current_next_indicator,
section_number: section_number,
last_section_number: last_section_number,
program_map: program_map,
crc32: crc32,
})
}
}
| {
// Network_PID
} | conditional_block |
lib.rs | #![feature(custom_derive, plugin)]
#![plugin(serde_macros)]
#![cfg_attr(test, plugin(stainless))]
extern crate chrono;
extern crate metrics_controller;
extern crate serde_json;
extern crate timer;
extern crate uuid;
extern crate time;
extern crate hyper;
#[allow(unused_imports)]
use std::env;
#[allow(unused_imports)]
use hyper::Client;
#[allow(unused_imports)]
use metrics_controller::MetricsController;
#[allow(unused_imports)]
use std::error::Error;
#[allow(unused_imports)]
use std::fs;
#[allow(unused_imports)]
use std::fs::File;
#[allow(unused_imports)]
use std::io::prelude::*;
#[allow(unused_imports)]
use std::path::Path;
#[allow(unused_imports)]
use std::thread;
#[cfg(feature = "integration")]
use metrics_controller::config::Config;
#[allow(unused_imports)]
use self::serde_json::Value;
#[allow(unused_imports)]
use self::uuid::Uuid;
#[allow(dead_code)]
const KEY_CID:&'static str = "cid";
#[cfg(feature = "integration")]
#[test]
fn test_thread_timer() {
// make sure we are starting with no files created.
delete_file("integration1.dat");
delete_file("cid.dat");
create_config("metricsconfig.json");
let mut controller = MetricsController::new(
"foxbox",
"1.0",
"default",
"rust",
"en-us",
"raspberry-pi",
"arm",
"linux",
"1.2.3.");
controller.start_metrics();
thread::sleep(std::time::Duration::from_secs(10));
controller.stop_collecting();
drop(controller);
// this sleep is needed for the file to get flushed out and saved to the
// disk properly. Otherwise, you get a file not found error.
thread::sleep(std::time::Duration::from_secs(2));
let path = Path::new("thread.dat");
let display = path.display();
// Open the path in read-only mode.
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}", display,
Error::description(&why)),
Ok(file) => file,
};
// Read the file contents into a string, returns `io::Result<usize>`
let mut buffer = [0; 1];
match file.read_exact(&mut buffer) {
Err(why) => panic!("couldn't read {}: {}", display,
Error::description(&why)),
Ok(_) => println!("value is:{}", buffer[0]),
}
// The timer should have been called 5 times.
assert_eq!(buffer[0], 5);
// Now remove the file
match fs::remove_file("thread.dat") {
Err(why) => panic!("couldn't delete: {}", Error::description(&why)),
Ok(_) => println!("deleted"),
}
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
}
#[cfg(feature = "integration")]
struct MockEventInfo<'a> {
pub app_name: &'a str,
pub app_version: &'a str,
pub app_update_channel: &'a str,
pub app_build_id: &'a str,
pub app_platform: &'a str,
pub locale: &'a str,
pub device: &'a str,
pub arch: &'a str,
pub os: &'a str,
pub os_version: &'a str
}
#[cfg(feature = "integration")]
fn get_event_info<'a>() -> MockEventInfo<'a> {
let ei = MockEventInfo {
app_name : "foxbox",
app_version : "1.0",
app_update_channel : "default",
app_build_id : "20160305",
app_platform : "rust",
locale : "en-us",
device : "raspberry-pi",
arch : "arm",
os : "linux",
os_version : "1.2.3.",
};
ei
}
#[cfg(feature = "integration")]
#[test]
fn test_cid_file_creation_and_proper_reuse() {
// make sure we are starting with no files created.
delete_file("integration1.dat");
delete_file("cid.dat");
let event_category = "event category";
let event_action = "event action";
let event_label = "event label";
let event_value = 999999;
let ei = get_event_info();
create_config("metricsconfig.json");
let mut metrics_controller = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
metrics_controller.record_event(event_category, event_action, event_label, event_value);
let cid1 = read_client_id();
// This sleep is necessary there is no file system interactions.
thread::sleep(std::time::Duration::from_secs(3));
{
let mut metrics_controller2 = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
metrics_controller2.record_event(event_category, event_action, event_label, event_value);
let cid2 = read_client_id();
// The same client id should be used for both metrics controllers on the same device.
assert_eq!(cid1, cid2);
drop(metrics_controller2);
}
// This sleep is necessary so the main thread does not exit.
thread::sleep(std::time::Duration::from_secs(20));
let expected_body = format!(
"v=1&t=event&tid=UA-77033033-1&cid={0}&ec=event%20category&ea=event%20action&el=event%20label&ev={1}\
&an={2}&av={3}&ul={4}&cd1={5}&cd2={6}&cd3={7}&cd4={8}&cd5={9}",
cid1, event_value, ei.app_name, ei.app_version, ei.locale, ei.os, ei.os_version,
ei.device, ei.arch, ei.app_platform
);
let path = Path::new("integration1.dat");
let display = path.display();
// Open the path in read-only mode.
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}",
display, Error::description(&why)),
Ok(file) => file
};
// Read the file contents into a string, returns `io::Result<usize>`
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(why) => panic!("couldn't read {}: {}",
display, Error::description(&why)),
Ok(_) => (),
}
let s_slice: &str = &s[..];
let e_slice: &str = &expected_body[..];
println!("s_slice: {}", s_slice);
println!("e_slice: {}", e_slice);
assert_eq!(s_slice.find(e_slice), Some(0));
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
}
// If this test fails, make sure to run integration tests in serial,
// run |RUST_TEST_THREADS=1 cargo test --features integration|
#[cfg(feature = "integration")]
#[test]
fn test_max_body_size() {
// make sure we are starting with no files created.
delete_file("integration1.dat");
delete_file("cid.dat");
let event_category = "event category";
let event_action = "event action";
let event_label = "event label";
let event_value = 999999;
let ei = get_event_info();
create_config("metricsconfig.json");
let mut metrics_controller = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
for _ in 0.. 20 {
metrics_controller.record_event(event_category, event_action, event_label, event_value);
}
// This sleep is necessary so the main thread does not exit.
thread::sleep(std::time::Duration::from_secs(20));
let path = Path::new("integration1.dat");
let display = path.display();
// Open the path in read-only mode.
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}", display,
Error::description(&why)),
Ok(file) => file
};
// Read the file contents into a string, returns `io::Result<usize>`
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(why) => panic!("couldn't read {}: {}", display,
Error::description(&why)),
Ok(_) => (),
}
let s_slice: &str = &s[..];
let v: Vec<&str> = s_slice.split("UA-77033033-1").collect();
// 21 chunks since split on the property id.
assert_eq!(v.len(), 21);
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
}
// This test is being ignored as it requires you to setup an environment variable
// called GOOGLE_ACCESS_TOKEN. The token can be obtained from the Metrics Explorer.
#[ignore]
#[cfg(feature = "integration")]
#[test]
fn test_google_analytics_received() {
let event_category = "test";
let event_action = "integration";
let event_label = &Uuid::new_v4().to_simple_string().to_string();
let event_value = 5;
let ei = get_event_info();
create_config("metricsconfig.json");
let mut metrics_controller = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
// Test with the max payload number of events (20 hits can go in one POST request).
for _ in 0.. 20 {
metrics_controller.record_event(event_category, event_action, event_label, event_value);
}
// This sleep is necessary so the main thread does not exit.
thread::sleep(std::time::Duration::from_secs(30));
// Read the environment variable for the Google Access Token... Obtain
// this from Query Explorer. It is good for an hour.
let access_token:String;
let key = "GOOGLE_ACCESS_TOKEN";
match env::var_os(key) {
Some(val) => {
access_token = val.to_str().unwrap().to_string();
println!("{}", access_token);
},
None => panic!("GOOGLE_ACCESS_TOKEN is not defined in the environment. \
Retrieve this value from the query explorer")
}
// Get the time so we can filter by it.
let ts = time::now();
let filter_time = format!("{0:4}-{1:02}-{2:02}", ts.tm_year + 1900, ts.tm_mon + 1, ts.tm_mday);
let report_url = format!("https://www.googleapis.com/analytics/v3/data/ga?ids=ga%3A121095747&\
start-date={0}&end-date={1}&metrics=ga%3AeventValue&\
dimensions=ga%3AeventCategory%2Cga%3AeventAction%2Cga%3AeventLabel&\
filters=ga%3AeventLabel%3D%3D{2}&access_token={3}",
filter_time, filter_time, event_label, access_token);
println!("REPORT URL: {}", report_url);
// This is set to success only when the eventValue matches what we sent above.
let mut success: bool = false;
// Loop 10 times to give the data time to be queryable by the reporting API.
// As an observation it seems to take about 2.5 minutes for the data to arrive.
for _ in 0.. 10 {
thread::sleep(std::time::Duration::from_secs(30));
let client = Client::new();
let mut res = client.get(&report_url.to_string()).send().unwrap();
if hyper::status::StatusCode::Unauthorized == res.status {
println!("Access Token missing or expired. Set environment /
variable GOOGLE_ACCESS_TOKEN to access token");
}
// Read the Response Code.
assert_eq!(res.status, hyper::Ok);
let mut s = String::new();
res.read_to_string(&mut s).unwrap();
let data: Value = serde_json::from_str(&s).unwrap();
let obj = data.as_object().unwrap();
let val = obj.get(&"totalsForAllResults".to_string()).unwrap().clone();
match val {
Value::Object(v) => {
let event_val = v.get(&"ga:eventValue".to_string()).unwrap().clone();
match event_val {
Value::String(v) => {
println!("String is: {}", v);
// When the eventValue is 100, that means all 20 events
// have been processed by GA with a value of 5 (5*20=100).
// It make take a couple of iterations for this to reach 100.
if v == "100".to_string() {
println!("success");
success = true;
break;
}
},
_ => panic!("Sth else"),
}
},
_ => panic!("Expected an object"),
}
println!("RESPONSE: {}", s);
}
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
assert_eq!(success, true);
}
#[cfg(feature = "integration")]
fn | () -> String {
let mut cid = String::new();
let mut cfg = Config::new();
if cfg.init("cid.dat") {
let val: Option<Value> = cfg.get(KEY_CID);
match val {
Some(_) => cid.push_str(&cfg.get_string(KEY_CID).to_string()),
None => panic!("Error: no cid written")
}
} else {
panic!("Failed. no cid created.");
}
cid
}
#[cfg(feature = "integration")]
fn delete_file(file_name: &str) {
match fs::remove_file(file_name) {
Err(why) => println!("couldn't delete: {}", Error::description(&why)),
Ok(_) => println!("deleted"),
}
}
#[cfg(feature = "integration")]
fn create_config(file_name: &str) {
let json = "{\"sendInterval\": 10, \"saveInterval\": 2, \"analytics\": \"UA-77033033-1\"}";
let mut cfg = Config::new();
cfg.create_and_write_json(file_name, json);
}
| read_client_id | identifier_name |
lib.rs | #![feature(custom_derive, plugin)]
#![plugin(serde_macros)]
#![cfg_attr(test, plugin(stainless))]
extern crate chrono;
extern crate metrics_controller;
extern crate serde_json;
extern crate timer;
extern crate uuid;
extern crate time;
extern crate hyper;
#[allow(unused_imports)]
use std::env;
#[allow(unused_imports)]
use hyper::Client;
#[allow(unused_imports)]
use metrics_controller::MetricsController;
#[allow(unused_imports)]
use std::error::Error;
#[allow(unused_imports)]
use std::fs;
#[allow(unused_imports)]
use std::fs::File;
#[allow(unused_imports)]
use std::io::prelude::*;
#[allow(unused_imports)]
use std::path::Path;
#[allow(unused_imports)]
use std::thread;
#[cfg(feature = "integration")]
use metrics_controller::config::Config;
#[allow(unused_imports)]
use self::serde_json::Value;
#[allow(unused_imports)]
use self::uuid::Uuid;
#[allow(dead_code)]
const KEY_CID:&'static str = "cid";
#[cfg(feature = "integration")]
#[test]
fn test_thread_timer() {
// make sure we are starting with no files created.
delete_file("integration1.dat");
delete_file("cid.dat");
create_config("metricsconfig.json");
let mut controller = MetricsController::new(
"foxbox",
"1.0",
"default",
"rust",
"en-us",
"raspberry-pi",
"arm",
"linux",
"1.2.3.");
controller.start_metrics();
thread::sleep(std::time::Duration::from_secs(10));
controller.stop_collecting();
drop(controller);
// this sleep is needed for the file to get flushed out and saved to the
// disk properly. Otherwise, you get a file not found error.
thread::sleep(std::time::Duration::from_secs(2));
let path = Path::new("thread.dat");
let display = path.display();
// Open the path in read-only mode.
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}", display,
Error::description(&why)),
Ok(file) => file,
};
// Read the file contents into a string, returns `io::Result<usize>`
let mut buffer = [0; 1];
match file.read_exact(&mut buffer) {
Err(why) => panic!("couldn't read {}: {}", display,
Error::description(&why)),
Ok(_) => println!("value is:{}", buffer[0]),
}
// The timer should have been called 5 times.
assert_eq!(buffer[0], 5);
// Now remove the file
match fs::remove_file("thread.dat") {
Err(why) => panic!("couldn't delete: {}", Error::description(&why)),
Ok(_) => println!("deleted"),
}
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
}
#[cfg(feature = "integration")]
struct MockEventInfo<'a> {
pub app_name: &'a str,
pub app_version: &'a str,
pub app_update_channel: &'a str,
pub app_build_id: &'a str,
pub app_platform: &'a str,
pub locale: &'a str,
pub device: &'a str,
pub arch: &'a str,
pub os: &'a str,
pub os_version: &'a str
}
#[cfg(feature = "integration")]
fn get_event_info<'a>() -> MockEventInfo<'a> {
let ei = MockEventInfo {
app_name : "foxbox",
app_version : "1.0",
app_update_channel : "default",
app_build_id : "20160305",
app_platform : "rust",
locale : "en-us",
device : "raspberry-pi",
arch : "arm",
os : "linux",
os_version : "1.2.3.",
};
ei
}
#[cfg(feature = "integration")]
#[test]
fn test_cid_file_creation_and_proper_reuse() {
// make sure we are starting with no files created.
delete_file("integration1.dat");
delete_file("cid.dat");
let event_category = "event category";
let event_action = "event action";
let event_label = "event label";
let event_value = 999999;
let ei = get_event_info();
create_config("metricsconfig.json");
let mut metrics_controller = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
metrics_controller.record_event(event_category, event_action, event_label, event_value);
let cid1 = read_client_id();
// This sleep is necessary there is no file system interactions.
thread::sleep(std::time::Duration::from_secs(3));
{
let mut metrics_controller2 = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
metrics_controller2.record_event(event_category, event_action, event_label, event_value);
let cid2 = read_client_id();
// The same client id should be used for both metrics controllers on the same device.
assert_eq!(cid1, cid2);
drop(metrics_controller2);
}
// This sleep is necessary so the main thread does not exit.
thread::sleep(std::time::Duration::from_secs(20));
let expected_body = format!(
"v=1&t=event&tid=UA-77033033-1&cid={0}&ec=event%20category&ea=event%20action&el=event%20label&ev={1}\
&an={2}&av={3}&ul={4}&cd1={5}&cd2={6}&cd3={7}&cd4={8}&cd5={9}",
cid1, event_value, ei.app_name, ei.app_version, ei.locale, ei.os, ei.os_version,
ei.device, ei.arch, ei.app_platform
);
let path = Path::new("integration1.dat");
let display = path.display();
// Open the path in read-only mode.
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}",
display, Error::description(&why)),
Ok(file) => file
};
// Read the file contents into a string, returns `io::Result<usize>`
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(why) => panic!("couldn't read {}: {}",
display, Error::description(&why)),
Ok(_) => (),
}
let s_slice: &str = &s[..];
let e_slice: &str = &expected_body[..];
println!("s_slice: {}", s_slice);
println!("e_slice: {}", e_slice);
assert_eq!(s_slice.find(e_slice), Some(0));
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
}
// If this test fails, make sure to run integration tests in serial,
// run |RUST_TEST_THREADS=1 cargo test --features integration|
#[cfg(feature = "integration")]
#[test]
fn test_max_body_size() {
// make sure we are starting with no files created.
delete_file("integration1.dat");
delete_file("cid.dat");
let event_category = "event category";
let event_action = "event action";
let event_label = "event label";
let event_value = 999999;
let ei = get_event_info();
create_config("metricsconfig.json");
let mut metrics_controller = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
for _ in 0.. 20 {
metrics_controller.record_event(event_category, event_action, event_label, event_value);
}
// This sleep is necessary so the main thread does not exit.
thread::sleep(std::time::Duration::from_secs(20));
let path = Path::new("integration1.dat");
let display = path.display();
// Open the path in read-only mode.
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}", display,
Error::description(&why)),
Ok(file) => file
};
// Read the file contents into a string, returns `io::Result<usize>`
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(why) => panic!("couldn't read {}: {}", display,
Error::description(&why)),
Ok(_) => (),
}
let s_slice: &str = &s[..];
let v: Vec<&str> = s_slice.split("UA-77033033-1").collect();
// 21 chunks since split on the property id.
assert_eq!(v.len(), 21);
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
}
// This test is being ignored as it requires you to setup an environment variable
// called GOOGLE_ACCESS_TOKEN. The token can be obtained from the Metrics Explorer.
#[ignore]
#[cfg(feature = "integration")]
#[test]
fn test_google_analytics_received() |
// Read the environment variable for the Google Access Token... Obtain
// this from Query Explorer. It is good for an hour.
let access_token:String;
let key = "GOOGLE_ACCESS_TOKEN";
match env::var_os(key) {
Some(val) => {
access_token = val.to_str().unwrap().to_string();
println!("{}", access_token);
},
None => panic!("GOOGLE_ACCESS_TOKEN is not defined in the environment. \
Retrieve this value from the query explorer")
}
// Get the time so we can filter by it.
let ts = time::now();
let filter_time = format!("{0:4}-{1:02}-{2:02}", ts.tm_year + 1900, ts.tm_mon + 1, ts.tm_mday);
let report_url = format!("https://www.googleapis.com/analytics/v3/data/ga?ids=ga%3A121095747&\
start-date={0}&end-date={1}&metrics=ga%3AeventValue&\
dimensions=ga%3AeventCategory%2Cga%3AeventAction%2Cga%3AeventLabel&\
filters=ga%3AeventLabel%3D%3D{2}&access_token={3}",
filter_time, filter_time, event_label, access_token);
println!("REPORT URL: {}", report_url);
// This is set to success only when the eventValue matches what we sent above.
let mut success: bool = false;
// Loop 10 times to give the data time to be queryable by the reporting API.
// As an observation it seems to take about 2.5 minutes for the data to arrive.
for _ in 0.. 10 {
thread::sleep(std::time::Duration::from_secs(30));
let client = Client::new();
let mut res = client.get(&report_url.to_string()).send().unwrap();
if hyper::status::StatusCode::Unauthorized == res.status {
println!("Access Token missing or expired. Set environment /
variable GOOGLE_ACCESS_TOKEN to access token");
}
// Read the Response Code.
assert_eq!(res.status, hyper::Ok);
let mut s = String::new();
res.read_to_string(&mut s).unwrap();
let data: Value = serde_json::from_str(&s).unwrap();
let obj = data.as_object().unwrap();
let val = obj.get(&"totalsForAllResults".to_string()).unwrap().clone();
match val {
Value::Object(v) => {
let event_val = v.get(&"ga:eventValue".to_string()).unwrap().clone();
match event_val {
Value::String(v) => {
println!("String is: {}", v);
// When the eventValue is 100, that means all 20 events
// have been processed by GA with a value of 5 (5*20=100).
// It make take a couple of iterations for this to reach 100.
if v == "100".to_string() {
println!("success");
success = true;
break;
}
},
_ => panic!("Sth else"),
}
},
_ => panic!("Expected an object"),
}
println!("RESPONSE: {}", s);
}
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
assert_eq!(success, true);
}
#[cfg(feature = "integration")]
fn read_client_id() -> String {
let mut cid = String::new();
let mut cfg = Config::new();
if cfg.init("cid.dat") {
let val: Option<Value> = cfg.get(KEY_CID);
match val {
Some(_) => cid.push_str(&cfg.get_string(KEY_CID).to_string()),
None => panic!("Error: no cid written")
}
} else {
panic!("Failed. no cid created.");
}
cid
}
#[cfg(feature = "integration")]
fn delete_file(file_name: &str) {
match fs::remove_file(file_name) {
Err(why) => println!("couldn't delete: {}", Error::description(&why)),
Ok(_) => println!("deleted"),
}
}
#[cfg(feature = "integration")]
fn create_config(file_name: &str) {
let json = "{\"sendInterval\": 10, \"saveInterval\": 2, \"analytics\": \"UA-77033033-1\"}";
let mut cfg = Config::new();
cfg.create_and_write_json(file_name, json);
}
| {
let event_category = "test";
let event_action = "integration";
let event_label = &Uuid::new_v4().to_simple_string().to_string();
let event_value = 5;
let ei = get_event_info();
create_config("metricsconfig.json");
let mut metrics_controller = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
// Test with the max payload number of events (20 hits can go in one POST request).
for _ in 0 .. 20 {
metrics_controller.record_event(event_category, event_action, event_label, event_value);
}
// This sleep is necessary so the main thread does not exit.
thread::sleep(std::time::Duration::from_secs(30)); | identifier_body |
lib.rs | #![feature(custom_derive, plugin)]
#![plugin(serde_macros)]
#![cfg_attr(test, plugin(stainless))]
extern crate chrono;
extern crate metrics_controller;
extern crate serde_json;
extern crate timer;
extern crate uuid;
extern crate time;
extern crate hyper;
#[allow(unused_imports)]
use std::env;
#[allow(unused_imports)]
use hyper::Client;
#[allow(unused_imports)]
use metrics_controller::MetricsController;
#[allow(unused_imports)]
use std::error::Error;
#[allow(unused_imports)]
use std::fs;
#[allow(unused_imports)]
use std::fs::File;
#[allow(unused_imports)]
use std::io::prelude::*;
#[allow(unused_imports)]
use std::path::Path;
#[allow(unused_imports)]
use std::thread;
#[cfg(feature = "integration")]
use metrics_controller::config::Config;
#[allow(unused_imports)]
use self::serde_json::Value;
#[allow(unused_imports)]
use self::uuid::Uuid;
#[allow(dead_code)]
const KEY_CID:&'static str = "cid";
#[cfg(feature = "integration")]
#[test]
fn test_thread_timer() {
// make sure we are starting with no files created.
delete_file("integration1.dat");
delete_file("cid.dat");
create_config("metricsconfig.json");
let mut controller = MetricsController::new(
"foxbox",
"1.0",
"default",
"rust",
"en-us",
"raspberry-pi",
"arm",
"linux",
"1.2.3.");
controller.start_metrics();
thread::sleep(std::time::Duration::from_secs(10));
controller.stop_collecting();
drop(controller);
// this sleep is needed for the file to get flushed out and saved to the
// disk properly. Otherwise, you get a file not found error.
thread::sleep(std::time::Duration::from_secs(2));
let path = Path::new("thread.dat");
let display = path.display();
// Open the path in read-only mode.
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}", display,
Error::description(&why)),
Ok(file) => file,
};
// Read the file contents into a string, returns `io::Result<usize>`
let mut buffer = [0; 1];
match file.read_exact(&mut buffer) {
Err(why) => panic!("couldn't read {}: {}", display,
Error::description(&why)),
Ok(_) => println!("value is:{}", buffer[0]),
}
// The timer should have been called 5 times.
assert_eq!(buffer[0], 5);
// Now remove the file
match fs::remove_file("thread.dat") {
Err(why) => panic!("couldn't delete: {}", Error::description(&why)),
Ok(_) => println!("deleted"),
}
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
} | pub app_name: &'a str,
pub app_version: &'a str,
pub app_update_channel: &'a str,
pub app_build_id: &'a str,
pub app_platform: &'a str,
pub locale: &'a str,
pub device: &'a str,
pub arch: &'a str,
pub os: &'a str,
pub os_version: &'a str
}
#[cfg(feature = "integration")]
fn get_event_info<'a>() -> MockEventInfo<'a> {
let ei = MockEventInfo {
app_name : "foxbox",
app_version : "1.0",
app_update_channel : "default",
app_build_id : "20160305",
app_platform : "rust",
locale : "en-us",
device : "raspberry-pi",
arch : "arm",
os : "linux",
os_version : "1.2.3.",
};
ei
}
#[cfg(feature = "integration")]
#[test]
fn test_cid_file_creation_and_proper_reuse() {
// make sure we are starting with no files created.
delete_file("integration1.dat");
delete_file("cid.dat");
let event_category = "event category";
let event_action = "event action";
let event_label = "event label";
let event_value = 999999;
let ei = get_event_info();
create_config("metricsconfig.json");
let mut metrics_controller = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
metrics_controller.record_event(event_category, event_action, event_label, event_value);
let cid1 = read_client_id();
// This sleep is necessary there is no file system interactions.
thread::sleep(std::time::Duration::from_secs(3));
{
let mut metrics_controller2 = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
metrics_controller2.record_event(event_category, event_action, event_label, event_value);
let cid2 = read_client_id();
// The same client id should be used for both metrics controllers on the same device.
assert_eq!(cid1, cid2);
drop(metrics_controller2);
}
// This sleep is necessary so the main thread does not exit.
thread::sleep(std::time::Duration::from_secs(20));
let expected_body = format!(
"v=1&t=event&tid=UA-77033033-1&cid={0}&ec=event%20category&ea=event%20action&el=event%20label&ev={1}\
&an={2}&av={3}&ul={4}&cd1={5}&cd2={6}&cd3={7}&cd4={8}&cd5={9}",
cid1, event_value, ei.app_name, ei.app_version, ei.locale, ei.os, ei.os_version,
ei.device, ei.arch, ei.app_platform
);
let path = Path::new("integration1.dat");
let display = path.display();
// Open the path in read-only mode.
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}",
display, Error::description(&why)),
Ok(file) => file
};
// Read the file contents into a string, returns `io::Result<usize>`
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(why) => panic!("couldn't read {}: {}",
display, Error::description(&why)),
Ok(_) => (),
}
let s_slice: &str = &s[..];
let e_slice: &str = &expected_body[..];
println!("s_slice: {}", s_slice);
println!("e_slice: {}", e_slice);
assert_eq!(s_slice.find(e_slice), Some(0));
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
}
// If this test fails, make sure to run integration tests in serial,
// run |RUST_TEST_THREADS=1 cargo test --features integration|
#[cfg(feature = "integration")]
#[test]
fn test_max_body_size() {
// make sure we are starting with no files created.
delete_file("integration1.dat");
delete_file("cid.dat");
let event_category = "event category";
let event_action = "event action";
let event_label = "event label";
let event_value = 999999;
let ei = get_event_info();
create_config("metricsconfig.json");
let mut metrics_controller = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
for _ in 0.. 20 {
metrics_controller.record_event(event_category, event_action, event_label, event_value);
}
// This sleep is necessary so the main thread does not exit.
thread::sleep(std::time::Duration::from_secs(20));
let path = Path::new("integration1.dat");
let display = path.display();
// Open the path in read-only mode.
let mut file = match File::open(&path) {
Err(why) => panic!("couldn't open {}: {}", display,
Error::description(&why)),
Ok(file) => file
};
// Read the file contents into a string, returns `io::Result<usize>`
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(why) => panic!("couldn't read {}: {}", display,
Error::description(&why)),
Ok(_) => (),
}
let s_slice: &str = &s[..];
let v: Vec<&str> = s_slice.split("UA-77033033-1").collect();
// 21 chunks since split on the property id.
assert_eq!(v.len(), 21);
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
}
// This test is being ignored as it requires you to setup an environment variable
// called GOOGLE_ACCESS_TOKEN. The token can be obtained from the Metrics Explorer.
#[ignore]
#[cfg(feature = "integration")]
#[test]
fn test_google_analytics_received() {
let event_category = "test";
let event_action = "integration";
let event_label = &Uuid::new_v4().to_simple_string().to_string();
let event_value = 5;
let ei = get_event_info();
create_config("metricsconfig.json");
let mut metrics_controller = MetricsController::new(
ei.app_name, ei.app_version, ei.app_update_channel,
ei.app_platform, ei.locale, ei.device, ei.arch, ei.os,
ei.os_version);
// Test with the max payload number of events (20 hits can go in one POST request).
for _ in 0.. 20 {
metrics_controller.record_event(event_category, event_action, event_label, event_value);
}
// This sleep is necessary so the main thread does not exit.
thread::sleep(std::time::Duration::from_secs(30));
// Read the environment variable for the Google Access Token... Obtain
// this from Query Explorer. It is good for an hour.
let access_token:String;
let key = "GOOGLE_ACCESS_TOKEN";
match env::var_os(key) {
Some(val) => {
access_token = val.to_str().unwrap().to_string();
println!("{}", access_token);
},
None => panic!("GOOGLE_ACCESS_TOKEN is not defined in the environment. \
Retrieve this value from the query explorer")
}
// Get the time so we can filter by it.
let ts = time::now();
let filter_time = format!("{0:4}-{1:02}-{2:02}", ts.tm_year + 1900, ts.tm_mon + 1, ts.tm_mday);
let report_url = format!("https://www.googleapis.com/analytics/v3/data/ga?ids=ga%3A121095747&\
start-date={0}&end-date={1}&metrics=ga%3AeventValue&\
dimensions=ga%3AeventCategory%2Cga%3AeventAction%2Cga%3AeventLabel&\
filters=ga%3AeventLabel%3D%3D{2}&access_token={3}",
filter_time, filter_time, event_label, access_token);
println!("REPORT URL: {}", report_url);
// This is set to success only when the eventValue matches what we sent above.
let mut success: bool = false;
// Loop 10 times to give the data time to be queryable by the reporting API.
// As an observation it seems to take about 2.5 minutes for the data to arrive.
for _ in 0.. 10 {
thread::sleep(std::time::Duration::from_secs(30));
let client = Client::new();
let mut res = client.get(&report_url.to_string()).send().unwrap();
if hyper::status::StatusCode::Unauthorized == res.status {
println!("Access Token missing or expired. Set environment /
variable GOOGLE_ACCESS_TOKEN to access token");
}
// Read the Response Code.
assert_eq!(res.status, hyper::Ok);
let mut s = String::new();
res.read_to_string(&mut s).unwrap();
let data: Value = serde_json::from_str(&s).unwrap();
let obj = data.as_object().unwrap();
let val = obj.get(&"totalsForAllResults".to_string()).unwrap().clone();
match val {
Value::Object(v) => {
let event_val = v.get(&"ga:eventValue".to_string()).unwrap().clone();
match event_val {
Value::String(v) => {
println!("String is: {}", v);
// When the eventValue is 100, that means all 20 events
// have been processed by GA with a value of 5 (5*20=100).
// It make take a couple of iterations for this to reach 100.
if v == "100".to_string() {
println!("success");
success = true;
break;
}
},
_ => panic!("Sth else"),
}
},
_ => panic!("Expected an object"),
}
println!("RESPONSE: {}", s);
}
// Clean up any side effects of the test.
delete_file("integration1.dat");
delete_file("cid.dat");
assert_eq!(success, true);
}
#[cfg(feature = "integration")]
fn read_client_id() -> String {
let mut cid = String::new();
let mut cfg = Config::new();
if cfg.init("cid.dat") {
let val: Option<Value> = cfg.get(KEY_CID);
match val {
Some(_) => cid.push_str(&cfg.get_string(KEY_CID).to_string()),
None => panic!("Error: no cid written")
}
} else {
panic!("Failed. no cid created.");
}
cid
}
#[cfg(feature = "integration")]
fn delete_file(file_name: &str) {
match fs::remove_file(file_name) {
Err(why) => println!("couldn't delete: {}", Error::description(&why)),
Ok(_) => println!("deleted"),
}
}
#[cfg(feature = "integration")]
fn create_config(file_name: &str) {
let json = "{\"sendInterval\": 10, \"saveInterval\": 2, \"analytics\": \"UA-77033033-1\"}";
let mut cfg = Config::new();
cfg.create_and_write_json(file_name, json);
} |
#[cfg(feature = "integration")]
struct MockEventInfo<'a> { | random_line_split |
hex.rs | #![allow(deprecated)]
use std::hash::{Hasher, Hash, SipHasher};
use rustc_serialize::hex::ToHex;
pub fn to_hex(num: u64) -> String { | (num >> 32) as u8,
(num >> 40) as u8,
(num >> 48) as u8,
(num >> 56) as u8,
].to_hex()
}
pub fn hash_u64<H: Hash>(hashable: &H) -> u64 {
let mut hasher = SipHasher::new_with_keys(0, 0);
hashable.hash(&mut hasher);
hasher.finish()
}
pub fn short_hash<H: Hash>(hashable: &H) -> String {
to_hex(hash_u64(hashable))
} | [
(num >> 0) as u8,
(num >> 8) as u8,
(num >> 16) as u8,
(num >> 24) as u8, | random_line_split |
hex.rs | #![allow(deprecated)]
use std::hash::{Hasher, Hash, SipHasher};
use rustc_serialize::hex::ToHex;
pub fn to_hex(num: u64) -> String {
[
(num >> 0) as u8,
(num >> 8) as u8,
(num >> 16) as u8,
(num >> 24) as u8,
(num >> 32) as u8,
(num >> 40) as u8,
(num >> 48) as u8,
(num >> 56) as u8,
].to_hex()
}
pub fn hash_u64<H: Hash>(hashable: &H) -> u64 {
let mut hasher = SipHasher::new_with_keys(0, 0);
hashable.hash(&mut hasher);
hasher.finish()
}
pub fn | <H: Hash>(hashable: &H) -> String {
to_hex(hash_u64(hashable))
}
| short_hash | identifier_name |
hex.rs | #![allow(deprecated)]
use std::hash::{Hasher, Hash, SipHasher};
use rustc_serialize::hex::ToHex;
pub fn to_hex(num: u64) -> String |
pub fn hash_u64<H: Hash>(hashable: &H) -> u64 {
let mut hasher = SipHasher::new_with_keys(0, 0);
hashable.hash(&mut hasher);
hasher.finish()
}
pub fn short_hash<H: Hash>(hashable: &H) -> String {
to_hex(hash_u64(hashable))
}
| {
[
(num >> 0) as u8,
(num >> 8) as u8,
(num >> 16) as u8,
(num >> 24) as u8,
(num >> 32) as u8,
(num >> 40) as u8,
(num >> 48) as u8,
(num >> 56) as u8,
].to_hex()
} | identifier_body |
stackvec.rs | use minimal_lexical::bigint;
#[cfg(feature = "alloc")]
pub use minimal_lexical::heapvec::HeapVec as VecType;
#[cfg(not(feature = "alloc"))]
pub use minimal_lexical::stackvec::StackVec as VecType;
pub fn vec_from_u32(x: &[u32]) -> VecType {
let mut vec = VecType::new();
#[cfg(not(all(target_pointer_width = "64", not(target_arch = "sparc"))))]
{
for &xi in x {
vec.try_push(xi as bigint::Limb).unwrap();
}
}
#[cfg(all(target_pointer_width = "64", not(target_arch = "sparc")))] | {
for xi in x.chunks(2) {
match xi.len() {
1 => vec.try_push(xi[0] as bigint::Limb).unwrap(),
2 => {
let xi0 = xi[0] as bigint::Limb;
let xi1 = xi[1] as bigint::Limb;
vec.try_push((xi1 << 32) | xi0).unwrap()
},
_ => unreachable!(),
}
}
}
vec
} | random_line_split |
|
stackvec.rs | use minimal_lexical::bigint;
#[cfg(feature = "alloc")]
pub use minimal_lexical::heapvec::HeapVec as VecType;
#[cfg(not(feature = "alloc"))]
pub use minimal_lexical::stackvec::StackVec as VecType;
pub fn vec_from_u32(x: &[u32]) -> VecType | }
}
}
vec
}
| {
let mut vec = VecType::new();
#[cfg(not(all(target_pointer_width = "64", not(target_arch = "sparc"))))]
{
for &xi in x {
vec.try_push(xi as bigint::Limb).unwrap();
}
}
#[cfg(all(target_pointer_width = "64", not(target_arch = "sparc")))]
{
for xi in x.chunks(2) {
match xi.len() {
1 => vec.try_push(xi[0] as bigint::Limb).unwrap(),
2 => {
let xi0 = xi[0] as bigint::Limb;
let xi1 = xi[1] as bigint::Limb;
vec.try_push((xi1 << 32) | xi0).unwrap()
},
_ => unreachable!(), | identifier_body |
stackvec.rs | use minimal_lexical::bigint;
#[cfg(feature = "alloc")]
pub use minimal_lexical::heapvec::HeapVec as VecType;
#[cfg(not(feature = "alloc"))]
pub use minimal_lexical::stackvec::StackVec as VecType;
pub fn | (x: &[u32]) -> VecType {
let mut vec = VecType::new();
#[cfg(not(all(target_pointer_width = "64", not(target_arch = "sparc"))))]
{
for &xi in x {
vec.try_push(xi as bigint::Limb).unwrap();
}
}
#[cfg(all(target_pointer_width = "64", not(target_arch = "sparc")))]
{
for xi in x.chunks(2) {
match xi.len() {
1 => vec.try_push(xi[0] as bigint::Limb).unwrap(),
2 => {
let xi0 = xi[0] as bigint::Limb;
let xi1 = xi[1] as bigint::Limb;
vec.try_push((xi1 << 32) | xi0).unwrap()
},
_ => unreachable!(),
}
}
}
vec
}
| vec_from_u32 | identifier_name |
build.rs | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This build script is used to generate the rust source files that
// we need for XDS GRPC communication.
fn main() -> Result<(), Box<dyn std::error::Error>> {
let proto_files = vec![
"proto/data-plane-api/envoy/config/accesslog/v3/accesslog.proto",
"proto/data-plane-api/envoy/config/cluster/v3/cluster.proto",
"proto/data-plane-api/envoy/config/listener/v3/listener.proto",
"proto/data-plane-api/envoy/config/route/v3/route.proto",
"proto/data-plane-api/envoy/service/cluster/v3/cds.proto",
"proto/data-plane-api/envoy/service/discovery/v3/ads.proto",
"proto/data-plane-api/envoy/service/discovery/v3/discovery.proto",
"proto/data-plane-api/envoy/type/metadata/v3/metadata.proto",
"proto/data-plane-api/envoy/type/tracing/v3/custom_tag.proto",
"proto/quilkin/filters/capture/v1alpha1/capture.proto",
"proto/quilkin/filters/compress/v1alpha1/compress.proto",
"proto/quilkin/filters/concatenate_bytes/v1alpha1/concatenate_bytes.proto",
"proto/quilkin/filters/debug/v1alpha1/debug.proto",
"proto/quilkin/filters/drop/v1alpha1/drop.proto",
"proto/quilkin/filters/firewall/v1alpha1/firewall.proto",
"proto/quilkin/filters/load_balancer/v1alpha1/load_balancer.proto",
"proto/quilkin/filters/local_rate_limit/v1alpha1/local_rate_limit.proto",
"proto/quilkin/filters/match/v1alpha1/match.proto",
"proto/quilkin/filters/pass/v1alpha1/pass.proto",
"proto/quilkin/filters/token_router/v1alpha1/token_router.proto",
"proto/udpa/xds/core/v3/resource_name.proto",
]
.iter()
.map(|name| std::env::current_dir().unwrap().join(name))
.collect::<Vec<_>>();
let include_dirs = vec![
"proto/data-plane-api",
"proto/udpa",
"proto/googleapis",
"proto/protoc-gen-validate",
"proto/quilkin",
]
.iter()
.map(|i| std::env::current_dir().unwrap().join(i))
.collect::<Vec<_>>();
let config = {
let mut c = prost_build::Config::new();
c.disable_comments(Some("."));
c
}; | config,
&proto_files
.iter()
.map(|path| path.to_str().unwrap())
.collect::<Vec<_>>(),
&include_dirs
.iter()
.map(|p| p.to_str().unwrap())
.collect::<Vec<_>>(),
)?;
// This tells cargo to re-run this build script only when the proto files
// we're interested in change or the any of the proto directories were updated.
for path in vec![proto_files, include_dirs].concat() {
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
}
Ok(())
} | tonic_build::configure()
.build_server(true)
.compile_with_config( | random_line_split |
build.rs | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This build script is used to generate the rust source files that
// we need for XDS GRPC communication.
fn main() -> Result<(), Box<dyn std::error::Error>> | "proto/quilkin/filters/pass/v1alpha1/pass.proto",
"proto/quilkin/filters/token_router/v1alpha1/token_router.proto",
"proto/udpa/xds/core/v3/resource_name.proto",
]
.iter()
.map(|name| std::env::current_dir().unwrap().join(name))
.collect::<Vec<_>>();
let include_dirs = vec![
"proto/data-plane-api",
"proto/udpa",
"proto/googleapis",
"proto/protoc-gen-validate",
"proto/quilkin",
]
.iter()
.map(|i| std::env::current_dir().unwrap().join(i))
.collect::<Vec<_>>();
let config = {
let mut c = prost_build::Config::new();
c.disable_comments(Some("."));
c
};
tonic_build::configure()
.build_server(true)
.compile_with_config(
config,
&proto_files
.iter()
.map(|path| path.to_str().unwrap())
.collect::<Vec<_>>(),
&include_dirs
.iter()
.map(|p| p.to_str().unwrap())
.collect::<Vec<_>>(),
)?;
// This tells cargo to re-run this build script only when the proto files
// we're interested in change or the any of the proto directories were updated.
for path in vec![proto_files, include_dirs].concat() {
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
}
Ok(())
}
| {
let proto_files = vec![
"proto/data-plane-api/envoy/config/accesslog/v3/accesslog.proto",
"proto/data-plane-api/envoy/config/cluster/v3/cluster.proto",
"proto/data-plane-api/envoy/config/listener/v3/listener.proto",
"proto/data-plane-api/envoy/config/route/v3/route.proto",
"proto/data-plane-api/envoy/service/cluster/v3/cds.proto",
"proto/data-plane-api/envoy/service/discovery/v3/ads.proto",
"proto/data-plane-api/envoy/service/discovery/v3/discovery.proto",
"proto/data-plane-api/envoy/type/metadata/v3/metadata.proto",
"proto/data-plane-api/envoy/type/tracing/v3/custom_tag.proto",
"proto/quilkin/filters/capture/v1alpha1/capture.proto",
"proto/quilkin/filters/compress/v1alpha1/compress.proto",
"proto/quilkin/filters/concatenate_bytes/v1alpha1/concatenate_bytes.proto",
"proto/quilkin/filters/debug/v1alpha1/debug.proto",
"proto/quilkin/filters/drop/v1alpha1/drop.proto",
"proto/quilkin/filters/firewall/v1alpha1/firewall.proto",
"proto/quilkin/filters/load_balancer/v1alpha1/load_balancer.proto",
"proto/quilkin/filters/local_rate_limit/v1alpha1/local_rate_limit.proto",
"proto/quilkin/filters/match/v1alpha1/match.proto", | identifier_body |
build.rs | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This build script is used to generate the rust source files that
// we need for XDS GRPC communication.
fn | () -> Result<(), Box<dyn std::error::Error>> {
let proto_files = vec![
"proto/data-plane-api/envoy/config/accesslog/v3/accesslog.proto",
"proto/data-plane-api/envoy/config/cluster/v3/cluster.proto",
"proto/data-plane-api/envoy/config/listener/v3/listener.proto",
"proto/data-plane-api/envoy/config/route/v3/route.proto",
"proto/data-plane-api/envoy/service/cluster/v3/cds.proto",
"proto/data-plane-api/envoy/service/discovery/v3/ads.proto",
"proto/data-plane-api/envoy/service/discovery/v3/discovery.proto",
"proto/data-plane-api/envoy/type/metadata/v3/metadata.proto",
"proto/data-plane-api/envoy/type/tracing/v3/custom_tag.proto",
"proto/quilkin/filters/capture/v1alpha1/capture.proto",
"proto/quilkin/filters/compress/v1alpha1/compress.proto",
"proto/quilkin/filters/concatenate_bytes/v1alpha1/concatenate_bytes.proto",
"proto/quilkin/filters/debug/v1alpha1/debug.proto",
"proto/quilkin/filters/drop/v1alpha1/drop.proto",
"proto/quilkin/filters/firewall/v1alpha1/firewall.proto",
"proto/quilkin/filters/load_balancer/v1alpha1/load_balancer.proto",
"proto/quilkin/filters/local_rate_limit/v1alpha1/local_rate_limit.proto",
"proto/quilkin/filters/match/v1alpha1/match.proto",
"proto/quilkin/filters/pass/v1alpha1/pass.proto",
"proto/quilkin/filters/token_router/v1alpha1/token_router.proto",
"proto/udpa/xds/core/v3/resource_name.proto",
]
.iter()
.map(|name| std::env::current_dir().unwrap().join(name))
.collect::<Vec<_>>();
let include_dirs = vec![
"proto/data-plane-api",
"proto/udpa",
"proto/googleapis",
"proto/protoc-gen-validate",
"proto/quilkin",
]
.iter()
.map(|i| std::env::current_dir().unwrap().join(i))
.collect::<Vec<_>>();
let config = {
let mut c = prost_build::Config::new();
c.disable_comments(Some("."));
c
};
tonic_build::configure()
.build_server(true)
.compile_with_config(
config,
&proto_files
.iter()
.map(|path| path.to_str().unwrap())
.collect::<Vec<_>>(),
&include_dirs
.iter()
.map(|p| p.to_str().unwrap())
.collect::<Vec<_>>(),
)?;
// This tells cargo to re-run this build script only when the proto files
// we're interested in change or the any of the proto directories were updated.
for path in vec![proto_files, include_dirs].concat() {
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
}
Ok(())
}
| main | identifier_name |
x86_64.rs | pub type c_long = i64;
pub type c_ulong = u64;
pub type time_t = i64;
pub type suseconds_t = i64;
s! {
pub struct stat {
pub st_dev: ::dev_t,
pub st_ino: ::ino_t,
pub st_mode: ::mode_t,
pub st_nlink: ::nlink_t,
pub st_uid: ::uid_t,
pub st_gid: ::gid_t,
pub st_rdev: ::dev_t,
pub st_atime: ::time_t,
pub st_atime_nsec: ::c_long,
pub st_mtime: ::time_t,
pub st_mtime_nsec: ::c_long,
pub st_ctime: ::time_t, | pub st_blocks: ::blkcnt_t,
pub st_blksize: ::blksize_t,
pub st_flags: ::fflags_t,
pub st_gen: ::uint32_t,
pub st_lspare: ::int32_t,
pub st_birthtime: ::time_t,
pub st_birthtime_nsec: ::c_long,
}
} | pub st_ctime_nsec: ::c_long,
pub st_size: ::off_t, | random_line_split |
unsized2.rs | // run-pass
#![allow(unconditional_recursion)]
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unused_imports)]
#![feature(box_syntax)]
// Test sized-ness checking in substitution.
use std::marker;
// Unbounded.
fn f1<X:?Sized>(x: &X) {
f1::<X>(x);
}
fn f2<X>(x: &X) {
f1::<X>(x);
f2::<X>(x);
}
// Bounded.
trait T { fn dummy(&self) { } }
fn f3<X: T+?Sized>(x: &X) {
f3::<X>(x);
}
fn f4<X: T>(x: &X) {
f3::<X>(x);
f4::<X>(x);
}
// Self type.
trait T2 {
fn f() -> Box<Self>;
}
struct S;
impl T2 for S {
fn f() -> Box<S> {
box S
}
}
fn f5<X:?Sized+T2>(x: &X) {
let _: Box<X> = T2::f();
}
fn f6<X: T2>(x: &X) {
let _: Box<X> = T2::f();
}
trait T3 {
fn f() -> Box<Self>;
}
impl T3 for S {
fn f() -> Box<S> {
box S
}
}
fn f7<X:?Sized+T3>(x: &X) {
// This is valid, but the unsized bound on X is irrelevant because any type
// which implements T3 must have statically known size. | trait T4<X> {
fn dummy(&self) { }
fn m1(&self, x: &dyn T4<X>, y: X);
fn m2(&self, x: &dyn T5<X>, y: X);
}
trait T5<X:?Sized> {
fn dummy(&self) { }
// not an error (for now)
fn m1(&self, x: &dyn T4<X>);
fn m2(&self, x: &dyn T5<X>);
}
trait T6<X: T> {
fn dummy(&self) { }
fn m1(&self, x: &dyn T4<X>);
fn m2(&self, x: &dyn T5<X>);
}
trait T7<X:?Sized+T> {
fn dummy(&self) { }
// not an error (for now)
fn m1(&self, x: &dyn T4<X>);
fn m2(&self, x: &dyn T5<X>);
}
// The last field in a struct may be unsized
struct S2<X:?Sized> {
f: X,
}
struct S3<X:?Sized> {
f1: isize,
f2: X,
}
pub fn main() {
} | let _: Box<X> = T3::f();
}
| random_line_split |
unsized2.rs | // run-pass
#![allow(unconditional_recursion)]
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unused_imports)]
#![feature(box_syntax)]
// Test sized-ness checking in substitution.
use std::marker;
// Unbounded.
fn f1<X:?Sized>(x: &X) {
f1::<X>(x);
}
fn f2<X>(x: &X) {
f1::<X>(x);
f2::<X>(x);
}
// Bounded.
trait T { fn dummy(&self) { } }
fn f3<X: T+?Sized>(x: &X) {
f3::<X>(x);
}
fn f4<X: T>(x: &X) {
f3::<X>(x);
f4::<X>(x);
}
// Self type.
trait T2 {
fn f() -> Box<Self>;
}
struct S;
impl T2 for S {
fn f() -> Box<S> {
box S
}
}
fn f5<X:?Sized+T2>(x: &X) {
let _: Box<X> = T2::f();
}
fn f6<X: T2>(x: &X) {
let _: Box<X> = T2::f();
}
trait T3 {
fn f() -> Box<Self>;
}
impl T3 for S {
fn f() -> Box<S> {
box S
}
}
fn f7<X:?Sized+T3>(x: &X) {
// This is valid, but the unsized bound on X is irrelevant because any type
// which implements T3 must have statically known size.
let _: Box<X> = T3::f();
}
trait T4<X> {
fn dummy(&self) { }
fn m1(&self, x: &dyn T4<X>, y: X);
fn m2(&self, x: &dyn T5<X>, y: X);
}
trait T5<X:?Sized> {
fn dummy(&self) { }
// not an error (for now)
fn m1(&self, x: &dyn T4<X>);
fn m2(&self, x: &dyn T5<X>);
}
trait T6<X: T> {
fn dummy(&self) { }
fn m1(&self, x: &dyn T4<X>);
fn m2(&self, x: &dyn T5<X>);
}
trait T7<X:?Sized+T> {
fn dummy(&self) { }
// not an error (for now)
fn m1(&self, x: &dyn T4<X>);
fn m2(&self, x: &dyn T5<X>);
}
// The last field in a struct may be unsized
struct S2<X:?Sized> {
f: X,
}
struct S3<X:?Sized> {
f1: isize,
f2: X,
}
pub fn main() | {
} | identifier_body |
|
unsized2.rs | // run-pass
#![allow(unconditional_recursion)]
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unused_imports)]
#![feature(box_syntax)]
// Test sized-ness checking in substitution.
use std::marker;
// Unbounded.
fn f1<X:?Sized>(x: &X) {
f1::<X>(x);
}
fn f2<X>(x: &X) {
f1::<X>(x);
f2::<X>(x);
}
// Bounded.
trait T { fn dummy(&self) { } }
fn f3<X: T+?Sized>(x: &X) {
f3::<X>(x);
}
fn | <X: T>(x: &X) {
f3::<X>(x);
f4::<X>(x);
}
// Self type.
trait T2 {
fn f() -> Box<Self>;
}
struct S;
impl T2 for S {
fn f() -> Box<S> {
box S
}
}
fn f5<X:?Sized+T2>(x: &X) {
let _: Box<X> = T2::f();
}
fn f6<X: T2>(x: &X) {
let _: Box<X> = T2::f();
}
trait T3 {
fn f() -> Box<Self>;
}
impl T3 for S {
fn f() -> Box<S> {
box S
}
}
fn f7<X:?Sized+T3>(x: &X) {
// This is valid, but the unsized bound on X is irrelevant because any type
// which implements T3 must have statically known size.
let _: Box<X> = T3::f();
}
trait T4<X> {
fn dummy(&self) { }
fn m1(&self, x: &dyn T4<X>, y: X);
fn m2(&self, x: &dyn T5<X>, y: X);
}
trait T5<X:?Sized> {
fn dummy(&self) { }
// not an error (for now)
fn m1(&self, x: &dyn T4<X>);
fn m2(&self, x: &dyn T5<X>);
}
trait T6<X: T> {
fn dummy(&self) { }
fn m1(&self, x: &dyn T4<X>);
fn m2(&self, x: &dyn T5<X>);
}
trait T7<X:?Sized+T> {
fn dummy(&self) { }
// not an error (for now)
fn m1(&self, x: &dyn T4<X>);
fn m2(&self, x: &dyn T5<X>);
}
// The last field in a struct may be unsized
struct S2<X:?Sized> {
f: X,
}
struct S3<X:?Sized> {
f1: isize,
f2: X,
}
pub fn main() {
}
| f4 | identifier_name |
ord.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast;
use ast::{MetaItem, Item, Expr};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::build::AstBuilder;
use ext::deriving::generic::*;
use ext::deriving::generic::ty::*;
use parse::token::InternedString;
use ptr::P;
pub fn expand_deriving_ord(cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Item,
push: |P<Item>|) {
macro_rules! md (
($name:expr, $op:expr, $equal:expr) => { {
let inline = cx.meta_word(span, InternedString::new("inline"));
let attrs = vec!(cx.attribute(span, inline));
MethodDef {
name: $name,
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec!(borrowed_self()),
ret_ty: Literal(Path::new(vec!("bool"))),
attributes: attrs,
combine_substructure: combine_substructure(|cx, span, substr| {
cs_op($op, $equal, cx, span, substr)
})
}
} }
);
let ordering_ty = Literal(Path::new(vec!["std", "cmp", "Ordering"]));
let ret_ty = Literal(Path::new_(vec!["std", "option", "Option"],
None,
vec![box ordering_ty],
true));
let inline = cx.meta_word(span, InternedString::new("inline"));
let attrs = vec!(cx.attribute(span, inline));
let partial_cmp_def = MethodDef {
name: "partial_cmp",
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![borrowed_self()],
ret_ty: ret_ty,
attributes: attrs,
combine_substructure: combine_substructure(|cx, span, substr| {
cs_partial_cmp(cx, span, substr)
})
};
let trait_def = TraitDef {
span: span,
attributes: vec![],
path: Path::new(vec!["std", "cmp", "PartialOrd"]),
additional_bounds: vec![],
generics: LifetimeBounds::empty(),
methods: vec![
partial_cmp_def,
md!("lt", true, false),
md!("le", true, true),
md!("gt", false, false),
md!("ge", false, true)
]
};
trait_def.expand(cx, mitem, item, push)
}
pub enum OrderingOp {
PartialCmpOp, LtOp, LeOp, GtOp, GeOp,
}
pub fn some_ordering_collapsed(cx: &mut ExtCtxt,
span: Span,
op: OrderingOp,
self_arg_tags: &[ast::Ident]) -> P<ast::Expr> {
let lft = cx.expr_ident(span, self_arg_tags[0]);
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1]));
let op_str = match op {
PartialCmpOp => "partial_cmp",
LtOp => "lt", LeOp => "le",
GtOp => "gt", GeOp => "ge",
};
cx.expr_method_call(span, lft, cx.ident_of(op_str), vec![rgt])
}
pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span,
substr: &Substructure) -> P<Expr> {
let test_id = cx.ident_of("__test");
let ordering = cx.path_global(span,
vec!(cx.ident_of("std"),
cx.ident_of("cmp"),
cx.ident_of("Equal")));
let ordering = cx.expr_path(ordering);
let equals_expr = cx.expr_some(span, ordering);
let partial_cmp_path = vec![
cx.ident_of("std"),
cx.ident_of("cmp"),
cx.ident_of("PartialOrd"),
cx.ident_of("partial_cmp"),
];
/*
Builds:
let __test = ::std::cmp::PartialOrd::partial_cmp(&self_field1, &other_field1);
if __test == ::std::option::Some(::std::cmp::Equal) {
let __test = ::std::cmp::PartialOrd::partial_cmp(&self_field2, &other_field2);
if __test == ::std::option::Some(::std::cmp::Equal) {
...
} else {
__test
}
} else {
__test
}
FIXME #6449: These `if`s could/should be `match`es.
*/
cs_fold(
// foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost.
false,
|cx, span, old, self_f, other_fs| {
// let __test = new;
// if __test == Some(::std::cmp::Equal) {
// old
// } else {
// __test
// }
let new = {
let other_f = match other_fs {
[ref o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`"),
};
let args = vec![
cx.expr_addr_of(span, self_f),
cx.expr_addr_of(span, other_f.clone()),
];
cx.expr_call_global(span, partial_cmp_path.clone(), args)
};
let assign = cx.stmt_let(span, false, test_id, new);
let cond = cx.expr_binary(span, ast::BiEq,
cx.expr_ident(span, test_id),
equals_expr.clone());
let if_ = cx.expr_if(span,
cond,
old, Some(cx.expr_ident(span, test_id)));
cx.expr_block(cx.block(span, vec!(assign), Some(if_)))
},
equals_expr.clone(),
|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len()!= 2 {
cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
} else {
some_ordering_collapsed(cx, span, PartialCmpOp, tag_tuple)
}
},
cx, span, substr)
}
/// Strict inequality.
fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt,
span: Span, substr: &Substructure) -> P<Expr> | layers of pointers, if the type includes pointers.
*/
let other_f = match other_fs {
[ref o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
};
let cmp = cx.expr_binary(span, op, self_f.clone(), other_f.clone());
let not_cmp = cx.expr_unary(span, ast::UnNot,
cx.expr_binary(span, op, other_f.clone(), self_f));
let and = cx.expr_binary(span, ast::BiAnd, not_cmp, subexpr);
cx.expr_binary(span, ast::BiOr, cmp, and)
},
cx.expr_bool(span, equal),
|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len()!= 2 {
cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
} else {
let op = match (less, equal) {
(true, true) => LeOp, (true, false) => LtOp,
(false, true) => GeOp, (false, false) => GtOp,
};
some_ordering_collapsed(cx, span, op, tag_tuple)
}
},
cx, span, substr)
}
| {
let op = if less {ast::BiLt} else {ast::BiGt};
cs_fold(
false, // need foldr,
|cx, span, subexpr, self_f, other_fs| {
/*
build up a series of chain ||'s and &&'s from the inside
out (hence foldr) to get lexical ordering, i.e. for op ==
`ast::lt`
```
self.f1 < other.f1 || (!(other.f1 < self.f1) &&
(self.f2 < other.f2 || (!(other.f2 < self.f2) &&
(false)
))
)
```
The optimiser should remove the redundancy. We explicitly
get use the binops to avoid auto-deref dereferencing too many | identifier_body |
ord.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast;
use ast::{MetaItem, Item, Expr};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::build::AstBuilder;
use ext::deriving::generic::*;
use ext::deriving::generic::ty::*;
use parse::token::InternedString;
use ptr::P;
pub fn expand_deriving_ord(cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Item,
push: |P<Item>|) {
macro_rules! md (
($name:expr, $op:expr, $equal:expr) => { {
let inline = cx.meta_word(span, InternedString::new("inline"));
let attrs = vec!(cx.attribute(span, inline));
MethodDef {
name: $name,
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec!(borrowed_self()),
ret_ty: Literal(Path::new(vec!("bool"))),
attributes: attrs,
combine_substructure: combine_substructure(|cx, span, substr| {
cs_op($op, $equal, cx, span, substr)
})
}
} }
);
let ordering_ty = Literal(Path::new(vec!["std", "cmp", "Ordering"]));
let ret_ty = Literal(Path::new_(vec!["std", "option", "Option"],
None,
vec![box ordering_ty],
true));
let inline = cx.meta_word(span, InternedString::new("inline"));
let attrs = vec!(cx.attribute(span, inline));
let partial_cmp_def = MethodDef {
name: "partial_cmp",
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![borrowed_self()],
ret_ty: ret_ty,
attributes: attrs,
combine_substructure: combine_substructure(|cx, span, substr| {
cs_partial_cmp(cx, span, substr)
})
};
let trait_def = TraitDef {
span: span,
attributes: vec![],
path: Path::new(vec!["std", "cmp", "PartialOrd"]),
additional_bounds: vec![],
generics: LifetimeBounds::empty(),
methods: vec![
partial_cmp_def,
md!("lt", true, false),
md!("le", true, true),
md!("gt", false, false),
md!("ge", false, true)
]
};
trait_def.expand(cx, mitem, item, push)
}
pub enum OrderingOp {
PartialCmpOp, LtOp, LeOp, GtOp, GeOp,
} |
pub fn some_ordering_collapsed(cx: &mut ExtCtxt,
span: Span,
op: OrderingOp,
self_arg_tags: &[ast::Ident]) -> P<ast::Expr> {
let lft = cx.expr_ident(span, self_arg_tags[0]);
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1]));
let op_str = match op {
PartialCmpOp => "partial_cmp",
LtOp => "lt", LeOp => "le",
GtOp => "gt", GeOp => "ge",
};
cx.expr_method_call(span, lft, cx.ident_of(op_str), vec![rgt])
}
pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span,
substr: &Substructure) -> P<Expr> {
let test_id = cx.ident_of("__test");
let ordering = cx.path_global(span,
vec!(cx.ident_of("std"),
cx.ident_of("cmp"),
cx.ident_of("Equal")));
let ordering = cx.expr_path(ordering);
let equals_expr = cx.expr_some(span, ordering);
let partial_cmp_path = vec![
cx.ident_of("std"),
cx.ident_of("cmp"),
cx.ident_of("PartialOrd"),
cx.ident_of("partial_cmp"),
];
/*
Builds:
let __test = ::std::cmp::PartialOrd::partial_cmp(&self_field1, &other_field1);
if __test == ::std::option::Some(::std::cmp::Equal) {
let __test = ::std::cmp::PartialOrd::partial_cmp(&self_field2, &other_field2);
if __test == ::std::option::Some(::std::cmp::Equal) {
...
} else {
__test
}
} else {
__test
}
FIXME #6449: These `if`s could/should be `match`es.
*/
cs_fold(
// foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost.
false,
|cx, span, old, self_f, other_fs| {
// let __test = new;
// if __test == Some(::std::cmp::Equal) {
// old
// } else {
// __test
// }
let new = {
let other_f = match other_fs {
[ref o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`"),
};
let args = vec![
cx.expr_addr_of(span, self_f),
cx.expr_addr_of(span, other_f.clone()),
];
cx.expr_call_global(span, partial_cmp_path.clone(), args)
};
let assign = cx.stmt_let(span, false, test_id, new);
let cond = cx.expr_binary(span, ast::BiEq,
cx.expr_ident(span, test_id),
equals_expr.clone());
let if_ = cx.expr_if(span,
cond,
old, Some(cx.expr_ident(span, test_id)));
cx.expr_block(cx.block(span, vec!(assign), Some(if_)))
},
equals_expr.clone(),
|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len()!= 2 {
cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
} else {
some_ordering_collapsed(cx, span, PartialCmpOp, tag_tuple)
}
},
cx, span, substr)
}
/// Strict inequality.
fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt,
span: Span, substr: &Substructure) -> P<Expr> {
let op = if less {ast::BiLt} else {ast::BiGt};
cs_fold(
false, // need foldr,
|cx, span, subexpr, self_f, other_fs| {
/*
build up a series of chain ||'s and &&'s from the inside
out (hence foldr) to get lexical ordering, i.e. for op ==
`ast::lt`
```
self.f1 < other.f1 || (!(other.f1 < self.f1) &&
(self.f2 < other.f2 || (!(other.f2 < self.f2) &&
(false)
))
)
```
The optimiser should remove the redundancy. We explicitly
get use the binops to avoid auto-deref dereferencing too many
layers of pointers, if the type includes pointers.
*/
let other_f = match other_fs {
[ref o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
};
let cmp = cx.expr_binary(span, op, self_f.clone(), other_f.clone());
let not_cmp = cx.expr_unary(span, ast::UnNot,
cx.expr_binary(span, op, other_f.clone(), self_f));
let and = cx.expr_binary(span, ast::BiAnd, not_cmp, subexpr);
cx.expr_binary(span, ast::BiOr, cmp, and)
},
cx.expr_bool(span, equal),
|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len()!= 2 {
cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
} else {
let op = match (less, equal) {
(true, true) => LeOp, (true, false) => LtOp,
(false, true) => GeOp, (false, false) => GtOp,
};
some_ordering_collapsed(cx, span, op, tag_tuple)
}
},
cx, span, substr)
} | random_line_split |
|
ord.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast;
use ast::{MetaItem, Item, Expr};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::build::AstBuilder;
use ext::deriving::generic::*;
use ext::deriving::generic::ty::*;
use parse::token::InternedString;
use ptr::P;
pub fn expand_deriving_ord(cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Item,
push: |P<Item>|) {
macro_rules! md (
($name:expr, $op:expr, $equal:expr) => { {
let inline = cx.meta_word(span, InternedString::new("inline"));
let attrs = vec!(cx.attribute(span, inline));
MethodDef {
name: $name,
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec!(borrowed_self()),
ret_ty: Literal(Path::new(vec!("bool"))),
attributes: attrs,
combine_substructure: combine_substructure(|cx, span, substr| {
cs_op($op, $equal, cx, span, substr)
})
}
} }
);
let ordering_ty = Literal(Path::new(vec!["std", "cmp", "Ordering"]));
let ret_ty = Literal(Path::new_(vec!["std", "option", "Option"],
None,
vec![box ordering_ty],
true));
let inline = cx.meta_word(span, InternedString::new("inline"));
let attrs = vec!(cx.attribute(span, inline));
let partial_cmp_def = MethodDef {
name: "partial_cmp",
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![borrowed_self()],
ret_ty: ret_ty,
attributes: attrs,
combine_substructure: combine_substructure(|cx, span, substr| {
cs_partial_cmp(cx, span, substr)
})
};
let trait_def = TraitDef {
span: span,
attributes: vec![],
path: Path::new(vec!["std", "cmp", "PartialOrd"]),
additional_bounds: vec![],
generics: LifetimeBounds::empty(),
methods: vec![
partial_cmp_def,
md!("lt", true, false),
md!("le", true, true),
md!("gt", false, false),
md!("ge", false, true)
]
};
trait_def.expand(cx, mitem, item, push)
}
pub enum OrderingOp {
PartialCmpOp, LtOp, LeOp, GtOp, GeOp,
}
pub fn some_ordering_collapsed(cx: &mut ExtCtxt,
span: Span,
op: OrderingOp,
self_arg_tags: &[ast::Ident]) -> P<ast::Expr> {
let lft = cx.expr_ident(span, self_arg_tags[0]);
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1]));
let op_str = match op {
PartialCmpOp => "partial_cmp",
LtOp => "lt", LeOp => "le",
GtOp => "gt", GeOp => "ge",
};
cx.expr_method_call(span, lft, cx.ident_of(op_str), vec![rgt])
}
pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span,
substr: &Substructure) -> P<Expr> {
let test_id = cx.ident_of("__test");
let ordering = cx.path_global(span,
vec!(cx.ident_of("std"),
cx.ident_of("cmp"),
cx.ident_of("Equal")));
let ordering = cx.expr_path(ordering);
let equals_expr = cx.expr_some(span, ordering);
let partial_cmp_path = vec![
cx.ident_of("std"),
cx.ident_of("cmp"),
cx.ident_of("PartialOrd"),
cx.ident_of("partial_cmp"),
];
/*
Builds:
let __test = ::std::cmp::PartialOrd::partial_cmp(&self_field1, &other_field1);
if __test == ::std::option::Some(::std::cmp::Equal) {
let __test = ::std::cmp::PartialOrd::partial_cmp(&self_field2, &other_field2);
if __test == ::std::option::Some(::std::cmp::Equal) {
...
} else {
__test
}
} else {
__test
}
FIXME #6449: These `if`s could/should be `match`es.
*/
cs_fold(
// foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost.
false,
|cx, span, old, self_f, other_fs| {
// let __test = new;
// if __test == Some(::std::cmp::Equal) {
// old
// } else {
// __test
// }
let new = {
let other_f = match other_fs {
[ref o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`"),
};
let args = vec![
cx.expr_addr_of(span, self_f),
cx.expr_addr_of(span, other_f.clone()),
];
cx.expr_call_global(span, partial_cmp_path.clone(), args)
};
let assign = cx.stmt_let(span, false, test_id, new);
let cond = cx.expr_binary(span, ast::BiEq,
cx.expr_ident(span, test_id),
equals_expr.clone());
let if_ = cx.expr_if(span,
cond,
old, Some(cx.expr_ident(span, test_id)));
cx.expr_block(cx.block(span, vec!(assign), Some(if_)))
},
equals_expr.clone(),
|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len()!= 2 | else {
some_ordering_collapsed(cx, span, PartialCmpOp, tag_tuple)
}
},
cx, span, substr)
}
/// Strict inequality.
fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt,
span: Span, substr: &Substructure) -> P<Expr> {
let op = if less {ast::BiLt} else {ast::BiGt};
cs_fold(
false, // need foldr,
|cx, span, subexpr, self_f, other_fs| {
/*
build up a series of chain ||'s and &&'s from the inside
out (hence foldr) to get lexical ordering, i.e. for op ==
`ast::lt`
```
self.f1 < other.f1 || (!(other.f1 < self.f1) &&
(self.f2 < other.f2 || (!(other.f2 < self.f2) &&
(false)
))
)
```
The optimiser should remove the redundancy. We explicitly
get use the binops to avoid auto-deref dereferencing too many
layers of pointers, if the type includes pointers.
*/
let other_f = match other_fs {
[ref o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
};
let cmp = cx.expr_binary(span, op, self_f.clone(), other_f.clone());
let not_cmp = cx.expr_unary(span, ast::UnNot,
cx.expr_binary(span, op, other_f.clone(), self_f));
let and = cx.expr_binary(span, ast::BiAnd, not_cmp, subexpr);
cx.expr_binary(span, ast::BiOr, cmp, and)
},
cx.expr_bool(span, equal),
|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len()!= 2 {
cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
} else {
let op = match (less, equal) {
(true, true) => LeOp, (true, false) => LtOp,
(false, true) => GeOp, (false, false) => GtOp,
};
some_ordering_collapsed(cx, span, op, tag_tuple)
}
},
cx, span, substr)
}
| {
cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
} | conditional_block |
ord.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast;
use ast::{MetaItem, Item, Expr};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::build::AstBuilder;
use ext::deriving::generic::*;
use ext::deriving::generic::ty::*;
use parse::token::InternedString;
use ptr::P;
pub fn expand_deriving_ord(cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Item,
push: |P<Item>|) {
macro_rules! md (
($name:expr, $op:expr, $equal:expr) => { {
let inline = cx.meta_word(span, InternedString::new("inline"));
let attrs = vec!(cx.attribute(span, inline));
MethodDef {
name: $name,
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec!(borrowed_self()),
ret_ty: Literal(Path::new(vec!("bool"))),
attributes: attrs,
combine_substructure: combine_substructure(|cx, span, substr| {
cs_op($op, $equal, cx, span, substr)
})
}
} }
);
let ordering_ty = Literal(Path::new(vec!["std", "cmp", "Ordering"]));
let ret_ty = Literal(Path::new_(vec!["std", "option", "Option"],
None,
vec![box ordering_ty],
true));
let inline = cx.meta_word(span, InternedString::new("inline"));
let attrs = vec!(cx.attribute(span, inline));
let partial_cmp_def = MethodDef {
name: "partial_cmp",
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![borrowed_self()],
ret_ty: ret_ty,
attributes: attrs,
combine_substructure: combine_substructure(|cx, span, substr| {
cs_partial_cmp(cx, span, substr)
})
};
let trait_def = TraitDef {
span: span,
attributes: vec![],
path: Path::new(vec!["std", "cmp", "PartialOrd"]),
additional_bounds: vec![],
generics: LifetimeBounds::empty(),
methods: vec![
partial_cmp_def,
md!("lt", true, false),
md!("le", true, true),
md!("gt", false, false),
md!("ge", false, true)
]
};
trait_def.expand(cx, mitem, item, push)
}
pub enum | {
PartialCmpOp, LtOp, LeOp, GtOp, GeOp,
}
pub fn some_ordering_collapsed(cx: &mut ExtCtxt,
span: Span,
op: OrderingOp,
self_arg_tags: &[ast::Ident]) -> P<ast::Expr> {
let lft = cx.expr_ident(span, self_arg_tags[0]);
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1]));
let op_str = match op {
PartialCmpOp => "partial_cmp",
LtOp => "lt", LeOp => "le",
GtOp => "gt", GeOp => "ge",
};
cx.expr_method_call(span, lft, cx.ident_of(op_str), vec![rgt])
}
pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span,
substr: &Substructure) -> P<Expr> {
let test_id = cx.ident_of("__test");
let ordering = cx.path_global(span,
vec!(cx.ident_of("std"),
cx.ident_of("cmp"),
cx.ident_of("Equal")));
let ordering = cx.expr_path(ordering);
let equals_expr = cx.expr_some(span, ordering);
let partial_cmp_path = vec![
cx.ident_of("std"),
cx.ident_of("cmp"),
cx.ident_of("PartialOrd"),
cx.ident_of("partial_cmp"),
];
/*
Builds:
let __test = ::std::cmp::PartialOrd::partial_cmp(&self_field1, &other_field1);
if __test == ::std::option::Some(::std::cmp::Equal) {
let __test = ::std::cmp::PartialOrd::partial_cmp(&self_field2, &other_field2);
if __test == ::std::option::Some(::std::cmp::Equal) {
...
} else {
__test
}
} else {
__test
}
FIXME #6449: These `if`s could/should be `match`es.
*/
cs_fold(
// foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost.
false,
|cx, span, old, self_f, other_fs| {
// let __test = new;
// if __test == Some(::std::cmp::Equal) {
// old
// } else {
// __test
// }
let new = {
let other_f = match other_fs {
[ref o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`"),
};
let args = vec![
cx.expr_addr_of(span, self_f),
cx.expr_addr_of(span, other_f.clone()),
];
cx.expr_call_global(span, partial_cmp_path.clone(), args)
};
let assign = cx.stmt_let(span, false, test_id, new);
let cond = cx.expr_binary(span, ast::BiEq,
cx.expr_ident(span, test_id),
equals_expr.clone());
let if_ = cx.expr_if(span,
cond,
old, Some(cx.expr_ident(span, test_id)));
cx.expr_block(cx.block(span, vec!(assign), Some(if_)))
},
equals_expr.clone(),
|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len()!= 2 {
cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
} else {
some_ordering_collapsed(cx, span, PartialCmpOp, tag_tuple)
}
},
cx, span, substr)
}
/// Strict inequality.
fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt,
span: Span, substr: &Substructure) -> P<Expr> {
let op = if less {ast::BiLt} else {ast::BiGt};
cs_fold(
false, // need foldr,
|cx, span, subexpr, self_f, other_fs| {
/*
build up a series of chain ||'s and &&'s from the inside
out (hence foldr) to get lexical ordering, i.e. for op ==
`ast::lt`
```
self.f1 < other.f1 || (!(other.f1 < self.f1) &&
(self.f2 < other.f2 || (!(other.f2 < self.f2) &&
(false)
))
)
```
The optimiser should remove the redundancy. We explicitly
get use the binops to avoid auto-deref dereferencing too many
layers of pointers, if the type includes pointers.
*/
let other_f = match other_fs {
[ref o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
};
let cmp = cx.expr_binary(span, op, self_f.clone(), other_f.clone());
let not_cmp = cx.expr_unary(span, ast::UnNot,
cx.expr_binary(span, op, other_f.clone(), self_f));
let and = cx.expr_binary(span, ast::BiAnd, not_cmp, subexpr);
cx.expr_binary(span, ast::BiOr, cmp, and)
},
cx.expr_bool(span, equal),
|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len()!= 2 {
cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
} else {
let op = match (less, equal) {
(true, true) => LeOp, (true, false) => LtOp,
(false, true) => GeOp, (false, false) => GtOp,
};
some_ordering_collapsed(cx, span, op, tag_tuple)
}
},
cx, span, substr)
}
| OrderingOp | identifier_name |
mod.rs | use std::fmt::Debug;
use provider::Output;
use provider::error::Error;
use provider::error::HandleFuncNotDefined;
pub trait InlineProvider: Debug {
fn is_installed(&self, &str, Option<&str>) -> Result<Output, Error> |
fn version(&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "version".to_string(),
};
Err(e.into())
}
fn remove(&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "remove".to_string(),
};
Err(e.into())
}
fn install(&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "install".to_string(),
};
Err(e.into())
}
fn box_clone(&self) -> Box<InlineProvider>;
}
impl Clone for Box<InlineProvider> {
fn clone(&self) -> Box<InlineProvider> {
self.box_clone()
}
}
pub mod null;
| {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "is_installed".to_string(),
};
Err(e.into())
} | identifier_body |
mod.rs | use std::fmt::Debug;
use provider::Output;
use provider::error::Error;
use provider::error::HandleFuncNotDefined;
pub trait InlineProvider: Debug {
fn | (&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "is_installed".to_string(),
};
Err(e.into())
}
fn version(&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "version".to_string(),
};
Err(e.into())
}
fn remove(&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "remove".to_string(),
};
Err(e.into())
}
fn install(&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "install".to_string(),
};
Err(e.into())
}
fn box_clone(&self) -> Box<InlineProvider>;
}
impl Clone for Box<InlineProvider> {
fn clone(&self) -> Box<InlineProvider> {
self.box_clone()
}
}
pub mod null;
| is_installed | identifier_name |
mod.rs | use std::fmt::Debug;
use provider::Output; | fn is_installed(&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "is_installed".to_string(),
};
Err(e.into())
}
fn version(&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "version".to_string(),
};
Err(e.into())
}
fn remove(&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "remove".to_string(),
};
Err(e.into())
}
fn install(&self, &str, Option<&str>) -> Result<Output, Error> {
let e = HandleFuncNotDefined {
provider: format!("{:?}", self),
func: "install".to_string(),
};
Err(e.into())
}
fn box_clone(&self) -> Box<InlineProvider>;
}
impl Clone for Box<InlineProvider> {
fn clone(&self) -> Box<InlineProvider> {
self.box_clone()
}
}
pub mod null; | use provider::error::Error;
use provider::error::HandleFuncNotDefined;
pub trait InlineProvider: Debug { | random_line_split |
objects.rs | // Copyright 2019 Dmitry Tantsur <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Stored objects.
use std::collections::HashMap;
use std::io::Read;
use std::rc::Rc;
use chrono::{DateTime, TimeZone};
use fallible_iterator::{FallibleIterator, IntoFallibleIterator};
use osauth::services::OBJECT_STORAGE;
use reqwest::Url;
use super::super::common::{
ContainerRef, IntoVerified, ObjectRef, Refresh, ResourceIterator, ResourceQuery,
};
use super::super::session::Session;
use super::super::utils::Query;
use super::super::{Error, Result};
use super::{api, protocol};
/// A query to objects.
#[derive(Clone, Debug)]
pub struct ObjectQuery {
session: Rc<Session>,
c_name: String,
query: Query,
can_paginate: bool,
}
/// A request to create an object.
#[derive(Debug)]
pub struct NewObject<R> {
session: Rc<Session>,
c_name: ContainerRef,
name: String,
body: R,
headers: ObjectHeaders,
}
/// Optional headers for an object.
#[derive(Debug, Default)]
pub struct ObjectHeaders {
pub delete_after: Option<u32>,
pub delete_at: Option<i64>,
pub metadata: HashMap<String, String>,
}
/// Structure representing an object.
#[derive(Clone, Debug)]
pub struct Object {
session: Rc<Session>,
inner: protocol::Object,
c_name: String,
}
impl Object {
/// Create a new Object object.
pub(crate) fn new(session: Rc<Session>, inner: protocol::Object, c_name: String) -> Object {
Object {
session,
inner,
c_name,
}
}
/// Create an object.
pub(crate) fn create<C, Id, R>(
session: Rc<Session>,
container: C,
name: Id,
body: R,
) -> Result<Object>
where
C: Into<ContainerRef>,
Id: AsRef<str>,
R: Read + Sync + Send +'static,
{
let new_object = NewObject::new(
session,
container.into(),
// TODO(dtantsur): get rid of to_string here.
name.as_ref().to_string(),
body,
);
new_object.create()
}
/// Load an Object.
pub(crate) fn load<C, Id>(session: Rc<Session>, container: C, name: Id) -> Result<Object>
where
C: Into<ContainerRef>,
Id: AsRef<str>,
{
let c_ref = container.into();
let c_name = c_ref.to_string();
let inner = api::get_object(&session, c_ref, name)?;
Ok(Object::new(session, inner, c_name))
}
/// Delete the object.
#[inline]
pub fn delete(self) -> Result<()> {
api::delete_object(&self.session, &self.c_name, self.inner.name)
}
/// Download the object.
///
/// The object can be read from the resulting reader.
#[inline]
pub fn download(&self) -> Result<impl Read + '_> {
api::download_object(&self.session, &self.c_name, &self.inner.name)
}
transparent_property! {
#[doc = "Total size of the object."]
bytes: u64
}
/// Container name.
#[inline]
pub fn container_name(&self) -> &String {
&self.c_name
}
transparent_property! {
#[doc = "Object content type (if set)."]
content_type: ref Option<String>
}
transparent_property! {
#[doc = "Object hash or ETag, which is a content's md5 hash"]
hash: ref Option<String>
}
transparent_property! {
#[doc = "Object name."]
name: ref String
}
/// Object url.
#[inline]
pub fn url(&self) -> Result<Url> {
self.session
.get_endpoint(OBJECT_STORAGE, &[self.container_name(), self.name()])
}
}
impl Refresh for Object {
/// Refresh the object.
fn refresh(&mut self) -> Result<()> {
self.inner = api::get_object(&self.session, &self.c_name, &self.inner.name)?;
Ok(())
}
}
impl ObjectQuery {
pub(crate) fn new<C: Into<ContainerRef>>(session: Rc<Session>, container: C) -> ObjectQuery {
ObjectQuery {
session,
c_name: container.into().into(),
query: Query::new(),
can_paginate: true,
}
}
/// Add marker to the request.
///
/// Using this disables automatic pagination.
pub fn with_marker<T: Into<String>>(mut self, marker: T) -> Self {
self.can_paginate = false;
self.query.push_str("marker", marker);
self
}
/// Add limit to the request.
///
/// Using this disables automatic pagination.
pub fn with_limit(mut self, limit: usize) -> Self {
self.can_paginate = false;
self.query.push("limit", limit);
self
}
/// Convert this query into an iterator executing the request.
///
/// Returns a `FallibleIterator`, which is an iterator with each `next`
/// call returning a `Result`.
///
/// Note that no requests are done until you start iterating.
pub fn into_iter(self) -> ResourceIterator<ObjectQuery> {
debug!(
"Fetching objects in container {} with {:?}",
self.c_name, self.query
);
ResourceIterator::new(self)
}
/// Execute this request and return all results.
///
/// A convenience shortcut for `self.into_iter().collect()`.
pub fn all(self) -> Result<Vec<Object>> {
self.into_iter().collect()
}
/// Return one and exactly one result.
///
/// Fails with `ResourceNotFound` if the query produces no results and
/// with `TooManyItems` if the query produces more than one result.
pub fn one(mut self) -> Result<Object> {
debug!(
"Fetching one object in container {} with {:?}",
self.c_name, self.query
);
if self.can_paginate {
// We need only one result. We fetch maximum two to be able
// to check if the query yieled more than one result.
self.query.push("limit", 2);
}
self.into_iter().one()
}
}
impl ResourceQuery for ObjectQuery {
type Item = Object;
const DEFAULT_LIMIT: usize = 100;
fn can_paginate(&self) -> Result<bool> {
Ok(self.can_paginate)
}
fn extract_marker(&self, resource: &Self::Item) -> String {
resource.name().clone()
}
fn fetch_chunk(&self, limit: Option<usize>, marker: Option<String>) -> Result<Vec<Self::Item>> {
let query = self.query.with_marker_and_limit(limit, marker);
Ok(api::list_objects(&self.session, &self.c_name, query)?
.into_iter()
.map(|item| Object {
session: self.session.clone(),
inner: item,
c_name: self.c_name.clone(),
})
.collect())
}
}
impl IntoFallibleIterator for ObjectQuery {
type Item = Object;
type Error = Error;
type IntoFallibleIter = ResourceIterator<ObjectQuery>;
fn into_fallible_iter(self) -> Self::IntoFallibleIter {
self.into_iter()
}
}
impl<R: Read + Sync + Send +'static> NewObject<R> {
/// Start creating an object.
pub(crate) fn new(
session: Rc<Session>,
c_name: ContainerRef,
name: String,
body: R,
) -> NewObject<R> {
NewObject {
session,
c_name,
name,
body,
headers: ObjectHeaders::default(),
}
}
/// Request creation of the object.
pub fn create(self) -> Result<Object> {
let c_name = self.c_name.clone();
let inner = api::create_object(
&self.session,
self.c_name,
self.name,
self.body,
self.headers,
)?;
Ok(Object::new(self.session, inner, c_name.into()))
}
/// Metadata to set on the object.
#[inline]
pub fn metadata(&mut self) -> &mut HashMap<String, String> {
&mut self.headers.metadata
}
/// Set TTL in seconds for the object.
#[inline]
pub fn with_delete_after(mut self, ttl: u32) -> NewObject<R> {
self.headers.delete_after = Some(ttl);
self
}
/// Set the date and time when the object must be deleted.
#[inline]
pub fn with_delete_at<T: TimeZone>(mut self, datetime: DateTime<T>) -> NewObject<R> {
self.headers.delete_at = Some(datetime.timestamp());
self
}
/// Insert a new metadata item.
#[inline]
pub fn with_metadata<K, V>(mut self, key: K, item: V) -> NewObject<R>
where
K: Into<String>,
V: Into<String>,
{
let _ = self.headers.metadata.insert(key.into(), item.into());
self
}
}
impl From<Object> for ObjectRef {
fn from(value: Object) -> ObjectRef |
}
#[cfg(feature = "object-storage")]
impl IntoVerified for ObjectRef {}
| {
ObjectRef::new_verified(value.inner.name)
} | identifier_body |
objects.rs | // Copyright 2019 Dmitry Tantsur <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Stored objects.
use std::collections::HashMap;
use std::io::Read;
use std::rc::Rc;
use chrono::{DateTime, TimeZone};
use fallible_iterator::{FallibleIterator, IntoFallibleIterator};
use osauth::services::OBJECT_STORAGE;
use reqwest::Url;
use super::super::common::{
ContainerRef, IntoVerified, ObjectRef, Refresh, ResourceIterator, ResourceQuery,
};
use super::super::session::Session;
use super::super::utils::Query;
use super::super::{Error, Result};
use super::{api, protocol};
/// A query to objects.
#[derive(Clone, Debug)]
pub struct ObjectQuery {
session: Rc<Session>,
c_name: String,
query: Query,
can_paginate: bool,
}
/// A request to create an object.
#[derive(Debug)]
pub struct NewObject<R> {
session: Rc<Session>,
c_name: ContainerRef,
name: String,
body: R,
headers: ObjectHeaders,
}
/// Optional headers for an object.
#[derive(Debug, Default)]
pub struct ObjectHeaders {
pub delete_after: Option<u32>,
pub delete_at: Option<i64>,
pub metadata: HashMap<String, String>,
}
/// Structure representing an object.
#[derive(Clone, Debug)]
pub struct Object {
session: Rc<Session>,
inner: protocol::Object,
c_name: String,
}
impl Object {
/// Create a new Object object.
pub(crate) fn new(session: Rc<Session>, inner: protocol::Object, c_name: String) -> Object {
Object {
session,
inner,
c_name,
}
}
/// Create an object.
pub(crate) fn create<C, Id, R>(
session: Rc<Session>,
container: C,
name: Id,
body: R,
) -> Result<Object>
where
C: Into<ContainerRef>,
Id: AsRef<str>,
R: Read + Sync + Send +'static,
{
let new_object = NewObject::new(
session,
container.into(),
// TODO(dtantsur): get rid of to_string here.
name.as_ref().to_string(),
body,
);
new_object.create()
}
/// Load an Object.
pub(crate) fn load<C, Id>(session: Rc<Session>, container: C, name: Id) -> Result<Object>
where
C: Into<ContainerRef>,
Id: AsRef<str>,
{
let c_ref = container.into();
let c_name = c_ref.to_string();
let inner = api::get_object(&session, c_ref, name)?;
Ok(Object::new(session, inner, c_name))
}
/// Delete the object.
#[inline]
pub fn delete(self) -> Result<()> {
api::delete_object(&self.session, &self.c_name, self.inner.name)
}
/// Download the object.
///
/// The object can be read from the resulting reader.
#[inline]
pub fn download(&self) -> Result<impl Read + '_> {
api::download_object(&self.session, &self.c_name, &self.inner.name)
}
transparent_property! {
#[doc = "Total size of the object."]
bytes: u64
}
/// Container name.
#[inline]
pub fn container_name(&self) -> &String {
&self.c_name
}
transparent_property! {
#[doc = "Object content type (if set)."]
content_type: ref Option<String>
}
transparent_property! {
#[doc = "Object hash or ETag, which is a content's md5 hash"]
hash: ref Option<String>
}
transparent_property! {
#[doc = "Object name."]
name: ref String
}
/// Object url.
#[inline]
pub fn url(&self) -> Result<Url> { | self.session
.get_endpoint(OBJECT_STORAGE, &[self.container_name(), self.name()])
}
}
impl Refresh for Object {
/// Refresh the object.
fn refresh(&mut self) -> Result<()> {
self.inner = api::get_object(&self.session, &self.c_name, &self.inner.name)?;
Ok(())
}
}
impl ObjectQuery {
pub(crate) fn new<C: Into<ContainerRef>>(session: Rc<Session>, container: C) -> ObjectQuery {
ObjectQuery {
session,
c_name: container.into().into(),
query: Query::new(),
can_paginate: true,
}
}
/// Add marker to the request.
///
/// Using this disables automatic pagination.
pub fn with_marker<T: Into<String>>(mut self, marker: T) -> Self {
self.can_paginate = false;
self.query.push_str("marker", marker);
self
}
/// Add limit to the request.
///
/// Using this disables automatic pagination.
pub fn with_limit(mut self, limit: usize) -> Self {
self.can_paginate = false;
self.query.push("limit", limit);
self
}
/// Convert this query into an iterator executing the request.
///
/// Returns a `FallibleIterator`, which is an iterator with each `next`
/// call returning a `Result`.
///
/// Note that no requests are done until you start iterating.
pub fn into_iter(self) -> ResourceIterator<ObjectQuery> {
debug!(
"Fetching objects in container {} with {:?}",
self.c_name, self.query
);
ResourceIterator::new(self)
}
/// Execute this request and return all results.
///
/// A convenience shortcut for `self.into_iter().collect()`.
pub fn all(self) -> Result<Vec<Object>> {
self.into_iter().collect()
}
/// Return one and exactly one result.
///
/// Fails with `ResourceNotFound` if the query produces no results and
/// with `TooManyItems` if the query produces more than one result.
pub fn one(mut self) -> Result<Object> {
debug!(
"Fetching one object in container {} with {:?}",
self.c_name, self.query
);
if self.can_paginate {
// We need only one result. We fetch maximum two to be able
// to check if the query yieled more than one result.
self.query.push("limit", 2);
}
self.into_iter().one()
}
}
impl ResourceQuery for ObjectQuery {
type Item = Object;
const DEFAULT_LIMIT: usize = 100;
fn can_paginate(&self) -> Result<bool> {
Ok(self.can_paginate)
}
fn extract_marker(&self, resource: &Self::Item) -> String {
resource.name().clone()
}
fn fetch_chunk(&self, limit: Option<usize>, marker: Option<String>) -> Result<Vec<Self::Item>> {
let query = self.query.with_marker_and_limit(limit, marker);
Ok(api::list_objects(&self.session, &self.c_name, query)?
.into_iter()
.map(|item| Object {
session: self.session.clone(),
inner: item,
c_name: self.c_name.clone(),
})
.collect())
}
}
impl IntoFallibleIterator for ObjectQuery {
type Item = Object;
type Error = Error;
type IntoFallibleIter = ResourceIterator<ObjectQuery>;
fn into_fallible_iter(self) -> Self::IntoFallibleIter {
self.into_iter()
}
}
impl<R: Read + Sync + Send +'static> NewObject<R> {
/// Start creating an object.
pub(crate) fn new(
session: Rc<Session>,
c_name: ContainerRef,
name: String,
body: R,
) -> NewObject<R> {
NewObject {
session,
c_name,
name,
body,
headers: ObjectHeaders::default(),
}
}
/// Request creation of the object.
pub fn create(self) -> Result<Object> {
let c_name = self.c_name.clone();
let inner = api::create_object(
&self.session,
self.c_name,
self.name,
self.body,
self.headers,
)?;
Ok(Object::new(self.session, inner, c_name.into()))
}
/// Metadata to set on the object.
#[inline]
pub fn metadata(&mut self) -> &mut HashMap<String, String> {
&mut self.headers.metadata
}
/// Set TTL in seconds for the object.
#[inline]
pub fn with_delete_after(mut self, ttl: u32) -> NewObject<R> {
self.headers.delete_after = Some(ttl);
self
}
/// Set the date and time when the object must be deleted.
#[inline]
pub fn with_delete_at<T: TimeZone>(mut self, datetime: DateTime<T>) -> NewObject<R> {
self.headers.delete_at = Some(datetime.timestamp());
self
}
/// Insert a new metadata item.
#[inline]
pub fn with_metadata<K, V>(mut self, key: K, item: V) -> NewObject<R>
where
K: Into<String>,
V: Into<String>,
{
let _ = self.headers.metadata.insert(key.into(), item.into());
self
}
}
impl From<Object> for ObjectRef {
fn from(value: Object) -> ObjectRef {
ObjectRef::new_verified(value.inner.name)
}
}
#[cfg(feature = "object-storage")]
impl IntoVerified for ObjectRef {} | random_line_split |
|
objects.rs | // Copyright 2019 Dmitry Tantsur <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Stored objects.
use std::collections::HashMap;
use std::io::Read;
use std::rc::Rc;
use chrono::{DateTime, TimeZone};
use fallible_iterator::{FallibleIterator, IntoFallibleIterator};
use osauth::services::OBJECT_STORAGE;
use reqwest::Url;
use super::super::common::{
ContainerRef, IntoVerified, ObjectRef, Refresh, ResourceIterator, ResourceQuery,
};
use super::super::session::Session;
use super::super::utils::Query;
use super::super::{Error, Result};
use super::{api, protocol};
/// A query to objects.
#[derive(Clone, Debug)]
pub struct ObjectQuery {
session: Rc<Session>,
c_name: String,
query: Query,
can_paginate: bool,
}
/// A request to create an object.
#[derive(Debug)]
pub struct NewObject<R> {
session: Rc<Session>,
c_name: ContainerRef,
name: String,
body: R,
headers: ObjectHeaders,
}
/// Optional headers for an object.
#[derive(Debug, Default)]
pub struct ObjectHeaders {
pub delete_after: Option<u32>,
pub delete_at: Option<i64>,
pub metadata: HashMap<String, String>,
}
/// Structure representing an object.
#[derive(Clone, Debug)]
pub struct Object {
session: Rc<Session>,
inner: protocol::Object,
c_name: String,
}
impl Object {
/// Create a new Object object.
pub(crate) fn new(session: Rc<Session>, inner: protocol::Object, c_name: String) -> Object {
Object {
session,
inner,
c_name,
}
}
/// Create an object.
pub(crate) fn create<C, Id, R>(
session: Rc<Session>,
container: C,
name: Id,
body: R,
) -> Result<Object>
where
C: Into<ContainerRef>,
Id: AsRef<str>,
R: Read + Sync + Send +'static,
{
let new_object = NewObject::new(
session,
container.into(),
// TODO(dtantsur): get rid of to_string here.
name.as_ref().to_string(),
body,
);
new_object.create()
}
/// Load an Object.
pub(crate) fn load<C, Id>(session: Rc<Session>, container: C, name: Id) -> Result<Object>
where
C: Into<ContainerRef>,
Id: AsRef<str>,
{
let c_ref = container.into();
let c_name = c_ref.to_string();
let inner = api::get_object(&session, c_ref, name)?;
Ok(Object::new(session, inner, c_name))
}
/// Delete the object.
#[inline]
pub fn delete(self) -> Result<()> {
api::delete_object(&self.session, &self.c_name, self.inner.name)
}
/// Download the object.
///
/// The object can be read from the resulting reader.
#[inline]
pub fn download(&self) -> Result<impl Read + '_> {
api::download_object(&self.session, &self.c_name, &self.inner.name)
}
transparent_property! {
#[doc = "Total size of the object."]
bytes: u64
}
/// Container name.
#[inline]
pub fn container_name(&self) -> &String {
&self.c_name
}
transparent_property! {
#[doc = "Object content type (if set)."]
content_type: ref Option<String>
}
transparent_property! {
#[doc = "Object hash or ETag, which is a content's md5 hash"]
hash: ref Option<String>
}
transparent_property! {
#[doc = "Object name."]
name: ref String
}
/// Object url.
#[inline]
pub fn url(&self) -> Result<Url> {
self.session
.get_endpoint(OBJECT_STORAGE, &[self.container_name(), self.name()])
}
}
impl Refresh for Object {
/// Refresh the object.
fn refresh(&mut self) -> Result<()> {
self.inner = api::get_object(&self.session, &self.c_name, &self.inner.name)?;
Ok(())
}
}
impl ObjectQuery {
pub(crate) fn new<C: Into<ContainerRef>>(session: Rc<Session>, container: C) -> ObjectQuery {
ObjectQuery {
session,
c_name: container.into().into(),
query: Query::new(),
can_paginate: true,
}
}
/// Add marker to the request.
///
/// Using this disables automatic pagination.
pub fn with_marker<T: Into<String>>(mut self, marker: T) -> Self {
self.can_paginate = false;
self.query.push_str("marker", marker);
self
}
/// Add limit to the request.
///
/// Using this disables automatic pagination.
pub fn | (mut self, limit: usize) -> Self {
self.can_paginate = false;
self.query.push("limit", limit);
self
}
/// Convert this query into an iterator executing the request.
///
/// Returns a `FallibleIterator`, which is an iterator with each `next`
/// call returning a `Result`.
///
/// Note that no requests are done until you start iterating.
pub fn into_iter(self) -> ResourceIterator<ObjectQuery> {
debug!(
"Fetching objects in container {} with {:?}",
self.c_name, self.query
);
ResourceIterator::new(self)
}
/// Execute this request and return all results.
///
/// A convenience shortcut for `self.into_iter().collect()`.
pub fn all(self) -> Result<Vec<Object>> {
self.into_iter().collect()
}
/// Return one and exactly one result.
///
/// Fails with `ResourceNotFound` if the query produces no results and
/// with `TooManyItems` if the query produces more than one result.
pub fn one(mut self) -> Result<Object> {
debug!(
"Fetching one object in container {} with {:?}",
self.c_name, self.query
);
if self.can_paginate {
// We need only one result. We fetch maximum two to be able
// to check if the query yieled more than one result.
self.query.push("limit", 2);
}
self.into_iter().one()
}
}
impl ResourceQuery for ObjectQuery {
type Item = Object;
const DEFAULT_LIMIT: usize = 100;
fn can_paginate(&self) -> Result<bool> {
Ok(self.can_paginate)
}
fn extract_marker(&self, resource: &Self::Item) -> String {
resource.name().clone()
}
fn fetch_chunk(&self, limit: Option<usize>, marker: Option<String>) -> Result<Vec<Self::Item>> {
let query = self.query.with_marker_and_limit(limit, marker);
Ok(api::list_objects(&self.session, &self.c_name, query)?
.into_iter()
.map(|item| Object {
session: self.session.clone(),
inner: item,
c_name: self.c_name.clone(),
})
.collect())
}
}
impl IntoFallibleIterator for ObjectQuery {
type Item = Object;
type Error = Error;
type IntoFallibleIter = ResourceIterator<ObjectQuery>;
fn into_fallible_iter(self) -> Self::IntoFallibleIter {
self.into_iter()
}
}
impl<R: Read + Sync + Send +'static> NewObject<R> {
/// Start creating an object.
pub(crate) fn new(
session: Rc<Session>,
c_name: ContainerRef,
name: String,
body: R,
) -> NewObject<R> {
NewObject {
session,
c_name,
name,
body,
headers: ObjectHeaders::default(),
}
}
/// Request creation of the object.
pub fn create(self) -> Result<Object> {
let c_name = self.c_name.clone();
let inner = api::create_object(
&self.session,
self.c_name,
self.name,
self.body,
self.headers,
)?;
Ok(Object::new(self.session, inner, c_name.into()))
}
/// Metadata to set on the object.
#[inline]
pub fn metadata(&mut self) -> &mut HashMap<String, String> {
&mut self.headers.metadata
}
/// Set TTL in seconds for the object.
#[inline]
pub fn with_delete_after(mut self, ttl: u32) -> NewObject<R> {
self.headers.delete_after = Some(ttl);
self
}
/// Set the date and time when the object must be deleted.
#[inline]
pub fn with_delete_at<T: TimeZone>(mut self, datetime: DateTime<T>) -> NewObject<R> {
self.headers.delete_at = Some(datetime.timestamp());
self
}
/// Insert a new metadata item.
#[inline]
pub fn with_metadata<K, V>(mut self, key: K, item: V) -> NewObject<R>
where
K: Into<String>,
V: Into<String>,
{
let _ = self.headers.metadata.insert(key.into(), item.into());
self
}
}
impl From<Object> for ObjectRef {
fn from(value: Object) -> ObjectRef {
ObjectRef::new_verified(value.inner.name)
}
}
#[cfg(feature = "object-storage")]
impl IntoVerified for ObjectRef {}
| with_limit | identifier_name |
objects.rs | // Copyright 2019 Dmitry Tantsur <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Stored objects.
use std::collections::HashMap;
use std::io::Read;
use std::rc::Rc;
use chrono::{DateTime, TimeZone};
use fallible_iterator::{FallibleIterator, IntoFallibleIterator};
use osauth::services::OBJECT_STORAGE;
use reqwest::Url;
use super::super::common::{
ContainerRef, IntoVerified, ObjectRef, Refresh, ResourceIterator, ResourceQuery,
};
use super::super::session::Session;
use super::super::utils::Query;
use super::super::{Error, Result};
use super::{api, protocol};
/// A query to objects.
#[derive(Clone, Debug)]
pub struct ObjectQuery {
session: Rc<Session>,
c_name: String,
query: Query,
can_paginate: bool,
}
/// A request to create an object.
#[derive(Debug)]
pub struct NewObject<R> {
session: Rc<Session>,
c_name: ContainerRef,
name: String,
body: R,
headers: ObjectHeaders,
}
/// Optional headers for an object.
#[derive(Debug, Default)]
pub struct ObjectHeaders {
pub delete_after: Option<u32>,
pub delete_at: Option<i64>,
pub metadata: HashMap<String, String>,
}
/// Structure representing an object.
#[derive(Clone, Debug)]
pub struct Object {
session: Rc<Session>,
inner: protocol::Object,
c_name: String,
}
impl Object {
/// Create a new Object object.
pub(crate) fn new(session: Rc<Session>, inner: protocol::Object, c_name: String) -> Object {
Object {
session,
inner,
c_name,
}
}
/// Create an object.
pub(crate) fn create<C, Id, R>(
session: Rc<Session>,
container: C,
name: Id,
body: R,
) -> Result<Object>
where
C: Into<ContainerRef>,
Id: AsRef<str>,
R: Read + Sync + Send +'static,
{
let new_object = NewObject::new(
session,
container.into(),
// TODO(dtantsur): get rid of to_string here.
name.as_ref().to_string(),
body,
);
new_object.create()
}
/// Load an Object.
pub(crate) fn load<C, Id>(session: Rc<Session>, container: C, name: Id) -> Result<Object>
where
C: Into<ContainerRef>,
Id: AsRef<str>,
{
let c_ref = container.into();
let c_name = c_ref.to_string();
let inner = api::get_object(&session, c_ref, name)?;
Ok(Object::new(session, inner, c_name))
}
/// Delete the object.
#[inline]
pub fn delete(self) -> Result<()> {
api::delete_object(&self.session, &self.c_name, self.inner.name)
}
/// Download the object.
///
/// The object can be read from the resulting reader.
#[inline]
pub fn download(&self) -> Result<impl Read + '_> {
api::download_object(&self.session, &self.c_name, &self.inner.name)
}
transparent_property! {
#[doc = "Total size of the object."]
bytes: u64
}
/// Container name.
#[inline]
pub fn container_name(&self) -> &String {
&self.c_name
}
transparent_property! {
#[doc = "Object content type (if set)."]
content_type: ref Option<String>
}
transparent_property! {
#[doc = "Object hash or ETag, which is a content's md5 hash"]
hash: ref Option<String>
}
transparent_property! {
#[doc = "Object name."]
name: ref String
}
/// Object url.
#[inline]
pub fn url(&self) -> Result<Url> {
self.session
.get_endpoint(OBJECT_STORAGE, &[self.container_name(), self.name()])
}
}
impl Refresh for Object {
/// Refresh the object.
fn refresh(&mut self) -> Result<()> {
self.inner = api::get_object(&self.session, &self.c_name, &self.inner.name)?;
Ok(())
}
}
impl ObjectQuery {
pub(crate) fn new<C: Into<ContainerRef>>(session: Rc<Session>, container: C) -> ObjectQuery {
ObjectQuery {
session,
c_name: container.into().into(),
query: Query::new(),
can_paginate: true,
}
}
/// Add marker to the request.
///
/// Using this disables automatic pagination.
pub fn with_marker<T: Into<String>>(mut self, marker: T) -> Self {
self.can_paginate = false;
self.query.push_str("marker", marker);
self
}
/// Add limit to the request.
///
/// Using this disables automatic pagination.
pub fn with_limit(mut self, limit: usize) -> Self {
self.can_paginate = false;
self.query.push("limit", limit);
self
}
/// Convert this query into an iterator executing the request.
///
/// Returns a `FallibleIterator`, which is an iterator with each `next`
/// call returning a `Result`.
///
/// Note that no requests are done until you start iterating.
pub fn into_iter(self) -> ResourceIterator<ObjectQuery> {
debug!(
"Fetching objects in container {} with {:?}",
self.c_name, self.query
);
ResourceIterator::new(self)
}
/// Execute this request and return all results.
///
/// A convenience shortcut for `self.into_iter().collect()`.
pub fn all(self) -> Result<Vec<Object>> {
self.into_iter().collect()
}
/// Return one and exactly one result.
///
/// Fails with `ResourceNotFound` if the query produces no results and
/// with `TooManyItems` if the query produces more than one result.
pub fn one(mut self) -> Result<Object> {
debug!(
"Fetching one object in container {} with {:?}",
self.c_name, self.query
);
if self.can_paginate |
self.into_iter().one()
}
}
impl ResourceQuery for ObjectQuery {
type Item = Object;
const DEFAULT_LIMIT: usize = 100;
fn can_paginate(&self) -> Result<bool> {
Ok(self.can_paginate)
}
fn extract_marker(&self, resource: &Self::Item) -> String {
resource.name().clone()
}
fn fetch_chunk(&self, limit: Option<usize>, marker: Option<String>) -> Result<Vec<Self::Item>> {
let query = self.query.with_marker_and_limit(limit, marker);
Ok(api::list_objects(&self.session, &self.c_name, query)?
.into_iter()
.map(|item| Object {
session: self.session.clone(),
inner: item,
c_name: self.c_name.clone(),
})
.collect())
}
}
impl IntoFallibleIterator for ObjectQuery {
type Item = Object;
type Error = Error;
type IntoFallibleIter = ResourceIterator<ObjectQuery>;
fn into_fallible_iter(self) -> Self::IntoFallibleIter {
self.into_iter()
}
}
impl<R: Read + Sync + Send +'static> NewObject<R> {
/// Start creating an object.
pub(crate) fn new(
session: Rc<Session>,
c_name: ContainerRef,
name: String,
body: R,
) -> NewObject<R> {
NewObject {
session,
c_name,
name,
body,
headers: ObjectHeaders::default(),
}
}
/// Request creation of the object.
pub fn create(self) -> Result<Object> {
let c_name = self.c_name.clone();
let inner = api::create_object(
&self.session,
self.c_name,
self.name,
self.body,
self.headers,
)?;
Ok(Object::new(self.session, inner, c_name.into()))
}
/// Metadata to set on the object.
#[inline]
pub fn metadata(&mut self) -> &mut HashMap<String, String> {
&mut self.headers.metadata
}
/// Set TTL in seconds for the object.
#[inline]
pub fn with_delete_after(mut self, ttl: u32) -> NewObject<R> {
self.headers.delete_after = Some(ttl);
self
}
/// Set the date and time when the object must be deleted.
#[inline]
pub fn with_delete_at<T: TimeZone>(mut self, datetime: DateTime<T>) -> NewObject<R> {
self.headers.delete_at = Some(datetime.timestamp());
self
}
/// Insert a new metadata item.
#[inline]
pub fn with_metadata<K, V>(mut self, key: K, item: V) -> NewObject<R>
where
K: Into<String>,
V: Into<String>,
{
let _ = self.headers.metadata.insert(key.into(), item.into());
self
}
}
impl From<Object> for ObjectRef {
fn from(value: Object) -> ObjectRef {
ObjectRef::new_verified(value.inner.name)
}
}
#[cfg(feature = "object-storage")]
impl IntoVerified for ObjectRef {}
| {
// We need only one result. We fetch maximum two to be able
// to check if the query yieled more than one result.
self.query.push("limit", 2);
} | conditional_block |
type_alias.rs | /*type 语句可以给一个已存在类型起一个新的名字。
类型必须要有 CamelCase(驼峰方式)的名称,否则 编译器会产生一个警告。
对规则为例外的是基本类型: usize,f32等等。*/
// `NanoSecond` 是 `u64` 的新名字。
type NanoSecond = u64;
type Inch = u64;
// 使用一个属性来忽略警告。
//#[allow(non_camel_case_types)]
type u64_t = u64;
// 试一试 ^ 试着删掉属性
fn main() {
// `NanoSecond` = `Inch` = `u64_t` = `u64`.
let nanoseconds: NanoSecond = 5 as u64_t;
let inches: Inch = 2 as u64_t;
// 注意类型的别名*没有*提供任何额外的类型安全,因为别名*不是*新的类型
| tln!(
"{} nanoseconds + {} inches = {} unit?",
nanoseconds,
inches,
nanoseconds + inches
);
}
| prin | identifier_name |
type_alias.rs | /*type 语句可以给一个已存在类型起一个新的名字。
类型必须要有 CamelCase(驼峰方式)的名称,否则 编译器会产生一个警告。
对规则为例外的是基本类型: usize,f32等等。*/
// `NanoSecond` 是 `u64` 的新名字。
type NanoSecond = u64;
type Inch = u64;
// 使用一个属性来忽略警告。
//#[allow(non_camel_case_types)]
type u64_t = u64;
// 试一试 ^ 试着删掉属性
fn main() {
// `NanoSecond` = `Inch` = `u64_t` = `u64`.
let nanoseconds: NanoSecond = 5 as u64_t; |
// 注意类型的别名*没有*提供任何额外的类型安全,因为别名*不是*新的类型
println!(
"{} nanoseconds + {} inches = {} unit?",
nanoseconds,
inches,
nanoseconds + inches
);
} | let inches: Inch = 2 as u64_t; | random_line_split |
type_alias.rs | /*type 语句可以给一个已存在类型起一个新的名字。
类型必须要有 CamelCase(驼峰方式)的名称,否则 编译器会产生一个警告。
对规则为例外的是基本类型: usize,f32等等。*/
// `NanoSecond` 是 `u64` 的新名字。
type NanoSecond = u64;
type Inch = u64;
// 使用一个属性来忽略警告。
//#[allow(non_camel_case_types)]
type u64_t = u64;
// 试一试 ^ 试着删掉属性
fn main() {
// `NanoSecond` = `Inch` = `u64_t` = `u64`.
let nanoseconds: NanoSecond = 5 as u64_t;
let inches: Inch = 2 as u64_t;
// 注意类型的别名*没有*提供任何额外的类型安全,因为别名*不是*新的类型
println | !(
"{} nanoseconds + {} inches = {} unit?",
nanoseconds,
inches,
nanoseconds + inches
);
}
| identifier_body |
|
mod.rs | #![cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "openbsd"))]
#![allow(unused_variables, dead_code)]
use libc;
use api::osmesa::{OsMesaContext};
use Api;
use ContextError;
use CreationError;
use Event;
use GlAttributes;
use GlContext;
use PixelFormat;
use PixelFormatRequirements;
use CursorState;
use MouseCursor;
use WindowAttributes;
use std::collections::VecDeque;
use std::path::Path;
use std::ptr;
mod ffi;
pub struct Window {
libcaca: ffi::LibCaca,
display: *mut ffi::caca_display_t,
opengl: OsMesaContext,
dither: *mut ffi::caca_dither_t,
}
#[derive(Clone)]
pub struct WindowProxy;
impl WindowProxy {
#[inline]
pub fn wakeup_event_loop(&self) {
unimplemented!()
}
}
pub struct MonitorId;
#[inline]
pub fn get_available_monitors() -> VecDeque<MonitorId> {
VecDeque::new()
}
#[inline]
pub fn get_primary_monitor() -> MonitorId {
MonitorId
}
impl MonitorId {
#[inline]
pub fn get_name(&self) -> Option<String> {
unimplemented!();
}
#[inline]
pub fn get_native_identifier(&self) -> ::winit::NativeMonitorId {
::winit::NativeMonitorId::Unavailable
}
#[inline]
pub fn get_dimensions(&self) -> (u32, u32) {
unimplemented!();
}
}
pub struct PollEventsIterator<'a> {
window: &'a Window,
}
impl<'a> Iterator for PollEventsIterator<'a> {
type Item = Event;
#[inline]
fn next(&mut self) -> Option<Event> {
None
}
}
pub struct WaitEventsIterator<'a> {
window: &'a Window,
}
impl<'a> Iterator for WaitEventsIterator<'a> {
type Item = Event;
#[inline]
fn next(&mut self) -> Option<Event> {
loop {}
}
}
impl Window {
pub fn new(window: &WindowAttributes, pf_reqs: &PixelFormatRequirements,
opengl: &GlAttributes<&Window>) -> Result<Window, CreationError>
{
let opengl = opengl.clone().map_sharing(|w| &w.opengl);
let opengl = OsMesaContext::new(window.dimensions.unwrap_or((800, 600)), pf_reqs,
&opengl)?;
let opengl_dimensions = opengl.get_dimensions();
let libcaca = match ffi::LibCaca::open(&Path::new("libcaca.so.0")) {
Err(_) => return Err(CreationError::NotSupported),
Ok(l) => l
};
let display = unsafe { (libcaca.caca_create_display)(ptr::null_mut()) };
if display.is_null() {
return Err(CreationError::OsError("caca_create_display failed".to_string()));
}
let dither = unsafe {
#[cfg(target_endian = "little")]
fn get_masks() -> (u32, u32, u32, u32) { (0xff, 0xff00, 0xff0000, 0xff000000) }
#[cfg(target_endian = "big")]
fn get_masks() -> (u32, u32, u32, u32) { (0xff000000, 0xff0000, 0xff00, 0xff) }
let masks = get_masks();
(libcaca.caca_create_dither)(32, opengl_dimensions.0 as libc::c_int,
opengl_dimensions.1 as libc::c_int,
opengl_dimensions.0 as libc::c_int * 4,
masks.0, masks.1, masks.2, masks.3)
};
if dither.is_null() {
unsafe { (libcaca.caca_free_display)(display) };
return Err(CreationError::OsError("caca_create_dither failed".to_string()));
}
Ok(Window {
libcaca: libcaca,
display: display,
opengl: opengl,
dither: dither,
})
}
#[inline]
pub fn set_title(&self, title: &str) {
}
#[inline]
pub fn show(&self) {
}
#[inline]
pub fn hide(&self) {
}
#[inline]
pub fn get_position(&self) -> Option<(i32, i32)> {
unimplemented!()
}
#[inline]
pub fn set_position(&self, x: i32, y: i32) {
}
#[inline]
pub fn get_inner_size(&self) -> Option<(u32, u32)> {
Some(self.opengl.get_dimensions())
}
#[inline]
pub fn get_outer_size(&self) -> Option<(u32, u32)> {
self.get_inner_size()
}
#[inline]
pub fn set_inner_size(&self, _x: u32, _y: u32) {
unimplemented!()
}
#[inline]
pub fn create_window_proxy(&self) -> WindowProxy {
unimplemented!()
}
#[inline]
pub fn poll_events(&self) -> PollEventsIterator {
PollEventsIterator {
window: self
}
}
#[inline]
pub fn wait_events(&self) -> WaitEventsIterator {
WaitEventsIterator {
window: self
}
}
#[inline]
pub fn platform_display(&self) -> *mut libc::c_void {
unimplemented!()
}
#[inline]
pub fn platform_window(&self) -> *mut libc::c_void {
unimplemented!()
}
#[inline]
pub fn get_pixel_format(&self) -> PixelFormat {
unimplemented!();
}
#[inline]
pub fn set_window_resize_callback(&mut self, _: Option<fn(u32, u32)>) {
}
#[inline]
pub fn set_cursor(&self, cursor: MouseCursor) {
}
#[inline]
pub fn set_cursor_state(&self, state: CursorState) -> Result<(), String> {
Ok(())
}
#[inline]
pub fn hidpi_factor(&self) -> f32 {
1.0
}
#[inline]
pub fn set_cursor_position(&self, x: i32, y: i32) -> Result<(), ()> {
Ok(())
}
}
impl GlContext for Window {
#[inline]
unsafe fn make_current(&self) -> Result<(), ContextError> {
self.opengl.make_current()
}
#[inline]
fn is_current(&self) -> bool {
self.opengl.is_current()
}
#[inline]
fn get_proc_address(&self, addr: &str) -> *const () {
self.opengl.get_proc_address(addr)
}
#[inline]
fn swap_buffers(&self) -> Result<(), ContextError> {
unsafe {
let canvas = (self.libcaca.caca_get_canvas)(self.display);
let width = (self.libcaca.caca_get_canvas_width)(canvas);
let height = (self.libcaca.caca_get_canvas_height)(canvas);
let buffer = self.opengl.get_framebuffer().chunks(self.opengl.get_dimensions().0 as usize)
.flat_map(|i| i.iter().cloned()).rev().collect::<Vec<u32>>();
(self.libcaca.caca_dither_bitmap)(canvas, 0, 0, width as libc::c_int,
height as libc::c_int, self.dither,
buffer.as_ptr() as *const _);
(self.libcaca.caca_refresh_display)(self.display);
};
Ok(())
}
#[inline]
fn get_api(&self) -> Api {
self.opengl.get_api()
} |
#[inline]
fn get_pixel_format(&self) -> PixelFormat {
self.opengl.get_pixel_format()
}
}
impl Drop for Window {
#[inline]
fn drop(&mut self) {
unsafe {
(self.libcaca.caca_free_dither)(self.dither);
(self.libcaca.caca_free_display)(self.display);
}
}
} | random_line_split |
|
mod.rs | #![cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "openbsd"))]
#![allow(unused_variables, dead_code)]
use libc;
use api::osmesa::{OsMesaContext};
use Api;
use ContextError;
use CreationError;
use Event;
use GlAttributes;
use GlContext;
use PixelFormat;
use PixelFormatRequirements;
use CursorState;
use MouseCursor;
use WindowAttributes;
use std::collections::VecDeque;
use std::path::Path;
use std::ptr;
mod ffi;
pub struct Window {
libcaca: ffi::LibCaca,
display: *mut ffi::caca_display_t,
opengl: OsMesaContext,
dither: *mut ffi::caca_dither_t,
}
#[derive(Clone)]
pub struct WindowProxy;
impl WindowProxy {
#[inline]
pub fn wakeup_event_loop(&self) {
unimplemented!()
}
}
pub struct MonitorId;
#[inline]
pub fn get_available_monitors() -> VecDeque<MonitorId> {
VecDeque::new()
}
#[inline]
pub fn get_primary_monitor() -> MonitorId {
MonitorId
}
impl MonitorId {
#[inline]
pub fn get_name(&self) -> Option<String> {
unimplemented!();
}
#[inline]
pub fn get_native_identifier(&self) -> ::winit::NativeMonitorId {
::winit::NativeMonitorId::Unavailable
}
#[inline]
pub fn get_dimensions(&self) -> (u32, u32) {
unimplemented!();
}
}
pub struct PollEventsIterator<'a> {
window: &'a Window,
}
impl<'a> Iterator for PollEventsIterator<'a> {
type Item = Event;
#[inline]
fn next(&mut self) -> Option<Event> {
None
}
}
pub struct WaitEventsIterator<'a> {
window: &'a Window,
}
impl<'a> Iterator for WaitEventsIterator<'a> {
type Item = Event;
#[inline]
fn next(&mut self) -> Option<Event> {
loop {}
}
}
impl Window {
pub fn new(window: &WindowAttributes, pf_reqs: &PixelFormatRequirements,
opengl: &GlAttributes<&Window>) -> Result<Window, CreationError>
{
let opengl = opengl.clone().map_sharing(|w| &w.opengl);
let opengl = OsMesaContext::new(window.dimensions.unwrap_or((800, 600)), pf_reqs,
&opengl)?;
let opengl_dimensions = opengl.get_dimensions();
let libcaca = match ffi::LibCaca::open(&Path::new("libcaca.so.0")) {
Err(_) => return Err(CreationError::NotSupported),
Ok(l) => l
};
let display = unsafe { (libcaca.caca_create_display)(ptr::null_mut()) };
if display.is_null() {
return Err(CreationError::OsError("caca_create_display failed".to_string()));
}
let dither = unsafe {
#[cfg(target_endian = "little")]
fn get_masks() -> (u32, u32, u32, u32) { (0xff, 0xff00, 0xff0000, 0xff000000) }
#[cfg(target_endian = "big")]
fn get_masks() -> (u32, u32, u32, u32) { (0xff000000, 0xff0000, 0xff00, 0xff) }
let masks = get_masks();
(libcaca.caca_create_dither)(32, opengl_dimensions.0 as libc::c_int,
opengl_dimensions.1 as libc::c_int,
opengl_dimensions.0 as libc::c_int * 4,
masks.0, masks.1, masks.2, masks.3)
};
if dither.is_null() {
unsafe { (libcaca.caca_free_display)(display) };
return Err(CreationError::OsError("caca_create_dither failed".to_string()));
}
Ok(Window {
libcaca: libcaca,
display: display,
opengl: opengl,
dither: dither,
})
}
#[inline]
pub fn set_title(&self, title: &str) |
#[inline]
pub fn show(&self) {
}
#[inline]
pub fn hide(&self) {
}
#[inline]
pub fn get_position(&self) -> Option<(i32, i32)> {
unimplemented!()
}
#[inline]
pub fn set_position(&self, x: i32, y: i32) {
}
#[inline]
pub fn get_inner_size(&self) -> Option<(u32, u32)> {
Some(self.opengl.get_dimensions())
}
#[inline]
pub fn get_outer_size(&self) -> Option<(u32, u32)> {
self.get_inner_size()
}
#[inline]
pub fn set_inner_size(&self, _x: u32, _y: u32) {
unimplemented!()
}
#[inline]
pub fn create_window_proxy(&self) -> WindowProxy {
unimplemented!()
}
#[inline]
pub fn poll_events(&self) -> PollEventsIterator {
PollEventsIterator {
window: self
}
}
#[inline]
pub fn wait_events(&self) -> WaitEventsIterator {
WaitEventsIterator {
window: self
}
}
#[inline]
pub fn platform_display(&self) -> *mut libc::c_void {
unimplemented!()
}
#[inline]
pub fn platform_window(&self) -> *mut libc::c_void {
unimplemented!()
}
#[inline]
pub fn get_pixel_format(&self) -> PixelFormat {
unimplemented!();
}
#[inline]
pub fn set_window_resize_callback(&mut self, _: Option<fn(u32, u32)>) {
}
#[inline]
pub fn set_cursor(&self, cursor: MouseCursor) {
}
#[inline]
pub fn set_cursor_state(&self, state: CursorState) -> Result<(), String> {
Ok(())
}
#[inline]
pub fn hidpi_factor(&self) -> f32 {
1.0
}
#[inline]
pub fn set_cursor_position(&self, x: i32, y: i32) -> Result<(), ()> {
Ok(())
}
}
impl GlContext for Window {
#[inline]
unsafe fn make_current(&self) -> Result<(), ContextError> {
self.opengl.make_current()
}
#[inline]
fn is_current(&self) -> bool {
self.opengl.is_current()
}
#[inline]
fn get_proc_address(&self, addr: &str) -> *const () {
self.opengl.get_proc_address(addr)
}
#[inline]
fn swap_buffers(&self) -> Result<(), ContextError> {
unsafe {
let canvas = (self.libcaca.caca_get_canvas)(self.display);
let width = (self.libcaca.caca_get_canvas_width)(canvas);
let height = (self.libcaca.caca_get_canvas_height)(canvas);
let buffer = self.opengl.get_framebuffer().chunks(self.opengl.get_dimensions().0 as usize)
.flat_map(|i| i.iter().cloned()).rev().collect::<Vec<u32>>();
(self.libcaca.caca_dither_bitmap)(canvas, 0, 0, width as libc::c_int,
height as libc::c_int, self.dither,
buffer.as_ptr() as *const _);
(self.libcaca.caca_refresh_display)(self.display);
};
Ok(())
}
#[inline]
fn get_api(&self) -> Api {
self.opengl.get_api()
}
#[inline]
fn get_pixel_format(&self) -> PixelFormat {
self.opengl.get_pixel_format()
}
}
impl Drop for Window {
#[inline]
fn drop(&mut self) {
unsafe {
(self.libcaca.caca_free_dither)(self.dither);
(self.libcaca.caca_free_display)(self.display);
}
}
}
| {
} | identifier_body |
mod.rs | #![cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "openbsd"))]
#![allow(unused_variables, dead_code)]
use libc;
use api::osmesa::{OsMesaContext};
use Api;
use ContextError;
use CreationError;
use Event;
use GlAttributes;
use GlContext;
use PixelFormat;
use PixelFormatRequirements;
use CursorState;
use MouseCursor;
use WindowAttributes;
use std::collections::VecDeque;
use std::path::Path;
use std::ptr;
mod ffi;
pub struct Window {
libcaca: ffi::LibCaca,
display: *mut ffi::caca_display_t,
opengl: OsMesaContext,
dither: *mut ffi::caca_dither_t,
}
#[derive(Clone)]
pub struct | ;
impl WindowProxy {
#[inline]
pub fn wakeup_event_loop(&self) {
unimplemented!()
}
}
pub struct MonitorId;
#[inline]
pub fn get_available_monitors() -> VecDeque<MonitorId> {
VecDeque::new()
}
#[inline]
pub fn get_primary_monitor() -> MonitorId {
MonitorId
}
impl MonitorId {
#[inline]
pub fn get_name(&self) -> Option<String> {
unimplemented!();
}
#[inline]
pub fn get_native_identifier(&self) -> ::winit::NativeMonitorId {
::winit::NativeMonitorId::Unavailable
}
#[inline]
pub fn get_dimensions(&self) -> (u32, u32) {
unimplemented!();
}
}
pub struct PollEventsIterator<'a> {
window: &'a Window,
}
impl<'a> Iterator for PollEventsIterator<'a> {
type Item = Event;
#[inline]
fn next(&mut self) -> Option<Event> {
None
}
}
pub struct WaitEventsIterator<'a> {
window: &'a Window,
}
impl<'a> Iterator for WaitEventsIterator<'a> {
type Item = Event;
#[inline]
fn next(&mut self) -> Option<Event> {
loop {}
}
}
impl Window {
pub fn new(window: &WindowAttributes, pf_reqs: &PixelFormatRequirements,
opengl: &GlAttributes<&Window>) -> Result<Window, CreationError>
{
let opengl = opengl.clone().map_sharing(|w| &w.opengl);
let opengl = OsMesaContext::new(window.dimensions.unwrap_or((800, 600)), pf_reqs,
&opengl)?;
let opengl_dimensions = opengl.get_dimensions();
let libcaca = match ffi::LibCaca::open(&Path::new("libcaca.so.0")) {
Err(_) => return Err(CreationError::NotSupported),
Ok(l) => l
};
let display = unsafe { (libcaca.caca_create_display)(ptr::null_mut()) };
if display.is_null() {
return Err(CreationError::OsError("caca_create_display failed".to_string()));
}
let dither = unsafe {
#[cfg(target_endian = "little")]
fn get_masks() -> (u32, u32, u32, u32) { (0xff, 0xff00, 0xff0000, 0xff000000) }
#[cfg(target_endian = "big")]
fn get_masks() -> (u32, u32, u32, u32) { (0xff000000, 0xff0000, 0xff00, 0xff) }
let masks = get_masks();
(libcaca.caca_create_dither)(32, opengl_dimensions.0 as libc::c_int,
opengl_dimensions.1 as libc::c_int,
opengl_dimensions.0 as libc::c_int * 4,
masks.0, masks.1, masks.2, masks.3)
};
if dither.is_null() {
unsafe { (libcaca.caca_free_display)(display) };
return Err(CreationError::OsError("caca_create_dither failed".to_string()));
}
Ok(Window {
libcaca: libcaca,
display: display,
opengl: opengl,
dither: dither,
})
}
#[inline]
pub fn set_title(&self, title: &str) {
}
#[inline]
pub fn show(&self) {
}
#[inline]
pub fn hide(&self) {
}
#[inline]
pub fn get_position(&self) -> Option<(i32, i32)> {
unimplemented!()
}
#[inline]
pub fn set_position(&self, x: i32, y: i32) {
}
#[inline]
pub fn get_inner_size(&self) -> Option<(u32, u32)> {
Some(self.opengl.get_dimensions())
}
#[inline]
pub fn get_outer_size(&self) -> Option<(u32, u32)> {
self.get_inner_size()
}
#[inline]
pub fn set_inner_size(&self, _x: u32, _y: u32) {
unimplemented!()
}
#[inline]
pub fn create_window_proxy(&self) -> WindowProxy {
unimplemented!()
}
#[inline]
pub fn poll_events(&self) -> PollEventsIterator {
PollEventsIterator {
window: self
}
}
#[inline]
pub fn wait_events(&self) -> WaitEventsIterator {
WaitEventsIterator {
window: self
}
}
#[inline]
pub fn platform_display(&self) -> *mut libc::c_void {
unimplemented!()
}
#[inline]
pub fn platform_window(&self) -> *mut libc::c_void {
unimplemented!()
}
#[inline]
pub fn get_pixel_format(&self) -> PixelFormat {
unimplemented!();
}
#[inline]
pub fn set_window_resize_callback(&mut self, _: Option<fn(u32, u32)>) {
}
#[inline]
pub fn set_cursor(&self, cursor: MouseCursor) {
}
#[inline]
pub fn set_cursor_state(&self, state: CursorState) -> Result<(), String> {
Ok(())
}
#[inline]
pub fn hidpi_factor(&self) -> f32 {
1.0
}
#[inline]
pub fn set_cursor_position(&self, x: i32, y: i32) -> Result<(), ()> {
Ok(())
}
}
impl GlContext for Window {
#[inline]
unsafe fn make_current(&self) -> Result<(), ContextError> {
self.opengl.make_current()
}
#[inline]
fn is_current(&self) -> bool {
self.opengl.is_current()
}
#[inline]
fn get_proc_address(&self, addr: &str) -> *const () {
self.opengl.get_proc_address(addr)
}
#[inline]
fn swap_buffers(&self) -> Result<(), ContextError> {
unsafe {
let canvas = (self.libcaca.caca_get_canvas)(self.display);
let width = (self.libcaca.caca_get_canvas_width)(canvas);
let height = (self.libcaca.caca_get_canvas_height)(canvas);
let buffer = self.opengl.get_framebuffer().chunks(self.opengl.get_dimensions().0 as usize)
.flat_map(|i| i.iter().cloned()).rev().collect::<Vec<u32>>();
(self.libcaca.caca_dither_bitmap)(canvas, 0, 0, width as libc::c_int,
height as libc::c_int, self.dither,
buffer.as_ptr() as *const _);
(self.libcaca.caca_refresh_display)(self.display);
};
Ok(())
}
#[inline]
fn get_api(&self) -> Api {
self.opengl.get_api()
}
#[inline]
fn get_pixel_format(&self) -> PixelFormat {
self.opengl.get_pixel_format()
}
}
impl Drop for Window {
#[inline]
fn drop(&mut self) {
unsafe {
(self.libcaca.caca_free_dither)(self.dither);
(self.libcaca.caca_free_display)(self.display);
}
}
}
| WindowProxy | identifier_name |
htmltablerowelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLTableRowElementBinding;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLTableRowElementDerived};
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::{AttributeMutation, ElementTypeId};
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use dom::virtualmethods::VirtualMethods;
use std::cell::Cell;
use util::str::{self, DOMString};
#[dom_struct]
pub struct HTMLTableRowElement {
htmlelement: HTMLElement,
background_color: Cell<Option<RGBA>>,
}
impl HTMLTableRowElementDerived for EventTarget {
fn | (&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTableRowElement)))
}
}
impl HTMLTableRowElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document)
-> HTMLTableRowElement {
HTMLTableRowElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLTableRowElement,
localName,
prefix,
document),
background_color: Cell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: &Document)
-> Root<HTMLTableRowElement> {
Node::reflect_node(box HTMLTableRowElement::new_inherited(localName, prefix, document),
document,
HTMLTableRowElementBinding::Wrap)
}
pub fn get_background_color(&self) -> Option<RGBA> {
self.background_color.get()
}
}
impl VirtualMethods for HTMLTableRowElement {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &HTMLElement = HTMLElementCast::from_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&atom!(bgcolor) => {
self.background_color.set(mutation.new_value(attr).and_then(|value| {
str::parse_legacy_color(&value).ok()
}));
},
_ => {},
}
}
}
| is_htmltablerowelement | identifier_name |
htmltablerowelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLTableRowElementBinding;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLTableRowElementDerived};
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::{AttributeMutation, ElementTypeId};
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use dom::virtualmethods::VirtualMethods;
use std::cell::Cell;
use util::str::{self, DOMString};
#[dom_struct]
pub struct HTMLTableRowElement {
htmlelement: HTMLElement,
background_color: Cell<Option<RGBA>>,
}
impl HTMLTableRowElementDerived for EventTarget {
fn is_htmltablerowelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTableRowElement)))
}
}
impl HTMLTableRowElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document)
-> HTMLTableRowElement {
HTMLTableRowElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLTableRowElement,
localName,
prefix,
document),
background_color: Cell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: &Document)
-> Root<HTMLTableRowElement> {
Node::reflect_node(box HTMLTableRowElement::new_inherited(localName, prefix, document),
document,
HTMLTableRowElementBinding::Wrap)
}
pub fn get_background_color(&self) -> Option<RGBA> {
self.background_color.get()
}
}
impl VirtualMethods for HTMLTableRowElement {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &HTMLElement = HTMLElementCast::from_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&atom!(bgcolor) => {
self.background_color.set(mutation.new_value(attr).and_then(|value| {
str::parse_legacy_color(&value).ok()
}));
},
_ => | ,
}
}
}
| {} | conditional_block |
htmltablerowelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLTableRowElementBinding;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLTableRowElementDerived};
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::{AttributeMutation, ElementTypeId};
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use dom::virtualmethods::VirtualMethods;
use std::cell::Cell;
use util::str::{self, DOMString};
#[dom_struct]
pub struct HTMLTableRowElement {
htmlelement: HTMLElement,
background_color: Cell<Option<RGBA>>,
}
impl HTMLTableRowElementDerived for EventTarget {
fn is_htmltablerowelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTableRowElement)))
}
}
impl HTMLTableRowElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document)
-> HTMLTableRowElement {
HTMLTableRowElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLTableRowElement,
localName,
prefix,
document),
background_color: Cell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: &Document)
-> Root<HTMLTableRowElement> {
Node::reflect_node(box HTMLTableRowElement::new_inherited(localName, prefix, document),
document,
HTMLTableRowElementBinding::Wrap)
}
pub fn get_background_color(&self) -> Option<RGBA> {
self.background_color.get()
}
}
impl VirtualMethods for HTMLTableRowElement {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> |
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&atom!(bgcolor) => {
self.background_color.set(mutation.new_value(attr).and_then(|value| {
str::parse_legacy_color(&value).ok()
}));
},
_ => {},
}
}
}
| {
let htmlelement: &HTMLElement = HTMLElementCast::from_ref(self);
Some(htmlelement as &VirtualMethods)
} | identifier_body |
htmltablerowelement.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::HTMLTableRowElementBinding;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, HTMLTableRowElementDerived};
use dom::bindings::js::Root;
use dom::document::Document;
use dom::element::{AttributeMutation, ElementTypeId};
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use dom::virtualmethods::VirtualMethods;
use std::cell::Cell;
use util::str::{self, DOMString};
#[dom_struct]
pub struct HTMLTableRowElement {
htmlelement: HTMLElement,
background_color: Cell<Option<RGBA>>, | *self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTableRowElement)))
}
}
impl HTMLTableRowElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: &Document)
-> HTMLTableRowElement {
HTMLTableRowElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLTableRowElement,
localName,
prefix,
document),
background_color: Cell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: &Document)
-> Root<HTMLTableRowElement> {
Node::reflect_node(box HTMLTableRowElement::new_inherited(localName, prefix, document),
document,
HTMLTableRowElementBinding::Wrap)
}
pub fn get_background_color(&self) -> Option<RGBA> {
self.background_color.get()
}
}
impl VirtualMethods for HTMLTableRowElement {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &HTMLElement = HTMLElementCast::from_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&atom!(bgcolor) => {
self.background_color.set(mutation.new_value(attr).and_then(|value| {
str::parse_legacy_color(&value).ok()
}));
},
_ => {},
}
}
} | }
impl HTMLTableRowElementDerived for EventTarget {
fn is_htmltablerowelement(&self) -> bool { | random_line_split |
lib.rs | extern crate audsp;
extern crate aunorm;
use aunorm::{Normalizer, NormalizerProvider};
use std::collections::HashMap;
pub trait DspProcessor<'a, TSample: audsp::Numeric> {
fn get_properties(& self) -> & PropStorage<'a, TSample, Self>;
fn get_mut_properties(&mut self) -> &mut PropStorage<'a, TSample, Self>;
fn param_changed(&mut self, i32);
fn sample_rate_changed(&mut self, f32);
fn get_plugin_name(&self) -> String;
fn process_stereo_stereo(&mut self, [& [TSample]; 2], [&mut [TSample]; 2]);
}
#[allow(dead_code)]
pub struct | <'a, TSample: audsp::Numeric, TProcessor: DspProcessor<'a, TSample>> where TSample: 'a {
id: u32,
min: TSample,
max: TSample,
default: TSample,
value: TSample,
norm_value: TSample,
caption: &'a str,
measure: &'a str,
norm: Box<Normalizer<TSample> + 'a>,
callback : Box<Fn(&mut TProcessor) + 'a>,
}
#[derive(Default)]
pub struct PropStorage<'a, TSample: audsp::Numeric, TProcessor: DspProcessor<'a, TSample>> where TSample: 'a {
properties: HashMap<u32, PropInfo<'a, TSample, TProcessor>>
}
impl<'a, TSample: audsp::Numeric, TProcessor: DspProcessor<'a, TSample>> PropInfo<'a, TSample, TProcessor> where TSample: 'a {
pub fn set_from_norm(&mut self, norm: TSample, processor: &mut TProcessor) {
self.norm_value = norm;
self.value = self.norm.from_normal(norm);
let callback = &self.callback;
callback(processor);
}
pub fn get_value(&self) -> TSample {
return self.value
}
pub fn get_norm_value(&self) -> TSample {
return self.norm_value
}
pub fn get_caption(&self) -> String {
self.caption.to_string()
}
pub fn get_measure(&self) -> String {
self.measure.to_string()
}
}
impl<'a, TSample: audsp::Numeric, TProcessor> PropStorage<'a, TSample, TProcessor> where TSample: 'a, TProcessor: DspProcessor<'a, TSample> {
pub fn new() -> Self {
PropStorage::<TSample, TProcessor>{properties: HashMap::new()}
}
pub fn len(&self) -> usize {
self.properties.len()
}
pub fn add_prop<TProv>(&mut self, index: u32, min: TSample, max: TSample, default: TSample, caption: &'a str, measure: &'a str, callback: Box<Fn(&mut TProcessor) + 'a>) where TProv: NormalizerProvider<'a, TSample> {
let norm = TProv::boxed(min, max);
self.properties.insert(index, PropInfo::<TSample, TProcessor>{
id: index,
min: min,
max: max,
default: default,
value: default,
norm_value: norm.to_normal(default),
caption: caption,
measure: measure,
norm: norm,
callback: callback,
});
}
pub fn get_value(&self, i: u32) -> TSample {
return self.properties.get(&i).map(|x| x.value).unwrap_or(TSample::zero())
}
pub fn get_propinfo(&self, index: i32) -> Option<&PropInfo<'a, TSample, TProcessor>> {
let i:u32 = index as u32;
self.properties.get(&i)
}
pub fn get_mut_propinfo(&mut self, index: i32) -> Option<&mut PropInfo<'a, TSample, TProcessor>> {
let i:u32 = index as u32;
self.properties.get_mut(&i)
}
}
| PropInfo | identifier_name |
lib.rs | extern crate audsp;
extern crate aunorm;
use aunorm::{Normalizer, NormalizerProvider};
use std::collections::HashMap;
pub trait DspProcessor<'a, TSample: audsp::Numeric> {
fn get_properties(& self) -> & PropStorage<'a, TSample, Self>;
fn get_mut_properties(&mut self) -> &mut PropStorage<'a, TSample, Self>;
fn param_changed(&mut self, i32);
fn sample_rate_changed(&mut self, f32);
fn get_plugin_name(&self) -> String;
fn process_stereo_stereo(&mut self, [& [TSample]; 2], [&mut [TSample]; 2]);
}
#[allow(dead_code)]
pub struct PropInfo<'a, TSample: audsp::Numeric, TProcessor: DspProcessor<'a, TSample>> where TSample: 'a {
id: u32,
min: TSample,
max: TSample,
default: TSample,
value: TSample,
norm_value: TSample,
caption: &'a str,
measure: &'a str,
norm: Box<Normalizer<TSample> + 'a>,
callback : Box<Fn(&mut TProcessor) + 'a>,
} | properties: HashMap<u32, PropInfo<'a, TSample, TProcessor>>
}
impl<'a, TSample: audsp::Numeric, TProcessor: DspProcessor<'a, TSample>> PropInfo<'a, TSample, TProcessor> where TSample: 'a {
pub fn set_from_norm(&mut self, norm: TSample, processor: &mut TProcessor) {
self.norm_value = norm;
self.value = self.norm.from_normal(norm);
let callback = &self.callback;
callback(processor);
}
pub fn get_value(&self) -> TSample {
return self.value
}
pub fn get_norm_value(&self) -> TSample {
return self.norm_value
}
pub fn get_caption(&self) -> String {
self.caption.to_string()
}
pub fn get_measure(&self) -> String {
self.measure.to_string()
}
}
impl<'a, TSample: audsp::Numeric, TProcessor> PropStorage<'a, TSample, TProcessor> where TSample: 'a, TProcessor: DspProcessor<'a, TSample> {
pub fn new() -> Self {
PropStorage::<TSample, TProcessor>{properties: HashMap::new()}
}
pub fn len(&self) -> usize {
self.properties.len()
}
pub fn add_prop<TProv>(&mut self, index: u32, min: TSample, max: TSample, default: TSample, caption: &'a str, measure: &'a str, callback: Box<Fn(&mut TProcessor) + 'a>) where TProv: NormalizerProvider<'a, TSample> {
let norm = TProv::boxed(min, max);
self.properties.insert(index, PropInfo::<TSample, TProcessor>{
id: index,
min: min,
max: max,
default: default,
value: default,
norm_value: norm.to_normal(default),
caption: caption,
measure: measure,
norm: norm,
callback: callback,
});
}
pub fn get_value(&self, i: u32) -> TSample {
return self.properties.get(&i).map(|x| x.value).unwrap_or(TSample::zero())
}
pub fn get_propinfo(&self, index: i32) -> Option<&PropInfo<'a, TSample, TProcessor>> {
let i:u32 = index as u32;
self.properties.get(&i)
}
pub fn get_mut_propinfo(&mut self, index: i32) -> Option<&mut PropInfo<'a, TSample, TProcessor>> {
let i:u32 = index as u32;
self.properties.get_mut(&i)
}
} |
#[derive(Default)]
pub struct PropStorage<'a, TSample: audsp::Numeric, TProcessor: DspProcessor<'a, TSample>> where TSample: 'a { | random_line_split |
unique-vec-res.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
#[derive(Debug)]
struct r<'a> {
i: &'a Cell<isize>,
}
impl<'a> Drop for r<'a> {
fn drop(&mut self) {
unsafe {
self.i.set(self.i.get() + 1);
}
}
}
fn f<T>(__isize: Vec<T>, _j: Vec<T> ) {
}
fn clone<T: Clone>(t: &T) -> T { t.clone() }
fn | () {
let i1 = &Cell::new(0);
let i2 = &Cell::new(1);
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let r1 = vec!(Box::new(r { i: i1 }));
let r2 = vec!(Box::new(r { i: i2 }));
f(clone(&r1), clone(&r2));
//~^ ERROR the trait `core::clone::Clone` is not implemented for the type
//~^^ ERROR the trait `core::clone::Clone` is not implemented for the type
println!("{:?}", (r2, i1.get()));
println!("{:?}", (r1, i2.get()));
}
| main | identifier_name |
unique-vec-res.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
#[derive(Debug)]
struct r<'a> {
i: &'a Cell<isize>,
}
impl<'a> Drop for r<'a> {
fn drop(&mut self) {
unsafe {
self.i.set(self.i.get() + 1);
}
}
}
fn f<T>(__isize: Vec<T>, _j: Vec<T> ) {
}
fn clone<T: Clone>(t: &T) -> T |
fn main() {
let i1 = &Cell::new(0);
let i2 = &Cell::new(1);
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let r1 = vec!(Box::new(r { i: i1 }));
let r2 = vec!(Box::new(r { i: i2 }));
f(clone(&r1), clone(&r2));
//~^ ERROR the trait `core::clone::Clone` is not implemented for the type
//~^^ ERROR the trait `core::clone::Clone` is not implemented for the type
println!("{:?}", (r2, i1.get()));
println!("{:?}", (r1, i2.get()));
}
| { t.clone() } | identifier_body |
unique-vec-res.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
#[derive(Debug)]
struct r<'a> {
i: &'a Cell<isize>,
}
impl<'a> Drop for r<'a> {
fn drop(&mut self) {
unsafe {
self.i.set(self.i.get() + 1);
}
}
} | }
fn clone<T: Clone>(t: &T) -> T { t.clone() }
fn main() {
let i1 = &Cell::new(0);
let i2 = &Cell::new(1);
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let r1 = vec!(Box::new(r { i: i1 }));
let r2 = vec!(Box::new(r { i: i2 }));
f(clone(&r1), clone(&r2));
//~^ ERROR the trait `core::clone::Clone` is not implemented for the type
//~^^ ERROR the trait `core::clone::Clone` is not implemented for the type
println!("{:?}", (r2, i1.get()));
println!("{:?}", (r1, i2.get()));
} |
fn f<T>(__isize: Vec<T> , _j: Vec<T> ) { | random_line_split |
http_body.rs | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use proxy_wasm::traits::*;
use proxy_wasm::types::*;
#[no_mangle]
pub fn | () {
proxy_wasm::set_log_level(LogLevel::Trace);
proxy_wasm::set_root_context(|_| -> Box<dyn RootContext> { Box::new(HttpBodyRoot) });
}
struct HttpBodyRoot;
impl Context for HttpBodyRoot {}
impl RootContext for HttpBodyRoot {
fn get_type(&self) -> Option<ContextType> {
Some(ContextType::HttpContext)
}
fn create_http_context(&self, _: u32) -> Option<Box<dyn HttpContext>> {
Some(Box::new(HttpBody))
}
}
struct HttpBody;
impl Context for HttpBody {}
impl HttpContext for HttpBody {
fn on_http_response_headers(&mut self, _: usize, _: bool) -> Action {
// If there is a Content-Length header and we change the length of
// the body later, then clients will break. So remove it.
// We must do this here, because once we exit this function we
// can no longer modify the response headers.
self.set_http_response_header("content-length", None);
Action::Continue
}
fn on_http_response_body(&mut self, body_size: usize, end_of_stream: bool) -> Action {
if!end_of_stream {
// Wait -- we'll be called again when the complete body is buffered
// at the host side.
return Action::Pause;
}
// Replace the message body if it contains the text "secret".
// Since we returned "Pause" previuously, this will return the whole body.
if let Some(body_bytes) = self.get_http_response_body(0, body_size) {
let body_str = String::from_utf8(body_bytes).unwrap();
if body_str.contains("secret") {
let new_body = format!("Original message body ({} bytes) redacted.", body_size);
self.set_http_response_body(0, body_size, &new_body.into_bytes());
}
}
Action::Continue
}
}
| _start | identifier_name |
http_body.rs | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use proxy_wasm::traits::*;
use proxy_wasm::types::*;
#[no_mangle]
pub fn _start() {
proxy_wasm::set_log_level(LogLevel::Trace);
proxy_wasm::set_root_context(|_| -> Box<dyn RootContext> { Box::new(HttpBodyRoot) });
}
struct HttpBodyRoot;
impl Context for HttpBodyRoot {}
impl RootContext for HttpBodyRoot {
fn get_type(&self) -> Option<ContextType> {
Some(ContextType::HttpContext)
}
fn create_http_context(&self, _: u32) -> Option<Box<dyn HttpContext>> {
Some(Box::new(HttpBody))
}
}
struct HttpBody;
impl Context for HttpBody {}
impl HttpContext for HttpBody {
fn on_http_response_headers(&mut self, _: usize, _: bool) -> Action {
// If there is a Content-Length header and we change the length of
// the body later, then clients will break. So remove it.
// We must do this here, because once we exit this function we
// can no longer modify the response headers.
self.set_http_response_header("content-length", None);
Action::Continue
}
fn on_http_response_body(&mut self, body_size: usize, end_of_stream: bool) -> Action {
if!end_of_stream {
// Wait -- we'll be called again when the complete body is buffered
// at the host side.
return Action::Pause;
}
// Replace the message body if it contains the text "secret".
// Since we returned "Pause" previuously, this will return the whole body.
if let Some(body_bytes) = self.get_http_response_body(0, body_size) {
let body_str = String::from_utf8(body_bytes).unwrap();
if body_str.contains("secret") {
let new_body = format!("Original message body ({} bytes) redacted.", body_size);
self.set_http_response_body(0, body_size, &new_body.into_bytes());
}
} | Action::Continue
}
} | random_line_split |
|
http_body.rs | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use proxy_wasm::traits::*;
use proxy_wasm::types::*;
#[no_mangle]
pub fn _start() {
proxy_wasm::set_log_level(LogLevel::Trace);
proxy_wasm::set_root_context(|_| -> Box<dyn RootContext> { Box::new(HttpBodyRoot) });
}
struct HttpBodyRoot;
impl Context for HttpBodyRoot {}
impl RootContext for HttpBodyRoot {
fn get_type(&self) -> Option<ContextType> {
Some(ContextType::HttpContext)
}
fn create_http_context(&self, _: u32) -> Option<Box<dyn HttpContext>> {
Some(Box::new(HttpBody))
}
}
struct HttpBody;
impl Context for HttpBody {}
impl HttpContext for HttpBody {
fn on_http_response_headers(&mut self, _: usize, _: bool) -> Action {
// If there is a Content-Length header and we change the length of
// the body later, then clients will break. So remove it.
// We must do this here, because once we exit this function we
// can no longer modify the response headers.
self.set_http_response_header("content-length", None);
Action::Continue
}
fn on_http_response_body(&mut self, body_size: usize, end_of_stream: bool) -> Action {
if!end_of_stream {
// Wait -- we'll be called again when the complete body is buffered
// at the host side.
return Action::Pause;
}
// Replace the message body if it contains the text "secret".
// Since we returned "Pause" previuously, this will return the whole body.
if let Some(body_bytes) = self.get_http_response_body(0, body_size) |
Action::Continue
}
}
| {
let body_str = String::from_utf8(body_bytes).unwrap();
if body_str.contains("secret") {
let new_body = format!("Original message body ({} bytes) redacted.", body_size);
self.set_http_response_body(0, body_size, &new_body.into_bytes());
}
} | conditional_block |
graphviz.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module provides linkage between rustc::middle::graph and
//! libgraphviz traits, specialized to attaching borrowck analysis
//! data to rendered labels.
pub use self::Variant::*;
pub use rustc::middle::cfg::graphviz::{Node, Edge};
use rustc::middle::cfg::graphviz as cfg_dot;
use borrowck;
use borrowck::{BorrowckCtxt, LoanPath};
use dot;
use rustc::middle::cfg::{CFGIndex};
use rustc::middle::dataflow::{DataFlowOperator, DataFlowContext, EntryOrExit};
use rustc::middle::dataflow;
use std::rc::Rc;
use std::borrow::IntoCow;
#[derive(Debug, Copy)]
pub enum Variant {
Loans,
Moves,
Assigns,
}
impl Variant {
pub fn short_name(&self) -> &'static str {
match *self {
Loans => "loans",
Moves => "moves",
Assigns => "assigns",
}
}
}
pub struct DataflowLabeller<'a, 'tcx: 'a> {
pub inner: cfg_dot::LabelledCFG<'a, 'tcx>,
pub variants: Vec<Variant>,
pub borrowck_ctxt: &'a BorrowckCtxt<'a, 'tcx>,
pub analysis_data: &'a borrowck::AnalysisData<'a, 'tcx>,
}
impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
fn dataflow_for(&self, e: EntryOrExit, n: &Node<'a>) -> String |
fn dataflow_for_variant(&self, e: EntryOrExit, n: &Node, v: Variant) -> String {
let cfgidx = n.0;
match v {
Loans => self.dataflow_loans_for(e, cfgidx),
Moves => self.dataflow_moves_for(e, cfgidx),
Assigns => self.dataflow_assigns_for(e, cfgidx),
}
}
fn build_set<O:DataFlowOperator, F>(&self,
e: EntryOrExit,
cfgidx: CFGIndex,
dfcx: &DataFlowContext<'a, 'tcx, O>,
mut to_lp: F) -> String where
F: FnMut(uint) -> Rc<LoanPath<'tcx>>,
{
let mut saw_some = false;
let mut set = "{".to_string();
dfcx.each_bit_for_node(e, cfgidx, |index| {
let lp = to_lp(index);
if saw_some {
set.push_str(", ");
}
let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp);
set.push_str(&loan_str[..]);
saw_some = true;
true
});
set.push_str("}");
set
}
fn dataflow_loans_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.loans;
let loan_index_to_path = |loan_index| {
let all_loans = &self.analysis_data.all_loans;
let l: &borrowck::Loan = &all_loans[loan_index];
l.loan_path()
};
self.build_set(e, cfgidx, dfcx, loan_index_to_path)
}
fn dataflow_moves_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.move_data.dfcx_moves;
let move_index_to_path = |move_index| {
let move_data = &self.analysis_data.move_data.move_data;
let moves = move_data.moves.borrow();
let the_move: &borrowck::move_data::Move = &(*moves)[move_index];
move_data.path_loan_path(the_move.path)
};
self.build_set(e, cfgidx, dfcx, move_index_to_path)
}
fn dataflow_assigns_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.move_data.dfcx_assign;
let assign_index_to_path = |assign_index| {
let move_data = &self.analysis_data.move_data.move_data;
let assignments = move_data.var_assignments.borrow();
let assignment: &borrowck::move_data::Assignment = &(*assignments)[assign_index];
move_data.path_loan_path(assignment.path)
};
self.build_set(e, cfgidx, dfcx, assign_index_to_path)
}
}
impl<'a, 'tcx> dot::Labeller<'a, Node<'a>, Edge<'a>> for DataflowLabeller<'a, 'tcx> {
fn graph_id(&'a self) -> dot::Id<'a> { self.inner.graph_id() }
fn node_id(&'a self, n: &Node<'a>) -> dot::Id<'a> { self.inner.node_id(n) }
fn node_label(&'a self, n: &Node<'a>) -> dot::LabelText<'a> {
let prefix = self.dataflow_for(dataflow::Entry, n);
let suffix = self.dataflow_for(dataflow::Exit, n);
let inner_label = self.inner.node_label(n);
inner_label
.prefix_line(dot::LabelText::LabelStr(prefix.into_cow()))
.suffix_line(dot::LabelText::LabelStr(suffix.into_cow()))
}
fn edge_label(&'a self, e: &Edge<'a>) -> dot::LabelText<'a> { self.inner.edge_label(e) }
}
impl<'a, 'tcx> dot::GraphWalk<'a, Node<'a>, Edge<'a>> for DataflowLabeller<'a, 'tcx> {
fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { self.inner.nodes() }
fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { self.inner.edges() }
fn source(&'a self, edge: &Edge<'a>) -> Node<'a> { self.inner.source(edge) }
fn target(&'a self, edge: &Edge<'a>) -> Node<'a> { self.inner.target(edge) }
}
| {
let id = n.1.data.id();
debug!("dataflow_for({:?}, id={}) {:?}", e, id, self.variants);
let mut sets = "".to_string();
let mut seen_one = false;
for &variant in &self.variants {
if seen_one { sets.push_str(" "); } else { seen_one = true; }
sets.push_str(variant.short_name());
sets.push_str(": ");
sets.push_str(&self.dataflow_for_variant(e, n, variant));
}
sets
} | identifier_body |
graphviz.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module provides linkage between rustc::middle::graph and
//! libgraphviz traits, specialized to attaching borrowck analysis
//! data to rendered labels.
pub use self::Variant::*;
pub use rustc::middle::cfg::graphviz::{Node, Edge};
use rustc::middle::cfg::graphviz as cfg_dot;
use borrowck;
use borrowck::{BorrowckCtxt, LoanPath};
use dot;
use rustc::middle::cfg::{CFGIndex};
use rustc::middle::dataflow::{DataFlowOperator, DataFlowContext, EntryOrExit};
use rustc::middle::dataflow;
use std::rc::Rc;
use std::borrow::IntoCow;
#[derive(Debug, Copy)]
pub enum Variant {
Loans,
Moves,
Assigns,
}
impl Variant {
pub fn short_name(&self) -> &'static str {
match *self {
Loans => "loans",
Moves => "moves",
Assigns => "assigns",
}
}
}
pub struct DataflowLabeller<'a, 'tcx: 'a> {
pub inner: cfg_dot::LabelledCFG<'a, 'tcx>,
pub variants: Vec<Variant>,
pub borrowck_ctxt: &'a BorrowckCtxt<'a, 'tcx>,
pub analysis_data: &'a borrowck::AnalysisData<'a, 'tcx>,
}
impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
fn | (&self, e: EntryOrExit, n: &Node<'a>) -> String {
let id = n.1.data.id();
debug!("dataflow_for({:?}, id={}) {:?}", e, id, self.variants);
let mut sets = "".to_string();
let mut seen_one = false;
for &variant in &self.variants {
if seen_one { sets.push_str(" "); } else { seen_one = true; }
sets.push_str(variant.short_name());
sets.push_str(": ");
sets.push_str(&self.dataflow_for_variant(e, n, variant));
}
sets
}
fn dataflow_for_variant(&self, e: EntryOrExit, n: &Node, v: Variant) -> String {
let cfgidx = n.0;
match v {
Loans => self.dataflow_loans_for(e, cfgidx),
Moves => self.dataflow_moves_for(e, cfgidx),
Assigns => self.dataflow_assigns_for(e, cfgidx),
}
}
fn build_set<O:DataFlowOperator, F>(&self,
e: EntryOrExit,
cfgidx: CFGIndex,
dfcx: &DataFlowContext<'a, 'tcx, O>,
mut to_lp: F) -> String where
F: FnMut(uint) -> Rc<LoanPath<'tcx>>,
{
let mut saw_some = false;
let mut set = "{".to_string();
dfcx.each_bit_for_node(e, cfgidx, |index| {
let lp = to_lp(index);
if saw_some {
set.push_str(", ");
}
let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp);
set.push_str(&loan_str[..]);
saw_some = true;
true
});
set.push_str("}");
set
}
fn dataflow_loans_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.loans;
let loan_index_to_path = |loan_index| {
let all_loans = &self.analysis_data.all_loans;
let l: &borrowck::Loan = &all_loans[loan_index];
l.loan_path()
};
self.build_set(e, cfgidx, dfcx, loan_index_to_path)
}
fn dataflow_moves_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.move_data.dfcx_moves;
let move_index_to_path = |move_index| {
let move_data = &self.analysis_data.move_data.move_data;
let moves = move_data.moves.borrow();
let the_move: &borrowck::move_data::Move = &(*moves)[move_index];
move_data.path_loan_path(the_move.path)
};
self.build_set(e, cfgidx, dfcx, move_index_to_path)
}
fn dataflow_assigns_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.move_data.dfcx_assign;
let assign_index_to_path = |assign_index| {
let move_data = &self.analysis_data.move_data.move_data;
let assignments = move_data.var_assignments.borrow();
let assignment: &borrowck::move_data::Assignment = &(*assignments)[assign_index];
move_data.path_loan_path(assignment.path)
};
self.build_set(e, cfgidx, dfcx, assign_index_to_path)
}
}
impl<'a, 'tcx> dot::Labeller<'a, Node<'a>, Edge<'a>> for DataflowLabeller<'a, 'tcx> {
fn graph_id(&'a self) -> dot::Id<'a> { self.inner.graph_id() }
fn node_id(&'a self, n: &Node<'a>) -> dot::Id<'a> { self.inner.node_id(n) }
fn node_label(&'a self, n: &Node<'a>) -> dot::LabelText<'a> {
let prefix = self.dataflow_for(dataflow::Entry, n);
let suffix = self.dataflow_for(dataflow::Exit, n);
let inner_label = self.inner.node_label(n);
inner_label
.prefix_line(dot::LabelText::LabelStr(prefix.into_cow()))
.suffix_line(dot::LabelText::LabelStr(suffix.into_cow()))
}
fn edge_label(&'a self, e: &Edge<'a>) -> dot::LabelText<'a> { self.inner.edge_label(e) }
}
impl<'a, 'tcx> dot::GraphWalk<'a, Node<'a>, Edge<'a>> for DataflowLabeller<'a, 'tcx> {
fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { self.inner.nodes() }
fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { self.inner.edges() }
fn source(&'a self, edge: &Edge<'a>) -> Node<'a> { self.inner.source(edge) }
fn target(&'a self, edge: &Edge<'a>) -> Node<'a> { self.inner.target(edge) }
}
| dataflow_for | identifier_name |
graphviz.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module provides linkage between rustc::middle::graph and
//! libgraphviz traits, specialized to attaching borrowck analysis
//! data to rendered labels.
pub use self::Variant::*;
pub use rustc::middle::cfg::graphviz::{Node, Edge};
use rustc::middle::cfg::graphviz as cfg_dot;
use borrowck;
use borrowck::{BorrowckCtxt, LoanPath};
use dot;
use rustc::middle::cfg::{CFGIndex};
use rustc::middle::dataflow::{DataFlowOperator, DataFlowContext, EntryOrExit};
use rustc::middle::dataflow;
use std::rc::Rc;
use std::borrow::IntoCow;
#[derive(Debug, Copy)] | pub enum Variant {
Loans,
Moves,
Assigns,
}
impl Variant {
pub fn short_name(&self) -> &'static str {
match *self {
Loans => "loans",
Moves => "moves",
Assigns => "assigns",
}
}
}
pub struct DataflowLabeller<'a, 'tcx: 'a> {
pub inner: cfg_dot::LabelledCFG<'a, 'tcx>,
pub variants: Vec<Variant>,
pub borrowck_ctxt: &'a BorrowckCtxt<'a, 'tcx>,
pub analysis_data: &'a borrowck::AnalysisData<'a, 'tcx>,
}
impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
fn dataflow_for(&self, e: EntryOrExit, n: &Node<'a>) -> String {
let id = n.1.data.id();
debug!("dataflow_for({:?}, id={}) {:?}", e, id, self.variants);
let mut sets = "".to_string();
let mut seen_one = false;
for &variant in &self.variants {
if seen_one { sets.push_str(" "); } else { seen_one = true; }
sets.push_str(variant.short_name());
sets.push_str(": ");
sets.push_str(&self.dataflow_for_variant(e, n, variant));
}
sets
}
fn dataflow_for_variant(&self, e: EntryOrExit, n: &Node, v: Variant) -> String {
let cfgidx = n.0;
match v {
Loans => self.dataflow_loans_for(e, cfgidx),
Moves => self.dataflow_moves_for(e, cfgidx),
Assigns => self.dataflow_assigns_for(e, cfgidx),
}
}
fn build_set<O:DataFlowOperator, F>(&self,
e: EntryOrExit,
cfgidx: CFGIndex,
dfcx: &DataFlowContext<'a, 'tcx, O>,
mut to_lp: F) -> String where
F: FnMut(uint) -> Rc<LoanPath<'tcx>>,
{
let mut saw_some = false;
let mut set = "{".to_string();
dfcx.each_bit_for_node(e, cfgidx, |index| {
let lp = to_lp(index);
if saw_some {
set.push_str(", ");
}
let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp);
set.push_str(&loan_str[..]);
saw_some = true;
true
});
set.push_str("}");
set
}
fn dataflow_loans_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.loans;
let loan_index_to_path = |loan_index| {
let all_loans = &self.analysis_data.all_loans;
let l: &borrowck::Loan = &all_loans[loan_index];
l.loan_path()
};
self.build_set(e, cfgidx, dfcx, loan_index_to_path)
}
fn dataflow_moves_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.move_data.dfcx_moves;
let move_index_to_path = |move_index| {
let move_data = &self.analysis_data.move_data.move_data;
let moves = move_data.moves.borrow();
let the_move: &borrowck::move_data::Move = &(*moves)[move_index];
move_data.path_loan_path(the_move.path)
};
self.build_set(e, cfgidx, dfcx, move_index_to_path)
}
fn dataflow_assigns_for(&self, e: EntryOrExit, cfgidx: CFGIndex) -> String {
let dfcx = &self.analysis_data.move_data.dfcx_assign;
let assign_index_to_path = |assign_index| {
let move_data = &self.analysis_data.move_data.move_data;
let assignments = move_data.var_assignments.borrow();
let assignment: &borrowck::move_data::Assignment = &(*assignments)[assign_index];
move_data.path_loan_path(assignment.path)
};
self.build_set(e, cfgidx, dfcx, assign_index_to_path)
}
}
impl<'a, 'tcx> dot::Labeller<'a, Node<'a>, Edge<'a>> for DataflowLabeller<'a, 'tcx> {
fn graph_id(&'a self) -> dot::Id<'a> { self.inner.graph_id() }
fn node_id(&'a self, n: &Node<'a>) -> dot::Id<'a> { self.inner.node_id(n) }
fn node_label(&'a self, n: &Node<'a>) -> dot::LabelText<'a> {
let prefix = self.dataflow_for(dataflow::Entry, n);
let suffix = self.dataflow_for(dataflow::Exit, n);
let inner_label = self.inner.node_label(n);
inner_label
.prefix_line(dot::LabelText::LabelStr(prefix.into_cow()))
.suffix_line(dot::LabelText::LabelStr(suffix.into_cow()))
}
fn edge_label(&'a self, e: &Edge<'a>) -> dot::LabelText<'a> { self.inner.edge_label(e) }
}
impl<'a, 'tcx> dot::GraphWalk<'a, Node<'a>, Edge<'a>> for DataflowLabeller<'a, 'tcx> {
fn nodes(&'a self) -> dot::Nodes<'a, Node<'a>> { self.inner.nodes() }
fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> { self.inner.edges() }
fn source(&'a self, edge: &Edge<'a>) -> Node<'a> { self.inner.source(edge) }
fn target(&'a self, edge: &Edge<'a>) -> Node<'a> { self.inner.target(edge) }
} | random_line_split |
|
ifmt.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// no-pretty-expanded unnecessary unsafe block generated
// ignore-lexer-test FIXME #15679
| use std::fmt;
use std::usize;
struct A;
struct B;
struct C;
impl fmt::LowerHex for A {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("aloha")
}
}
impl fmt::UpperHex for B {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("adios")
}
}
impl fmt::Display for C {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad_integral(true, "☃", "123")
}
}
macro_rules! t {
($a:expr, $b:expr) => { assert_eq!($a, $b) }
}
pub fn main() {
// Various edge cases without formats
t!(format!(""), "");
t!(format!("hello"), "hello");
t!(format!("hello {{"), "hello {");
// default formatters should work
t!(format!("{}", 1.0f32), "1");
t!(format!("{}", 1.0f64), "1");
t!(format!("{}", "a"), "a");
t!(format!("{}", "a".to_string()), "a");
t!(format!("{}", false), "false");
t!(format!("{}", 'a'), "a");
// At least exercise all the formats
t!(format!("{}", true), "true");
t!(format!("{}", '☃'), "☃");
t!(format!("{}", 10), "10");
t!(format!("{}", 10_usize), "10");
t!(format!("{:?}", '☃'), "'\\u{2603}'");
t!(format!("{:?}", 10), "10");
t!(format!("{:?}", 10_usize), "10");
t!(format!("{:?}", "true"), "\"true\"");
t!(format!("{:?}", "foo\nbar"), "\"foo\\nbar\"");
t!(format!("{:o}", 10_usize), "12");
t!(format!("{:x}", 10_usize), "a");
t!(format!("{:X}", 10_usize), "A");
t!(format!("{}", "foo"), "foo");
t!(format!("{}", "foo".to_string()), "foo");
if cfg!(target_pointer_width = "32") {
t!(format!("{:#p}", 0x1234 as *const isize), "0x00001234");
t!(format!("{:#p}", 0x1234 as *mut isize), "0x00001234");
} else {
t!(format!("{:#p}", 0x1234 as *const isize), "0x0000000000001234");
t!(format!("{:#p}", 0x1234 as *mut isize), "0x0000000000001234");
}
t!(format!("{:p}", 0x1234 as *const isize), "0x1234");
t!(format!("{:p}", 0x1234 as *mut isize), "0x1234");
t!(format!("{:x}", A), "aloha");
t!(format!("{:X}", B), "adios");
t!(format!("foo {} ☃☃☃☃☃☃", "bar"), "foo bar ☃☃☃☃☃☃");
t!(format!("{1} {0}", 0, 1), "1 0");
t!(format!("{foo} {bar}", foo=0, bar=1), "0 1");
t!(format!("{foo} {1} {bar} {0}", 0, 1, foo=2, bar=3), "2 1 3 0");
t!(format!("{} {0}", "a"), "a a");
t!(format!("{foo_bar}", foo_bar=1), "1");
t!(format!("{}", 5 + 5), "10");
t!(format!("{:#4}", C), "☃123");
let a: &fmt::Debug = &1;
t!(format!("{:?}", a), "1");
// Formatting strings and their arguments
t!(format!("{}", "a"), "a");
t!(format!("{:4}", "a"), "a ");
t!(format!("{:4}", "☃"), "☃ ");
t!(format!("{:>4}", "a"), " a");
t!(format!("{:<4}", "a"), "a ");
t!(format!("{:^5}", "a"), " a ");
t!(format!("{:^5}", "aa"), " aa ");
t!(format!("{:^4}", "a"), " a ");
t!(format!("{:^4}", "aa"), " aa ");
t!(format!("{:.4}", "a"), "a");
t!(format!("{:4.4}", "a"), "a ");
t!(format!("{:4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:<4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:>4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:^4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:>10.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:2.4}", "aaaaa"), "aaaa");
t!(format!("{:2.4}", "aaaa"), "aaaa");
t!(format!("{:2.4}", "aaa"), "aaa");
t!(format!("{:2.4}", "aa"), "aa");
t!(format!("{:2.4}", "a"), "a ");
t!(format!("{:0>2}", "a"), "0a");
t!(format!("{:.*}", 4, "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:.1$}", "aaaaaaaaaaaaaaaaaa", 4), "aaaa");
t!(format!("{:.a$}", "aaaaaaaaaaaaaaaaaa", a=4), "aaaa");
t!(format!("{:1$}", "a", 4), "a ");
t!(format!("{1:0$}", 4, "a"), "a ");
t!(format!("{:a$}", "a", a=4), "a ");
t!(format!("{:-#}", "a"), "a");
t!(format!("{:+#}", "a"), "a");
// Some float stuff
t!(format!("{:}", 1.0f32), "1");
t!(format!("{:}", 1.0f64), "1");
t!(format!("{:.3}", 1.0f64), "1.000");
t!(format!("{:10.3}", 1.0f64), " 1.000");
t!(format!("{:+10.3}", 1.0f64), " +1.000");
t!(format!("{:+10.3}", -1.0f64), " -1.000");
t!(format!("{:e}", 1.2345e6f32), "1.2345e6");
t!(format!("{:e}", 1.2345e6f64), "1.2345e6");
t!(format!("{:E}", 1.2345e6f64), "1.2345E6");
t!(format!("{:.3e}", 1.2345e6f64), "1.234e6");
t!(format!("{:10.3e}", 1.2345e6f64), " 1.234e6");
t!(format!("{:+10.3e}", 1.2345e6f64), " +1.234e6");
t!(format!("{:+10.3e}", -1.2345e6f64), " -1.234e6");
// Test that pointers don't get truncated.
{
let val = usize::MAX;
let exp = format!("{:#x}", val);
t!(format!("{:p}", val as *const isize), exp);
}
// Escaping
t!(format!("{{"), "{");
t!(format!("}}"), "}");
test_write();
test_print();
test_order();
// make sure that format! doesn't move out of local variables
let a: Box<_> = box 3;
format!("{}", a);
format!("{}", a);
// make sure that format! doesn't cause spurious unused-unsafe warnings when
// it's inside of an outer unsafe block
unsafe {
let a: isize = ::std::mem::transmute(3_usize);
format!("{}", a);
}
test_format_args();
// test that trailing commas are acceptable
format!("{}", "test",);
format!("{foo}", foo="test",);
}
// Basic test to make sure that we can invoke the `write!` macro with an
// fmt::Write instance.
fn test_write() {
use std::fmt::Write;
let mut buf = String::new();
write!(&mut buf, "{}", 3);
{
let w = &mut buf;
write!(w, "{foo}", foo=4);
write!(w, "{}", "hello");
writeln!(w, "{}", "line");
writeln!(w, "{foo}", foo="bar");
}
t!(buf, "34helloline\nbar\n");
}
// Just make sure that the macros are defined, there's not really a lot that we
// can do with them just yet (to test the output)
fn test_print() {
print!("hi");
print!("{:?}", vec!(0u8));
println!("hello");
println!("this is a {}", "test");
println!("{foo}", foo="bar");
}
// Just make sure that the macros are defined, there's not really a lot that we
// can do with them just yet (to test the output)
fn test_format_args() {
use std::fmt::Write;
let mut buf = String::new();
{
let w = &mut buf;
write!(w, "{}", format_args!("{}", 1));
write!(w, "{}", format_args!("test"));
write!(w, "{}", format_args!("{test}", test=3));
}
let s = buf;
t!(s, "1test3");
let s = fmt::format(format_args!("hello {}", "world"));
t!(s, "hello world");
let s = format!("{}: {}", "args were", format_args!("hello {}", "world"));
t!(s, "args were: hello world");
}
fn test_order() {
// Make sure format!() arguments are always evaluated in a left-to-right
// ordering
fn foo() -> isize {
static mut FOO: isize = 0;
unsafe {
FOO += 1;
FOO
}
}
assert_eq!(format!("{} {} {a} {b} {} {c}",
foo(), foo(), foo(), a=foo(), b=foo(), c=foo()),
"1 2 4 5 3 6".to_string());
} | #![deny(warnings)]
#![allow(unused_must_use)]
#![allow(unknown_features)]
#![feature(box_syntax)]
| random_line_split |
ifmt.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// no-pretty-expanded unnecessary unsafe block generated
// ignore-lexer-test FIXME #15679
#![deny(warnings)]
#![allow(unused_must_use)]
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::fmt;
use std::usize;
struct A;
struct B;
struct C;
impl fmt::LowerHex for A {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("aloha")
}
}
impl fmt::UpperHex for B {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("adios")
}
}
impl fmt::Display for C {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad_integral(true, "☃", "123")
}
}
macro_rules! t {
($a:expr, $b:expr) => { assert_eq!($a, $b) }
}
pub fn main() {
| t!(format!("{:?}", 10_usize), "10");
t!(format!("{:?}", "true"), "\"true\"");
t!(format!("{:?}", "foo\nbar"), "\"foo\\nbar\"");
t!(format!("{:o}", 10_usize), "12");
t!(format!("{:x}", 10_usize), "a");
t!(format!("{:X}", 10_usize), "A");
t!(format!("{}", "foo"), "foo");
t!(format!("{}", "foo".to_string()), "foo");
if cfg!(target_pointer_width = "32") {
t!(format!("{:#p}", 0x1234 as *const isize), "0x00001234");
t!(format!("{:#p}", 0x1234 as *mut isize), "0x00001234");
} else {
t!(format!("{:#p}", 0x1234 as *const isize), "0x0000000000001234");
t!(format!("{:#p}", 0x1234 as *mut isize), "0x0000000000001234");
}
t!(format!("{:p}", 0x1234 as *const isize), "0x1234");
t!(format!("{:p}", 0x1234 as *mut isize), "0x1234");
t!(format!("{:x}", A), "aloha");
t!(format!("{:X}", B), "adios");
t!(format!("foo {} ☃☃☃☃☃☃", "bar"), "foo bar ☃☃☃☃☃☃");
t!(format!("{1} {0}", 0, 1), "1 0");
t!(format!("{foo} {bar}", foo=0, bar=1), "0 1");
t!(format!("{foo} {1} {bar} {0}", 0, 1, foo=2, bar=3), "2 1 3 0");
t!(format!("{} {0}", "a"), "a a");
t!(format!("{foo_bar}", foo_bar=1), "1");
t!(format!("{}", 5 + 5), "10");
t!(format!("{:#4}", C), "☃123");
let a: &fmt::Debug = &1;
t!(format!("{:?}", a), "1");
// Formatting strings and their arguments
t!(format!("{}", "a"), "a");
t!(format!("{:4}", "a"), "a ");
t!(format!("{:4}", "☃"), "☃ ");
t!(format!("{:>4}", "a"), " a");
t!(format!("{:<4}", "a"), "a ");
t!(format!("{:^5}", "a"), " a ");
t!(format!("{:^5}", "aa"), " aa ");
t!(format!("{:^4}", "a"), " a ");
t!(format!("{:^4}", "aa"), " aa ");
t!(format!("{:.4}", "a"), "a");
t!(format!("{:4.4}", "a"), "a ");
t!(format!("{:4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:<4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:>4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:^4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:>10.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:2.4}", "aaaaa"), "aaaa");
t!(format!("{:2.4}", "aaaa"), "aaaa");
t!(format!("{:2.4}", "aaa"), "aaa");
t!(format!("{:2.4}", "aa"), "aa");
t!(format!("{:2.4}", "a"), "a ");
t!(format!("{:0>2}", "a"), "0a");
t!(format!("{:.*}", 4, "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:.1$}", "aaaaaaaaaaaaaaaaaa", 4), "aaaa");
t!(format!("{:.a$}", "aaaaaaaaaaaaaaaaaa", a=4), "aaaa");
t!(format!("{:1$}", "a", 4), "a ");
t!(format!("{1:0$}", 4, "a"), "a ");
t!(format!("{:a$}", "a", a=4), "a ");
t!(format!("{:-#}", "a"), "a");
t!(format!("{:+#}", "a"), "a");
// Some float stuff
t!(format!("{:}", 1.0f32), "1");
t!(format!("{:}", 1.0f64), "1");
t!(format!("{:.3}", 1.0f64), "1.000");
t!(format!("{:10.3}", 1.0f64), " 1.000");
t!(format!("{:+10.3}", 1.0f64), " +1.000");
t!(format!("{:+10.3}", -1.0f64), " -1.000");
t!(format!("{:e}", 1.2345e6f32), "1.2345e6");
t!(format!("{:e}", 1.2345e6f64), "1.2345e6");
t!(format!("{:E}", 1.2345e6f64), "1.2345E6");
t!(format!("{:.3e}", 1.2345e6f64), "1.234e6");
t!(format!("{:10.3e}", 1.2345e6f64), " 1.234e6");
t!(format!("{:+10.3e}", 1.2345e6f64), " +1.234e6");
t!(format!("{:+10.3e}", -1.2345e6f64), " -1.234e6");
// Test that pointers don't get truncated.
{
let val = usize::MAX;
let exp = format!("{:#x}", val);
t!(format!("{:p}", val as *const isize), exp);
}
// Escaping
t!(format!("{{"), "{");
t!(format!("}}"), "}");
test_write();
test_print();
test_order();
// make sure that format! doesn't move out of local variables
let a: Box<_> = box 3;
format!("{}", a);
format!("{}", a);
// make sure that format! doesn't cause spurious unused-unsafe warnings when
// it's inside of an outer unsafe block
unsafe {
let a: isize = ::std::mem::transmute(3_usize);
format!("{}", a);
}
test_format_args();
// test that trailing commas are acceptable
format!("{}", "test",);
format!("{foo}", foo="test",);
}
// Basic test to make sure that we c
an invoke the `write!` macro with an
// fmt::Write instance.
fn test_write() {
use std::fmt::Write;
let mut buf = String::new();
write!(&mut buf, "{}", 3);
{
let w = &mut buf;
write!(w, "{foo}", foo=4);
write!(w, "{}", "hello");
writeln!(w, "{}", "line");
writeln!(w, "{foo}", foo="bar");
}
t!(buf, "34helloline\nbar\n");
}
// Just make sure that the macros are defined, there's not really a lot that we
// can do with them just yet (to test the output)
fn test_print() {
print!("hi");
print!("{:?}", vec!(0u8));
println!("hello");
println!("this is a {}", "test");
println!("{foo}", foo="bar");
}
// Just make sure that the macros are defined, there's not really a lot that we
// can do with them just yet (to test the output)
fn test_format_args() {
use std::fmt::Write;
let mut buf = String::new();
{
let w = &mut buf;
write!(w, "{}", format_args!("{}", 1));
write!(w, "{}", format_args!("test"));
write!(w, "{}", format_args!("{test}", test=3));
}
let s = buf;
t!(s, "1test3");
let s = fmt::format(format_args!("hello {}", "world"));
t!(s, "hello world");
let s = format!("{}: {}", "args were", format_args!("hello {}", "world"));
t!(s, "args were: hello world");
}
fn test_order() {
// Make sure format!() arguments are always evaluated in a left-to-right
// ordering
fn foo() -> isize {
static mut FOO: isize = 0;
unsafe {
FOO += 1;
FOO
}
}
assert_eq!(format!("{} {} {a} {b} {} {c}",
foo(), foo(), foo(), a=foo(), b=foo(), c=foo()),
"1 2 4 5 3 6".to_string());
}
| // Various edge cases without formats
t!(format!(""), "");
t!(format!("hello"), "hello");
t!(format!("hello {{"), "hello {");
// default formatters should work
t!(format!("{}", 1.0f32), "1");
t!(format!("{}", 1.0f64), "1");
t!(format!("{}", "a"), "a");
t!(format!("{}", "a".to_string()), "a");
t!(format!("{}", false), "false");
t!(format!("{}", 'a'), "a");
// At least exercise all the formats
t!(format!("{}", true), "true");
t!(format!("{}", '☃'), "☃");
t!(format!("{}", 10), "10");
t!(format!("{}", 10_usize), "10");
t!(format!("{:?}", '☃'), "'\\u{2603}'");
t!(format!("{:?}", 10), "10"); | identifier_body |
ifmt.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// no-pretty-expanded unnecessary unsafe block generated
// ignore-lexer-test FIXME #15679
#![deny(warnings)]
#![allow(unused_must_use)]
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::fmt;
use std::usize;
struct A;
struct B;
struct C;
impl fmt::LowerHex for A {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("aloha")
}
}
impl fmt::UpperHex for B {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("adios")
}
}
impl fmt::Display for C {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad_integral(true, "☃", "123")
}
}
macro_rules! t {
($a:expr, $b:expr) => { assert_eq!($a, $b) }
}
pub fn main() {
// Various edge cases without formats
t!(format!(""), "");
t!(format!("hello"), "hello");
t!(format!("hello {{"), "hello {");
// default formatters should work
t!(format!("{}", 1.0f32), "1");
t!(format!("{}", 1.0f64), "1");
t!(format!("{}", "a"), "a");
t!(format!("{}", "a".to_string()), "a");
t!(format!("{}", false), "false");
t!(format!("{}", 'a'), "a");
// At least exercise all the formats
t!(format!("{}", true), "true");
t!(format!("{}", '☃'), "☃");
t!(format!("{}", 10), "10");
t!(format!("{}", 10_usize), "10");
t!(format!("{:?}", '☃'), "'\\u{2603}'");
t!(format!("{:?}", 10), "10");
t!(format!("{:?}", 10_usize), "10");
t!(format!("{:?}", "true"), "\"true\"");
t!(format!("{:?}", "foo\nbar"), "\"foo\\nbar\"");
t!(format!("{:o}", 10_usize), "12");
t!(format!("{:x}", 10_usize), "a");
t!(format!("{:X}", 10_usize), "A");
t!(format!("{}", "foo"), "foo");
t!(format!("{}", "foo".to_string()), "foo");
if cfg!(target_pointer_width = "32") {
t!(format!("{:#p}", 0x1234 as *const isize), "0x00001234");
t!(format!("{:#p}", 0x1234 as *mut isize), "0x00001234");
} else {
| format!("{:p}", 0x1234 as *const isize), "0x1234");
t!(format!("{:p}", 0x1234 as *mut isize), "0x1234");
t!(format!("{:x}", A), "aloha");
t!(format!("{:X}", B), "adios");
t!(format!("foo {} ☃☃☃☃☃☃", "bar"), "foo bar ☃☃☃☃☃☃");
t!(format!("{1} {0}", 0, 1), "1 0");
t!(format!("{foo} {bar}", foo=0, bar=1), "0 1");
t!(format!("{foo} {1} {bar} {0}", 0, 1, foo=2, bar=3), "2 1 3 0");
t!(format!("{} {0}", "a"), "a a");
t!(format!("{foo_bar}", foo_bar=1), "1");
t!(format!("{}", 5 + 5), "10");
t!(format!("{:#4}", C), "☃123");
let a: &fmt::Debug = &1;
t!(format!("{:?}", a), "1");
// Formatting strings and their arguments
t!(format!("{}", "a"), "a");
t!(format!("{:4}", "a"), "a ");
t!(format!("{:4}", "☃"), "☃ ");
t!(format!("{:>4}", "a"), " a");
t!(format!("{:<4}", "a"), "a ");
t!(format!("{:^5}", "a"), " a ");
t!(format!("{:^5}", "aa"), " aa ");
t!(format!("{:^4}", "a"), " a ");
t!(format!("{:^4}", "aa"), " aa ");
t!(format!("{:.4}", "a"), "a");
t!(format!("{:4.4}", "a"), "a ");
t!(format!("{:4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:<4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:>4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:^4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:>10.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:2.4}", "aaaaa"), "aaaa");
t!(format!("{:2.4}", "aaaa"), "aaaa");
t!(format!("{:2.4}", "aaa"), "aaa");
t!(format!("{:2.4}", "aa"), "aa");
t!(format!("{:2.4}", "a"), "a ");
t!(format!("{:0>2}", "a"), "0a");
t!(format!("{:.*}", 4, "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:.1$}", "aaaaaaaaaaaaaaaaaa", 4), "aaaa");
t!(format!("{:.a$}", "aaaaaaaaaaaaaaaaaa", a=4), "aaaa");
t!(format!("{:1$}", "a", 4), "a ");
t!(format!("{1:0$}", 4, "a"), "a ");
t!(format!("{:a$}", "a", a=4), "a ");
t!(format!("{:-#}", "a"), "a");
t!(format!("{:+#}", "a"), "a");
// Some float stuff
t!(format!("{:}", 1.0f32), "1");
t!(format!("{:}", 1.0f64), "1");
t!(format!("{:.3}", 1.0f64), "1.000");
t!(format!("{:10.3}", 1.0f64), " 1.000");
t!(format!("{:+10.3}", 1.0f64), " +1.000");
t!(format!("{:+10.3}", -1.0f64), " -1.000");
t!(format!("{:e}", 1.2345e6f32), "1.2345e6");
t!(format!("{:e}", 1.2345e6f64), "1.2345e6");
t!(format!("{:E}", 1.2345e6f64), "1.2345E6");
t!(format!("{:.3e}", 1.2345e6f64), "1.234e6");
t!(format!("{:10.3e}", 1.2345e6f64), " 1.234e6");
t!(format!("{:+10.3e}", 1.2345e6f64), " +1.234e6");
t!(format!("{:+10.3e}", -1.2345e6f64), " -1.234e6");
// Test that pointers don't get truncated.
{
let val = usize::MAX;
let exp = format!("{:#x}", val);
t!(format!("{:p}", val as *const isize), exp);
}
// Escaping
t!(format!("{{"), "{");
t!(format!("}}"), "}");
test_write();
test_print();
test_order();
// make sure that format! doesn't move out of local variables
let a: Box<_> = box 3;
format!("{}", a);
format!("{}", a);
// make sure that format! doesn't cause spurious unused-unsafe warnings when
// it's inside of an outer unsafe block
unsafe {
let a: isize = ::std::mem::transmute(3_usize);
format!("{}", a);
}
test_format_args();
// test that trailing commas are acceptable
format!("{}", "test",);
format!("{foo}", foo="test",);
}
// Basic test to make sure that we can invoke the `write!` macro with an
// fmt::Write instance.
fn test_write() {
use std::fmt::Write;
let mut buf = String::new();
write!(&mut buf, "{}", 3);
{
let w = &mut buf;
write!(w, "{foo}", foo=4);
write!(w, "{}", "hello");
writeln!(w, "{}", "line");
writeln!(w, "{foo}", foo="bar");
}
t!(buf, "34helloline\nbar\n");
}
// Just make sure that the macros are defined, there's not really a lot that we
// can do with them just yet (to test the output)
fn test_print() {
print!("hi");
print!("{:?}", vec!(0u8));
println!("hello");
println!("this is a {}", "test");
println!("{foo}", foo="bar");
}
// Just make sure that the macros are defined, there's not really a lot that we
// can do with them just yet (to test the output)
fn test_format_args() {
use std::fmt::Write;
let mut buf = String::new();
{
let w = &mut buf;
write!(w, "{}", format_args!("{}", 1));
write!(w, "{}", format_args!("test"));
write!(w, "{}", format_args!("{test}", test=3));
}
let s = buf;
t!(s, "1test3");
let s = fmt::format(format_args!("hello {}", "world"));
t!(s, "hello world");
let s = format!("{}: {}", "args were", format_args!("hello {}", "world"));
t!(s, "args were: hello world");
}
fn test_order() {
// Make sure format!() arguments are always evaluated in a left-to-right
// ordering
fn foo() -> isize {
static mut FOO: isize = 0;
unsafe {
FOO += 1;
FOO
}
}
assert_eq!(format!("{} {} {a} {b} {} {c}",
foo(), foo(), foo(), a=foo(), b=foo(), c=foo()),
"1 2 4 5 3 6".to_string());
}
| t!(format!("{:#p}", 0x1234 as *const isize), "0x0000000000001234");
t!(format!("{:#p}", 0x1234 as *mut isize), "0x0000000000001234");
}
t!( | conditional_block |
ifmt.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// no-pretty-expanded unnecessary unsafe block generated
// ignore-lexer-test FIXME #15679
#![deny(warnings)]
#![allow(unused_must_use)]
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::fmt;
use std::usize;
struct A;
struct B;
struct C;
impl fmt::LowerHex for A {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("aloha")
}
}
impl fmt::UpperHex for B {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("adios")
}
}
impl fmt::Display for C {
fn | (&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad_integral(true, "☃", "123")
}
}
macro_rules! t {
($a:expr, $b:expr) => { assert_eq!($a, $b) }
}
pub fn main() {
// Various edge cases without formats
t!(format!(""), "");
t!(format!("hello"), "hello");
t!(format!("hello {{"), "hello {");
// default formatters should work
t!(format!("{}", 1.0f32), "1");
t!(format!("{}", 1.0f64), "1");
t!(format!("{}", "a"), "a");
t!(format!("{}", "a".to_string()), "a");
t!(format!("{}", false), "false");
t!(format!("{}", 'a'), "a");
// At least exercise all the formats
t!(format!("{}", true), "true");
t!(format!("{}", '☃'), "☃");
t!(format!("{}", 10), "10");
t!(format!("{}", 10_usize), "10");
t!(format!("{:?}", '☃'), "'\\u{2603}'");
t!(format!("{:?}", 10), "10");
t!(format!("{:?}", 10_usize), "10");
t!(format!("{:?}", "true"), "\"true\"");
t!(format!("{:?}", "foo\nbar"), "\"foo\\nbar\"");
t!(format!("{:o}", 10_usize), "12");
t!(format!("{:x}", 10_usize), "a");
t!(format!("{:X}", 10_usize), "A");
t!(format!("{}", "foo"), "foo");
t!(format!("{}", "foo".to_string()), "foo");
if cfg!(target_pointer_width = "32") {
t!(format!("{:#p}", 0x1234 as *const isize), "0x00001234");
t!(format!("{:#p}", 0x1234 as *mut isize), "0x00001234");
} else {
t!(format!("{:#p}", 0x1234 as *const isize), "0x0000000000001234");
t!(format!("{:#p}", 0x1234 as *mut isize), "0x0000000000001234");
}
t!(format!("{:p}", 0x1234 as *const isize), "0x1234");
t!(format!("{:p}", 0x1234 as *mut isize), "0x1234");
t!(format!("{:x}", A), "aloha");
t!(format!("{:X}", B), "adios");
t!(format!("foo {} ☃☃☃☃☃☃", "bar"), "foo bar ☃☃☃☃☃☃");
t!(format!("{1} {0}", 0, 1), "1 0");
t!(format!("{foo} {bar}", foo=0, bar=1), "0 1");
t!(format!("{foo} {1} {bar} {0}", 0, 1, foo=2, bar=3), "2 1 3 0");
t!(format!("{} {0}", "a"), "a a");
t!(format!("{foo_bar}", foo_bar=1), "1");
t!(format!("{}", 5 + 5), "10");
t!(format!("{:#4}", C), "☃123");
let a: &fmt::Debug = &1;
t!(format!("{:?}", a), "1");
// Formatting strings and their arguments
t!(format!("{}", "a"), "a");
t!(format!("{:4}", "a"), "a ");
t!(format!("{:4}", "☃"), "☃ ");
t!(format!("{:>4}", "a"), " a");
t!(format!("{:<4}", "a"), "a ");
t!(format!("{:^5}", "a"), " a ");
t!(format!("{:^5}", "aa"), " aa ");
t!(format!("{:^4}", "a"), " a ");
t!(format!("{:^4}", "aa"), " aa ");
t!(format!("{:.4}", "a"), "a");
t!(format!("{:4.4}", "a"), "a ");
t!(format!("{:4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:<4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:>4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:^4.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:>10.4}", "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:2.4}", "aaaaa"), "aaaa");
t!(format!("{:2.4}", "aaaa"), "aaaa");
t!(format!("{:2.4}", "aaa"), "aaa");
t!(format!("{:2.4}", "aa"), "aa");
t!(format!("{:2.4}", "a"), "a ");
t!(format!("{:0>2}", "a"), "0a");
t!(format!("{:.*}", 4, "aaaaaaaaaaaaaaaaaa"), "aaaa");
t!(format!("{:.1$}", "aaaaaaaaaaaaaaaaaa", 4), "aaaa");
t!(format!("{:.a$}", "aaaaaaaaaaaaaaaaaa", a=4), "aaaa");
t!(format!("{:1$}", "a", 4), "a ");
t!(format!("{1:0$}", 4, "a"), "a ");
t!(format!("{:a$}", "a", a=4), "a ");
t!(format!("{:-#}", "a"), "a");
t!(format!("{:+#}", "a"), "a");
// Some float stuff
t!(format!("{:}", 1.0f32), "1");
t!(format!("{:}", 1.0f64), "1");
t!(format!("{:.3}", 1.0f64), "1.000");
t!(format!("{:10.3}", 1.0f64), " 1.000");
t!(format!("{:+10.3}", 1.0f64), " +1.000");
t!(format!("{:+10.3}", -1.0f64), " -1.000");
t!(format!("{:e}", 1.2345e6f32), "1.2345e6");
t!(format!("{:e}", 1.2345e6f64), "1.2345e6");
t!(format!("{:E}", 1.2345e6f64), "1.2345E6");
t!(format!("{:.3e}", 1.2345e6f64), "1.234e6");
t!(format!("{:10.3e}", 1.2345e6f64), " 1.234e6");
t!(format!("{:+10.3e}", 1.2345e6f64), " +1.234e6");
t!(format!("{:+10.3e}", -1.2345e6f64), " -1.234e6");
// Test that pointers don't get truncated.
{
let val = usize::MAX;
let exp = format!("{:#x}", val);
t!(format!("{:p}", val as *const isize), exp);
}
// Escaping
t!(format!("{{"), "{");
t!(format!("}}"), "}");
test_write();
test_print();
test_order();
// make sure that format! doesn't move out of local variables
let a: Box<_> = box 3;
format!("{}", a);
format!("{}", a);
// make sure that format! doesn't cause spurious unused-unsafe warnings when
// it's inside of an outer unsafe block
unsafe {
let a: isize = ::std::mem::transmute(3_usize);
format!("{}", a);
}
test_format_args();
// test that trailing commas are acceptable
format!("{}", "test",);
format!("{foo}", foo="test",);
}
// Basic test to make sure that we can invoke the `write!` macro with an
// fmt::Write instance.
fn test_write() {
use std::fmt::Write;
let mut buf = String::new();
write!(&mut buf, "{}", 3);
{
let w = &mut buf;
write!(w, "{foo}", foo=4);
write!(w, "{}", "hello");
writeln!(w, "{}", "line");
writeln!(w, "{foo}", foo="bar");
}
t!(buf, "34helloline\nbar\n");
}
// Just make sure that the macros are defined, there's not really a lot that we
// can do with them just yet (to test the output)
fn test_print() {
print!("hi");
print!("{:?}", vec!(0u8));
println!("hello");
println!("this is a {}", "test");
println!("{foo}", foo="bar");
}
// Just make sure that the macros are defined, there's not really a lot that we
// can do with them just yet (to test the output)
fn test_format_args() {
use std::fmt::Write;
let mut buf = String::new();
{
let w = &mut buf;
write!(w, "{}", format_args!("{}", 1));
write!(w, "{}", format_args!("test"));
write!(w, "{}", format_args!("{test}", test=3));
}
let s = buf;
t!(s, "1test3");
let s = fmt::format(format_args!("hello {}", "world"));
t!(s, "hello world");
let s = format!("{}: {}", "args were", format_args!("hello {}", "world"));
t!(s, "args were: hello world");
}
fn test_order() {
// Make sure format!() arguments are always evaluated in a left-to-right
// ordering
fn foo() -> isize {
static mut FOO: isize = 0;
unsafe {
FOO += 1;
FOO
}
}
assert_eq!(format!("{} {} {a} {b} {} {c}",
foo(), foo(), foo(), a=foo(), b=foo(), c=foo()),
"1 2 4 5 3 6".to_string());
}
| fmt | identifier_name |
generate.rs | // Copyright (c) 2016 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
|
use common::ui::UI;
use hcore::crypto::SymKey;
use error::Result;
pub fn start(ui: &mut UI, ring: &str, cache: &Path) -> Result<()> {
ui.begin(format!("Generating ring key for {}", &ring))?;
let pair = SymKey::generate_pair_for_ring(ring)?;
pair.to_pair_files(cache)?;
ui.end(format!(
"Generated ring key pair {}.",
&pair.name_with_rev()
))?;
Ok(())
} | use std::path::Path; | random_line_split |
generate.rs | // Copyright (c) 2016 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::path::Path;
use common::ui::UI;
use hcore::crypto::SymKey;
use error::Result;
pub fn start(ui: &mut UI, ring: &str, cache: &Path) -> Result<()> | {
ui.begin(format!("Generating ring key for {}", &ring))?;
let pair = SymKey::generate_pair_for_ring(ring)?;
pair.to_pair_files(cache)?;
ui.end(format!(
"Generated ring key pair {}.",
&pair.name_with_rev()
))?;
Ok(())
} | identifier_body |
|
generate.rs | // Copyright (c) 2016 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::path::Path;
use common::ui::UI;
use hcore::crypto::SymKey;
use error::Result;
pub fn | (ui: &mut UI, ring: &str, cache: &Path) -> Result<()> {
ui.begin(format!("Generating ring key for {}", &ring))?;
let pair = SymKey::generate_pair_for_ring(ring)?;
pair.to_pair_files(cache)?;
ui.end(format!(
"Generated ring key pair {}.",
&pair.name_with_rev()
))?;
Ok(())
}
| start | identifier_name |
script_runtime.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The script runtime contains common traits and structs commonly used by the
//! script thread, the dom, and the worker threads.
use dom::bindings::codegen::Bindings::PromiseBinding::PromiseJobCallback;
use dom::bindings::refcounted::{LiveDOMReferences, trace_refcounted_objects};
use dom::bindings::root::trace_roots;
use dom::bindings::settings_stack;
use dom::bindings::trace::{JSTraceable, trace_traceables};
use dom::bindings::utils::DOM_CALLBACKS;
use dom::globalscope::GlobalScope;
use js::glue::CollectServoSizes;
use js::jsapi::{DisableIncrementalGC, GCDescription, GCProgress, HandleObject};
use js::jsapi::{JSContext, JS_GetRuntime, JSRuntime, JSTracer, SetDOMCallbacks, SetGCSliceCallback};
use js::jsapi::{JSGCInvocationKind, JSGCStatus, JS_AddExtraGCRootsTracer, JS_SetGCCallback};
use js::jsapi::{JSGCMode, JSGCParamKey, JS_SetGCParameter, JS_SetGlobalJitCompilerOption};
use js::jsapi::{JSJitCompilerOption, JS_SetOffthreadIonCompilationEnabled, JS_SetParallelParsingEnabled};
use js::jsapi::{JSObject, RuntimeOptionsRef, SetPreserveWrapperCallback, SetEnqueuePromiseJobCallback};
use js::panic::wrap_panic;
use js::rust::Runtime as RustRuntime;
use microtask::{EnqueuedPromiseCallback, Microtask};
use profile_traits::mem::{Report, ReportKind, ReportsChan};
use script_thread::trace_thread;
use servo_config::opts;
use servo_config::prefs::PREFS;
use std::cell::Cell;
use std::fmt;
use std::io::{Write, stdout};
use std::ops::Deref;
use std::os;
use std::os::raw::c_void;
use std::panic::AssertUnwindSafe;
use std::ptr;
use style::thread_state;
use task::TaskBox;
use time::{Tm, now};
/// Common messages used to control the event loops in both the script and the worker
pub enum CommonScriptMsg {
/// Requests that the script thread measure its memory usage. The results are sent back via the
/// supplied channel.
CollectReports(ReportsChan),
/// Generic message that encapsulates event handling.
Task(ScriptThreadEventCategory, Box<TaskBox>),
}
impl fmt::Debug for CommonScriptMsg {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CommonScriptMsg::CollectReports(_) => write!(f, "CollectReports(...)"),
CommonScriptMsg::Task(ref category, ref task) => {
f.debug_tuple("Task").field(category).field(task).finish()
},
}
}
}
/// A cloneable interface for communicating with an event loop.
pub trait ScriptChan: JSTraceable {
/// Send a message to the associated event loop.
fn send(&self, msg: CommonScriptMsg) -> Result<(), ()>;
/// Clone this handle.
fn clone(&self) -> Box<ScriptChan + Send>;
}
#[derive(Clone, Copy, Debug, Eq, Hash, JSTraceable, PartialEq)]
pub enum ScriptThreadEventCategory {
AttachLayout,
ConstellationMsg,
DevtoolsMsg,
DocumentEvent,
DomEvent,
FileRead,
FormPlannedNavigation,
ImageCacheMsg,
InputEvent,
NetworkEvent,
Resize,
ScriptEvent,
SetScrollState,
SetViewport,
StylesheetLoad,
TimerEvent,
UpdateReplacedElement,
WebSocketEvent,
WorkerEvent,
WorkletEvent,
ServiceWorkerEvent,
EnterFullscreen,
ExitFullscreen,
WebVREvent,
PerformanceTimelineTask,
}
/// An interface for receiving ScriptMsg values in an event loop. Used for synchronous DOM
/// APIs that need to abstract over multiple kinds of event loops (worker/main thread) with
/// different Receiver interfaces.
pub trait ScriptPort {
fn recv(&self) -> Result<CommonScriptMsg, ()>;
}
/// SM callback for promise job resolution. Adds a promise callback to the current
/// global's microtask queue.
#[allow(unsafe_code)]
unsafe extern "C" fn enqueue_job(cx: *mut JSContext,
job: HandleObject,
_allocation_site: HandleObject,
_data: *mut c_void) -> bool {
wrap_panic(AssertUnwindSafe(|| {
//XXXjdm - use a different global now?
let global = GlobalScope::from_object(job.get());
let pipeline = global.pipeline_id();
global.enqueue_microtask(Microtask::Promise(EnqueuedPromiseCallback {
callback: PromiseJobCallback::new(cx, job.get()),
pipeline: pipeline,
}));
true
}), false)
}
#[derive(JSTraceable)]
pub struct Runtime(RustRuntime);
impl Drop for Runtime {
fn drop(&mut self) {
THREAD_ACTIVE.with(|t| t.set(false));
}
}
impl Deref for Runtime {
type Target = RustRuntime;
fn deref(&self) -> &RustRuntime {
&self.0
}
}
#[allow(unsafe_code)]
pub unsafe fn new_rt_and_cx() -> Runtime {
LiveDOMReferences::initialize();
let runtime = RustRuntime::new().unwrap();
JS_AddExtraGCRootsTracer(runtime.rt(), Some(trace_rust_roots), ptr::null_mut());
// Needed for debug assertions about whether GC is running.
if cfg!(debug_assertions) {
JS_SetGCCallback(runtime.rt(), Some(debug_gc_callback), ptr::null_mut());
}
if opts::get().gc_profile {
SetGCSliceCallback(runtime.rt(), Some(gc_slice_callback));
}
unsafe extern "C" fn empty_wrapper_callback(_: *mut JSContext, _: *mut JSObject) -> bool { true }
SetDOMCallbacks(runtime.rt(), &DOM_CALLBACKS);
SetPreserveWrapperCallback(runtime.rt(), Some(empty_wrapper_callback));
// Pre barriers aren't working correctly at the moment
DisableIncrementalGC(runtime.rt());
SetEnqueuePromiseJobCallback(runtime.rt(), Some(enqueue_job), ptr::null_mut());
set_gc_zeal_options(runtime.rt());
// Enable or disable the JITs.
let rt_opts = &mut *RuntimeOptionsRef(runtime.rt());
if let Some(val) = PREFS.get("js.baseline.enabled").as_boolean() {
rt_opts.set_baseline_(val);
}
if let Some(val) = PREFS.get("js.ion.enabled").as_boolean() {
rt_opts.set_ion_(val);
}
if let Some(val) = PREFS.get("js.asmjs.enabled").as_boolean() {
rt_opts.set_asmJS_(val);
}
if let Some(val) = PREFS.get("js.strict.enabled").as_boolean() {
rt_opts.set_extraWarnings_(val);
}
// TODO: handle js.strict.debug.enabled
// TODO: handle js.throw_on_asmjs_validation_failure (needs new Spidermonkey)
if let Some(val) = PREFS.get("js.native_regexp.enabled").as_boolean() {
rt_opts.set_nativeRegExp_(val);
}
if let Some(val) = PREFS.get("js.parallel_parsing.enabled").as_boolean() {
JS_SetParallelParsingEnabled(runtime.rt(), val);
}
if let Some(val) = PREFS.get("js.offthread_compilation_enabled").as_boolean() {
JS_SetOffthreadIonCompilationEnabled(runtime.rt(), val);
}
if let Some(val) = PREFS.get("js.baseline.unsafe_eager_compilation.enabled").as_boolean() {
let trigger: i32 = if val {
0
} else {
-1
};
JS_SetGlobalJitCompilerOption(runtime.rt(),
JSJitCompilerOption::JSJITCOMPILER_BASELINE_WARMUP_TRIGGER,
trigger as u32);
}
if let Some(val) = PREFS.get("js.ion.unsafe_eager_compilation.enabled").as_boolean() {
let trigger: i64 = if val {
0
} else {
-1
};
JS_SetGlobalJitCompilerOption(runtime.rt(),
JSJitCompilerOption::JSJITCOMPILER_ION_WARMUP_TRIGGER,
trigger as u32);
}
// TODO: handle js.discard_system_source.enabled
// TODO: handle js.asyncstack.enabled (needs new Spidermonkey)
// TODO: handle js.throw_on_debugee_would_run (needs new Spidermonkey)
// TODO: handle js.dump_stack_on_debugee_would_run (needs new Spidermonkey)
if let Some(val) = PREFS.get("js.werror.enabled").as_boolean() {
rt_opts.set_werror_(val);
}
// TODO: handle js.shared_memory.enabled
if let Some(val) = PREFS.get("js.mem.high_water_mark").as_i64() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_MALLOC_BYTES, val as u32 * 1024 * 1024);
}
if let Some(val) = PREFS.get("js.mem.max").as_i64() {
let max = if val <= 0 || val >= 0x1000 {
-1
} else {
val * 1024 * 1024
};
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_BYTES, max as u32);
}
// NOTE: This is disabled above, so enabling it here will do nothing for now.
if let Some(val) = PREFS.get("js.mem.gc.incremental.enabled").as_boolean() {
let compartment = if let Some(val) = PREFS.get("js.mem.gc.per_compartment.enabled").as_boolean() {
val
} else {
false
};
let mode = if val {
JSGCMode::JSGC_MODE_INCREMENTAL
} else if compartment {
JSGCMode::JSGC_MODE_COMPARTMENT
} else {
JSGCMode::JSGC_MODE_GLOBAL
};
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MODE, mode as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.incremental.slice_ms").as_i64() {
if val >= 0 && val < 100000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_SLICE_TIME_BUDGET, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.compacting.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_COMPACTING_ENABLED, val as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_time_limit_ms").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_TIME_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.dynamic_mark_slice.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DYNAMIC_MARK_SLICE, val as u32);
}
// TODO: handle js.mem.gc.refresh_frame_slices.enabled
if let Some(val) = PREFS.get("js.mem.gc.dynamic_heap_growth.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DYNAMIC_HEAP_GROWTH, val as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.low_frequency_heap_growth").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_LOW_FREQUENCY_HEAP_GROWTH, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_heap_growth_min").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_heap_growth_max").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_low_limit_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_LOW_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_high_limit_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HIGH_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.allocation_threshold_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_ALLOCATION_THRESHOLD, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.decommit_threshold_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DECOMMIT_THRESHOLD, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.empty_chunk_count_min").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MIN_EMPTY_CHUNK_COUNT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.empty_chunk_count_max").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_EMPTY_CHUNK_COUNT, val as u32);
}
}
Runtime(runtime)
}
#[allow(unsafe_code)]
pub fn get_reports(cx: *mut JSContext, path_seg: String) -> Vec<Report> {
let mut reports = vec![];
unsafe {
let rt = JS_GetRuntime(cx);
let mut stats = ::std::mem::zeroed();
if CollectServoSizes(rt, &mut stats) {
let mut report = |mut path_suffix, kind, size| {
let mut path = path![path_seg, "js"];
path.append(&mut path_suffix);
reports.push(Report {
path: path,
kind: kind,
size: size as usize,
})
};
// A note about possibly confusing terminology: the JS GC "heap" is allocated via
// mmap/VirtualAlloc, which means it's not on the malloc "heap", so we use
// `ExplicitNonHeapSize` as its kind.
report(path!["gc-heap", "used"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapUsed);
report(path!["gc-heap", "unused"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapUnused);
report(path!["gc-heap", "admin"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapAdmin);
report(path!["gc-heap", "decommitted"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapDecommitted);
// SpiderMonkey uses the system heap, not jemalloc.
report(path!["malloc-heap"],
ReportKind::ExplicitSystemHeapSize,
stats.mallocHeap);
report(path!["non-heap"],
ReportKind::ExplicitNonHeapSize,
stats.nonHeap);
}
}
reports
}
thread_local!(static GC_CYCLE_START: Cell<Option<Tm>> = Cell::new(None));
thread_local!(static GC_SLICE_START: Cell<Option<Tm>> = Cell::new(None));
#[allow(unsafe_code)]
unsafe extern "C" fn gc_slice_callback(_rt: *mut JSRuntime, progress: GCProgress, desc: *const GCDescription) {
match progress {
GCProgress::GC_CYCLE_BEGIN => {
GC_CYCLE_START.with(|start| {
start.set(Some(now()));
println!("GC cycle began");
})
},
GCProgress::GC_SLICE_BEGIN => {
GC_SLICE_START.with(|start| {
start.set(Some(now()));
println!("GC slice began");
})
},
GCProgress::GC_SLICE_END => {
GC_SLICE_START.with(|start| {
let dur = now() - start.get().unwrap();
start.set(None);
println!("GC slice ended: duration={}", dur);
})
},
GCProgress::GC_CYCLE_END => {
GC_CYCLE_START.with(|start| {
let dur = now() - start.get().unwrap();
start.set(None);
println!("GC cycle ended: duration={}", dur);
})
},
};
if!desc.is_null() {
let desc: &GCDescription = &*desc;
let invocation_kind = match desc.invocationKind_ {
JSGCInvocationKind::GC_NORMAL => "GC_NORMAL",
JSGCInvocationKind::GC_SHRINK => "GC_SHRINK",
};
println!(" isCompartment={}, invocation_kind={}", desc.isCompartment_, invocation_kind);
}
let _ = stdout().flush();
}
#[allow(unsafe_code)]
unsafe extern "C" fn debug_gc_callback(_rt: *mut JSRuntime, status: JSGCStatus, _data: *mut os::raw::c_void) {
match status {
JSGCStatus::JSGC_BEGIN => thread_state::enter(thread_state::IN_GC),
JSGCStatus::JSGC_END => thread_state::exit(thread_state::IN_GC),
}
}
thread_local!(
static THREAD_ACTIVE: Cell<bool> = Cell::new(true);
);
#[allow(unsafe_code)]
unsafe extern fn trace_rust_roots(tr: *mut JSTracer, _data: *mut os::raw::c_void) {
if!THREAD_ACTIVE.with(|t| t.get()) {
return;
}
debug!("starting custom root handler");
trace_thread(tr);
trace_traceables(tr);
trace_roots(tr);
trace_refcounted_objects(tr);
settings_stack::trace(tr);
debug!("done custom root handler");
}
#[allow(unsafe_code)]
#[cfg(feature = "debugmozjs")]
unsafe fn set_gc_zeal_options(rt: *mut JSRuntime) |
#[allow(unsafe_code)]
#[cfg(not(feature = "debugmozjs"))]
unsafe fn set_gc_zeal_options(_: *mut JSRuntime) {}
| {
use js::jsapi::{JS_DEFAULT_ZEAL_FREQ, JS_SetGCZeal};
let level = match PREFS.get("js.mem.gc.zeal.level").as_i64() {
Some(level @ 0...14) => level as u8,
_ => return,
};
let frequency = match PREFS.get("js.mem.gc.zeal.frequency").as_i64() {
Some(frequency) if frequency >= 0 => frequency as u32,
_ => JS_DEFAULT_ZEAL_FREQ,
};
JS_SetGCZeal(rt, level, frequency);
} | identifier_body |
script_runtime.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The script runtime contains common traits and structs commonly used by the
//! script thread, the dom, and the worker threads.
use dom::bindings::codegen::Bindings::PromiseBinding::PromiseJobCallback;
use dom::bindings::refcounted::{LiveDOMReferences, trace_refcounted_objects};
use dom::bindings::root::trace_roots;
use dom::bindings::settings_stack;
use dom::bindings::trace::{JSTraceable, trace_traceables};
use dom::bindings::utils::DOM_CALLBACKS;
use dom::globalscope::GlobalScope;
use js::glue::CollectServoSizes;
use js::jsapi::{DisableIncrementalGC, GCDescription, GCProgress, HandleObject};
use js::jsapi::{JSContext, JS_GetRuntime, JSRuntime, JSTracer, SetDOMCallbacks, SetGCSliceCallback};
use js::jsapi::{JSGCInvocationKind, JSGCStatus, JS_AddExtraGCRootsTracer, JS_SetGCCallback};
use js::jsapi::{JSGCMode, JSGCParamKey, JS_SetGCParameter, JS_SetGlobalJitCompilerOption};
use js::jsapi::{JSJitCompilerOption, JS_SetOffthreadIonCompilationEnabled, JS_SetParallelParsingEnabled};
use js::jsapi::{JSObject, RuntimeOptionsRef, SetPreserveWrapperCallback, SetEnqueuePromiseJobCallback};
use js::panic::wrap_panic;
use js::rust::Runtime as RustRuntime;
use microtask::{EnqueuedPromiseCallback, Microtask};
use profile_traits::mem::{Report, ReportKind, ReportsChan};
use script_thread::trace_thread;
use servo_config::opts;
use servo_config::prefs::PREFS;
use std::cell::Cell;
use std::fmt;
use std::io::{Write, stdout};
use std::ops::Deref;
use std::os;
use std::os::raw::c_void;
use std::panic::AssertUnwindSafe;
use std::ptr;
use style::thread_state;
use task::TaskBox;
use time::{Tm, now};
/// Common messages used to control the event loops in both the script and the worker
pub enum CommonScriptMsg {
/// Requests that the script thread measure its memory usage. The results are sent back via the
/// supplied channel.
CollectReports(ReportsChan),
/// Generic message that encapsulates event handling.
Task(ScriptThreadEventCategory, Box<TaskBox>),
}
impl fmt::Debug for CommonScriptMsg {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CommonScriptMsg::CollectReports(_) => write!(f, "CollectReports(...)"),
CommonScriptMsg::Task(ref category, ref task) => {
f.debug_tuple("Task").field(category).field(task).finish()
},
}
}
}
/// A cloneable interface for communicating with an event loop.
pub trait ScriptChan: JSTraceable {
/// Send a message to the associated event loop.
fn send(&self, msg: CommonScriptMsg) -> Result<(), ()>;
/// Clone this handle.
fn clone(&self) -> Box<ScriptChan + Send>;
}
#[derive(Clone, Copy, Debug, Eq, Hash, JSTraceable, PartialEq)]
pub enum ScriptThreadEventCategory {
AttachLayout,
ConstellationMsg,
DevtoolsMsg,
DocumentEvent,
DomEvent,
FileRead,
FormPlannedNavigation,
ImageCacheMsg,
InputEvent,
NetworkEvent,
Resize,
ScriptEvent,
SetScrollState,
SetViewport,
StylesheetLoad,
TimerEvent,
UpdateReplacedElement,
WebSocketEvent,
WorkerEvent,
WorkletEvent,
ServiceWorkerEvent,
EnterFullscreen,
ExitFullscreen,
WebVREvent,
PerformanceTimelineTask,
}
/// An interface for receiving ScriptMsg values in an event loop. Used for synchronous DOM
/// APIs that need to abstract over multiple kinds of event loops (worker/main thread) with
/// different Receiver interfaces.
pub trait ScriptPort {
fn recv(&self) -> Result<CommonScriptMsg, ()>;
}
/// SM callback for promise job resolution. Adds a promise callback to the current
/// global's microtask queue.
#[allow(unsafe_code)]
unsafe extern "C" fn enqueue_job(cx: *mut JSContext,
job: HandleObject,
_allocation_site: HandleObject,
_data: *mut c_void) -> bool {
wrap_panic(AssertUnwindSafe(|| {
//XXXjdm - use a different global now?
let global = GlobalScope::from_object(job.get());
let pipeline = global.pipeline_id();
global.enqueue_microtask(Microtask::Promise(EnqueuedPromiseCallback {
callback: PromiseJobCallback::new(cx, job.get()),
pipeline: pipeline,
}));
true
}), false)
}
#[derive(JSTraceable)]
pub struct Runtime(RustRuntime);
impl Drop for Runtime {
fn drop(&mut self) {
THREAD_ACTIVE.with(|t| t.set(false));
}
}
impl Deref for Runtime { | fn deref(&self) -> &RustRuntime {
&self.0
}
}
#[allow(unsafe_code)]
pub unsafe fn new_rt_and_cx() -> Runtime {
LiveDOMReferences::initialize();
let runtime = RustRuntime::new().unwrap();
JS_AddExtraGCRootsTracer(runtime.rt(), Some(trace_rust_roots), ptr::null_mut());
// Needed for debug assertions about whether GC is running.
if cfg!(debug_assertions) {
JS_SetGCCallback(runtime.rt(), Some(debug_gc_callback), ptr::null_mut());
}
if opts::get().gc_profile {
SetGCSliceCallback(runtime.rt(), Some(gc_slice_callback));
}
unsafe extern "C" fn empty_wrapper_callback(_: *mut JSContext, _: *mut JSObject) -> bool { true }
SetDOMCallbacks(runtime.rt(), &DOM_CALLBACKS);
SetPreserveWrapperCallback(runtime.rt(), Some(empty_wrapper_callback));
// Pre barriers aren't working correctly at the moment
DisableIncrementalGC(runtime.rt());
SetEnqueuePromiseJobCallback(runtime.rt(), Some(enqueue_job), ptr::null_mut());
set_gc_zeal_options(runtime.rt());
// Enable or disable the JITs.
let rt_opts = &mut *RuntimeOptionsRef(runtime.rt());
if let Some(val) = PREFS.get("js.baseline.enabled").as_boolean() {
rt_opts.set_baseline_(val);
}
if let Some(val) = PREFS.get("js.ion.enabled").as_boolean() {
rt_opts.set_ion_(val);
}
if let Some(val) = PREFS.get("js.asmjs.enabled").as_boolean() {
rt_opts.set_asmJS_(val);
}
if let Some(val) = PREFS.get("js.strict.enabled").as_boolean() {
rt_opts.set_extraWarnings_(val);
}
// TODO: handle js.strict.debug.enabled
// TODO: handle js.throw_on_asmjs_validation_failure (needs new Spidermonkey)
if let Some(val) = PREFS.get("js.native_regexp.enabled").as_boolean() {
rt_opts.set_nativeRegExp_(val);
}
if let Some(val) = PREFS.get("js.parallel_parsing.enabled").as_boolean() {
JS_SetParallelParsingEnabled(runtime.rt(), val);
}
if let Some(val) = PREFS.get("js.offthread_compilation_enabled").as_boolean() {
JS_SetOffthreadIonCompilationEnabled(runtime.rt(), val);
}
if let Some(val) = PREFS.get("js.baseline.unsafe_eager_compilation.enabled").as_boolean() {
let trigger: i32 = if val {
0
} else {
-1
};
JS_SetGlobalJitCompilerOption(runtime.rt(),
JSJitCompilerOption::JSJITCOMPILER_BASELINE_WARMUP_TRIGGER,
trigger as u32);
}
if let Some(val) = PREFS.get("js.ion.unsafe_eager_compilation.enabled").as_boolean() {
let trigger: i64 = if val {
0
} else {
-1
};
JS_SetGlobalJitCompilerOption(runtime.rt(),
JSJitCompilerOption::JSJITCOMPILER_ION_WARMUP_TRIGGER,
trigger as u32);
}
// TODO: handle js.discard_system_source.enabled
// TODO: handle js.asyncstack.enabled (needs new Spidermonkey)
// TODO: handle js.throw_on_debugee_would_run (needs new Spidermonkey)
// TODO: handle js.dump_stack_on_debugee_would_run (needs new Spidermonkey)
if let Some(val) = PREFS.get("js.werror.enabled").as_boolean() {
rt_opts.set_werror_(val);
}
// TODO: handle js.shared_memory.enabled
if let Some(val) = PREFS.get("js.mem.high_water_mark").as_i64() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_MALLOC_BYTES, val as u32 * 1024 * 1024);
}
if let Some(val) = PREFS.get("js.mem.max").as_i64() {
let max = if val <= 0 || val >= 0x1000 {
-1
} else {
val * 1024 * 1024
};
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_BYTES, max as u32);
}
// NOTE: This is disabled above, so enabling it here will do nothing for now.
if let Some(val) = PREFS.get("js.mem.gc.incremental.enabled").as_boolean() {
let compartment = if let Some(val) = PREFS.get("js.mem.gc.per_compartment.enabled").as_boolean() {
val
} else {
false
};
let mode = if val {
JSGCMode::JSGC_MODE_INCREMENTAL
} else if compartment {
JSGCMode::JSGC_MODE_COMPARTMENT
} else {
JSGCMode::JSGC_MODE_GLOBAL
};
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MODE, mode as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.incremental.slice_ms").as_i64() {
if val >= 0 && val < 100000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_SLICE_TIME_BUDGET, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.compacting.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_COMPACTING_ENABLED, val as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_time_limit_ms").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_TIME_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.dynamic_mark_slice.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DYNAMIC_MARK_SLICE, val as u32);
}
// TODO: handle js.mem.gc.refresh_frame_slices.enabled
if let Some(val) = PREFS.get("js.mem.gc.dynamic_heap_growth.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DYNAMIC_HEAP_GROWTH, val as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.low_frequency_heap_growth").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_LOW_FREQUENCY_HEAP_GROWTH, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_heap_growth_min").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_heap_growth_max").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_low_limit_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_LOW_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_high_limit_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HIGH_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.allocation_threshold_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_ALLOCATION_THRESHOLD, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.decommit_threshold_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DECOMMIT_THRESHOLD, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.empty_chunk_count_min").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MIN_EMPTY_CHUNK_COUNT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.empty_chunk_count_max").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_EMPTY_CHUNK_COUNT, val as u32);
}
}
Runtime(runtime)
}
#[allow(unsafe_code)]
pub fn get_reports(cx: *mut JSContext, path_seg: String) -> Vec<Report> {
let mut reports = vec![];
unsafe {
let rt = JS_GetRuntime(cx);
let mut stats = ::std::mem::zeroed();
if CollectServoSizes(rt, &mut stats) {
let mut report = |mut path_suffix, kind, size| {
let mut path = path![path_seg, "js"];
path.append(&mut path_suffix);
reports.push(Report {
path: path,
kind: kind,
size: size as usize,
})
};
// A note about possibly confusing terminology: the JS GC "heap" is allocated via
// mmap/VirtualAlloc, which means it's not on the malloc "heap", so we use
// `ExplicitNonHeapSize` as its kind.
report(path!["gc-heap", "used"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapUsed);
report(path!["gc-heap", "unused"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapUnused);
report(path!["gc-heap", "admin"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapAdmin);
report(path!["gc-heap", "decommitted"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapDecommitted);
// SpiderMonkey uses the system heap, not jemalloc.
report(path!["malloc-heap"],
ReportKind::ExplicitSystemHeapSize,
stats.mallocHeap);
report(path!["non-heap"],
ReportKind::ExplicitNonHeapSize,
stats.nonHeap);
}
}
reports
}
thread_local!(static GC_CYCLE_START: Cell<Option<Tm>> = Cell::new(None));
thread_local!(static GC_SLICE_START: Cell<Option<Tm>> = Cell::new(None));
#[allow(unsafe_code)]
unsafe extern "C" fn gc_slice_callback(_rt: *mut JSRuntime, progress: GCProgress, desc: *const GCDescription) {
match progress {
GCProgress::GC_CYCLE_BEGIN => {
GC_CYCLE_START.with(|start| {
start.set(Some(now()));
println!("GC cycle began");
})
},
GCProgress::GC_SLICE_BEGIN => {
GC_SLICE_START.with(|start| {
start.set(Some(now()));
println!("GC slice began");
})
},
GCProgress::GC_SLICE_END => {
GC_SLICE_START.with(|start| {
let dur = now() - start.get().unwrap();
start.set(None);
println!("GC slice ended: duration={}", dur);
})
},
GCProgress::GC_CYCLE_END => {
GC_CYCLE_START.with(|start| {
let dur = now() - start.get().unwrap();
start.set(None);
println!("GC cycle ended: duration={}", dur);
})
},
};
if!desc.is_null() {
let desc: &GCDescription = &*desc;
let invocation_kind = match desc.invocationKind_ {
JSGCInvocationKind::GC_NORMAL => "GC_NORMAL",
JSGCInvocationKind::GC_SHRINK => "GC_SHRINK",
};
println!(" isCompartment={}, invocation_kind={}", desc.isCompartment_, invocation_kind);
}
let _ = stdout().flush();
}
#[allow(unsafe_code)]
unsafe extern "C" fn debug_gc_callback(_rt: *mut JSRuntime, status: JSGCStatus, _data: *mut os::raw::c_void) {
match status {
JSGCStatus::JSGC_BEGIN => thread_state::enter(thread_state::IN_GC),
JSGCStatus::JSGC_END => thread_state::exit(thread_state::IN_GC),
}
}
thread_local!(
static THREAD_ACTIVE: Cell<bool> = Cell::new(true);
);
#[allow(unsafe_code)]
unsafe extern fn trace_rust_roots(tr: *mut JSTracer, _data: *mut os::raw::c_void) {
if!THREAD_ACTIVE.with(|t| t.get()) {
return;
}
debug!("starting custom root handler");
trace_thread(tr);
trace_traceables(tr);
trace_roots(tr);
trace_refcounted_objects(tr);
settings_stack::trace(tr);
debug!("done custom root handler");
}
#[allow(unsafe_code)]
#[cfg(feature = "debugmozjs")]
unsafe fn set_gc_zeal_options(rt: *mut JSRuntime) {
use js::jsapi::{JS_DEFAULT_ZEAL_FREQ, JS_SetGCZeal};
let level = match PREFS.get("js.mem.gc.zeal.level").as_i64() {
Some(level @ 0...14) => level as u8,
_ => return,
};
let frequency = match PREFS.get("js.mem.gc.zeal.frequency").as_i64() {
Some(frequency) if frequency >= 0 => frequency as u32,
_ => JS_DEFAULT_ZEAL_FREQ,
};
JS_SetGCZeal(rt, level, frequency);
}
#[allow(unsafe_code)]
#[cfg(not(feature = "debugmozjs"))]
unsafe fn set_gc_zeal_options(_: *mut JSRuntime) {} | type Target = RustRuntime; | random_line_split |
script_runtime.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The script runtime contains common traits and structs commonly used by the
//! script thread, the dom, and the worker threads.
use dom::bindings::codegen::Bindings::PromiseBinding::PromiseJobCallback;
use dom::bindings::refcounted::{LiveDOMReferences, trace_refcounted_objects};
use dom::bindings::root::trace_roots;
use dom::bindings::settings_stack;
use dom::bindings::trace::{JSTraceable, trace_traceables};
use dom::bindings::utils::DOM_CALLBACKS;
use dom::globalscope::GlobalScope;
use js::glue::CollectServoSizes;
use js::jsapi::{DisableIncrementalGC, GCDescription, GCProgress, HandleObject};
use js::jsapi::{JSContext, JS_GetRuntime, JSRuntime, JSTracer, SetDOMCallbacks, SetGCSliceCallback};
use js::jsapi::{JSGCInvocationKind, JSGCStatus, JS_AddExtraGCRootsTracer, JS_SetGCCallback};
use js::jsapi::{JSGCMode, JSGCParamKey, JS_SetGCParameter, JS_SetGlobalJitCompilerOption};
use js::jsapi::{JSJitCompilerOption, JS_SetOffthreadIonCompilationEnabled, JS_SetParallelParsingEnabled};
use js::jsapi::{JSObject, RuntimeOptionsRef, SetPreserveWrapperCallback, SetEnqueuePromiseJobCallback};
use js::panic::wrap_panic;
use js::rust::Runtime as RustRuntime;
use microtask::{EnqueuedPromiseCallback, Microtask};
use profile_traits::mem::{Report, ReportKind, ReportsChan};
use script_thread::trace_thread;
use servo_config::opts;
use servo_config::prefs::PREFS;
use std::cell::Cell;
use std::fmt;
use std::io::{Write, stdout};
use std::ops::Deref;
use std::os;
use std::os::raw::c_void;
use std::panic::AssertUnwindSafe;
use std::ptr;
use style::thread_state;
use task::TaskBox;
use time::{Tm, now};
/// Common messages used to control the event loops in both the script and the worker
pub enum CommonScriptMsg {
/// Requests that the script thread measure its memory usage. The results are sent back via the
/// supplied channel.
CollectReports(ReportsChan),
/// Generic message that encapsulates event handling.
Task(ScriptThreadEventCategory, Box<TaskBox>),
}
impl fmt::Debug for CommonScriptMsg {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CommonScriptMsg::CollectReports(_) => write!(f, "CollectReports(...)"),
CommonScriptMsg::Task(ref category, ref task) => {
f.debug_tuple("Task").field(category).field(task).finish()
},
}
}
}
/// A cloneable interface for communicating with an event loop.
pub trait ScriptChan: JSTraceable {
/// Send a message to the associated event loop.
fn send(&self, msg: CommonScriptMsg) -> Result<(), ()>;
/// Clone this handle.
fn clone(&self) -> Box<ScriptChan + Send>;
}
#[derive(Clone, Copy, Debug, Eq, Hash, JSTraceable, PartialEq)]
pub enum ScriptThreadEventCategory {
AttachLayout,
ConstellationMsg,
DevtoolsMsg,
DocumentEvent,
DomEvent,
FileRead,
FormPlannedNavigation,
ImageCacheMsg,
InputEvent,
NetworkEvent,
Resize,
ScriptEvent,
SetScrollState,
SetViewport,
StylesheetLoad,
TimerEvent,
UpdateReplacedElement,
WebSocketEvent,
WorkerEvent,
WorkletEvent,
ServiceWorkerEvent,
EnterFullscreen,
ExitFullscreen,
WebVREvent,
PerformanceTimelineTask,
}
/// An interface for receiving ScriptMsg values in an event loop. Used for synchronous DOM
/// APIs that need to abstract over multiple kinds of event loops (worker/main thread) with
/// different Receiver interfaces.
pub trait ScriptPort {
fn recv(&self) -> Result<CommonScriptMsg, ()>;
}
/// SM callback for promise job resolution. Adds a promise callback to the current
/// global's microtask queue.
#[allow(unsafe_code)]
unsafe extern "C" fn enqueue_job(cx: *mut JSContext,
job: HandleObject,
_allocation_site: HandleObject,
_data: *mut c_void) -> bool {
wrap_panic(AssertUnwindSafe(|| {
//XXXjdm - use a different global now?
let global = GlobalScope::from_object(job.get());
let pipeline = global.pipeline_id();
global.enqueue_microtask(Microtask::Promise(EnqueuedPromiseCallback {
callback: PromiseJobCallback::new(cx, job.get()),
pipeline: pipeline,
}));
true
}), false)
}
#[derive(JSTraceable)]
pub struct Runtime(RustRuntime);
impl Drop for Runtime {
fn drop(&mut self) {
THREAD_ACTIVE.with(|t| t.set(false));
}
}
impl Deref for Runtime {
type Target = RustRuntime;
fn deref(&self) -> &RustRuntime {
&self.0
}
}
#[allow(unsafe_code)]
pub unsafe fn new_rt_and_cx() -> Runtime {
LiveDOMReferences::initialize();
let runtime = RustRuntime::new().unwrap();
JS_AddExtraGCRootsTracer(runtime.rt(), Some(trace_rust_roots), ptr::null_mut());
// Needed for debug assertions about whether GC is running.
if cfg!(debug_assertions) {
JS_SetGCCallback(runtime.rt(), Some(debug_gc_callback), ptr::null_mut());
}
if opts::get().gc_profile {
SetGCSliceCallback(runtime.rt(), Some(gc_slice_callback));
}
unsafe extern "C" fn empty_wrapper_callback(_: *mut JSContext, _: *mut JSObject) -> bool { true }
SetDOMCallbacks(runtime.rt(), &DOM_CALLBACKS);
SetPreserveWrapperCallback(runtime.rt(), Some(empty_wrapper_callback));
// Pre barriers aren't working correctly at the moment
DisableIncrementalGC(runtime.rt());
SetEnqueuePromiseJobCallback(runtime.rt(), Some(enqueue_job), ptr::null_mut());
set_gc_zeal_options(runtime.rt());
// Enable or disable the JITs.
let rt_opts = &mut *RuntimeOptionsRef(runtime.rt());
if let Some(val) = PREFS.get("js.baseline.enabled").as_boolean() {
rt_opts.set_baseline_(val);
}
if let Some(val) = PREFS.get("js.ion.enabled").as_boolean() {
rt_opts.set_ion_(val);
}
if let Some(val) = PREFS.get("js.asmjs.enabled").as_boolean() {
rt_opts.set_asmJS_(val);
}
if let Some(val) = PREFS.get("js.strict.enabled").as_boolean() {
rt_opts.set_extraWarnings_(val);
}
// TODO: handle js.strict.debug.enabled
// TODO: handle js.throw_on_asmjs_validation_failure (needs new Spidermonkey)
if let Some(val) = PREFS.get("js.native_regexp.enabled").as_boolean() {
rt_opts.set_nativeRegExp_(val);
}
if let Some(val) = PREFS.get("js.parallel_parsing.enabled").as_boolean() {
JS_SetParallelParsingEnabled(runtime.rt(), val);
}
if let Some(val) = PREFS.get("js.offthread_compilation_enabled").as_boolean() {
JS_SetOffthreadIonCompilationEnabled(runtime.rt(), val);
}
if let Some(val) = PREFS.get("js.baseline.unsafe_eager_compilation.enabled").as_boolean() {
let trigger: i32 = if val {
0
} else {
-1
};
JS_SetGlobalJitCompilerOption(runtime.rt(),
JSJitCompilerOption::JSJITCOMPILER_BASELINE_WARMUP_TRIGGER,
trigger as u32);
}
if let Some(val) = PREFS.get("js.ion.unsafe_eager_compilation.enabled").as_boolean() {
let trigger: i64 = if val {
0
} else {
-1
};
JS_SetGlobalJitCompilerOption(runtime.rt(),
JSJitCompilerOption::JSJITCOMPILER_ION_WARMUP_TRIGGER,
trigger as u32);
}
// TODO: handle js.discard_system_source.enabled
// TODO: handle js.asyncstack.enabled (needs new Spidermonkey)
// TODO: handle js.throw_on_debugee_would_run (needs new Spidermonkey)
// TODO: handle js.dump_stack_on_debugee_would_run (needs new Spidermonkey)
if let Some(val) = PREFS.get("js.werror.enabled").as_boolean() {
rt_opts.set_werror_(val);
}
// TODO: handle js.shared_memory.enabled
if let Some(val) = PREFS.get("js.mem.high_water_mark").as_i64() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_MALLOC_BYTES, val as u32 * 1024 * 1024);
}
if let Some(val) = PREFS.get("js.mem.max").as_i64() {
let max = if val <= 0 || val >= 0x1000 {
-1
} else {
val * 1024 * 1024
};
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_BYTES, max as u32);
}
// NOTE: This is disabled above, so enabling it here will do nothing for now.
if let Some(val) = PREFS.get("js.mem.gc.incremental.enabled").as_boolean() {
let compartment = if let Some(val) = PREFS.get("js.mem.gc.per_compartment.enabled").as_boolean() {
val
} else {
false
};
let mode = if val {
JSGCMode::JSGC_MODE_INCREMENTAL
} else if compartment {
JSGCMode::JSGC_MODE_COMPARTMENT
} else {
JSGCMode::JSGC_MODE_GLOBAL
};
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MODE, mode as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.incremental.slice_ms").as_i64() {
if val >= 0 && val < 100000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_SLICE_TIME_BUDGET, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.compacting.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_COMPACTING_ENABLED, val as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_time_limit_ms").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_TIME_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.dynamic_mark_slice.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DYNAMIC_MARK_SLICE, val as u32);
}
// TODO: handle js.mem.gc.refresh_frame_slices.enabled
if let Some(val) = PREFS.get("js.mem.gc.dynamic_heap_growth.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DYNAMIC_HEAP_GROWTH, val as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.low_frequency_heap_growth").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_LOW_FREQUENCY_HEAP_GROWTH, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_heap_growth_min").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_heap_growth_max").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_low_limit_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_LOW_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_high_limit_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HIGH_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.allocation_threshold_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_ALLOCATION_THRESHOLD, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.decommit_threshold_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DECOMMIT_THRESHOLD, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.empty_chunk_count_min").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MIN_EMPTY_CHUNK_COUNT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.empty_chunk_count_max").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_EMPTY_CHUNK_COUNT, val as u32);
}
}
Runtime(runtime)
}
#[allow(unsafe_code)]
pub fn get_reports(cx: *mut JSContext, path_seg: String) -> Vec<Report> {
let mut reports = vec![];
unsafe {
let rt = JS_GetRuntime(cx);
let mut stats = ::std::mem::zeroed();
if CollectServoSizes(rt, &mut stats) {
let mut report = |mut path_suffix, kind, size| {
let mut path = path![path_seg, "js"];
path.append(&mut path_suffix);
reports.push(Report {
path: path,
kind: kind,
size: size as usize,
})
};
// A note about possibly confusing terminology: the JS GC "heap" is allocated via
// mmap/VirtualAlloc, which means it's not on the malloc "heap", so we use
// `ExplicitNonHeapSize` as its kind.
report(path!["gc-heap", "used"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapUsed);
report(path!["gc-heap", "unused"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapUnused);
report(path!["gc-heap", "admin"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapAdmin);
report(path!["gc-heap", "decommitted"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapDecommitted);
// SpiderMonkey uses the system heap, not jemalloc.
report(path!["malloc-heap"],
ReportKind::ExplicitSystemHeapSize,
stats.mallocHeap);
report(path!["non-heap"],
ReportKind::ExplicitNonHeapSize,
stats.nonHeap);
}
}
reports
}
thread_local!(static GC_CYCLE_START: Cell<Option<Tm>> = Cell::new(None));
thread_local!(static GC_SLICE_START: Cell<Option<Tm>> = Cell::new(None));
#[allow(unsafe_code)]
unsafe extern "C" fn gc_slice_callback(_rt: *mut JSRuntime, progress: GCProgress, desc: *const GCDescription) {
match progress {
GCProgress::GC_CYCLE_BEGIN => {
GC_CYCLE_START.with(|start| {
start.set(Some(now()));
println!("GC cycle began");
})
},
GCProgress::GC_SLICE_BEGIN => {
GC_SLICE_START.with(|start| {
start.set(Some(now()));
println!("GC slice began");
})
},
GCProgress::GC_SLICE_END => {
GC_SLICE_START.with(|start| {
let dur = now() - start.get().unwrap();
start.set(None);
println!("GC slice ended: duration={}", dur);
})
},
GCProgress::GC_CYCLE_END => {
GC_CYCLE_START.with(|start| {
let dur = now() - start.get().unwrap();
start.set(None);
println!("GC cycle ended: duration={}", dur);
})
},
};
if!desc.is_null() {
let desc: &GCDescription = &*desc;
let invocation_kind = match desc.invocationKind_ {
JSGCInvocationKind::GC_NORMAL => "GC_NORMAL",
JSGCInvocationKind::GC_SHRINK => "GC_SHRINK",
};
println!(" isCompartment={}, invocation_kind={}", desc.isCompartment_, invocation_kind);
}
let _ = stdout().flush();
}
#[allow(unsafe_code)]
unsafe extern "C" fn | (_rt: *mut JSRuntime, status: JSGCStatus, _data: *mut os::raw::c_void) {
match status {
JSGCStatus::JSGC_BEGIN => thread_state::enter(thread_state::IN_GC),
JSGCStatus::JSGC_END => thread_state::exit(thread_state::IN_GC),
}
}
thread_local!(
static THREAD_ACTIVE: Cell<bool> = Cell::new(true);
);
#[allow(unsafe_code)]
unsafe extern fn trace_rust_roots(tr: *mut JSTracer, _data: *mut os::raw::c_void) {
if!THREAD_ACTIVE.with(|t| t.get()) {
return;
}
debug!("starting custom root handler");
trace_thread(tr);
trace_traceables(tr);
trace_roots(tr);
trace_refcounted_objects(tr);
settings_stack::trace(tr);
debug!("done custom root handler");
}
#[allow(unsafe_code)]
#[cfg(feature = "debugmozjs")]
unsafe fn set_gc_zeal_options(rt: *mut JSRuntime) {
use js::jsapi::{JS_DEFAULT_ZEAL_FREQ, JS_SetGCZeal};
let level = match PREFS.get("js.mem.gc.zeal.level").as_i64() {
Some(level @ 0...14) => level as u8,
_ => return,
};
let frequency = match PREFS.get("js.mem.gc.zeal.frequency").as_i64() {
Some(frequency) if frequency >= 0 => frequency as u32,
_ => JS_DEFAULT_ZEAL_FREQ,
};
JS_SetGCZeal(rt, level, frequency);
}
#[allow(unsafe_code)]
#[cfg(not(feature = "debugmozjs"))]
unsafe fn set_gc_zeal_options(_: *mut JSRuntime) {}
| debug_gc_callback | identifier_name |
script_runtime.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The script runtime contains common traits and structs commonly used by the
//! script thread, the dom, and the worker threads.
use dom::bindings::codegen::Bindings::PromiseBinding::PromiseJobCallback;
use dom::bindings::refcounted::{LiveDOMReferences, trace_refcounted_objects};
use dom::bindings::root::trace_roots;
use dom::bindings::settings_stack;
use dom::bindings::trace::{JSTraceable, trace_traceables};
use dom::bindings::utils::DOM_CALLBACKS;
use dom::globalscope::GlobalScope;
use js::glue::CollectServoSizes;
use js::jsapi::{DisableIncrementalGC, GCDescription, GCProgress, HandleObject};
use js::jsapi::{JSContext, JS_GetRuntime, JSRuntime, JSTracer, SetDOMCallbacks, SetGCSliceCallback};
use js::jsapi::{JSGCInvocationKind, JSGCStatus, JS_AddExtraGCRootsTracer, JS_SetGCCallback};
use js::jsapi::{JSGCMode, JSGCParamKey, JS_SetGCParameter, JS_SetGlobalJitCompilerOption};
use js::jsapi::{JSJitCompilerOption, JS_SetOffthreadIonCompilationEnabled, JS_SetParallelParsingEnabled};
use js::jsapi::{JSObject, RuntimeOptionsRef, SetPreserveWrapperCallback, SetEnqueuePromiseJobCallback};
use js::panic::wrap_panic;
use js::rust::Runtime as RustRuntime;
use microtask::{EnqueuedPromiseCallback, Microtask};
use profile_traits::mem::{Report, ReportKind, ReportsChan};
use script_thread::trace_thread;
use servo_config::opts;
use servo_config::prefs::PREFS;
use std::cell::Cell;
use std::fmt;
use std::io::{Write, stdout};
use std::ops::Deref;
use std::os;
use std::os::raw::c_void;
use std::panic::AssertUnwindSafe;
use std::ptr;
use style::thread_state;
use task::TaskBox;
use time::{Tm, now};
/// Common messages used to control the event loops in both the script and the worker
pub enum CommonScriptMsg {
/// Requests that the script thread measure its memory usage. The results are sent back via the
/// supplied channel.
CollectReports(ReportsChan),
/// Generic message that encapsulates event handling.
Task(ScriptThreadEventCategory, Box<TaskBox>),
}
impl fmt::Debug for CommonScriptMsg {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CommonScriptMsg::CollectReports(_) => write!(f, "CollectReports(...)"),
CommonScriptMsg::Task(ref category, ref task) => {
f.debug_tuple("Task").field(category).field(task).finish()
},
}
}
}
/// A cloneable interface for communicating with an event loop.
pub trait ScriptChan: JSTraceable {
/// Send a message to the associated event loop.
fn send(&self, msg: CommonScriptMsg) -> Result<(), ()>;
/// Clone this handle.
fn clone(&self) -> Box<ScriptChan + Send>;
}
#[derive(Clone, Copy, Debug, Eq, Hash, JSTraceable, PartialEq)]
pub enum ScriptThreadEventCategory {
AttachLayout,
ConstellationMsg,
DevtoolsMsg,
DocumentEvent,
DomEvent,
FileRead,
FormPlannedNavigation,
ImageCacheMsg,
InputEvent,
NetworkEvent,
Resize,
ScriptEvent,
SetScrollState,
SetViewport,
StylesheetLoad,
TimerEvent,
UpdateReplacedElement,
WebSocketEvent,
WorkerEvent,
WorkletEvent,
ServiceWorkerEvent,
EnterFullscreen,
ExitFullscreen,
WebVREvent,
PerformanceTimelineTask,
}
/// An interface for receiving ScriptMsg values in an event loop. Used for synchronous DOM
/// APIs that need to abstract over multiple kinds of event loops (worker/main thread) with
/// different Receiver interfaces.
pub trait ScriptPort {
fn recv(&self) -> Result<CommonScriptMsg, ()>;
}
/// SM callback for promise job resolution. Adds a promise callback to the current
/// global's microtask queue.
#[allow(unsafe_code)]
unsafe extern "C" fn enqueue_job(cx: *mut JSContext,
job: HandleObject,
_allocation_site: HandleObject,
_data: *mut c_void) -> bool {
wrap_panic(AssertUnwindSafe(|| {
//XXXjdm - use a different global now?
let global = GlobalScope::from_object(job.get());
let pipeline = global.pipeline_id();
global.enqueue_microtask(Microtask::Promise(EnqueuedPromiseCallback {
callback: PromiseJobCallback::new(cx, job.get()),
pipeline: pipeline,
}));
true
}), false)
}
#[derive(JSTraceable)]
pub struct Runtime(RustRuntime);
impl Drop for Runtime {
fn drop(&mut self) {
THREAD_ACTIVE.with(|t| t.set(false));
}
}
impl Deref for Runtime {
type Target = RustRuntime;
fn deref(&self) -> &RustRuntime {
&self.0
}
}
#[allow(unsafe_code)]
pub unsafe fn new_rt_and_cx() -> Runtime {
LiveDOMReferences::initialize();
let runtime = RustRuntime::new().unwrap();
JS_AddExtraGCRootsTracer(runtime.rt(), Some(trace_rust_roots), ptr::null_mut());
// Needed for debug assertions about whether GC is running.
if cfg!(debug_assertions) {
JS_SetGCCallback(runtime.rt(), Some(debug_gc_callback), ptr::null_mut());
}
if opts::get().gc_profile {
SetGCSliceCallback(runtime.rt(), Some(gc_slice_callback));
}
unsafe extern "C" fn empty_wrapper_callback(_: *mut JSContext, _: *mut JSObject) -> bool { true }
SetDOMCallbacks(runtime.rt(), &DOM_CALLBACKS);
SetPreserveWrapperCallback(runtime.rt(), Some(empty_wrapper_callback));
// Pre barriers aren't working correctly at the moment
DisableIncrementalGC(runtime.rt());
SetEnqueuePromiseJobCallback(runtime.rt(), Some(enqueue_job), ptr::null_mut());
set_gc_zeal_options(runtime.rt());
// Enable or disable the JITs.
let rt_opts = &mut *RuntimeOptionsRef(runtime.rt());
if let Some(val) = PREFS.get("js.baseline.enabled").as_boolean() {
rt_opts.set_baseline_(val);
}
if let Some(val) = PREFS.get("js.ion.enabled").as_boolean() {
rt_opts.set_ion_(val);
}
if let Some(val) = PREFS.get("js.asmjs.enabled").as_boolean() {
rt_opts.set_asmJS_(val);
}
if let Some(val) = PREFS.get("js.strict.enabled").as_boolean() {
rt_opts.set_extraWarnings_(val);
}
// TODO: handle js.strict.debug.enabled
// TODO: handle js.throw_on_asmjs_validation_failure (needs new Spidermonkey)
if let Some(val) = PREFS.get("js.native_regexp.enabled").as_boolean() {
rt_opts.set_nativeRegExp_(val);
}
if let Some(val) = PREFS.get("js.parallel_parsing.enabled").as_boolean() {
JS_SetParallelParsingEnabled(runtime.rt(), val);
}
if let Some(val) = PREFS.get("js.offthread_compilation_enabled").as_boolean() {
JS_SetOffthreadIonCompilationEnabled(runtime.rt(), val);
}
if let Some(val) = PREFS.get("js.baseline.unsafe_eager_compilation.enabled").as_boolean() {
let trigger: i32 = if val {
0
} else {
-1
};
JS_SetGlobalJitCompilerOption(runtime.rt(),
JSJitCompilerOption::JSJITCOMPILER_BASELINE_WARMUP_TRIGGER,
trigger as u32);
}
if let Some(val) = PREFS.get("js.ion.unsafe_eager_compilation.enabled").as_boolean() {
let trigger: i64 = if val {
0
} else {
-1
};
JS_SetGlobalJitCompilerOption(runtime.rt(),
JSJitCompilerOption::JSJITCOMPILER_ION_WARMUP_TRIGGER,
trigger as u32);
}
// TODO: handle js.discard_system_source.enabled
// TODO: handle js.asyncstack.enabled (needs new Spidermonkey)
// TODO: handle js.throw_on_debugee_would_run (needs new Spidermonkey)
// TODO: handle js.dump_stack_on_debugee_would_run (needs new Spidermonkey)
if let Some(val) = PREFS.get("js.werror.enabled").as_boolean() {
rt_opts.set_werror_(val);
}
// TODO: handle js.shared_memory.enabled
if let Some(val) = PREFS.get("js.mem.high_water_mark").as_i64() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_MALLOC_BYTES, val as u32 * 1024 * 1024);
}
if let Some(val) = PREFS.get("js.mem.max").as_i64() {
let max = if val <= 0 || val >= 0x1000 {
-1
} else {
val * 1024 * 1024
};
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_BYTES, max as u32);
}
// NOTE: This is disabled above, so enabling it here will do nothing for now.
if let Some(val) = PREFS.get("js.mem.gc.incremental.enabled").as_boolean() {
let compartment = if let Some(val) = PREFS.get("js.mem.gc.per_compartment.enabled").as_boolean() {
val
} else | ;
let mode = if val {
JSGCMode::JSGC_MODE_INCREMENTAL
} else if compartment {
JSGCMode::JSGC_MODE_COMPARTMENT
} else {
JSGCMode::JSGC_MODE_GLOBAL
};
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MODE, mode as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.incremental.slice_ms").as_i64() {
if val >= 0 && val < 100000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_SLICE_TIME_BUDGET, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.compacting.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_COMPACTING_ENABLED, val as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_time_limit_ms").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_TIME_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.dynamic_mark_slice.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DYNAMIC_MARK_SLICE, val as u32);
}
// TODO: handle js.mem.gc.refresh_frame_slices.enabled
if let Some(val) = PREFS.get("js.mem.gc.dynamic_heap_growth.enabled").as_boolean() {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DYNAMIC_HEAP_GROWTH, val as u32);
}
if let Some(val) = PREFS.get("js.mem.gc.low_frequency_heap_growth").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_LOW_FREQUENCY_HEAP_GROWTH, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_heap_growth_min").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MIN, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_heap_growth_max").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HEAP_GROWTH_MAX, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_low_limit_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_LOW_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.high_frequency_high_limit_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_HIGH_FREQUENCY_HIGH_LIMIT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.allocation_threshold_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_ALLOCATION_THRESHOLD, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.decommit_threshold_mb").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_DECOMMIT_THRESHOLD, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.empty_chunk_count_min").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MIN_EMPTY_CHUNK_COUNT, val as u32);
}
}
if let Some(val) = PREFS.get("js.mem.gc.empty_chunk_count_max").as_i64() {
if val >= 0 && val < 10000 {
JS_SetGCParameter(runtime.rt(), JSGCParamKey::JSGC_MAX_EMPTY_CHUNK_COUNT, val as u32);
}
}
Runtime(runtime)
}
#[allow(unsafe_code)]
pub fn get_reports(cx: *mut JSContext, path_seg: String) -> Vec<Report> {
let mut reports = vec![];
unsafe {
let rt = JS_GetRuntime(cx);
let mut stats = ::std::mem::zeroed();
if CollectServoSizes(rt, &mut stats) {
let mut report = |mut path_suffix, kind, size| {
let mut path = path![path_seg, "js"];
path.append(&mut path_suffix);
reports.push(Report {
path: path,
kind: kind,
size: size as usize,
})
};
// A note about possibly confusing terminology: the JS GC "heap" is allocated via
// mmap/VirtualAlloc, which means it's not on the malloc "heap", so we use
// `ExplicitNonHeapSize` as its kind.
report(path!["gc-heap", "used"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapUsed);
report(path!["gc-heap", "unused"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapUnused);
report(path!["gc-heap", "admin"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapAdmin);
report(path!["gc-heap", "decommitted"],
ReportKind::ExplicitNonHeapSize,
stats.gcHeapDecommitted);
// SpiderMonkey uses the system heap, not jemalloc.
report(path!["malloc-heap"],
ReportKind::ExplicitSystemHeapSize,
stats.mallocHeap);
report(path!["non-heap"],
ReportKind::ExplicitNonHeapSize,
stats.nonHeap);
}
}
reports
}
thread_local!(static GC_CYCLE_START: Cell<Option<Tm>> = Cell::new(None));
thread_local!(static GC_SLICE_START: Cell<Option<Tm>> = Cell::new(None));
#[allow(unsafe_code)]
unsafe extern "C" fn gc_slice_callback(_rt: *mut JSRuntime, progress: GCProgress, desc: *const GCDescription) {
match progress {
GCProgress::GC_CYCLE_BEGIN => {
GC_CYCLE_START.with(|start| {
start.set(Some(now()));
println!("GC cycle began");
})
},
GCProgress::GC_SLICE_BEGIN => {
GC_SLICE_START.with(|start| {
start.set(Some(now()));
println!("GC slice began");
})
},
GCProgress::GC_SLICE_END => {
GC_SLICE_START.with(|start| {
let dur = now() - start.get().unwrap();
start.set(None);
println!("GC slice ended: duration={}", dur);
})
},
GCProgress::GC_CYCLE_END => {
GC_CYCLE_START.with(|start| {
let dur = now() - start.get().unwrap();
start.set(None);
println!("GC cycle ended: duration={}", dur);
})
},
};
if!desc.is_null() {
let desc: &GCDescription = &*desc;
let invocation_kind = match desc.invocationKind_ {
JSGCInvocationKind::GC_NORMAL => "GC_NORMAL",
JSGCInvocationKind::GC_SHRINK => "GC_SHRINK",
};
println!(" isCompartment={}, invocation_kind={}", desc.isCompartment_, invocation_kind);
}
let _ = stdout().flush();
}
#[allow(unsafe_code)]
unsafe extern "C" fn debug_gc_callback(_rt: *mut JSRuntime, status: JSGCStatus, _data: *mut os::raw::c_void) {
match status {
JSGCStatus::JSGC_BEGIN => thread_state::enter(thread_state::IN_GC),
JSGCStatus::JSGC_END => thread_state::exit(thread_state::IN_GC),
}
}
thread_local!(
static THREAD_ACTIVE: Cell<bool> = Cell::new(true);
);
#[allow(unsafe_code)]
unsafe extern fn trace_rust_roots(tr: *mut JSTracer, _data: *mut os::raw::c_void) {
if!THREAD_ACTIVE.with(|t| t.get()) {
return;
}
debug!("starting custom root handler");
trace_thread(tr);
trace_traceables(tr);
trace_roots(tr);
trace_refcounted_objects(tr);
settings_stack::trace(tr);
debug!("done custom root handler");
}
#[allow(unsafe_code)]
#[cfg(feature = "debugmozjs")]
unsafe fn set_gc_zeal_options(rt: *mut JSRuntime) {
use js::jsapi::{JS_DEFAULT_ZEAL_FREQ, JS_SetGCZeal};
let level = match PREFS.get("js.mem.gc.zeal.level").as_i64() {
Some(level @ 0...14) => level as u8,
_ => return,
};
let frequency = match PREFS.get("js.mem.gc.zeal.frequency").as_i64() {
Some(frequency) if frequency >= 0 => frequency as u32,
_ => JS_DEFAULT_ZEAL_FREQ,
};
JS_SetGCZeal(rt, level, frequency);
}
#[allow(unsafe_code)]
#[cfg(not(feature = "debugmozjs"))]
unsafe fn set_gc_zeal_options(_: *mut JSRuntime) {}
| {
false
} | conditional_block |
cross-borrow-trait.rs | // Copyright 2012-2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that cross-borrowing (implicitly converting from `Box<T>` to `&T`) is
// forbidden when `T` is a trait.
#![feature(box_syntax)]
struct Foo;
trait Trait {}
impl Trait for Foo {} | let x: Box<Trait> = box Foo;
let _y: &Trait = x; //~ ERROR mismatched types: expected `&Trait`, found `Box<Trait>`
} |
pub fn main() { | random_line_split |
cross-borrow-trait.rs | // Copyright 2012-2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that cross-borrowing (implicitly converting from `Box<T>` to `&T`) is
// forbidden when `T` is a trait.
#![feature(box_syntax)]
struct Foo;
trait Trait {}
impl Trait for Foo {}
pub fn | () {
let x: Box<Trait> = box Foo;
let _y: &Trait = x; //~ ERROR mismatched types: expected `&Trait`, found `Box<Trait>`
}
| main | identifier_name |
cross-borrow-trait.rs | // Copyright 2012-2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that cross-borrowing (implicitly converting from `Box<T>` to `&T`) is
// forbidden when `T` is a trait.
#![feature(box_syntax)]
struct Foo;
trait Trait {}
impl Trait for Foo {}
pub fn main() | {
let x: Box<Trait> = box Foo;
let _y: &Trait = x; //~ ERROR mismatched types: expected `&Trait`, found `Box<Trait>`
} | identifier_body |
|
main.rs | /// serde_eg
/// Example of serializing a rust struct into various formats
///
///
extern crate bincode;
extern crate serde;
extern crate serde_cbor;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
/// struct to describe some properties of a city
#[derive(Serialize, Deserialize)]
struct City {
name: String,
population: usize,
latitude: f64,
longitude: f64,
}
fn | () {
let calabar = City {
name: String::from("Calabar"),
population: 470_000,
latitude: 4.95,
longitude: 8.33,
};
let as_json = serde_json::to_string(&calabar).unwrap();
let as_cbor = serde_cbor::to_vec(&calabar).unwrap();
let as_bincode = bincode::serialize(&calabar).unwrap();
println!("json: {}", &as_json);
println!("cbor: {:?}", &as_cbor);
println!("cbor (as UTF-8): {:?}", String::from_utf8_lossy(&as_cbor));
println!("bincode: {:?}", &as_bincode);
println!(
"bincode (as UTF-8): {:?}",
String::from_utf8_lossy(&as_bincode)
);
}
| main | identifier_name |
main.rs | /// serde_eg
/// Example of serializing a rust struct into various formats
///
///
extern crate bincode;
extern crate serde;
extern crate serde_cbor;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
/// struct to describe some properties of a city
#[derive(Serialize, Deserialize)]
struct City {
name: String,
population: usize, | }
fn main() {
let calabar = City {
name: String::from("Calabar"),
population: 470_000,
latitude: 4.95,
longitude: 8.33,
};
let as_json = serde_json::to_string(&calabar).unwrap();
let as_cbor = serde_cbor::to_vec(&calabar).unwrap();
let as_bincode = bincode::serialize(&calabar).unwrap();
println!("json: {}", &as_json);
println!("cbor: {:?}", &as_cbor);
println!("cbor (as UTF-8): {:?}", String::from_utf8_lossy(&as_cbor));
println!("bincode: {:?}", &as_bincode);
println!(
"bincode (as UTF-8): {:?}",
String::from_utf8_lossy(&as_bincode)
);
} | latitude: f64,
longitude: f64, | random_line_split |
actor.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// General actor system infrastructure.
use std::any::{Any, AnyRefExt, AnyMutRefExt};
use std::collections::HashMap;
use std::cell::{Cell, RefCell};
use std::intrinsics::TypeId;
use std::io::TcpStream;
use std::mem::{transmute, transmute_copy, replace};
use std::raw::TraitObject;
use serialize::json;
/// A common trait for all devtools actors that encompasses an immutable name
/// and the ability to process messages that are directed to particular actors.
/// TODO: ensure the name is immutable
pub trait Actor {
fn handle_message(&self,
registry: &ActorRegistry,
msg_type: &String,
msg: &json::JsonObject,
stream: &mut TcpStream) -> bool;
fn name(&self) -> String;
}
impl<'a> AnyMutRefExt<'a> for &'a mut Actor + 'a {
fn downcast_mut<T:'static>(self) -> Option<&'a mut T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
}
impl<'a> AnyRefExt<'a> for &'a Actor + 'a {
fn is<T:'static>(self) -> bool {
// This implementation is only needed so long as there's a Rust bug that
// prevents downcast_ref from giving realistic return values.
unsafe {
let t = TypeId::of::<T>();
let this: &Actor = transmute(self);
let boxed: TypeId = this.get_type_id();
t == boxed
}
}
fn downcast_ref<T:'static>(self) -> Option<&'a T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
}
/// A list of known, owned actors.
pub struct ActorRegistry {
actors: HashMap<String, Box<Actor+Send+Sized>>,
new_actors: RefCell<Vec<Box<Actor+Send+Sized>>>,
script_actors: RefCell<HashMap<String, String>>,
next: Cell<u32>,
}
impl ActorRegistry {
/// Create an empty registry.
pub fn new() -> ActorRegistry {
ActorRegistry {
actors: HashMap::new(),
new_actors: RefCell::new(vec!()),
script_actors: RefCell::new(HashMap::new()),
next: Cell::new(0),
}
}
pub fn register_script_actor(&self, script_id: String, actor: String) {
println!("registering {:s} ({:s})", actor.as_slice(), script_id.as_slice());
let mut script_actors = self.script_actors.borrow_mut();
script_actors.insert(script_id, actor);
}
pub fn script_to_actor(&self, script_id: String) -> String {
if script_id.as_slice() == "" {
return "".to_string();
}
self.script_actors.borrow().get(&script_id).unwrap().to_string()
}
pub fn script_actor_registered(&self, script_id: String) -> bool {
self.script_actors.borrow().contains_key(&script_id)
}
pub fn actor_to_script(&self, actor: String) -> String {
for (key, value) in self.script_actors.borrow().iter() {
println!("checking {:s}", value.as_slice());
if value.as_slice() == actor.as_slice() {
return key.to_string();
}
}
panic!("couldn't find actor named {:s}", actor)
}
/// Create a unique name based on a monotonically increasing suffix
pub fn | (&self, prefix: &str) -> String {
let suffix = self.next.get();
self.next.set(suffix + 1);
format!("{:s}{:u}", prefix, suffix)
}
/// Add an actor to the registry of known actors that can receive messages.
pub fn register(&mut self, actor: Box<Actor+Send+Sized>) {
self.actors.insert(actor.name().to_string(), actor);
}
pub fn register_later(&self, actor: Box<Actor+Send+Sized>) {
let mut actors = self.new_actors.borrow_mut();
actors.push(actor);
}
/// Find an actor by registered name
pub fn find<'a, T:'static>(&'a self, name: &str) -> &'a T {
//FIXME: Rust bug forces us to implement bogus Any for Actor since downcast_ref currently
// fails for unknown reasons.
/*let actor: &Actor+Send+Sized = *self.actors.find(&name.to_string()).unwrap();
(actor as &Any).downcast_ref::<T>().unwrap()*/
self.actors.get(&name.to_string()).unwrap().downcast_ref::<T>().unwrap()
}
/// Find an actor by registered name
pub fn find_mut<'a, T:'static>(&'a mut self, name: &str) -> &'a mut T {
//FIXME: Rust bug forces us to implement bogus Any for Actor since downcast_ref currently
// fails for unknown reasons.
/*let actor: &mut Actor+Send+Sized = *self.actors.find_mut(&name.to_string()).unwrap();
(actor as &mut Any).downcast_mut::<T>().unwrap()*/
self.actors.get_mut(&name.to_string()).unwrap().downcast_mut::<T>().unwrap()
}
/// Attempt to process a message as directed by its `to` property. If the actor is not
/// found or does not indicate that it knew how to process the message, ignore the failure.
pub fn handle_message(&mut self, msg: &json::JsonObject, stream: &mut TcpStream) {
let to = msg.get(&"to".to_string()).unwrap().as_string().unwrap();
match self.actors.get(&to.to_string()) {
None => println!("message received for unknown actor \"{:s}\"", to),
Some(actor) => {
let msg_type = msg.get(&"type".to_string()).unwrap().as_string().unwrap();
if!actor.handle_message(self, &msg_type.to_string(), msg, stream) {
println!("unexpected message type \"{:s}\" found for actor \"{:s}\"",
msg_type, to);
}
}
}
let new_actors = replace(&mut *self.new_actors.borrow_mut(), vec!());
for actor in new_actors.into_iter() {
self.actors.insert(actor.name().to_string(), actor);
}
}
}
| new_name | identifier_name |
actor.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/// General actor system infrastructure.
use std::any::{Any, AnyRefExt, AnyMutRefExt};
use std::collections::HashMap;
use std::cell::{Cell, RefCell};
use std::intrinsics::TypeId;
use std::io::TcpStream;
use std::mem::{transmute, transmute_copy, replace};
use std::raw::TraitObject;
use serialize::json;
/// A common trait for all devtools actors that encompasses an immutable name
/// and the ability to process messages that are directed to particular actors.
/// TODO: ensure the name is immutable
pub trait Actor {
fn handle_message(&self,
registry: &ActorRegistry,
msg_type: &String,
msg: &json::JsonObject,
stream: &mut TcpStream) -> bool;
fn name(&self) -> String;
}
impl<'a> AnyMutRefExt<'a> for &'a mut Actor + 'a {
fn downcast_mut<T:'static>(self) -> Option<&'a mut T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
}
impl<'a> AnyRefExt<'a> for &'a Actor + 'a {
fn is<T:'static>(self) -> bool {
// This implementation is only needed so long as there's a Rust bug that
// prevents downcast_ref from giving realistic return values.
unsafe {
let t = TypeId::of::<T>();
let this: &Actor = transmute(self);
let boxed: TypeId = this.get_type_id();
t == boxed
}
}
fn downcast_ref<T:'static>(self) -> Option<&'a T> {
if self.is::<T>() {
unsafe {
// Get the raw representation of the trait object
let to: TraitObject = transmute_copy(&self);
// Extract the data pointer
Some(transmute(to.data))
}
} else {
None
}
}
}
/// A list of known, owned actors.
pub struct ActorRegistry {
actors: HashMap<String, Box<Actor+Send+Sized>>,
new_actors: RefCell<Vec<Box<Actor+Send+Sized>>>,
script_actors: RefCell<HashMap<String, String>>,
next: Cell<u32>,
}
impl ActorRegistry {
/// Create an empty registry.
pub fn new() -> ActorRegistry {
ActorRegistry {
actors: HashMap::new(),
new_actors: RefCell::new(vec!()),
script_actors: RefCell::new(HashMap::new()),
next: Cell::new(0),
}
}
pub fn register_script_actor(&self, script_id: String, actor: String) {
println!("registering {:s} ({:s})", actor.as_slice(), script_id.as_slice());
let mut script_actors = self.script_actors.borrow_mut();
script_actors.insert(script_id, actor);
}
pub fn script_to_actor(&self, script_id: String) -> String {
if script_id.as_slice() == "" {
return "".to_string();
}
self.script_actors.borrow().get(&script_id).unwrap().to_string()
}
pub fn script_actor_registered(&self, script_id: String) -> bool {
self.script_actors.borrow().contains_key(&script_id)
}
pub fn actor_to_script(&self, actor: String) -> String {
for (key, value) in self.script_actors.borrow().iter() {
println!("checking {:s}", value.as_slice());
if value.as_slice() == actor.as_slice() { |
/// Create a unique name based on a monotonically increasing suffix
pub fn new_name(&self, prefix: &str) -> String {
let suffix = self.next.get();
self.next.set(suffix + 1);
format!("{:s}{:u}", prefix, suffix)
}
/// Add an actor to the registry of known actors that can receive messages.
pub fn register(&mut self, actor: Box<Actor+Send+Sized>) {
self.actors.insert(actor.name().to_string(), actor);
}
pub fn register_later(&self, actor: Box<Actor+Send+Sized>) {
let mut actors = self.new_actors.borrow_mut();
actors.push(actor);
}
/// Find an actor by registered name
pub fn find<'a, T:'static>(&'a self, name: &str) -> &'a T {
//FIXME: Rust bug forces us to implement bogus Any for Actor since downcast_ref currently
// fails for unknown reasons.
/*let actor: &Actor+Send+Sized = *self.actors.find(&name.to_string()).unwrap();
(actor as &Any).downcast_ref::<T>().unwrap()*/
self.actors.get(&name.to_string()).unwrap().downcast_ref::<T>().unwrap()
}
/// Find an actor by registered name
pub fn find_mut<'a, T:'static>(&'a mut self, name: &str) -> &'a mut T {
//FIXME: Rust bug forces us to implement bogus Any for Actor since downcast_ref currently
// fails for unknown reasons.
/*let actor: &mut Actor+Send+Sized = *self.actors.find_mut(&name.to_string()).unwrap();
(actor as &mut Any).downcast_mut::<T>().unwrap()*/
self.actors.get_mut(&name.to_string()).unwrap().downcast_mut::<T>().unwrap()
}
/// Attempt to process a message as directed by its `to` property. If the actor is not
/// found or does not indicate that it knew how to process the message, ignore the failure.
pub fn handle_message(&mut self, msg: &json::JsonObject, stream: &mut TcpStream) {
let to = msg.get(&"to".to_string()).unwrap().as_string().unwrap();
match self.actors.get(&to.to_string()) {
None => println!("message received for unknown actor \"{:s}\"", to),
Some(actor) => {
let msg_type = msg.get(&"type".to_string()).unwrap().as_string().unwrap();
if!actor.handle_message(self, &msg_type.to_string(), msg, stream) {
println!("unexpected message type \"{:s}\" found for actor \"{:s}\"",
msg_type, to);
}
}
}
let new_actors = replace(&mut *self.new_actors.borrow_mut(), vec!());
for actor in new_actors.into_iter() {
self.actors.insert(actor.name().to_string(), actor);
}
}
} | return key.to_string();
}
}
panic!("couldn't find actor named {:s}", actor)
} | random_line_split |
instr_vextracti128.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test] | }
#[test]
fn vextracti128_2() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(IndirectScaledIndexedDisplaced(EAX, EBX, Eight, 1167941305, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(YMM0)), operand3: Some(Literal8(96)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 132, 216, 185, 94, 157, 69, 96], OperandSize::Dword)
}
#[test]
fn vextracti128_3() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(Direct(XMM5)), operand2: Some(Direct(YMM3)), operand3: Some(Literal8(70)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 221, 70], OperandSize::Qword)
}
#[test]
fn vextracti128_4() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(IndirectDisplaced(RDX, 545176606, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(YMM0)), operand3: Some(Literal8(81)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 130, 30, 188, 126, 32, 81], OperandSize::Qword)
} | fn vextracti128_1() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(Direct(XMM5)), operand2: Some(Direct(YMM4)), operand3: Some(Literal8(67)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 229, 67], OperandSize::Dword) | random_line_split |
instr_vextracti128.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn vextracti128_1() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(Direct(XMM5)), operand2: Some(Direct(YMM4)), operand3: Some(Literal8(67)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 229, 67], OperandSize::Dword)
}
#[test]
fn vextracti128_2() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(IndirectScaledIndexedDisplaced(EAX, EBX, Eight, 1167941305, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(YMM0)), operand3: Some(Literal8(96)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 132, 216, 185, 94, 157, 69, 96], OperandSize::Dword)
}
#[test]
fn vextracti128_3() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(Direct(XMM5)), operand2: Some(Direct(YMM3)), operand3: Some(Literal8(70)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 221, 70], OperandSize::Qword)
}
#[test]
fn | () {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(IndirectDisplaced(RDX, 545176606, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(YMM0)), operand3: Some(Literal8(81)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 130, 30, 188, 126, 32, 81], OperandSize::Qword)
}
| vextracti128_4 | identifier_name |
instr_vextracti128.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn vextracti128_1() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(Direct(XMM5)), operand2: Some(Direct(YMM4)), operand3: Some(Literal8(67)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 229, 67], OperandSize::Dword)
}
#[test]
fn vextracti128_2() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(IndirectScaledIndexedDisplaced(EAX, EBX, Eight, 1167941305, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(YMM0)), operand3: Some(Literal8(96)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 132, 216, 185, 94, 157, 69, 96], OperandSize::Dword)
}
#[test]
fn vextracti128_3() {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(Direct(XMM5)), operand2: Some(Direct(YMM3)), operand3: Some(Literal8(70)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 221, 70], OperandSize::Qword)
}
#[test]
fn vextracti128_4() | {
run_test(&Instruction { mnemonic: Mnemonic::VEXTRACTI128, operand1: Some(IndirectDisplaced(RDX, 545176606, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(YMM0)), operand3: Some(Literal8(81)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 125, 57, 130, 30, 188, 126, 32, 81], OperandSize::Qword)
} | identifier_body |
|
tag.rs | // Copyright 2016 Jeremy Letang.
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use backit::{json, time};
use db::models::Tag;
use diesel::result::Error as DieselError;
use std::error::Error;
use uuid::Uuid;
//use diesel::sqlite::SqliteConnection;
use diesel::pg::PgConnection;
pub fn create(db: &mut PgConnection, mut t: Tag) -> Result<Tag, json::Error> {
use diesel::{self, ExecuteDsl};
use db::schemas::tags;
// create some mandatory fields
t.id = Uuid::new_v4().to_string();
t.created_at = Some(time::timestamp::now() as i32);
t.updated_at = Some(time::timestamp::now() as i32);
match diesel::insert(&t).into(tags::table).execute(db) {
Ok(_) => Ok(t),
Err(e) => Err(json::Error::internal_error(e.description())),
}
}
pub fn update(db: &mut PgConnection, mut t: Tag) -> Result<Tag, json::Error> {
use diesel::SaveChangesDsl;
t.updated_at = Some(time::timestamp::now() as i32);
match t.save_changes::<Tag>(db) {
Ok(_) => Ok(t),
Err(e) => Err(json::Error::internal_error(e.description())),
}
}
pub fn get(db: &mut PgConnection, get_id: &str) -> Result<Tag, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, id};
tags.filter(id.eq(get_id)).first::<Tag>(db)
}
pub fn get_from_name_and_user_id(db: &mut PgConnection, get_name: &str, get_user_id: &str) -> Result<Tag, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, name, user_id};
tags.filter(user_id.eq(get_user_id)).filter(name.eq(get_name)).first::<Tag>(db)
}
pub fn list(db: &mut PgConnection) -> Result<Vec<Tag>, DieselError> |
pub fn list_for_user_id(db: &mut PgConnection, list_user_id: &str) -> Result<Vec<Tag>, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, user_id};
tags.filter(user_id.eq(list_user_id)).load::<Tag>(db)
}
pub fn delete(db: &mut PgConnection, delete_id: &str)
-> Result<usize, DieselError> {
use diesel::{self, ExecuteDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, id};
diesel::delete(tags.filter(id.eq(delete_id))).execute(db)
}
pub fn get_all(db: &mut PgConnection, ids: &[String]) -> Result<Vec<Tag>, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, id};
use diesel::pg::expression::dsl::any;
tags.filter(id.eq(any(ids)))
.load::<Tag>(db)
}
| {
use diesel::LoadDsl;
use db::schemas::tags::dsl::tags;
tags.load::<Tag>(db)
} | identifier_body |
tag.rs | // Copyright 2016 Jeremy Letang.
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use backit::{json, time};
use db::models::Tag;
use diesel::result::Error as DieselError;
use std::error::Error;
use uuid::Uuid;
//use diesel::sqlite::SqliteConnection;
use diesel::pg::PgConnection;
pub fn create(db: &mut PgConnection, mut t: Tag) -> Result<Tag, json::Error> {
use diesel::{self, ExecuteDsl};
use db::schemas::tags;
// create some mandatory fields
t.id = Uuid::new_v4().to_string();
t.created_at = Some(time::timestamp::now() as i32);
t.updated_at = Some(time::timestamp::now() as i32);
match diesel::insert(&t).into(tags::table).execute(db) {
Ok(_) => Ok(t),
Err(e) => Err(json::Error::internal_error(e.description())),
}
}
pub fn update(db: &mut PgConnection, mut t: Tag) -> Result<Tag, json::Error> {
use diesel::SaveChangesDsl;
t.updated_at = Some(time::timestamp::now() as i32);
match t.save_changes::<Tag>(db) {
Ok(_) => Ok(t),
Err(e) => Err(json::Error::internal_error(e.description())),
}
}
pub fn | (db: &mut PgConnection, get_id: &str) -> Result<Tag, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, id};
tags.filter(id.eq(get_id)).first::<Tag>(db)
}
pub fn get_from_name_and_user_id(db: &mut PgConnection, get_name: &str, get_user_id: &str) -> Result<Tag, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, name, user_id};
tags.filter(user_id.eq(get_user_id)).filter(name.eq(get_name)).first::<Tag>(db)
}
pub fn list(db: &mut PgConnection) -> Result<Vec<Tag>, DieselError> {
use diesel::LoadDsl;
use db::schemas::tags::dsl::tags;
tags.load::<Tag>(db)
}
pub fn list_for_user_id(db: &mut PgConnection, list_user_id: &str) -> Result<Vec<Tag>, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, user_id};
tags.filter(user_id.eq(list_user_id)).load::<Tag>(db)
}
pub fn delete(db: &mut PgConnection, delete_id: &str)
-> Result<usize, DieselError> {
use diesel::{self, ExecuteDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, id};
diesel::delete(tags.filter(id.eq(delete_id))).execute(db)
}
pub fn get_all(db: &mut PgConnection, ids: &[String]) -> Result<Vec<Tag>, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, id};
use diesel::pg::expression::dsl::any;
tags.filter(id.eq(any(ids)))
.load::<Tag>(db)
}
| get | identifier_name |
tag.rs | // Copyright 2016 Jeremy Letang.
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use backit::{json, time};
use db::models::Tag;
use diesel::result::Error as DieselError;
use std::error::Error;
use uuid::Uuid;
//use diesel::sqlite::SqliteConnection;
use diesel::pg::PgConnection;
pub fn create(db: &mut PgConnection, mut t: Tag) -> Result<Tag, json::Error> {
use diesel::{self, ExecuteDsl};
use db::schemas::tags;
// create some mandatory fields
t.id = Uuid::new_v4().to_string();
t.created_at = Some(time::timestamp::now() as i32);
t.updated_at = Some(time::timestamp::now() as i32);
match diesel::insert(&t).into(tags::table).execute(db) {
Ok(_) => Ok(t),
Err(e) => Err(json::Error::internal_error(e.description())),
}
}
pub fn update(db: &mut PgConnection, mut t: Tag) -> Result<Tag, json::Error> {
use diesel::SaveChangesDsl;
t.updated_at = Some(time::timestamp::now() as i32);
match t.save_changes::<Tag>(db) {
Ok(_) => Ok(t),
Err(e) => Err(json::Error::internal_error(e.description())),
}
}
pub fn get(db: &mut PgConnection, get_id: &str) -> Result<Tag, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, id};
tags.filter(id.eq(get_id)).first::<Tag>(db)
}
pub fn get_from_name_and_user_id(db: &mut PgConnection, get_name: &str, get_user_id: &str) -> Result<Tag, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, name, user_id};
tags.filter(user_id.eq(get_user_id)).filter(name.eq(get_name)).first::<Tag>(db)
}
pub fn list(db: &mut PgConnection) -> Result<Vec<Tag>, DieselError> {
use diesel::LoadDsl;
use db::schemas::tags::dsl::tags;
tags.load::<Tag>(db)
}
pub fn list_for_user_id(db: &mut PgConnection, list_user_id: &str) -> Result<Vec<Tag>, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, user_id};
tags.filter(user_id.eq(list_user_id)).load::<Tag>(db)
}
pub fn delete(db: &mut PgConnection, delete_id: &str)
-> Result<usize, DieselError> {
use diesel::{self, ExecuteDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, id};
diesel::delete(tags.filter(id.eq(delete_id))).execute(db)
} |
pub fn get_all(db: &mut PgConnection, ids: &[String]) -> Result<Vec<Tag>, DieselError> {
use diesel::{LoadDsl, FilterDsl, ExpressionMethods};
use db::schemas::tags::dsl::{tags, id};
use diesel::pg::expression::dsl::any;
tags.filter(id.eq(any(ids)))
.load::<Tag>(db)
} | random_line_split |
|
main.rs | use std::io;
use std::io::Write;
use std::f64::consts::PI; // We can use std::f32::consts::PI if using float
fn | () {
let mut side_1: String = String::new();
let mut side_2: String = String::new();
let mut angle: String = String::new();
print!("Enter first side length: ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut side_1)
.expect("Failed to read");
print!("Enter second side length: ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut side_2)
.expect("Failed to read");
print!("Enter the angle: ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut angle)
.expect("Failed to read");
let side_1: f64 = side_1.trim().parse()
.expect("Parsing error");
let side_2: f64 = side_2.trim().parse()
.expect("Parsing error");
let angle: f64 = angle.trim().parse()
.expect("Parsing error");
// I have to calculate and write the type the pi angle first before "sin" it
let pi_angle: f64 = (PI / 180.0) * angle;
// This must the same type (f64)
let area: f64 = (side_1 * side_2 * pi_angle.sin()) / 2.0;
println!("Area of Scalene triangle: {:.5}", area); // Round to 5 digits
}
| main | identifier_name |
main.rs | use std::io;
use std::io::Write;
use std::f64::consts::PI; // We can use std::f32::consts::PI if using float
fn main() {
let mut side_1: String = String::new();
let mut side_2: String = String::new();
let mut angle: String = String::new();
print!("Enter first side length: ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut side_1)
.expect("Failed to read");
print!("Enter second side length: "); |
print!("Enter the angle: ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut angle)
.expect("Failed to read");
let side_1: f64 = side_1.trim().parse()
.expect("Parsing error");
let side_2: f64 = side_2.trim().parse()
.expect("Parsing error");
let angle: f64 = angle.trim().parse()
.expect("Parsing error");
// I have to calculate and write the type the pi angle first before "sin" it
let pi_angle: f64 = (PI / 180.0) * angle;
// This must the same type (f64)
let area: f64 = (side_1 * side_2 * pi_angle.sin()) / 2.0;
println!("Area of Scalene triangle: {:.5}", area); // Round to 5 digits
} | io::stdout().flush().unwrap();
io::stdin().read_line(&mut side_2)
.expect("Failed to read"); | random_line_split |
main.rs | use std::io;
use std::io::Write;
use std::f64::consts::PI; // We can use std::f32::consts::PI if using float
fn main() | let side_1: f64 = side_1.trim().parse()
.expect("Parsing error");
let side_2: f64 = side_2.trim().parse()
.expect("Parsing error");
let angle: f64 = angle.trim().parse()
.expect("Parsing error");
// I have to calculate and write the type the pi angle first before "sin" it
let pi_angle: f64 = (PI / 180.0) * angle;
// This must the same type (f64)
let area: f64 = (side_1 * side_2 * pi_angle.sin()) / 2.0;
println!("Area of Scalene triangle: {:.5}", area); // Round to 5 digits
}
| {
let mut side_1: String = String::new();
let mut side_2: String = String::new();
let mut angle: String = String::new();
print!("Enter first side length: ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut side_1)
.expect("Failed to read");
print!("Enter second side length: ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut side_2)
.expect("Failed to read");
print!("Enter the angle: ");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut angle)
.expect("Failed to read");
| identifier_body |
log.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use util::numbers::*;
use ethcore::log_entry::LocalizedLogEntry;
use v1::types::Bytes;
#[derive(Debug, Serialize)] | pub block_hash: H256,
#[serde(rename="blockNumber")]
pub block_number: U256,
#[serde(rename="transactionHash")]
pub transaction_hash: H256,
#[serde(rename="transactionIndex")]
pub transaction_index: U256,
#[serde(rename="logIndex")]
pub log_index: U256,
}
impl From<LocalizedLogEntry> for Log {
fn from(e: LocalizedLogEntry) -> Log {
Log {
address: e.entry.address,
topics: e.entry.topics,
data: Bytes::new(e.entry.data),
block_hash: e.block_hash,
block_number: From::from(e.block_number),
transaction_hash: e.transaction_hash,
transaction_index: From::from(e.transaction_index),
log_index: From::from(e.log_index)
}
}
}
#[cfg(test)]
mod tests {
use serde_json;
use std::str::FromStr;
use util::numbers::*;
use v1::types::{Bytes, Log};
#[test]
fn log_serialization() {
let s = r#"{"address":"0x33990122638b9132ca29c723bdf037f1a891a70c","topics":["0xa6697e974e6a320f454390be03f74955e8978f1a6971ea6730542e37b66179bc","0x4861736852656700000000000000000000000000000000000000000000000000"],"data":"0x","blockHash":"0xed76641c68a1c641aee09a94b3b471f4dc0316efe5ac19cf488e2674cf8d05b5","blockNumber":"0x04510c","transactionHash":"0x0000000000000000000000000000000000000000000000000000000000000000","transactionIndex":"0x00","logIndex":"0x01"}"#;
let log = Log {
address: Address::from_str("33990122638b9132ca29c723bdf037f1a891a70c").unwrap(),
topics: vec![
H256::from_str("a6697e974e6a320f454390be03f74955e8978f1a6971ea6730542e37b66179bc").unwrap(),
H256::from_str("4861736852656700000000000000000000000000000000000000000000000000").unwrap()
],
data: Bytes::new(vec![]),
block_hash: H256::from_str("ed76641c68a1c641aee09a94b3b471f4dc0316efe5ac19cf488e2674cf8d05b5").unwrap(),
block_number: U256::from(0x4510c),
transaction_hash: H256::new(),
transaction_index: U256::zero(),
log_index: U256::one()
};
let serialized = serde_json::to_string(&log).unwrap();
assert_eq!(serialized, s);
}
} | pub struct Log {
pub address: Address,
pub topics: Vec<H256>,
pub data: Bytes,
#[serde(rename="blockHash")] | random_line_split |
log.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use util::numbers::*;
use ethcore::log_entry::LocalizedLogEntry;
use v1::types::Bytes;
#[derive(Debug, Serialize)]
pub struct Log {
pub address: Address,
pub topics: Vec<H256>,
pub data: Bytes,
#[serde(rename="blockHash")]
pub block_hash: H256,
#[serde(rename="blockNumber")]
pub block_number: U256,
#[serde(rename="transactionHash")]
pub transaction_hash: H256,
#[serde(rename="transactionIndex")]
pub transaction_index: U256,
#[serde(rename="logIndex")]
pub log_index: U256,
}
impl From<LocalizedLogEntry> for Log {
fn | (e: LocalizedLogEntry) -> Log {
Log {
address: e.entry.address,
topics: e.entry.topics,
data: Bytes::new(e.entry.data),
block_hash: e.block_hash,
block_number: From::from(e.block_number),
transaction_hash: e.transaction_hash,
transaction_index: From::from(e.transaction_index),
log_index: From::from(e.log_index)
}
}
}
#[cfg(test)]
mod tests {
use serde_json;
use std::str::FromStr;
use util::numbers::*;
use v1::types::{Bytes, Log};
#[test]
fn log_serialization() {
let s = r#"{"address":"0x33990122638b9132ca29c723bdf037f1a891a70c","topics":["0xa6697e974e6a320f454390be03f74955e8978f1a6971ea6730542e37b66179bc","0x4861736852656700000000000000000000000000000000000000000000000000"],"data":"0x","blockHash":"0xed76641c68a1c641aee09a94b3b471f4dc0316efe5ac19cf488e2674cf8d05b5","blockNumber":"0x04510c","transactionHash":"0x0000000000000000000000000000000000000000000000000000000000000000","transactionIndex":"0x00","logIndex":"0x01"}"#;
let log = Log {
address: Address::from_str("33990122638b9132ca29c723bdf037f1a891a70c").unwrap(),
topics: vec![
H256::from_str("a6697e974e6a320f454390be03f74955e8978f1a6971ea6730542e37b66179bc").unwrap(),
H256::from_str("4861736852656700000000000000000000000000000000000000000000000000").unwrap()
],
data: Bytes::new(vec![]),
block_hash: H256::from_str("ed76641c68a1c641aee09a94b3b471f4dc0316efe5ac19cf488e2674cf8d05b5").unwrap(),
block_number: U256::from(0x4510c),
transaction_hash: H256::new(),
transaction_index: U256::zero(),
log_index: U256::one()
};
let serialized = serde_json::to_string(&log).unwrap();
assert_eq!(serialized, s);
}
}
| from | identifier_name |
element_test.rs | use rquery::Document;
fn new_document() -> Document {
Document::new_from_xml_string(r#"
<?xml version="1.0" encoding="UTF-8"?>
<main type="simple">
This is some text
</main>
"#).unwrap()
}
#[test]
fn it_knows_its_tag_name() {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.tag_name(), "main");
}
#[test]
fn it_knows_its_attributes() {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.attr("type").unwrap(), "simple");
}
#[test]
fn it_knows_its_inner_text_contents() { |
let element = document.select("main").unwrap();
assert_eq!(element.text().trim(), "This is some text");
}
#[test]
fn it_knows_its_node_indices() {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.node_index(), 1);
} | let document = new_document(); | random_line_split |
element_test.rs | use rquery::Document;
fn new_document() -> Document {
Document::new_from_xml_string(r#"
<?xml version="1.0" encoding="UTF-8"?>
<main type="simple">
This is some text
</main>
"#).unwrap()
}
#[test]
fn it_knows_its_tag_name() {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.tag_name(), "main");
}
#[test]
fn it_knows_its_attributes() {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.attr("type").unwrap(), "simple");
}
#[test]
fn it_knows_its_inner_text_contents() {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.text().trim(), "This is some text");
}
#[test]
fn | () {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.node_index(), 1);
} | it_knows_its_node_indices | identifier_name |
element_test.rs | use rquery::Document;
fn new_document() -> Document {
Document::new_from_xml_string(r#"
<?xml version="1.0" encoding="UTF-8"?>
<main type="simple">
This is some text
</main>
"#).unwrap()
}
#[test]
fn it_knows_its_tag_name() {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.tag_name(), "main");
}
#[test]
fn it_knows_its_attributes() |
#[test]
fn it_knows_its_inner_text_contents() {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.text().trim(), "This is some text");
}
#[test]
fn it_knows_its_node_indices() {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.node_index(), 1);
} | {
let document = new_document();
let element = document.select("main").unwrap();
assert_eq!(element.attr("type").unwrap(), "simple");
} | identifier_body |
catrank.rs | // Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use common::*;
use catrank_capnp::*;
#[derive(Clone, Copy)]
pub struct ScoredResult<'a> {
score: f64,
result: search_result::Reader<'a>
}
const URL_PREFIX: &'static str = "http://example.com";
pub struct CatRank;
impl ::TestCase for CatRank {
type Request = search_result_list::Owned;
type Response = search_result_list::Owned;
type Expectation = i32;
fn setup_request(&self, rng: &mut FastRand, request: search_result_list::Builder) -> i32 {
let count = rng.next_less_than(1000);
let mut good_count: i32 = 0;
let mut list = request.init_results(count);
for i in 0..count {
let mut result = list.reborrow().get(i);
result.set_score(1000.0 - i as f64);
let url_size = rng.next_less_than(100);
let url_prefix_length = URL_PREFIX.as_bytes().len();
{
let mut url = result.reborrow().init_url(url_size + url_prefix_length as u32);
url.push_str(URL_PREFIX);
for _ in 0..url_size {
url.push_ascii((97 + rng.next_less_than(26)) as u8);
}
}
let is_cat = rng.next_less_than(8) == 0;
let is_dog = rng.next_less_than(8) == 0;
if is_cat &&!is_dog {
good_count += 1;
}
let mut snippet = " ".to_string();
let prefix = rng.next_less_than(20) as usize;
for _ in 0..prefix {
snippet.push_str(WORDS[rng.next_less_than(WORDS.len() as u32) as usize]);
}
if is_cat { snippet.push_str("cat ") }
if is_dog { snippet.push_str("dog ") }
let suffix = rng.next_less_than(20) as usize;
for _ in 0..suffix {
snippet.push_str(WORDS[rng.next_less_than(WORDS.len() as u32) as usize]);
}
result.set_snippet(&snippet);
}
good_count
}
fn handle_request(&self, request: search_result_list::Reader,
response: search_result_list::Builder) -> ::capnp::Result<()>
{
let mut scored_results: Vec<ScoredResult> = Vec::new();
let results = request.get_results()?;
for i in 0..results.len() {
let result = results.get(i);
let mut score = result.get_score();
let snippet = result.get_snippet()?;
if snippet.contains(" cat ") {
score *= 10000.0;
}
if snippet.contains(" dog ") {
score /= 10000.0;
}
scored_results.push(ScoredResult {score : score, result : result});
}
// sort in decreasing order
scored_results.sort_by(|v1, v2| { if v1.score < v2.score { ::std::cmp::Ordering::Greater }
else { ::std::cmp::Ordering::Less } });
let mut list = response.init_results(scored_results.len() as u32);
for i in 0..list.len() {
let mut item = list.reborrow().get(i);
let result = scored_results[i as usize];
item.set_score(result.score);
item.set_url(result.result.get_url()?);
item.set_snippet(result.result.get_snippet()?);
}
Ok(())
}
fn check_response(&self, response: search_result_list::Reader, expected_good_count: i32)
-> ::capnp::Result<()>
|
}
| {
let mut good_count : i32 = 0;
let results = response.get_results()?;
for result in results.iter() {
if result.get_score() > 1001.0 {
good_count += 1;
} else {
break;
}
}
if good_count == expected_good_count {
Ok(())
} else {
Err(::capnp::Error::failed(
format!("check_response() expected {} but got {}", expected_good_count, good_count)))
}
} | identifier_body |
catrank.rs | // Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use common::*;
use catrank_capnp::*;
#[derive(Clone, Copy)]
pub struct ScoredResult<'a> {
score: f64,
result: search_result::Reader<'a>
}
const URL_PREFIX: &'static str = "http://example.com";
pub struct CatRank;
impl ::TestCase for CatRank {
type Request = search_result_list::Owned;
type Response = search_result_list::Owned;
type Expectation = i32;
fn setup_request(&self, rng: &mut FastRand, request: search_result_list::Builder) -> i32 {
let count = rng.next_less_than(1000);
let mut good_count: i32 = 0;
let mut list = request.init_results(count);
for i in 0..count {
let mut result = list.reborrow().get(i);
result.set_score(1000.0 - i as f64);
let url_size = rng.next_less_than(100);
let url_prefix_length = URL_PREFIX.as_bytes().len();
{
let mut url = result.reborrow().init_url(url_size + url_prefix_length as u32);
url.push_str(URL_PREFIX);
for _ in 0..url_size {
url.push_ascii((97 + rng.next_less_than(26)) as u8);
}
}
let is_cat = rng.next_less_than(8) == 0;
let is_dog = rng.next_less_than(8) == 0;
if is_cat &&!is_dog {
good_count += 1;
}
let mut snippet = " ".to_string();
let prefix = rng.next_less_than(20) as usize;
for _ in 0..prefix {
snippet.push_str(WORDS[rng.next_less_than(WORDS.len() as u32) as usize]);
}
if is_cat { snippet.push_str("cat ") }
if is_dog |
let suffix = rng.next_less_than(20) as usize;
for _ in 0..suffix {
snippet.push_str(WORDS[rng.next_less_than(WORDS.len() as u32) as usize]);
}
result.set_snippet(&snippet);
}
good_count
}
fn handle_request(&self, request: search_result_list::Reader,
response: search_result_list::Builder) -> ::capnp::Result<()>
{
let mut scored_results: Vec<ScoredResult> = Vec::new();
let results = request.get_results()?;
for i in 0..results.len() {
let result = results.get(i);
let mut score = result.get_score();
let snippet = result.get_snippet()?;
if snippet.contains(" cat ") {
score *= 10000.0;
}
if snippet.contains(" dog ") {
score /= 10000.0;
}
scored_results.push(ScoredResult {score : score, result : result});
}
// sort in decreasing order
scored_results.sort_by(|v1, v2| { if v1.score < v2.score { ::std::cmp::Ordering::Greater }
else { ::std::cmp::Ordering::Less } });
let mut list = response.init_results(scored_results.len() as u32);
for i in 0..list.len() {
let mut item = list.reborrow().get(i);
let result = scored_results[i as usize];
item.set_score(result.score);
item.set_url(result.result.get_url()?);
item.set_snippet(result.result.get_snippet()?);
}
Ok(())
}
fn check_response(&self, response: search_result_list::Reader, expected_good_count: i32)
-> ::capnp::Result<()>
{
let mut good_count : i32 = 0;
let results = response.get_results()?;
for result in results.iter() {
if result.get_score() > 1001.0 {
good_count += 1;
} else {
break;
}
}
if good_count == expected_good_count {
Ok(())
} else {
Err(::capnp::Error::failed(
format!("check_response() expected {} but got {}", expected_good_count, good_count)))
}
}
}
| { snippet.push_str("dog ") } | conditional_block |
catrank.rs | // Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use common::*;
use catrank_capnp::*;
#[derive(Clone, Copy)]
pub struct ScoredResult<'a> {
score: f64,
result: search_result::Reader<'a>
}
const URL_PREFIX: &'static str = "http://example.com";
pub struct CatRank;
impl ::TestCase for CatRank {
type Request = search_result_list::Owned;
type Response = search_result_list::Owned;
type Expectation = i32;
fn setup_request(&self, rng: &mut FastRand, request: search_result_list::Builder) -> i32 {
let count = rng.next_less_than(1000);
let mut good_count: i32 = 0;
let mut list = request.init_results(count);
for i in 0..count {
let mut result = list.reborrow().get(i);
result.set_score(1000.0 - i as f64);
let url_size = rng.next_less_than(100);
let url_prefix_length = URL_PREFIX.as_bytes().len();
{
let mut url = result.reborrow().init_url(url_size + url_prefix_length as u32);
url.push_str(URL_PREFIX);
for _ in 0..url_size {
url.push_ascii((97 + rng.next_less_than(26)) as u8);
}
}
let is_cat = rng.next_less_than(8) == 0;
let is_dog = rng.next_less_than(8) == 0;
if is_cat &&!is_dog {
good_count += 1;
}
let mut snippet = " ".to_string();
let prefix = rng.next_less_than(20) as usize;
for _ in 0..prefix {
snippet.push_str(WORDS[rng.next_less_than(WORDS.len() as u32) as usize]);
}
if is_cat { snippet.push_str("cat ") }
if is_dog { snippet.push_str("dog ") }
let suffix = rng.next_less_than(20) as usize;
for _ in 0..suffix {
snippet.push_str(WORDS[rng.next_less_than(WORDS.len() as u32) as usize]);
}
result.set_snippet(&snippet);
}
good_count
}
fn | (&self, request: search_result_list::Reader,
response: search_result_list::Builder) -> ::capnp::Result<()>
{
let mut scored_results: Vec<ScoredResult> = Vec::new();
let results = request.get_results()?;
for i in 0..results.len() {
let result = results.get(i);
let mut score = result.get_score();
let snippet = result.get_snippet()?;
if snippet.contains(" cat ") {
score *= 10000.0;
}
if snippet.contains(" dog ") {
score /= 10000.0;
}
scored_results.push(ScoredResult {score : score, result : result});
}
// sort in decreasing order
scored_results.sort_by(|v1, v2| { if v1.score < v2.score { ::std::cmp::Ordering::Greater }
else { ::std::cmp::Ordering::Less } });
let mut list = response.init_results(scored_results.len() as u32);
for i in 0..list.len() {
let mut item = list.reborrow().get(i);
let result = scored_results[i as usize];
item.set_score(result.score);
item.set_url(result.result.get_url()?);
item.set_snippet(result.result.get_snippet()?);
}
Ok(())
}
fn check_response(&self, response: search_result_list::Reader, expected_good_count: i32)
-> ::capnp::Result<()>
{
let mut good_count : i32 = 0;
let results = response.get_results()?;
for result in results.iter() {
if result.get_score() > 1001.0 {
good_count += 1;
} else {
break;
}
}
if good_count == expected_good_count {
Ok(())
} else {
Err(::capnp::Error::failed(
format!("check_response() expected {} but got {}", expected_good_count, good_count)))
}
}
}
| handle_request | identifier_name |
catrank.rs | // Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use common::*;
use catrank_capnp::*;
#[derive(Clone, Copy)]
pub struct ScoredResult<'a> {
score: f64,
result: search_result::Reader<'a>
}
const URL_PREFIX: &'static str = "http://example.com";
pub struct CatRank;
impl ::TestCase for CatRank {
type Request = search_result_list::Owned;
type Response = search_result_list::Owned;
type Expectation = i32;
fn setup_request(&self, rng: &mut FastRand, request: search_result_list::Builder) -> i32 {
let count = rng.next_less_than(1000);
let mut good_count: i32 = 0;
let mut list = request.init_results(count);
for i in 0..count {
let mut result = list.reborrow().get(i);
result.set_score(1000.0 - i as f64);
let url_size = rng.next_less_than(100);
let url_prefix_length = URL_PREFIX.as_bytes().len();
{
let mut url = result.reborrow().init_url(url_size + url_prefix_length as u32);
url.push_str(URL_PREFIX);
for _ in 0..url_size {
url.push_ascii((97 + rng.next_less_than(26)) as u8);
}
}
let is_cat = rng.next_less_than(8) == 0;
let is_dog = rng.next_less_than(8) == 0;
if is_cat &&!is_dog {
good_count += 1;
}
let mut snippet = " ".to_string();
let prefix = rng.next_less_than(20) as usize;
for _ in 0..prefix {
snippet.push_str(WORDS[rng.next_less_than(WORDS.len() as u32) as usize]);
}
if is_cat { snippet.push_str("cat ") }
if is_dog { snippet.push_str("dog ") }
let suffix = rng.next_less_than(20) as usize;
for _ in 0..suffix {
snippet.push_str(WORDS[rng.next_less_than(WORDS.len() as u32) as usize]);
}
result.set_snippet(&snippet);
}
good_count
}
fn handle_request(&self, request: search_result_list::Reader,
response: search_result_list::Builder) -> ::capnp::Result<()>
{
let mut scored_results: Vec<ScoredResult> = Vec::new();
let results = request.get_results()?;
for i in 0..results.len() {
let result = results.get(i);
let mut score = result.get_score();
let snippet = result.get_snippet()?;
if snippet.contains(" cat ") {
score *= 10000.0;
}
if snippet.contains(" dog ") {
score /= 10000.0;
}
scored_results.push(ScoredResult {score : score, result : result});
}
// sort in decreasing order
scored_results.sort_by(|v1, v2| { if v1.score < v2.score { ::std::cmp::Ordering::Greater }
else { ::std::cmp::Ordering::Less } });
let mut list = response.init_results(scored_results.len() as u32);
for i in 0..list.len() {
let mut item = list.reborrow().get(i);
let result = scored_results[i as usize];
item.set_score(result.score);
item.set_url(result.result.get_url()?);
item.set_snippet(result.result.get_snippet()?);
}
Ok(())
}
fn check_response(&self, response: search_result_list::Reader, expected_good_count: i32)
-> ::capnp::Result<()>
{
let mut good_count : i32 = 0;
let results = response.get_results()?;
for result in results.iter() {
if result.get_score() > 1001.0 {
good_count += 1;
} else {
break;
}
}
if good_count == expected_good_count {
Ok(())
} else {
Err(::capnp::Error::failed(
format!("check_response() expected {} but got {}", expected_good_count, good_count)))
}
}
} | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | random_line_split |
lib.rs | extern crate warc_parser;
extern crate nom;
mod tests {
use std::fs::File;
use std::io::prelude::*;
use nom::{Err, IResult, Needed};
fn | (sample_name: &str) -> Vec<u8> {
let full_path = "sample/".to_string() + sample_name;
let mut f = File::open(full_path).unwrap();
let mut s = Vec::new();
f.read_to_end(&mut s).unwrap();
s
}
use warc_parser;
#[test]
fn it_parses_a_plethora() {
let examples = read_sample_file("plethora.warc");
let parsed = warc_parser::records(&examples);
assert!(parsed.is_ok());
match parsed {
Err(_) => assert!(false),
Ok((i, records)) => {
let empty: Vec<u8> = Vec::new();
assert_eq!(empty, i);
assert_eq!(8, records.len());
}
}
}
#[test]
fn it_parses_single() {
let bbc = read_sample_file("bbc.warc");
let parsed = warc_parser::record(&bbc);
assert!(parsed.is_ok());
match parsed {
Err(_) => assert!(false),
Ok((i, record)) => {
let empty: Vec<u8> = Vec::new();
assert_eq!(empty, i);
assert_eq!(13, record.headers.len());
}
}
}
#[test]
fn it_parses_incomplete() {
let bbc = read_sample_file("bbc.warc");
let parsed = warc_parser::record(&bbc[..bbc.len() - 10]);
assert!(!parsed.is_ok());
match parsed {
Err(Err::Incomplete(needed)) => assert_eq!(Needed::Size(10), needed),
Err(_) => assert!(false),
Ok((_, _)) => assert!(false),
}
}
}
| read_sample_file | identifier_name |
lib.rs | extern crate warc_parser;
extern crate nom;
mod tests {
use std::fs::File;
use std::io::prelude::*;
use nom::{Err, IResult, Needed};
fn read_sample_file(sample_name: &str) -> Vec<u8> {
let full_path = "sample/".to_string() + sample_name;
let mut f = File::open(full_path).unwrap();
let mut s = Vec::new();
f.read_to_end(&mut s).unwrap();
s
} | use warc_parser;
#[test]
fn it_parses_a_plethora() {
let examples = read_sample_file("plethora.warc");
let parsed = warc_parser::records(&examples);
assert!(parsed.is_ok());
match parsed {
Err(_) => assert!(false),
Ok((i, records)) => {
let empty: Vec<u8> = Vec::new();
assert_eq!(empty, i);
assert_eq!(8, records.len());
}
}
}
#[test]
fn it_parses_single() {
let bbc = read_sample_file("bbc.warc");
let parsed = warc_parser::record(&bbc);
assert!(parsed.is_ok());
match parsed {
Err(_) => assert!(false),
Ok((i, record)) => {
let empty: Vec<u8> = Vec::new();
assert_eq!(empty, i);
assert_eq!(13, record.headers.len());
}
}
}
#[test]
fn it_parses_incomplete() {
let bbc = read_sample_file("bbc.warc");
let parsed = warc_parser::record(&bbc[..bbc.len() - 10]);
assert!(!parsed.is_ok());
match parsed {
Err(Err::Incomplete(needed)) => assert_eq!(Needed::Size(10), needed),
Err(_) => assert!(false),
Ok((_, _)) => assert!(false),
}
}
} | random_line_split |
|
lib.rs | extern crate warc_parser;
extern crate nom;
mod tests {
use std::fs::File;
use std::io::prelude::*;
use nom::{Err, IResult, Needed};
fn read_sample_file(sample_name: &str) -> Vec<u8> {
let full_path = "sample/".to_string() + sample_name;
let mut f = File::open(full_path).unwrap();
let mut s = Vec::new();
f.read_to_end(&mut s).unwrap();
s
}
use warc_parser;
#[test]
fn it_parses_a_plethora() {
let examples = read_sample_file("plethora.warc");
let parsed = warc_parser::records(&examples);
assert!(parsed.is_ok());
match parsed {
Err(_) => assert!(false),
Ok((i, records)) => {
let empty: Vec<u8> = Vec::new();
assert_eq!(empty, i);
assert_eq!(8, records.len());
}
}
}
#[test]
fn it_parses_single() {
let bbc = read_sample_file("bbc.warc");
let parsed = warc_parser::record(&bbc);
assert!(parsed.is_ok());
match parsed {
Err(_) => assert!(false),
Ok((i, record)) => {
let empty: Vec<u8> = Vec::new();
assert_eq!(empty, i);
assert_eq!(13, record.headers.len());
}
}
}
#[test]
fn it_parses_incomplete() |
}
| {
let bbc = read_sample_file("bbc.warc");
let parsed = warc_parser::record(&bbc[..bbc.len() - 10]);
assert!(!parsed.is_ok());
match parsed {
Err(Err::Incomplete(needed)) => assert_eq!(Needed::Size(10), needed),
Err(_) => assert!(false),
Ok((_, _)) => assert!(false),
}
} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.