file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
shader.rs | use vecmath::Matrix4;
use gfx;
use gfx::{Device, DeviceHelper, ToSlice};
use device;
use device::draw::CommandBuffer;
use render;
static VERTEX: gfx::ShaderSource = shaders! {
GLSL_120: b"
#version 120
uniform mat4 projection, view;
attribute vec2 tex_coord;
attribute vec3 color, position;
varying vec2 v_tex_coord;
varying vec3 v_color;
void main() {
v_tex_coord = tex_coord;
v_color = color;
gl_Position = projection * view * vec4(position, 1.0);
}
"
GLSL_150: b"
#version 150 core
uniform mat4 projection, view;
in vec2 tex_coord;
in vec3 color, position;
out vec2 v_tex_coord;
out vec3 v_color;
void main() {
v_tex_coord = tex_coord;
v_color = color;
gl_Position = projection * view * vec4(position, 1.0);
}
"
};
static FRAGMENT: gfx::ShaderSource = shaders!{
GLSL_120: b"
#version 120
uniform sampler2D s_texture;
varying vec2 v_tex_coord;
varying vec3 v_color;
void main() {
vec4 tex_color = texture2D(s_texture, v_tex_coord);
if(tex_color.a == 0.0) // Discard transparent pixels.
discard;
gl_FragColor = tex_color * vec4(v_color, 1.0);
}
"
GLSL_150: b"
#version 150 core
out vec4 out_color;
uniform sampler2D s_texture;
in vec2 v_tex_coord;
in vec3 v_color;
void main() {
vec4 tex_color = texture(s_texture, v_tex_coord);
if(tex_color.a == 0.0) // Discard transparent pixels.
discard;
out_color = tex_color * vec4(v_color, 1.0);
}
"
};
#[shader_param(Program)]
pub struct ShaderParam {
pub projection: [[f32,..4],..4],
pub view: [[f32,..4],..4],
pub s_texture: gfx::shade::TextureParam,
}
#[vertex_format]
pub struct Vertex {
#[name="position"]
pub xyz: [f32,..3],
#[name="tex_coord"]
pub uv: [f32,..2],
#[name="color"]
pub rgb: [f32,..3],
}
impl Clone for Vertex {
fn clone(&self) -> Vertex {
*self
}
}
pub struct Buffer {
buf: gfx::BufferHandle<Vertex>,
batch: render::batch::RefBatch<_ShaderParamLink, ShaderParam>
}
pub struct Renderer<D: Device<C>, C: CommandBuffer> {
graphics: gfx::Graphics<D, C>,
params: ShaderParam,
frame: gfx::Frame,
cd: gfx::ClearData,
prog: device::Handle<u32, device::shade::ProgramInfo>,
drawstate: gfx::DrawState
}
impl<D: Device<C>, C: CommandBuffer> Renderer<D, C> {
pub fn new(mut device: D, frame: gfx::Frame, tex: gfx::TextureHandle) -> Renderer<D, C> {
let sampler = device.create_sampler(gfx::tex::SamplerInfo::new(gfx::tex::Scale, gfx::tex::Tile));
let mut graphics = gfx::Graphics::new(device);
let params = ShaderParam {
projection: [[0.0,..4],..4],
view: [[0.0,..4],..4],
s_texture: (tex, Some(sampler))
};
let prog = graphics.device.link_program(VERTEX.clone(), FRAGMENT.clone()).unwrap();
let mut drawstate = gfx::DrawState::new().depth(gfx::state::LessEqual, true);
drawstate.primitive.front_face = gfx::state::Clockwise;
Renderer {
graphics: graphics,
params: params,
frame: frame,
cd: gfx::ClearData {
color: [0.81, 0.8, 1.0, 1.0],
depth: 1.0,
stencil: 0,
},
prog: prog,
drawstate: drawstate,
}
}
pub fn set_projection(&mut self, proj_mat: Matrix4<f32>) {
self.params.projection = proj_mat;
}
pub fn | (&mut self, view_mat: Matrix4<f32>) {
self.params.view = view_mat;
}
pub fn clear(&mut self) {
self.graphics.clear(self.cd, gfx::COLOR | gfx::DEPTH, &self.frame);
}
pub fn create_buffer(&mut self, data: &[Vertex]) -> Buffer {
let buf = self.graphics.device.create_buffer(data.len(), gfx::UsageStatic);
self.graphics.device.update_buffer(buf, data, 0);
let mesh = gfx::Mesh::from_format(buf, data.len() as u32);
Buffer {
buf: buf,
batch: self.graphics.make_batch(&self.prog, &mesh, mesh.to_slice(gfx::TriangleList),
&self.drawstate).unwrap()
}
}
pub fn delete_buffer(&mut self, buf: Buffer) {
self.graphics.device.delete_buffer(buf.buf);
}
pub fn render(&mut self, buffer: Buffer) {
self.graphics.draw(&buffer.batch, &self.params, &self.frame);
}
pub fn end_frame(&mut self) {
self.graphics.end_frame();
}
}
| set_view | identifier_name |
reader.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A wrapper around any Reader to treat it as an RNG.
use io::Reader;
use rand::Rng;
use result::{Ok, Err};
use slice::SlicePrelude;
/// An RNG that reads random bytes straight from a `Reader`. This will
/// work best with an infinite reader, but this is not required.
///
/// # Panics
///
/// It will panic if it there is insufficient data to fulfill a request.
///
/// # Example
///
/// ```rust
/// use std::rand::{reader, Rng};
/// use std::io::MemReader;
///
/// let mut rng = reader::ReaderRng::new(MemReader::new(vec!(1,2,3,4,5,6,7,8)));
/// println!("{:x}", rng.gen::<uint>());
/// ```
pub struct ReaderRng<R> {
reader: R
}
impl<R: Reader> ReaderRng<R> {
/// Create a new `ReaderRng` from a `Reader`.
pub fn new(r: R) -> ReaderRng<R> {
ReaderRng {
reader: r
}
}
}
impl<R: Reader> Rng for ReaderRng<R> {
fn next_u32(&mut self) -> u32 {
// This is designed for speed: reading a LE integer on a LE
// platform just involves blitting the bytes into the memory
// of the u32, similarly for BE on BE; avoiding byteswapping.
if cfg!(target_endian="little") {
self.reader.read_le_u32().unwrap()
} else {
self.reader.read_be_u32().unwrap()
}
}
fn next_u64(&mut self) -> u64 {
// see above for explanation.
if cfg!(target_endian="little") {
self.reader.read_le_u64().unwrap()
} else {
self.reader.read_be_u64().unwrap()
}
}
fn fill_bytes(&mut self, v: &mut [u8]) {
if v.len() == 0 |
match self.reader.read_at_least(v.len(), v) {
Ok(_) => {}
Err(e) => panic!("ReaderRng.fill_bytes error: {}", e)
}
}
}
#[cfg(test)]
mod test {
use prelude::*;
use super::ReaderRng;
use io::MemReader;
use num::Int;
use rand::Rng;
#[test]
fn test_reader_rng_u64() {
// transmute from the target to avoid endianness concerns.
let v = vec![0u8, 0, 0, 0, 0, 0, 0, 1,
0 , 0, 0, 0, 0, 0, 0, 2,
0, 0, 0, 0, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
assert_eq!(rng.next_u64(), 1_u64.to_be());
assert_eq!(rng.next_u64(), 2_u64.to_be());
assert_eq!(rng.next_u64(), 3_u64.to_be());
}
#[test]
fn test_reader_rng_u32() {
let v = vec![0u8, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
assert_eq!(rng.next_u32(), 1_u32.to_be());
assert_eq!(rng.next_u32(), 2_u32.to_be());
assert_eq!(rng.next_u32(), 3_u32.to_be());
}
#[test]
fn test_reader_rng_fill_bytes() {
let v = [1u8, 2, 3, 4, 5, 6, 7, 8];
let mut w = [0u8,.. 8];
let mut rng = ReaderRng::new(MemReader::new(v.as_slice().to_vec()));
rng.fill_bytes(&mut w);
assert!(v == w);
}
#[test]
#[should_fail]
fn test_reader_rng_insufficient_bytes() {
let mut rng = ReaderRng::new(MemReader::new(vec!()));
let mut v = [0u8,.. 3];
rng.fill_bytes(&mut v);
}
}
| { return } | conditional_block |
reader.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A wrapper around any Reader to treat it as an RNG.
use io::Reader;
use rand::Rng;
use result::{Ok, Err};
use slice::SlicePrelude;
/// An RNG that reads random bytes straight from a `Reader`. This will
/// work best with an infinite reader, but this is not required.
///
/// # Panics
///
/// It will panic if it there is insufficient data to fulfill a request.
///
/// # Example
///
/// ```rust
/// use std::rand::{reader, Rng};
/// use std::io::MemReader;
///
/// let mut rng = reader::ReaderRng::new(MemReader::new(vec!(1,2,3,4,5,6,7,8)));
/// println!("{:x}", rng.gen::<uint>());
/// ```
pub struct ReaderRng<R> {
reader: R
}
impl<R: Reader> ReaderRng<R> {
/// Create a new `ReaderRng` from a `Reader`.
pub fn new(r: R) -> ReaderRng<R> {
ReaderRng {
reader: r
}
}
}
impl<R: Reader> Rng for ReaderRng<R> {
fn next_u32(&mut self) -> u32 |
fn next_u64(&mut self) -> u64 {
// see above for explanation.
if cfg!(target_endian="little") {
self.reader.read_le_u64().unwrap()
} else {
self.reader.read_be_u64().unwrap()
}
}
fn fill_bytes(&mut self, v: &mut [u8]) {
if v.len() == 0 { return }
match self.reader.read_at_least(v.len(), v) {
Ok(_) => {}
Err(e) => panic!("ReaderRng.fill_bytes error: {}", e)
}
}
}
#[cfg(test)]
mod test {
use prelude::*;
use super::ReaderRng;
use io::MemReader;
use num::Int;
use rand::Rng;
#[test]
fn test_reader_rng_u64() {
// transmute from the target to avoid endianness concerns.
let v = vec![0u8, 0, 0, 0, 0, 0, 0, 1,
0 , 0, 0, 0, 0, 0, 0, 2,
0, 0, 0, 0, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
assert_eq!(rng.next_u64(), 1_u64.to_be());
assert_eq!(rng.next_u64(), 2_u64.to_be());
assert_eq!(rng.next_u64(), 3_u64.to_be());
}
#[test]
fn test_reader_rng_u32() {
let v = vec![0u8, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
assert_eq!(rng.next_u32(), 1_u32.to_be());
assert_eq!(rng.next_u32(), 2_u32.to_be());
assert_eq!(rng.next_u32(), 3_u32.to_be());
}
#[test]
fn test_reader_rng_fill_bytes() {
let v = [1u8, 2, 3, 4, 5, 6, 7, 8];
let mut w = [0u8,.. 8];
let mut rng = ReaderRng::new(MemReader::new(v.as_slice().to_vec()));
rng.fill_bytes(&mut w);
assert!(v == w);
}
#[test]
#[should_fail]
fn test_reader_rng_insufficient_bytes() {
let mut rng = ReaderRng::new(MemReader::new(vec!()));
let mut v = [0u8,.. 3];
rng.fill_bytes(&mut v);
}
}
| {
// This is designed for speed: reading a LE integer on a LE
// platform just involves blitting the bytes into the memory
// of the u32, similarly for BE on BE; avoiding byteswapping.
if cfg!(target_endian="little") {
self.reader.read_le_u32().unwrap()
} else {
self.reader.read_be_u32().unwrap()
}
} | identifier_body |
reader.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A wrapper around any Reader to treat it as an RNG.
use io::Reader;
use rand::Rng;
use result::{Ok, Err};
use slice::SlicePrelude;
/// An RNG that reads random bytes straight from a `Reader`. This will
/// work best with an infinite reader, but this is not required.
///
/// # Panics
///
/// It will panic if it there is insufficient data to fulfill a request.
///
/// # Example
///
/// ```rust
/// use std::rand::{reader, Rng};
/// use std::io::MemReader;
///
/// let mut rng = reader::ReaderRng::new(MemReader::new(vec!(1,2,3,4,5,6,7,8)));
/// println!("{:x}", rng.gen::<uint>());
/// ```
pub struct ReaderRng<R> {
reader: R
}
impl<R: Reader> ReaderRng<R> {
/// Create a new `ReaderRng` from a `Reader`.
pub fn new(r: R) -> ReaderRng<R> {
ReaderRng {
reader: r
}
}
}
impl<R: Reader> Rng for ReaderRng<R> {
fn next_u32(&mut self) -> u32 {
// This is designed for speed: reading a LE integer on a LE
// platform just involves blitting the bytes into the memory
// of the u32, similarly for BE on BE; avoiding byteswapping.
if cfg!(target_endian="little") {
self.reader.read_le_u32().unwrap()
} else {
self.reader.read_be_u32().unwrap()
}
}
fn next_u64(&mut self) -> u64 {
// see above for explanation.
if cfg!(target_endian="little") {
self.reader.read_le_u64().unwrap()
} else {
self.reader.read_be_u64().unwrap()
}
}
fn | (&mut self, v: &mut [u8]) {
if v.len() == 0 { return }
match self.reader.read_at_least(v.len(), v) {
Ok(_) => {}
Err(e) => panic!("ReaderRng.fill_bytes error: {}", e)
}
}
}
#[cfg(test)]
mod test {
use prelude::*;
use super::ReaderRng;
use io::MemReader;
use num::Int;
use rand::Rng;
#[test]
fn test_reader_rng_u64() {
// transmute from the target to avoid endianness concerns.
let v = vec![0u8, 0, 0, 0, 0, 0, 0, 1,
0 , 0, 0, 0, 0, 0, 0, 2,
0, 0, 0, 0, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
assert_eq!(rng.next_u64(), 1_u64.to_be());
assert_eq!(rng.next_u64(), 2_u64.to_be());
assert_eq!(rng.next_u64(), 3_u64.to_be());
}
#[test]
fn test_reader_rng_u32() {
let v = vec![0u8, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
assert_eq!(rng.next_u32(), 1_u32.to_be());
assert_eq!(rng.next_u32(), 2_u32.to_be());
assert_eq!(rng.next_u32(), 3_u32.to_be());
}
#[test]
fn test_reader_rng_fill_bytes() {
let v = [1u8, 2, 3, 4, 5, 6, 7, 8];
let mut w = [0u8,.. 8];
let mut rng = ReaderRng::new(MemReader::new(v.as_slice().to_vec()));
rng.fill_bytes(&mut w);
assert!(v == w);
}
#[test]
#[should_fail]
fn test_reader_rng_insufficient_bytes() {
let mut rng = ReaderRng::new(MemReader::new(vec!()));
let mut v = [0u8,.. 3];
rng.fill_bytes(&mut v);
}
}
| fill_bytes | identifier_name |
reader.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A wrapper around any Reader to treat it as an RNG.
use io::Reader;
use rand::Rng;
use result::{Ok, Err};
use slice::SlicePrelude;
/// An RNG that reads random bytes straight from a `Reader`. This will
/// work best with an infinite reader, but this is not required.
///
/// # Panics
///
/// It will panic if it there is insufficient data to fulfill a request.
///
/// # Example
///
/// ```rust
/// use std::rand::{reader, Rng};
/// use std::io::MemReader;
///
/// let mut rng = reader::ReaderRng::new(MemReader::new(vec!(1,2,3,4,5,6,7,8)));
/// println!("{:x}", rng.gen::<uint>());
/// ```
pub struct ReaderRng<R> {
reader: R
}
impl<R: Reader> ReaderRng<R> {
/// Create a new `ReaderRng` from a `Reader`.
pub fn new(r: R) -> ReaderRng<R> {
ReaderRng {
reader: r
}
}
}
impl<R: Reader> Rng for ReaderRng<R> {
fn next_u32(&mut self) -> u32 {
// This is designed for speed: reading a LE integer on a LE
// platform just involves blitting the bytes into the memory
// of the u32, similarly for BE on BE; avoiding byteswapping.
if cfg!(target_endian="little") {
self.reader.read_le_u32().unwrap()
} else {
self.reader.read_be_u32().unwrap()
}
}
fn next_u64(&mut self) -> u64 {
// see above for explanation.
if cfg!(target_endian="little") {
self.reader.read_le_u64().unwrap()
} else { | }
}
fn fill_bytes(&mut self, v: &mut [u8]) {
if v.len() == 0 { return }
match self.reader.read_at_least(v.len(), v) {
Ok(_) => {}
Err(e) => panic!("ReaderRng.fill_bytes error: {}", e)
}
}
}
#[cfg(test)]
mod test {
use prelude::*;
use super::ReaderRng;
use io::MemReader;
use num::Int;
use rand::Rng;
#[test]
fn test_reader_rng_u64() {
// transmute from the target to avoid endianness concerns.
let v = vec![0u8, 0, 0, 0, 0, 0, 0, 1,
0 , 0, 0, 0, 0, 0, 0, 2,
0, 0, 0, 0, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
assert_eq!(rng.next_u64(), 1_u64.to_be());
assert_eq!(rng.next_u64(), 2_u64.to_be());
assert_eq!(rng.next_u64(), 3_u64.to_be());
}
#[test]
fn test_reader_rng_u32() {
let v = vec![0u8, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
assert_eq!(rng.next_u32(), 1_u32.to_be());
assert_eq!(rng.next_u32(), 2_u32.to_be());
assert_eq!(rng.next_u32(), 3_u32.to_be());
}
#[test]
fn test_reader_rng_fill_bytes() {
let v = [1u8, 2, 3, 4, 5, 6, 7, 8];
let mut w = [0u8,.. 8];
let mut rng = ReaderRng::new(MemReader::new(v.as_slice().to_vec()));
rng.fill_bytes(&mut w);
assert!(v == w);
}
#[test]
#[should_fail]
fn test_reader_rng_insufficient_bytes() {
let mut rng = ReaderRng::new(MemReader::new(vec!()));
let mut v = [0u8,.. 3];
rng.fill_bytes(&mut v);
}
} | self.reader.read_be_u64().unwrap() | random_line_split |
main.rs | //! # Synacor Challenge
//!
//! A Rust based runtime for the Synacor challenge architecture.
#![warn(missing_docs)]
extern crate byteorder;
extern crate termion;
#[macro_use] extern crate chan;
extern crate chan_signal;
extern crate libc;
extern crate synacor;
mod command;
mod debugger;
use debugger::Debugger;
use std::io::Read;
use std::fs::File;
use std::env::args;
fn | () {
let binary = if let Some(val) = args().nth(1) {
let mut buffer = Vec::new();
let mut in_file = File::open(val)
.expect("Failed to open challenge binary.");
in_file.read_to_end(&mut buffer)
.expect("Failed to read in binary contents.");
buffer
} else {
println!("Usage: debugger <binary> [replay] [injections]");
return;
};
let replay = if let Some(val) = args().nth(2) {
let mut buffer = String::new();
let mut replay_file = File::open(val)
.expect("Failed to open replay file");
replay_file.read_to_string(&mut buffer)
.expect("Failed to read in replay file");
let mut buffer: Vec<_> = buffer.chars().collect();
buffer.reverse();
println!("Replay buffer loaded");
buffer
} else {
Vec::new()
};
let injections = if let Some(val) = args().nth(3) {
let mut buffer = String::new();
let mut injection_file = File::open(val)
.expect("Failed to open injection file");
injection_file.read_to_string(&mut buffer)
.expect("Failed to read in injection file");
synacor::Injection::from_json(&buffer)
} else {
vec![]
};
let mut dbg = Debugger::new(binary, replay, &injections);
dbg.main_loop();
println!("Goodbye!");
}
| main | identifier_name |
main.rs | //! # Synacor Challenge
//!
//! A Rust based runtime for the Synacor challenge architecture.
#![warn(missing_docs)]
extern crate byteorder;
extern crate termion;
#[macro_use] extern crate chan;
extern crate chan_signal;
extern crate libc;
extern crate synacor;
mod command;
mod debugger;
use debugger::Debugger;
use std::io::Read;
use std::fs::File;
use std::env::args;
fn main() {
let binary = if let Some(val) = args().nth(1) {
let mut buffer = Vec::new();
let mut in_file = File::open(val)
.expect("Failed to open challenge binary.");
in_file.read_to_end(&mut buffer)
.expect("Failed to read in binary contents.");
buffer
} else {
println!("Usage: debugger <binary> [replay] [injections]");
return;
};
let replay = if let Some(val) = args().nth(2) | else {
Vec::new()
};
let injections = if let Some(val) = args().nth(3) {
let mut buffer = String::new();
let mut injection_file = File::open(val)
.expect("Failed to open injection file");
injection_file.read_to_string(&mut buffer)
.expect("Failed to read in injection file");
synacor::Injection::from_json(&buffer)
} else {
vec![]
};
let mut dbg = Debugger::new(binary, replay, &injections);
dbg.main_loop();
println!("Goodbye!");
}
| {
let mut buffer = String::new();
let mut replay_file = File::open(val)
.expect("Failed to open replay file");
replay_file.read_to_string(&mut buffer)
.expect("Failed to read in replay file");
let mut buffer: Vec<_> = buffer.chars().collect();
buffer.reverse();
println!("Replay buffer loaded");
buffer
} | conditional_block |
main.rs | //! # Synacor Challenge
//!
//! A Rust based runtime for the Synacor challenge architecture.
#![warn(missing_docs)]
extern crate byteorder;
extern crate termion;
#[macro_use] extern crate chan;
extern crate chan_signal;
extern crate libc;
extern crate synacor;
mod command;
mod debugger;
use debugger::Debugger;
use std::io::Read;
use std::fs::File;
use std::env::args;
fn main() {
let binary = if let Some(val) = args().nth(1) {
let mut buffer = Vec::new();
let mut in_file = File::open(val)
.expect("Failed to open challenge binary.");
in_file.read_to_end(&mut buffer)
.expect("Failed to read in binary contents.");
buffer
} else {
println!("Usage: debugger <binary> [replay] [injections]");
return;
};
let replay = if let Some(val) = args().nth(2) {
let mut buffer = String::new();
let mut replay_file = File::open(val)
.expect("Failed to open replay file");
replay_file.read_to_string(&mut buffer)
.expect("Failed to read in replay file");
let mut buffer: Vec<_> = buffer.chars().collect();
buffer.reverse();
println!("Replay buffer loaded"); | buffer
} else {
Vec::new()
};
let injections = if let Some(val) = args().nth(3) {
let mut buffer = String::new();
let mut injection_file = File::open(val)
.expect("Failed to open injection file");
injection_file.read_to_string(&mut buffer)
.expect("Failed to read in injection file");
synacor::Injection::from_json(&buffer)
} else {
vec![]
};
let mut dbg = Debugger::new(binary, replay, &injections);
dbg.main_loop();
println!("Goodbye!");
} | random_line_split |
|
main.rs | //! # Synacor Challenge
//!
//! A Rust based runtime for the Synacor challenge architecture.
#![warn(missing_docs)]
extern crate byteorder;
extern crate termion;
#[macro_use] extern crate chan;
extern crate chan_signal;
extern crate libc;
extern crate synacor;
mod command;
mod debugger;
use debugger::Debugger;
use std::io::Read;
use std::fs::File;
use std::env::args;
fn main() | buffer.reverse();
println!("Replay buffer loaded");
buffer
} else {
Vec::new()
};
let injections = if let Some(val) = args().nth(3) {
let mut buffer = String::new();
let mut injection_file = File::open(val)
.expect("Failed to open injection file");
injection_file.read_to_string(&mut buffer)
.expect("Failed to read in injection file");
synacor::Injection::from_json(&buffer)
} else {
vec![]
};
let mut dbg = Debugger::new(binary, replay, &injections);
dbg.main_loop();
println!("Goodbye!");
}
| {
let binary = if let Some(val) = args().nth(1) {
let mut buffer = Vec::new();
let mut in_file = File::open(val)
.expect("Failed to open challenge binary.");
in_file.read_to_end(&mut buffer)
.expect("Failed to read in binary contents.");
buffer
} else {
println!("Usage: debugger <binary> [replay] [injections]");
return;
};
let replay = if let Some(val) = args().nth(2) {
let mut buffer = String::new();
let mut replay_file = File::open(val)
.expect("Failed to open replay file");
replay_file.read_to_string(&mut buffer)
.expect("Failed to read in replay file");
let mut buffer: Vec<_> = buffer.chars().collect(); | identifier_body |
rectangle.rs | use point::Point;
#[derive(Debug, Hash, Eq, PartialEq)]
pub struct Rectangle {
pub top_left: Point<i16>,
pub bottom_right: Point<i16>,
} | top_left: top_left,
bottom_right: Point {
x: top_left.x + width as i16,
y: top_left.y + height as i16,
}
}
}
pub fn centre(&self) -> Point<i16> {
Point {
x: ((self.top_left.x + self.bottom_right.x) / 2),
y: ((self.top_left.y + self.bottom_right.y) / 2),
}
}
pub fn is_intersecting(&self, other: &Rectangle) -> bool {
self.top_left.x <= other.bottom_right.x && self.bottom_right.x >= other.top_left.x
&& self.top_left.y <= other.bottom_right.y && self.bottom_right.y >= other.top_left.y
}
pub fn clamp_to(&mut self, (left, top): (i16, i16), (right, bottom): (i16, i16)) {
if self.top_left.x < left {
let diff = left - self.top_left.x;
self.top_left.x += diff;
self.bottom_right.x += diff;
}
if self.top_left.y < top {
let diff = top - self.top_left.y;
self.top_left.y += diff;
self.bottom_right.y += diff;
}
if self.bottom_right.x > right {
let diff = right - self.bottom_right.x;
self.top_left.x += diff;
self.bottom_right.x += diff;
}
if self.bottom_right.y > bottom {
let diff = bottom - self.bottom_right.y;
self.top_left.y += diff;
self.bottom_right.y += diff;
}
}
} |
impl Rectangle {
pub fn new(top_left: Point<i16>, (width, height): (u8, u8)) -> Rectangle {
Rectangle { | random_line_split |
rectangle.rs | use point::Point;
#[derive(Debug, Hash, Eq, PartialEq)]
pub struct Rectangle {
pub top_left: Point<i16>,
pub bottom_right: Point<i16>,
}
impl Rectangle {
pub fn new(top_left: Point<i16>, (width, height): (u8, u8)) -> Rectangle {
Rectangle {
top_left: top_left,
bottom_right: Point {
x: top_left.x + width as i16,
y: top_left.y + height as i16,
}
}
}
pub fn centre(&self) -> Point<i16> {
Point {
x: ((self.top_left.x + self.bottom_right.x) / 2),
y: ((self.top_left.y + self.bottom_right.y) / 2),
}
}
pub fn is_intersecting(&self, other: &Rectangle) -> bool {
self.top_left.x <= other.bottom_right.x && self.bottom_right.x >= other.top_left.x
&& self.top_left.y <= other.bottom_right.y && self.bottom_right.y >= other.top_left.y
}
pub fn clamp_to(&mut self, (left, top): (i16, i16), (right, bottom): (i16, i16)) {
if self.top_left.x < left |
if self.top_left.y < top {
let diff = top - self.top_left.y;
self.top_left.y += diff;
self.bottom_right.y += diff;
}
if self.bottom_right.x > right {
let diff = right - self.bottom_right.x;
self.top_left.x += diff;
self.bottom_right.x += diff;
}
if self.bottom_right.y > bottom {
let diff = bottom - self.bottom_right.y;
self.top_left.y += diff;
self.bottom_right.y += diff;
}
}
} | {
let diff = left - self.top_left.x;
self.top_left.x += diff;
self.bottom_right.x += diff;
} | conditional_block |
rectangle.rs | use point::Point;
#[derive(Debug, Hash, Eq, PartialEq)]
pub struct Rectangle {
pub top_left: Point<i16>,
pub bottom_right: Point<i16>,
}
impl Rectangle {
pub fn new(top_left: Point<i16>, (width, height): (u8, u8)) -> Rectangle {
Rectangle {
top_left: top_left,
bottom_right: Point {
x: top_left.x + width as i16,
y: top_left.y + height as i16,
}
}
}
pub fn centre(&self) -> Point<i16> |
pub fn is_intersecting(&self, other: &Rectangle) -> bool {
self.top_left.x <= other.bottom_right.x && self.bottom_right.x >= other.top_left.x
&& self.top_left.y <= other.bottom_right.y && self.bottom_right.y >= other.top_left.y
}
pub fn clamp_to(&mut self, (left, top): (i16, i16), (right, bottom): (i16, i16)) {
if self.top_left.x < left {
let diff = left - self.top_left.x;
self.top_left.x += diff;
self.bottom_right.x += diff;
}
if self.top_left.y < top {
let diff = top - self.top_left.y;
self.top_left.y += diff;
self.bottom_right.y += diff;
}
if self.bottom_right.x > right {
let diff = right - self.bottom_right.x;
self.top_left.x += diff;
self.bottom_right.x += diff;
}
if self.bottom_right.y > bottom {
let diff = bottom - self.bottom_right.y;
self.top_left.y += diff;
self.bottom_right.y += diff;
}
}
} | {
Point {
x: ((self.top_left.x + self.bottom_right.x) / 2),
y: ((self.top_left.y + self.bottom_right.y) / 2),
}
} | identifier_body |
rectangle.rs | use point::Point;
#[derive(Debug, Hash, Eq, PartialEq)]
pub struct Rectangle {
pub top_left: Point<i16>,
pub bottom_right: Point<i16>,
}
impl Rectangle {
pub fn new(top_left: Point<i16>, (width, height): (u8, u8)) -> Rectangle {
Rectangle {
top_left: top_left,
bottom_right: Point {
x: top_left.x + width as i16,
y: top_left.y + height as i16,
}
}
}
pub fn centre(&self) -> Point<i16> {
Point {
x: ((self.top_left.x + self.bottom_right.x) / 2),
y: ((self.top_left.y + self.bottom_right.y) / 2),
}
}
pub fn | (&self, other: &Rectangle) -> bool {
self.top_left.x <= other.bottom_right.x && self.bottom_right.x >= other.top_left.x
&& self.top_left.y <= other.bottom_right.y && self.bottom_right.y >= other.top_left.y
}
pub fn clamp_to(&mut self, (left, top): (i16, i16), (right, bottom): (i16, i16)) {
if self.top_left.x < left {
let diff = left - self.top_left.x;
self.top_left.x += diff;
self.bottom_right.x += diff;
}
if self.top_left.y < top {
let diff = top - self.top_left.y;
self.top_left.y += diff;
self.bottom_right.y += diff;
}
if self.bottom_right.x > right {
let diff = right - self.bottom_right.x;
self.top_left.x += diff;
self.bottom_right.x += diff;
}
if self.bottom_right.y > bottom {
let diff = bottom - self.bottom_right.y;
self.top_left.y += diff;
self.bottom_right.y += diff;
}
}
} | is_intersecting | identifier_name |
dep-graph-caller-callee.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that immediate callers have to change when callee changes, but
// not callers' callers.
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(dead_code)]
fn main() { }
mod x {
#[rustc_if_this_changed]
pub fn x() { }
}
mod y {
use x;
// These dependencies SHOULD exist:
#[rustc_then_this_would_need(TypeckTables)] //~ ERROR OK
pub fn y() {
x::x();
} | }
mod z {
use y;
// These are expected to yield errors, because changes to `x`
// affect the BODY of `y`, but not its signature.
#[rustc_then_this_would_need(TypeckTables)] //~ ERROR no path
pub fn z() {
y::y();
}
} | random_line_split |
|
dep-graph-caller-callee.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that immediate callers have to change when callee changes, but
// not callers' callers.
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(dead_code)]
fn main() { }
mod x {
#[rustc_if_this_changed]
pub fn x() |
}
mod y {
use x;
// These dependencies SHOULD exist:
#[rustc_then_this_would_need(TypeckTables)] //~ ERROR OK
pub fn y() {
x::x();
}
}
mod z {
use y;
// These are expected to yield errors, because changes to `x`
// affect the BODY of `y`, but not its signature.
#[rustc_then_this_would_need(TypeckTables)] //~ ERROR no path
pub fn z() {
y::y();
}
}
| { } | identifier_body |
dep-graph-caller-callee.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that immediate callers have to change when callee changes, but
// not callers' callers.
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#![allow(dead_code)]
fn main() { }
mod x {
#[rustc_if_this_changed]
pub fn x() { }
}
mod y {
use x;
// These dependencies SHOULD exist:
#[rustc_then_this_would_need(TypeckTables)] //~ ERROR OK
pub fn y() {
x::x();
}
}
mod z {
use y;
// These are expected to yield errors, because changes to `x`
// affect the BODY of `y`, but not its signature.
#[rustc_then_this_would_need(TypeckTables)] //~ ERROR no path
pub fn | () {
y::y();
}
}
| z | identifier_name |
games.rs | use crate::*;
#[get("/newgame")]
pub fn newgame<'a>() -> ContRes<'a> {
respond_page("newgame", newgame_con())
}
pub fn newgame_con() -> Context {
let conn = lock_database();
let mut stmt = conn.prepare("SELECT id, name FROM players order by random()").unwrap();
let names: Vec<_> = stmt.query_map(NO_PARAMS, |row| {
Named {
id: row.get(0),
name: row.get(1)
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut ballstmt = conn.prepare("SELECT id, name, img FROM balls").unwrap();
let balls: Vec<_> = ballstmt.query_map(NO_PARAMS, |row| {
Ball {
id: row.get(0),
name: row.get(1),
img: row.get(2),
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut context = create_context("games");
context.insert("names", &names);
context.insert("balls", &balls);
context
}
#[derive(FromForm)]
pub struct NewGame {
home: i32,
away: i32,
home_score: i32,
away_score: i32,
ball: i32,
secret: String,
#[allow(dead_code)]
submit: IgnoreField
}
#[post("/newgame/submit", data = "<f>")]
pub fn submit_newgame<'a>(f: Form<NewGame>) -> Resp<'a> {
let f = f.into_inner();
if f.secret!= CONFIG.secret {
let mut context = newgame_con();
context.insert("fejl", &"Det indtastede kodeord er forkert 💩");
return Resp::cont(respond_page("newgame_fejl", context));
} | f.home == f.away || f.home_score > 10 || f.away_score > 10 {
let mut context = newgame_con();
context.insert("fejl",
&"Den indtastede kamp er ikke lovlig 😜");
return Resp::cont(respond_page("newgame_fejl", context));
}
let res = lock_database().execute("INSERT INTO games (home_id, away_id, home_score, away_score, dato, \
ball_id) VALUES (?,?,?,?, datetime('now'),?)",
&[&f.home, &f.away, &f.home_score, &f.away_score, &f.ball]);
println!("{:?}", res);
Resp::red(Redirect::to("/"))
}
#[get("/games")]
pub fn games<'a>() -> ContRes<'a> {
let conn = lock_database();
let mut stmt =
conn.prepare("SELECT (SELECT name FROM players p WHERE p.id = g.home_id) AS home, \
(SELECT name FROM players p WHERE p.id = g.away_id) AS away, home_score, \
away_score, ball_id, (SELECT img FROM balls b WHERE ball_id = b.id), \
(SELECT name FROM balls b WHERE ball_id = b.id), dato FROM games g WHERE dato > date('now','start of month') ORDER BY dato DESC")
.unwrap();
let games: Vec<_> = stmt.query_map(NO_PARAMS, |row| {
PlayedGame {
home: row.get(0),
away: row.get(1),
home_score: row.get(2),
away_score: row.get(3),
ball: row.get(5),
ball_name: row.get(6),
dato: row.get(7),
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut context = create_context("games");
context.insert("games", &games);
respond_page("games", context)
} |
if !(f.home_score == 10 || f.away_score == 10) || f.home_score == f.away_score || | random_line_split |
games.rs | use crate::*;
#[get("/newgame")]
pub fn newgame<'a>() -> ContRes<'a> {
respond_page("newgame", newgame_con())
}
pub fn newgame_con() -> Context {
let conn = lock_database();
let mut stmt = conn.prepare("SELECT id, name FROM players order by random()").unwrap();
let names: Vec<_> = stmt.query_map(NO_PARAMS, |row| {
Named {
id: row.get(0),
name: row.get(1)
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut ballstmt = conn.prepare("SELECT id, name, img FROM balls").unwrap();
let balls: Vec<_> = ballstmt.query_map(NO_PARAMS, |row| {
Ball {
id: row.get(0),
name: row.get(1),
img: row.get(2),
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut context = create_context("games");
context.insert("names", &names);
context.insert("balls", &balls);
context
}
#[derive(FromForm)]
pub struct NewGame {
home: i32,
away: i32,
home_score: i32,
away_score: i32,
ball: i32,
secret: String,
#[allow(dead_code)]
submit: IgnoreField
}
#[post("/newgame/submit", data = "<f>")]
pub fn submit_newgame<'a>(f: Form<NewGame>) -> Resp<'a> {
let f = f.into_inner();
if f.secret!= CONFIG.secret | if!(f.home_score == 10 || f.away_score == 10) || f.home_score == f.away_score ||
f.home == f.away || f.home_score > 10 || f.away_score > 10 {
let mut context = newgame_con();
context.insert("fejl",
&"Den indtastede kamp er ikke lovlig 😜");
return Resp::cont(respond_page("newgame_fejl", context));
}
let res = lock_database().execute("INSERT INTO games (home_id, away_id, home_score, away_score, dato, \
ball_id) VALUES (?,?,?,?, datetime('now'),?)",
&[&f.home, &f.away, &f.home_score, &f.away_score, &f.ball]);
println!("{:?}", res);
Resp::red(Redirect::to("/"))
}
#[get("/games")]
pub fn games<'a>() -> ContRes<'a> {
let conn = lock_database();
let mut stmt =
conn.prepare("SELECT (SELECT name FROM players p WHERE p.id = g.home_id) AS home, \
(SELECT name FROM players p WHERE p.id = g.away_id) AS away, home_score, \
away_score, ball_id, (SELECT img FROM balls b WHERE ball_id = b.id), \
(SELECT name FROM balls b WHERE ball_id = b.id), dato FROM games g WHERE dato > date('now','start of month') ORDER BY dato DESC")
.unwrap();
let games: Vec<_> = stmt.query_map(NO_PARAMS, |row| {
PlayedGame {
home: row.get(0),
away: row.get(1),
home_score: row.get(2),
away_score: row.get(3),
ball: row.get(5),
ball_name: row.get(6),
dato: row.get(7),
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut context = create_context("games");
context.insert("games", &games);
respond_page("games", context)
}
| {
let mut context = newgame_con();
context.insert("fejl", &"Det indtastede kodeord er forkert 💩");
return Resp::cont(respond_page("newgame_fejl", context));
}
| conditional_block |
games.rs | use crate::*;
#[get("/newgame")]
pub fn newgame<'a>() -> ContRes<'a> {
respond_page("newgame", newgame_con())
}
pub fn newgame_con() -> Context | })
.unwrap()
.map(Result::unwrap)
.collect();
let mut context = create_context("games");
context.insert("names", &names);
context.insert("balls", &balls);
context
}
#[derive(FromForm)]
pub struct NewGame {
home: i32,
away: i32,
home_score: i32,
away_score: i32,
ball: i32,
secret: String,
#[allow(dead_code)]
submit: IgnoreField
}
#[post("/newgame/submit", data = "<f>")]
pub fn submit_newgame<'a>(f: Form<NewGame>) -> Resp<'a> {
let f = f.into_inner();
if f.secret!= CONFIG.secret {
let mut context = newgame_con();
context.insert("fejl", &"Det indtastede kodeord er forkert 💩");
return Resp::cont(respond_page("newgame_fejl", context));
}
if!(f.home_score == 10 || f.away_score == 10) || f.home_score == f.away_score ||
f.home == f.away || f.home_score > 10 || f.away_score > 10 {
let mut context = newgame_con();
context.insert("fejl",
&"Den indtastede kamp er ikke lovlig 😜");
return Resp::cont(respond_page("newgame_fejl", context));
}
let res = lock_database().execute("INSERT INTO games (home_id, away_id, home_score, away_score, dato, \
ball_id) VALUES (?,?,?,?, datetime('now'),?)",
&[&f.home, &f.away, &f.home_score, &f.away_score, &f.ball]);
println!("{:?}", res);
Resp::red(Redirect::to("/"))
}
#[get("/games")]
pub fn games<'a>() -> ContRes<'a> {
let conn = lock_database();
let mut stmt =
conn.prepare("SELECT (SELECT name FROM players p WHERE p.id = g.home_id) AS home, \
(SELECT name FROM players p WHERE p.id = g.away_id) AS away, home_score, \
away_score, ball_id, (SELECT img FROM balls b WHERE ball_id = b.id), \
(SELECT name FROM balls b WHERE ball_id = b.id), dato FROM games g WHERE dato > date('now','start of month') ORDER BY dato DESC")
.unwrap();
let games: Vec<_> = stmt.query_map(NO_PARAMS, |row| {
PlayedGame {
home: row.get(0),
away: row.get(1),
home_score: row.get(2),
away_score: row.get(3),
ball: row.get(5),
ball_name: row.get(6),
dato: row.get(7),
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut context = create_context("games");
context.insert("games", &games);
respond_page("games", context)
}
| {
let conn = lock_database();
let mut stmt = conn.prepare("SELECT id, name FROM players order by random()").unwrap();
let names: Vec<_> = stmt.query_map(NO_PARAMS, |row| {
Named {
id: row.get(0),
name: row.get(1)
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut ballstmt = conn.prepare("SELECT id, name, img FROM balls").unwrap();
let balls: Vec<_> = ballstmt.query_map(NO_PARAMS, |row| {
Ball {
id: row.get(0),
name: row.get(1),
img: row.get(2),
} | identifier_body |
games.rs | use crate::*;
#[get("/newgame")]
pub fn newgame<'a>() -> ContRes<'a> {
respond_page("newgame", newgame_con())
}
pub fn | () -> Context {
let conn = lock_database();
let mut stmt = conn.prepare("SELECT id, name FROM players order by random()").unwrap();
let names: Vec<_> = stmt.query_map(NO_PARAMS, |row| {
Named {
id: row.get(0),
name: row.get(1)
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut ballstmt = conn.prepare("SELECT id, name, img FROM balls").unwrap();
let balls: Vec<_> = ballstmt.query_map(NO_PARAMS, |row| {
Ball {
id: row.get(0),
name: row.get(1),
img: row.get(2),
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut context = create_context("games");
context.insert("names", &names);
context.insert("balls", &balls);
context
}
#[derive(FromForm)]
pub struct NewGame {
home: i32,
away: i32,
home_score: i32,
away_score: i32,
ball: i32,
secret: String,
#[allow(dead_code)]
submit: IgnoreField
}
#[post("/newgame/submit", data = "<f>")]
pub fn submit_newgame<'a>(f: Form<NewGame>) -> Resp<'a> {
let f = f.into_inner();
if f.secret!= CONFIG.secret {
let mut context = newgame_con();
context.insert("fejl", &"Det indtastede kodeord er forkert 💩");
return Resp::cont(respond_page("newgame_fejl", context));
}
if!(f.home_score == 10 || f.away_score == 10) || f.home_score == f.away_score ||
f.home == f.away || f.home_score > 10 || f.away_score > 10 {
let mut context = newgame_con();
context.insert("fejl",
&"Den indtastede kamp er ikke lovlig 😜");
return Resp::cont(respond_page("newgame_fejl", context));
}
let res = lock_database().execute("INSERT INTO games (home_id, away_id, home_score, away_score, dato, \
ball_id) VALUES (?,?,?,?, datetime('now'),?)",
&[&f.home, &f.away, &f.home_score, &f.away_score, &f.ball]);
println!("{:?}", res);
Resp::red(Redirect::to("/"))
}
#[get("/games")]
pub fn games<'a>() -> ContRes<'a> {
let conn = lock_database();
let mut stmt =
conn.prepare("SELECT (SELECT name FROM players p WHERE p.id = g.home_id) AS home, \
(SELECT name FROM players p WHERE p.id = g.away_id) AS away, home_score, \
away_score, ball_id, (SELECT img FROM balls b WHERE ball_id = b.id), \
(SELECT name FROM balls b WHERE ball_id = b.id), dato FROM games g WHERE dato > date('now','start of month') ORDER BY dato DESC")
.unwrap();
let games: Vec<_> = stmt.query_map(NO_PARAMS, |row| {
PlayedGame {
home: row.get(0),
away: row.get(1),
home_score: row.get(2),
away_score: row.get(3),
ball: row.get(5),
ball_name: row.get(6),
dato: row.get(7),
}
})
.unwrap()
.map(Result::unwrap)
.collect();
let mut context = create_context("games");
context.insert("games", &games);
respond_page("games", context)
}
| newgame_con | identifier_name |
util.rs | #![allow(dead_code)]
use std::env;
use std::fs::{self, File};
use std::io::{Read, Write};
#[cfg(unix)]
use std::os::unix::fs::symlink as symlink_file;
#[cfg(windows)]
use std::os::windows::fs::symlink_file;
use std::path::Path;
use std::process::{Command, Stdio};
use std::str::from_utf8;
#[macro_export]
macro_rules! assert_empty_stderr(
($cond:expr) => (
if $cond.stderr.len() > 0 {
panic!(format!("stderr: {}", $cond.stderr))
}
);
);
pub struct CmdResult {
pub success: bool,
pub stdout: String,
pub stderr: String,
}
pub fn run(cmd: &mut Command) -> CmdResult {
let prog = cmd.output().unwrap();
CmdResult {
success: prog.status.success(),
stdout: from_utf8(&prog.stdout).unwrap().to_string(),
stderr: from_utf8(&prog.stderr).unwrap().to_string(),
}
}
pub fn run_piped_stdin<T: AsRef<[u8]>>(cmd: &mut Command, input: T)-> CmdResult {
let mut command = cmd
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.unwrap();
command.stdin
.take()
.unwrap_or_else(|| panic!("Could not take child process stdin"))
.write_all(input.as_ref())
.unwrap_or_else(|e| panic!("{}", e));
let prog = command.wait_with_output().unwrap();
CmdResult {
success: prog.status.success(),
stdout: from_utf8(&prog.stdout).unwrap().to_string(),
stderr: from_utf8(&prog.stderr).unwrap().to_string(),
}
}
pub fn get_file_contents(name: &str) -> String {
let mut f = File::open(Path::new(name)).unwrap();
let mut contents = String::new();
let _ = f.read_to_string(&mut contents);
contents
}
pub fn mkdir(dir: &str) {
fs::create_dir(Path::new(dir)).unwrap();
}
pub fn make_file(name: &str) -> File {
match File::create(Path::new(name)) {
Ok(f) => f,
Err(e) => panic!("{}", e)
}
}
pub fn touch(file: &str) |
pub fn symlink(src: &str, dst: &str) {
symlink_file(src, dst).unwrap();
}
pub fn is_symlink(path: &str) -> bool {
match fs::symlink_metadata(path) {
Ok(m) => m.file_type().is_symlink(),
Err(_) => false
}
}
pub fn resolve_link(path: &str) -> String {
match fs::read_link(path) {
Ok(p) => p.to_str().unwrap().to_owned(),
Err(_) => "".to_string()
}
}
pub fn metadata(path: &str) -> fs::Metadata {
match fs::metadata(path) {
Ok(m) => m,
Err(e) => panic!("{}", e)
}
}
pub fn file_exists(path: &str) -> bool {
match fs::metadata(path) {
Ok(m) => m.is_file(),
Err(_) => false
}
}
pub fn dir_exists(path: &str) -> bool {
match fs::metadata(path) {
Ok(m) => m.is_dir(),
Err(_) => false
}
}
pub fn cleanup(path: &'static str) {
let p = Path::new(path);
match fs::metadata(p) {
Ok(m) => if m.is_file() {
fs::remove_file(&p).unwrap();
} else {
fs::remove_dir(&p).unwrap();
},
Err(_) => {}
}
}
pub fn current_directory() -> String {
env::current_dir().unwrap().into_os_string().into_string().unwrap()
}
pub fn repeat_str(s: &str, n: u32) -> String {
let mut repeated = String::new();
for _ in 0.. n {
repeated.push_str(s);
}
repeated
}
| {
File::create(Path::new(file)).unwrap();
} | identifier_body |
util.rs | #![allow(dead_code)]
use std::env;
use std::fs::{self, File};
use std::io::{Read, Write};
#[cfg(unix)]
use std::os::unix::fs::symlink as symlink_file;
#[cfg(windows)]
use std::os::windows::fs::symlink_file;
use std::path::Path;
use std::process::{Command, Stdio};
use std::str::from_utf8;
#[macro_export]
macro_rules! assert_empty_stderr(
($cond:expr) => (
if $cond.stderr.len() > 0 {
panic!(format!("stderr: {}", $cond.stderr))
}
);
);
pub struct CmdResult {
pub success: bool,
pub stdout: String,
pub stderr: String,
}
pub fn run(cmd: &mut Command) -> CmdResult {
let prog = cmd.output().unwrap();
CmdResult {
success: prog.status.success(),
stdout: from_utf8(&prog.stdout).unwrap().to_string(),
stderr: from_utf8(&prog.stderr).unwrap().to_string(),
}
}
pub fn run_piped_stdin<T: AsRef<[u8]>>(cmd: &mut Command, input: T)-> CmdResult {
let mut command = cmd
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.unwrap();
command.stdin
.take()
.unwrap_or_else(|| panic!("Could not take child process stdin"))
.write_all(input.as_ref())
.unwrap_or_else(|e| panic!("{}", e));
let prog = command.wait_with_output().unwrap();
CmdResult {
success: prog.status.success(),
stdout: from_utf8(&prog.stdout).unwrap().to_string(),
stderr: from_utf8(&prog.stderr).unwrap().to_string(),
}
}
pub fn get_file_contents(name: &str) -> String {
let mut f = File::open(Path::new(name)).unwrap();
let mut contents = String::new();
let _ = f.read_to_string(&mut contents);
contents
}
pub fn mkdir(dir: &str) {
fs::create_dir(Path::new(dir)).unwrap();
}
pub fn make_file(name: &str) -> File {
match File::create(Path::new(name)) {
Ok(f) => f,
Err(e) => panic!("{}", e)
} | pub fn touch(file: &str) {
File::create(Path::new(file)).unwrap();
}
pub fn symlink(src: &str, dst: &str) {
symlink_file(src, dst).unwrap();
}
pub fn is_symlink(path: &str) -> bool {
match fs::symlink_metadata(path) {
Ok(m) => m.file_type().is_symlink(),
Err(_) => false
}
}
pub fn resolve_link(path: &str) -> String {
match fs::read_link(path) {
Ok(p) => p.to_str().unwrap().to_owned(),
Err(_) => "".to_string()
}
}
pub fn metadata(path: &str) -> fs::Metadata {
match fs::metadata(path) {
Ok(m) => m,
Err(e) => panic!("{}", e)
}
}
pub fn file_exists(path: &str) -> bool {
match fs::metadata(path) {
Ok(m) => m.is_file(),
Err(_) => false
}
}
pub fn dir_exists(path: &str) -> bool {
match fs::metadata(path) {
Ok(m) => m.is_dir(),
Err(_) => false
}
}
pub fn cleanup(path: &'static str) {
let p = Path::new(path);
match fs::metadata(p) {
Ok(m) => if m.is_file() {
fs::remove_file(&p).unwrap();
} else {
fs::remove_dir(&p).unwrap();
},
Err(_) => {}
}
}
pub fn current_directory() -> String {
env::current_dir().unwrap().into_os_string().into_string().unwrap()
}
pub fn repeat_str(s: &str, n: u32) -> String {
let mut repeated = String::new();
for _ in 0.. n {
repeated.push_str(s);
}
repeated
} | }
| random_line_split |
util.rs | #![allow(dead_code)]
use std::env;
use std::fs::{self, File};
use std::io::{Read, Write};
#[cfg(unix)]
use std::os::unix::fs::symlink as symlink_file;
#[cfg(windows)]
use std::os::windows::fs::symlink_file;
use std::path::Path;
use std::process::{Command, Stdio};
use std::str::from_utf8;
#[macro_export]
macro_rules! assert_empty_stderr(
($cond:expr) => (
if $cond.stderr.len() > 0 {
panic!(format!("stderr: {}", $cond.stderr))
}
);
);
pub struct CmdResult {
pub success: bool,
pub stdout: String,
pub stderr: String,
}
pub fn run(cmd: &mut Command) -> CmdResult {
let prog = cmd.output().unwrap();
CmdResult {
success: prog.status.success(),
stdout: from_utf8(&prog.stdout).unwrap().to_string(),
stderr: from_utf8(&prog.stderr).unwrap().to_string(),
}
}
pub fn run_piped_stdin<T: AsRef<[u8]>>(cmd: &mut Command, input: T)-> CmdResult {
let mut command = cmd
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.unwrap();
command.stdin
.take()
.unwrap_or_else(|| panic!("Could not take child process stdin"))
.write_all(input.as_ref())
.unwrap_or_else(|e| panic!("{}", e));
let prog = command.wait_with_output().unwrap();
CmdResult {
success: prog.status.success(),
stdout: from_utf8(&prog.stdout).unwrap().to_string(),
stderr: from_utf8(&prog.stderr).unwrap().to_string(),
}
}
pub fn get_file_contents(name: &str) -> String {
let mut f = File::open(Path::new(name)).unwrap();
let mut contents = String::new();
let _ = f.read_to_string(&mut contents);
contents
}
pub fn mkdir(dir: &str) {
fs::create_dir(Path::new(dir)).unwrap();
}
pub fn make_file(name: &str) -> File {
match File::create(Path::new(name)) {
Ok(f) => f,
Err(e) => panic!("{}", e)
}
}
pub fn touch(file: &str) {
File::create(Path::new(file)).unwrap();
}
pub fn symlink(src: &str, dst: &str) {
symlink_file(src, dst).unwrap();
}
pub fn is_symlink(path: &str) -> bool {
match fs::symlink_metadata(path) {
Ok(m) => m.file_type().is_symlink(),
Err(_) => false
}
}
pub fn resolve_link(path: &str) -> String {
match fs::read_link(path) {
Ok(p) => p.to_str().unwrap().to_owned(),
Err(_) => "".to_string()
}
}
pub fn | (path: &str) -> fs::Metadata {
match fs::metadata(path) {
Ok(m) => m,
Err(e) => panic!("{}", e)
}
}
pub fn file_exists(path: &str) -> bool {
match fs::metadata(path) {
Ok(m) => m.is_file(),
Err(_) => false
}
}
pub fn dir_exists(path: &str) -> bool {
match fs::metadata(path) {
Ok(m) => m.is_dir(),
Err(_) => false
}
}
pub fn cleanup(path: &'static str) {
let p = Path::new(path);
match fs::metadata(p) {
Ok(m) => if m.is_file() {
fs::remove_file(&p).unwrap();
} else {
fs::remove_dir(&p).unwrap();
},
Err(_) => {}
}
}
pub fn current_directory() -> String {
env::current_dir().unwrap().into_os_string().into_string().unwrap()
}
pub fn repeat_str(s: &str, n: u32) -> String {
let mut repeated = String::new();
for _ in 0.. n {
repeated.push_str(s);
}
repeated
}
| metadata | identifier_name |
canvasgradient.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::canvas::{CanvasGradientStop, FillOrStrokeStyle, LinearGradientStyle, RadialGradientStyle};
use cssparser::{Parser, ParserInput, RGBA};
use cssparser::Color as CSSColor;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CanvasGradientBinding;
use dom::bindings::codegen::Bindings::CanvasGradientBinding::CanvasGradientMethods;
use dom::bindings::error::{Error, ErrorResult};
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
// https://html.spec.whatwg.org/multipage/#canvasgradient
#[dom_struct]
pub struct CanvasGradient {
reflector_: Reflector,
style: CanvasGradientStyle,
stops: DOMRefCell<Vec<CanvasGradientStop>>,
}
#[derive(JSTraceable, Clone, HeapSizeOf)]
pub enum CanvasGradientStyle {
Linear(LinearGradientStyle),
Radial(RadialGradientStyle),
}
impl CanvasGradient {
fn new_inherited(style: CanvasGradientStyle) -> CanvasGradient {
CanvasGradient {
reflector_: Reflector::new(),
style: style,
stops: DOMRefCell::new(Vec::new()),
}
}
pub fn new(global: &GlobalScope, style: CanvasGradientStyle) -> Root<CanvasGradient> {
reflect_dom_object(box CanvasGradient::new_inherited(style),
global,
CanvasGradientBinding::Wrap)
}
}
impl CanvasGradientMethods for CanvasGradient {
// https://html.spec.whatwg.org/multipage/#dom-canvasgradient-addcolorstop
fn AddColorStop(&self, offset: Finite<f64>, color: DOMString) -> ErrorResult {
if *offset < 0f64 || *offset > 1f64 {
return Err(Error::IndexSize); | let color = CSSColor::parse(&mut parser);
let color = if parser.is_exhausted() {
match color {
Ok(CSSColor::RGBA(rgba)) => rgba,
Ok(CSSColor::CurrentColor) => RGBA::new(0, 0, 0, 255),
_ => return Err(Error::Syntax)
}
} else {
return Err(Error::Syntax)
};
self.stops.borrow_mut().push(CanvasGradientStop {
offset: (*offset) as f64,
color: color,
});
Ok(())
}
}
pub trait ToFillOrStrokeStyle {
fn to_fill_or_stroke_style(self) -> FillOrStrokeStyle;
}
impl<'a> ToFillOrStrokeStyle for &'a CanvasGradient {
fn to_fill_or_stroke_style(self) -> FillOrStrokeStyle {
let gradient_stops = self.stops.borrow().clone();
match self.style {
CanvasGradientStyle::Linear(ref gradient) => {
FillOrStrokeStyle::LinearGradient(LinearGradientStyle::new(gradient.x0,
gradient.y0,
gradient.x1,
gradient.y1,
gradient_stops))
}
CanvasGradientStyle::Radial(ref gradient) => {
FillOrStrokeStyle::RadialGradient(RadialGradientStyle::new(gradient.x0,
gradient.y0,
gradient.r0,
gradient.x1,
gradient.y1,
gradient.r1,
gradient_stops))
}
}
}
} | }
let mut input = ParserInput::new(&color);
let mut parser = Parser::new(&mut input); | random_line_split |
canvasgradient.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::canvas::{CanvasGradientStop, FillOrStrokeStyle, LinearGradientStyle, RadialGradientStyle};
use cssparser::{Parser, ParserInput, RGBA};
use cssparser::Color as CSSColor;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CanvasGradientBinding;
use dom::bindings::codegen::Bindings::CanvasGradientBinding::CanvasGradientMethods;
use dom::bindings::error::{Error, ErrorResult};
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
// https://html.spec.whatwg.org/multipage/#canvasgradient
#[dom_struct]
pub struct CanvasGradient {
reflector_: Reflector,
style: CanvasGradientStyle,
stops: DOMRefCell<Vec<CanvasGradientStop>>,
}
#[derive(JSTraceable, Clone, HeapSizeOf)]
pub enum | {
Linear(LinearGradientStyle),
Radial(RadialGradientStyle),
}
impl CanvasGradient {
fn new_inherited(style: CanvasGradientStyle) -> CanvasGradient {
CanvasGradient {
reflector_: Reflector::new(),
style: style,
stops: DOMRefCell::new(Vec::new()),
}
}
pub fn new(global: &GlobalScope, style: CanvasGradientStyle) -> Root<CanvasGradient> {
reflect_dom_object(box CanvasGradient::new_inherited(style),
global,
CanvasGradientBinding::Wrap)
}
}
impl CanvasGradientMethods for CanvasGradient {
// https://html.spec.whatwg.org/multipage/#dom-canvasgradient-addcolorstop
fn AddColorStop(&self, offset: Finite<f64>, color: DOMString) -> ErrorResult {
if *offset < 0f64 || *offset > 1f64 {
return Err(Error::IndexSize);
}
let mut input = ParserInput::new(&color);
let mut parser = Parser::new(&mut input);
let color = CSSColor::parse(&mut parser);
let color = if parser.is_exhausted() {
match color {
Ok(CSSColor::RGBA(rgba)) => rgba,
Ok(CSSColor::CurrentColor) => RGBA::new(0, 0, 0, 255),
_ => return Err(Error::Syntax)
}
} else {
return Err(Error::Syntax)
};
self.stops.borrow_mut().push(CanvasGradientStop {
offset: (*offset) as f64,
color: color,
});
Ok(())
}
}
pub trait ToFillOrStrokeStyle {
fn to_fill_or_stroke_style(self) -> FillOrStrokeStyle;
}
impl<'a> ToFillOrStrokeStyle for &'a CanvasGradient {
fn to_fill_or_stroke_style(self) -> FillOrStrokeStyle {
let gradient_stops = self.stops.borrow().clone();
match self.style {
CanvasGradientStyle::Linear(ref gradient) => {
FillOrStrokeStyle::LinearGradient(LinearGradientStyle::new(gradient.x0,
gradient.y0,
gradient.x1,
gradient.y1,
gradient_stops))
}
CanvasGradientStyle::Radial(ref gradient) => {
FillOrStrokeStyle::RadialGradient(RadialGradientStyle::new(gradient.x0,
gradient.y0,
gradient.r0,
gradient.x1,
gradient.y1,
gradient.r1,
gradient_stops))
}
}
}
}
| CanvasGradientStyle | identifier_name |
canvasgradient.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::canvas::{CanvasGradientStop, FillOrStrokeStyle, LinearGradientStyle, RadialGradientStyle};
use cssparser::{Parser, ParserInput, RGBA};
use cssparser::Color as CSSColor;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CanvasGradientBinding;
use dom::bindings::codegen::Bindings::CanvasGradientBinding::CanvasGradientMethods;
use dom::bindings::error::{Error, ErrorResult};
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::bindings::str::DOMString;
use dom::globalscope::GlobalScope;
use dom_struct::dom_struct;
// https://html.spec.whatwg.org/multipage/#canvasgradient
#[dom_struct]
pub struct CanvasGradient {
reflector_: Reflector,
style: CanvasGradientStyle,
stops: DOMRefCell<Vec<CanvasGradientStop>>,
}
#[derive(JSTraceable, Clone, HeapSizeOf)]
pub enum CanvasGradientStyle {
Linear(LinearGradientStyle),
Radial(RadialGradientStyle),
}
impl CanvasGradient {
fn new_inherited(style: CanvasGradientStyle) -> CanvasGradient {
CanvasGradient {
reflector_: Reflector::new(),
style: style,
stops: DOMRefCell::new(Vec::new()),
}
}
pub fn new(global: &GlobalScope, style: CanvasGradientStyle) -> Root<CanvasGradient> {
reflect_dom_object(box CanvasGradient::new_inherited(style),
global,
CanvasGradientBinding::Wrap)
}
}
impl CanvasGradientMethods for CanvasGradient {
// https://html.spec.whatwg.org/multipage/#dom-canvasgradient-addcolorstop
fn AddColorStop(&self, offset: Finite<f64>, color: DOMString) -> ErrorResult {
if *offset < 0f64 || *offset > 1f64 {
return Err(Error::IndexSize);
}
let mut input = ParserInput::new(&color);
let mut parser = Parser::new(&mut input);
let color = CSSColor::parse(&mut parser);
let color = if parser.is_exhausted() {
match color {
Ok(CSSColor::RGBA(rgba)) => rgba,
Ok(CSSColor::CurrentColor) => RGBA::new(0, 0, 0, 255),
_ => return Err(Error::Syntax)
}
} else {
return Err(Error::Syntax)
};
self.stops.borrow_mut().push(CanvasGradientStop {
offset: (*offset) as f64,
color: color,
});
Ok(())
}
}
pub trait ToFillOrStrokeStyle {
fn to_fill_or_stroke_style(self) -> FillOrStrokeStyle;
}
impl<'a> ToFillOrStrokeStyle for &'a CanvasGradient {
fn to_fill_or_stroke_style(self) -> FillOrStrokeStyle {
let gradient_stops = self.stops.borrow().clone();
match self.style {
CanvasGradientStyle::Linear(ref gradient) => |
CanvasGradientStyle::Radial(ref gradient) => {
FillOrStrokeStyle::RadialGradient(RadialGradientStyle::new(gradient.x0,
gradient.y0,
gradient.r0,
gradient.x1,
gradient.y1,
gradient.r1,
gradient_stops))
}
}
}
}
| {
FillOrStrokeStyle::LinearGradient(LinearGradientStyle::new(gradient.x0,
gradient.y0,
gradient.x1,
gradient.y1,
gradient_stops))
} | conditional_block |
lib.rs | //! A Rust crate to connect a HD44780 lcd display
//!
//! # Example
//! ```no_run
//! use pi_lcd::*;
//!
//! // create a new lcd
//! let lcd = HD44780::new(11,10,[6,5,4,1],20,4);
//!
//! // send a String to the lcd at row 0
//! lcd.send_string("Hello World".to_string(),0);
//! ```
extern crate cupi;
extern crate regex;
use cupi::{CuPi, PinOutput, DigitalWrite};
use std::time::Duration;
use std::cell::RefCell;
use regex::Regex;
static CGRAM_ADDRESS: u8 = 0x40;
static COMMAND: bool = false;
static DATA: bool = true;
/// The display handle
pub struct HD44780 {
rs: RefCell<PinOutput>,
e: RefCell<PinOutput>,
data: Vec<RefCell<PinOutput>>,
cols: u32,
rows: u32,
lines: Vec<u8>,
}
impl HD44780 {
/// Creates a new HD44780 instance with `disp_rs` as rs pin, `disp_e` as enabled pin, `datalines` as data4 to data7
///
/// `disp_cols` are the number of columns
/// `disp_rows` are the number of rows
pub fn new(disp_rs: u32,
disp_e: u32,
datalines: [u32; 4],
disp_cols: u32,
disp_rows: u32)
-> HD44780 {
let raspi = CuPi::new().unwrap();
let rs = RefCell::new(raspi.pin(disp_rs as usize).unwrap().output());
let e = RefCell::new(raspi.pin(disp_e as usize).unwrap().output());
let mut data: Vec<RefCell<PinOutput>> = Vec::new();
for x in 0..4 {
data.push(RefCell::new(raspi.pin(datalines[x] as usize).unwrap().output()));
}
let lines: Vec<u8>;
match disp_rows {
1 => lines = vec![0x80],
2 => lines = vec![0x80, 0xC0],
3 => lines = vec![0x80, 0xC0, 0x94],
4 => lines = vec![0x80, 0xC0, 0x94, 0xD4],
_ => lines = vec![0x80],
};
let result = HD44780 {
rs: rs,
e: e,
data: data,
cols: disp_cols,
rows: disp_rows,
lines: lines,
};
result
}
/// Initializes the display and clears it
pub fn init(&self) {
self.command(0x33);
self.command(0x32);
self.command(0x28);
self.command(0x0C);
self.command(0x06);
self.clear();
}
/// Clears the display
pub fn clear(&self) {
self.command(0x01);
}
/// Sends a given byte as a command
pub fn command(&self, bits: u8) {
self.send_byte(bits, COMMAND);
}
/// Parses a String and and outputs it to the given row
pub fn send_string(&self, text: String, row: u32) {
let re_char: Regex = Regex::new(r"^\\cg:([0-7])").unwrap();
let mut message: Vec<u8> = Vec::new();
let col = self.cols;
let row = row % self.rows;
// TODO: implement check for custom characters
for i in text.chars() {
message.push(i as u8);
}
message.truncate(col as usize);
self.select_row(row);
self.write(message);
}
/// Creates a new custom character from a bitmap on the given `address`
pub fn create_char(&self, address: u8, bitmap: [u8; 8]) -> Result<u8, &'static str> {
// send new custom character to cgram address
match address {
0...7 => {
self.command(CGRAM_ADDRESS | address << 3);
for row in &bitmap {
self.send_byte(bitmap[*row as usize], DATA);
}
Ok(address)
},
_ => Err("address must be between 0 and 7"),
}
}
fn select_row(&self, row: u32) {
// select the row where the String should be printed at
self.send_byte(self.lines[row as usize], COMMAND);
}
fn write(&self, charlist: Vec<u8>) {
// send every single char to send_byte
for x in charlist {
self.send_byte(x, DATA);
}
}
fn send_byte(&self, bits: u8, mode: bool) {
if mode {
self.rs.borrow_mut().high().unwrap();
} else {
self.rs.borrow_mut().low().unwrap();
}
self.data[0].borrow_mut().low().unwrap();
self.data[1].borrow_mut().low().unwrap();
self.data[2].borrow_mut().low().unwrap();
self.data[3].borrow_mut().low().unwrap();
if bits & 0x10 == 0x10 {
self.data[0].borrow_mut().high().unwrap();
}
if bits & 0x20 == 0x20 {
self.data[1].borrow_mut().high().unwrap();
}
if bits & 0x40 == 0x40 {
self.data[2].borrow_mut().high().unwrap();
}
if bits & 0x80 == 0x80 {
self.data[3].borrow_mut().high().unwrap();
}
e_wait();
self.e.borrow_mut().high().unwrap();
e_wait();
self.e.borrow_mut().low().unwrap();
self.data[0].borrow_mut().low().unwrap();
self.data[1].borrow_mut().low().unwrap();
self.data[2].borrow_mut().low().unwrap();
self.data[3].borrow_mut().low().unwrap();
if bits & 0x01 == 0x01 {
self.data[0].borrow_mut().high().unwrap();
}
if bits & 0x02 == 0x02 {
self.data[1].borrow_mut().high().unwrap(); | }
if bits & 0x04 == 0x04 {
self.data[2].borrow_mut().high().unwrap();
}
if bits & 0x08 == 0x08 {
self.data[3].borrow_mut().high().unwrap();
}
e_wait();
self.e.borrow_mut().high().unwrap();
e_wait();
self.e.borrow_mut().low().unwrap();
}
}
/// Waits 50 ns to let the display recognize the enabled pin
pub fn e_wait() {
std::thread::sleep(Duration::new(0, 50));
} | random_line_split |
|
lib.rs | //! A Rust crate to connect a HD44780 lcd display
//!
//! # Example
//! ```no_run
//! use pi_lcd::*;
//!
//! // create a new lcd
//! let lcd = HD44780::new(11,10,[6,5,4,1],20,4);
//!
//! // send a String to the lcd at row 0
//! lcd.send_string("Hello World".to_string(),0);
//! ```
extern crate cupi;
extern crate regex;
use cupi::{CuPi, PinOutput, DigitalWrite};
use std::time::Duration;
use std::cell::RefCell;
use regex::Regex;
static CGRAM_ADDRESS: u8 = 0x40;
static COMMAND: bool = false;
static DATA: bool = true;
/// The display handle
pub struct HD44780 {
rs: RefCell<PinOutput>,
e: RefCell<PinOutput>,
data: Vec<RefCell<PinOutput>>,
cols: u32,
rows: u32,
lines: Vec<u8>,
}
impl HD44780 {
/// Creates a new HD44780 instance with `disp_rs` as rs pin, `disp_e` as enabled pin, `datalines` as data4 to data7
///
/// `disp_cols` are the number of columns
/// `disp_rows` are the number of rows
pub fn new(disp_rs: u32,
disp_e: u32,
datalines: [u32; 4],
disp_cols: u32,
disp_rows: u32)
-> HD44780 {
let raspi = CuPi::new().unwrap();
let rs = RefCell::new(raspi.pin(disp_rs as usize).unwrap().output());
let e = RefCell::new(raspi.pin(disp_e as usize).unwrap().output());
let mut data: Vec<RefCell<PinOutput>> = Vec::new();
for x in 0..4 {
data.push(RefCell::new(raspi.pin(datalines[x] as usize).unwrap().output()));
}
let lines: Vec<u8>;
match disp_rows {
1 => lines = vec![0x80],
2 => lines = vec![0x80, 0xC0],
3 => lines = vec![0x80, 0xC0, 0x94],
4 => lines = vec![0x80, 0xC0, 0x94, 0xD4],
_ => lines = vec![0x80],
};
let result = HD44780 {
rs: rs,
e: e,
data: data,
cols: disp_cols,
rows: disp_rows,
lines: lines,
};
result
}
/// Initializes the display and clears it
pub fn init(&self) {
self.command(0x33);
self.command(0x32);
self.command(0x28);
self.command(0x0C);
self.command(0x06);
self.clear();
}
/// Clears the display
pub fn clear(&self) {
self.command(0x01);
}
/// Sends a given byte as a command
pub fn command(&self, bits: u8) {
self.send_byte(bits, COMMAND);
}
/// Parses a String and and outputs it to the given row
pub fn send_string(&self, text: String, row: u32) {
let re_char: Regex = Regex::new(r"^\\cg:([0-7])").unwrap();
let mut message: Vec<u8> = Vec::new();
let col = self.cols;
let row = row % self.rows;
// TODO: implement check for custom characters
for i in text.chars() {
message.push(i as u8);
}
message.truncate(col as usize);
self.select_row(row);
self.write(message);
}
/// Creates a new custom character from a bitmap on the given `address`
pub fn create_char(&self, address: u8, bitmap: [u8; 8]) -> Result<u8, &'static str> {
// send new custom character to cgram address
match address {
0...7 => {
self.command(CGRAM_ADDRESS | address << 3);
for row in &bitmap {
self.send_byte(bitmap[*row as usize], DATA);
}
Ok(address)
},
_ => Err("address must be between 0 and 7"),
}
}
fn select_row(&self, row: u32) {
// select the row where the String should be printed at
self.send_byte(self.lines[row as usize], COMMAND);
}
fn write(&self, charlist: Vec<u8>) {
// send every single char to send_byte
for x in charlist {
self.send_byte(x, DATA);
}
}
fn send_byte(&self, bits: u8, mode: bool) {
if mode {
self.rs.borrow_mut().high().unwrap();
} else {
self.rs.borrow_mut().low().unwrap();
}
self.data[0].borrow_mut().low().unwrap();
self.data[1].borrow_mut().low().unwrap();
self.data[2].borrow_mut().low().unwrap();
self.data[3].borrow_mut().low().unwrap();
if bits & 0x10 == 0x10 {
self.data[0].borrow_mut().high().unwrap();
}
if bits & 0x20 == 0x20 {
self.data[1].borrow_mut().high().unwrap();
}
if bits & 0x40 == 0x40 {
self.data[2].borrow_mut().high().unwrap();
}
if bits & 0x80 == 0x80 {
self.data[3].borrow_mut().high().unwrap();
}
e_wait();
self.e.borrow_mut().high().unwrap();
e_wait();
self.e.borrow_mut().low().unwrap();
self.data[0].borrow_mut().low().unwrap();
self.data[1].borrow_mut().low().unwrap();
self.data[2].borrow_mut().low().unwrap();
self.data[3].borrow_mut().low().unwrap();
if bits & 0x01 == 0x01 {
self.data[0].borrow_mut().high().unwrap();
}
if bits & 0x02 == 0x02 {
self.data[1].borrow_mut().high().unwrap();
}
if bits & 0x04 == 0x04 {
self.data[2].borrow_mut().high().unwrap();
}
if bits & 0x08 == 0x08 {
self.data[3].borrow_mut().high().unwrap();
}
e_wait();
self.e.borrow_mut().high().unwrap();
e_wait();
self.e.borrow_mut().low().unwrap();
}
}
/// Waits 50 ns to let the display recognize the enabled pin
pub fn | () {
std::thread::sleep(Duration::new(0, 50));
}
| e_wait | identifier_name |
lib.rs | //! A Rust crate to connect a HD44780 lcd display
//!
//! # Example
//! ```no_run
//! use pi_lcd::*;
//!
//! // create a new lcd
//! let lcd = HD44780::new(11,10,[6,5,4,1],20,4);
//!
//! // send a String to the lcd at row 0
//! lcd.send_string("Hello World".to_string(),0);
//! ```
extern crate cupi;
extern crate regex;
use cupi::{CuPi, PinOutput, DigitalWrite};
use std::time::Duration;
use std::cell::RefCell;
use regex::Regex;
static CGRAM_ADDRESS: u8 = 0x40;
static COMMAND: bool = false;
static DATA: bool = true;
/// The display handle
pub struct HD44780 {
rs: RefCell<PinOutput>,
e: RefCell<PinOutput>,
data: Vec<RefCell<PinOutput>>,
cols: u32,
rows: u32,
lines: Vec<u8>,
}
impl HD44780 {
/// Creates a new HD44780 instance with `disp_rs` as rs pin, `disp_e` as enabled pin, `datalines` as data4 to data7
///
/// `disp_cols` are the number of columns
/// `disp_rows` are the number of rows
pub fn new(disp_rs: u32,
disp_e: u32,
datalines: [u32; 4],
disp_cols: u32,
disp_rows: u32)
-> HD44780 {
let raspi = CuPi::new().unwrap();
let rs = RefCell::new(raspi.pin(disp_rs as usize).unwrap().output());
let e = RefCell::new(raspi.pin(disp_e as usize).unwrap().output());
let mut data: Vec<RefCell<PinOutput>> = Vec::new();
for x in 0..4 {
data.push(RefCell::new(raspi.pin(datalines[x] as usize).unwrap().output()));
}
let lines: Vec<u8>;
match disp_rows {
1 => lines = vec![0x80],
2 => lines = vec![0x80, 0xC0],
3 => lines = vec![0x80, 0xC0, 0x94],
4 => lines = vec![0x80, 0xC0, 0x94, 0xD4],
_ => lines = vec![0x80],
};
let result = HD44780 {
rs: rs,
e: e,
data: data,
cols: disp_cols,
rows: disp_rows,
lines: lines,
};
result
}
/// Initializes the display and clears it
pub fn init(&self) {
self.command(0x33);
self.command(0x32);
self.command(0x28);
self.command(0x0C);
self.command(0x06);
self.clear();
}
/// Clears the display
pub fn clear(&self) {
self.command(0x01);
}
/// Sends a given byte as a command
pub fn command(&self, bits: u8) {
self.send_byte(bits, COMMAND);
}
/// Parses a String and and outputs it to the given row
pub fn send_string(&self, text: String, row: u32) {
let re_char: Regex = Regex::new(r"^\\cg:([0-7])").unwrap();
let mut message: Vec<u8> = Vec::new();
let col = self.cols;
let row = row % self.rows;
// TODO: implement check for custom characters
for i in text.chars() {
message.push(i as u8);
}
message.truncate(col as usize);
self.select_row(row);
self.write(message);
}
/// Creates a new custom character from a bitmap on the given `address`
pub fn create_char(&self, address: u8, bitmap: [u8; 8]) -> Result<u8, &'static str> |
fn select_row(&self, row: u32) {
// select the row where the String should be printed at
self.send_byte(self.lines[row as usize], COMMAND);
}
fn write(&self, charlist: Vec<u8>) {
// send every single char to send_byte
for x in charlist {
self.send_byte(x, DATA);
}
}
fn send_byte(&self, bits: u8, mode: bool) {
if mode {
self.rs.borrow_mut().high().unwrap();
} else {
self.rs.borrow_mut().low().unwrap();
}
self.data[0].borrow_mut().low().unwrap();
self.data[1].borrow_mut().low().unwrap();
self.data[2].borrow_mut().low().unwrap();
self.data[3].borrow_mut().low().unwrap();
if bits & 0x10 == 0x10 {
self.data[0].borrow_mut().high().unwrap();
}
if bits & 0x20 == 0x20 {
self.data[1].borrow_mut().high().unwrap();
}
if bits & 0x40 == 0x40 {
self.data[2].borrow_mut().high().unwrap();
}
if bits & 0x80 == 0x80 {
self.data[3].borrow_mut().high().unwrap();
}
e_wait();
self.e.borrow_mut().high().unwrap();
e_wait();
self.e.borrow_mut().low().unwrap();
self.data[0].borrow_mut().low().unwrap();
self.data[1].borrow_mut().low().unwrap();
self.data[2].borrow_mut().low().unwrap();
self.data[3].borrow_mut().low().unwrap();
if bits & 0x01 == 0x01 {
self.data[0].borrow_mut().high().unwrap();
}
if bits & 0x02 == 0x02 {
self.data[1].borrow_mut().high().unwrap();
}
if bits & 0x04 == 0x04 {
self.data[2].borrow_mut().high().unwrap();
}
if bits & 0x08 == 0x08 {
self.data[3].borrow_mut().high().unwrap();
}
e_wait();
self.e.borrow_mut().high().unwrap();
e_wait();
self.e.borrow_mut().low().unwrap();
}
}
/// Waits 50 ns to let the display recognize the enabled pin
pub fn e_wait() {
std::thread::sleep(Duration::new(0, 50));
}
| {
// send new custom character to cgram address
match address {
0...7 => {
self.command(CGRAM_ADDRESS | address << 3);
for row in &bitmap {
self.send_byte(bitmap[*row as usize], DATA);
}
Ok(address)
},
_ => Err("address must be between 0 and 7"),
}
} | identifier_body |
constellation_msg.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The high-level interface from script to constellation. Using this abstract interface helps
//! reduce coupling between these two components.
use compositor_msg::Epoch;
use canvas_traits::CanvasMsg;
use euclid::rect::Rect;
use euclid::size::{Size2D, TypedSize2D};
use euclid::scale_factor::ScaleFactor;
use hyper::header::Headers;
use hyper::method::Method;
use ipc_channel::ipc::IpcSender;
use layers::geometry::DevicePixel;
use offscreen_gl_context::GLContextAttributes;
use png::Image;
use util::cursor::Cursor;
use util::geometry::{PagePx, ViewportPx};
use std::collections::HashMap;
use std::sync::mpsc::{channel, Sender, Receiver};
use style::viewport::ViewportConstraints;
use url::Url;
use webdriver_msg::{WebDriverScriptCommand, LoadStatus};
#[derive(Clone)]
pub struct ConstellationChan(pub Sender<Msg>);
impl ConstellationChan {
pub fn new() -> (Receiver<Msg>, ConstellationChan) {
let (chan, port) = channel();
(port, ConstellationChan(chan))
}
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum IFrameSandboxState {
IFrameSandboxed,
IFrameUnsandboxed
}
// We pass this info to various tasks, so it lives in a separate, cloneable struct.
#[derive(Clone, Copy, Deserialize, Serialize)]
pub struct Failure {
pub pipeline_id: PipelineId,
pub parent_info: Option<(PipelineId, SubpageId)>,
}
#[derive(Copy, Clone, Deserialize, Serialize)]
pub struct WindowSizeData {
/// The size of the initial layout viewport, before parsing an
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport
pub initial_viewport: TypedSize2D<ViewportPx, f32>,
/// The "viewing area" in page px. See `PagePx` documentation for details.
pub visible_viewport: TypedSize2D<PagePx, f32>,
/// The resolution of the window in dppx, not including any "pinch zoom" factor.
pub device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>,
}
#[derive(PartialEq, Eq, Copy, Clone, Deserialize, Serialize)]
pub enum KeyState {
Pressed,
Released,
Repeated,
}
//N.B. Based on the glutin key enum
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize)]
pub enum Key {
Space,
Apostrophe,
Comma,
Minus,
Period,
Slash,
Num0,
Num1,
Num2,
Num3,
Num4,
Num5,
Num6,
Num7,
Num8,
Num9,
Semicolon,
Equal,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
LeftBracket,
Backslash,
RightBracket,
GraveAccent,
World1,
World2,
Escape,
Enter,
Tab,
Backspace,
Insert,
Delete,
Right,
Left,
Down,
Up,
PageUp,
PageDown,
Home,
End,
CapsLock,
ScrollLock,
NumLock,
PrintScreen,
Pause,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
F25,
Kp0,
Kp1,
Kp2,
Kp3,
Kp4,
Kp5,
Kp6,
Kp7,
Kp8,
Kp9,
KpDecimal,
KpDivide,
KpMultiply,
KpSubtract,
KpAdd,
KpEnter,
KpEqual,
LeftShift,
LeftControl,
LeftAlt,
LeftSuper,
RightShift,
RightControl,
RightAlt,
RightSuper,
Menu,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags KeyModifiers: u8 {
const NONE = 0x00,
const SHIFT = 0x01,
const CONTROL = 0x02,
const ALT = 0x04,
const SUPER = 0x08,
}
}
/// Specifies the type of focus event that is sent to a pipeline
#[derive(Copy, Clone, PartialEq)]
pub enum FocusType {
Element, // The first focus message - focus the element itself
Parent, // Focusing a parent element (an iframe)
}
/// Messages from the compositor and script to the constellation.
#[derive(Deserialize, Serialize)]
pub enum Msg {
Exit,
Failure(Failure),
InitLoadUrl(Url),
LoadComplete(PipelineId),
FrameRect(PipelineId, SubpageId, Rect<f32>),
LoadUrl(PipelineId, LoadData),
ScriptLoadedURLInIFrame(Url, PipelineId, SubpageId, Option<SubpageId>, IFrameSandboxState),
Navigate(Option<(PipelineId, SubpageId)>, NavigationDirection),
PainterReady(PipelineId),
ResizedWindow(WindowSizeData),
KeyEvent(Key, KeyState, KeyModifiers),
/// Requests that the constellation inform the compositor of the title of the pipeline
/// immediately.
GetPipelineTitle(PipelineId),
/// Requests that the constellation inform the compositor of the a cursor change.
SetCursor(Cursor),
/// Dispatch a mozbrowser event to a given iframe. Only available in experimental mode.
MozBrowserEvent(PipelineId, SubpageId, MozBrowserEvent),
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Requests that the constellation instruct layout to begin a new tick of the animation.
TickAnimation(PipelineId),
/// Request that the constellation send the current pipeline id for the provided frame
/// id, or for the root frame if this is None, over a provided channel
GetPipeline(Option<FrameId>, IpcSender<Option<PipelineId>>),
/// Request that the constellation send the FrameId corresponding to the document
/// with the provided parent pipeline id and subpage id
GetFrame(PipelineId, SubpageId, IpcSender<Option<FrameId>>),
/// Notifies the constellation that this frame has received focus.
Focus(PipelineId),
/// Requests that the constellation retrieve the current contents of the clipboard
GetClipboardContents(IpcSender<String>),
/// Requests that the constellation set the contents of the clipboard
SetClipboardContents(String),
/// Dispatch a webdriver command
WebDriverCommand(WebDriverCommandMsg),
/// Notifies the constellation that the viewport has been constrained in some manner
ViewportConstrained(PipelineId, ViewportConstraints),
/// Query the constellation to see if the current compositor output is stable
IsReadyToSaveImage(HashMap<PipelineId, Epoch>),
/// Notification that this iframe should be removed.
RemoveIFrame(PipelineId, SubpageId),
/// Favicon detected
NewFavicon(Url),
/// <head> tag finished parsing
HeadParsed,
/// Requests that a new 2D canvas thread be created. (This is done in the constellation because
/// 2D canvases may use the GPU and we don't want to give untrusted content access to the GPU.)
CreateCanvasPaintTask(Size2D<i32>, IpcSender<(IpcSender<CanvasMsg>, usize)>),
/// Requests that a new WebGL thread be created. (This is done in the constellation because
/// WebGL uses the GPU and we don't want to give untrusted content access to the GPU.)
CreateWebGLPaintTask(Size2D<i32>,
GLContextAttributes,
IpcSender<Result<(IpcSender<CanvasMsg>, usize), String>>),
}
#[derive(Clone, Eq, PartialEq, Deserialize, Serialize)]
pub enum AnimationState {
AnimationsPresent,
AnimationCallbacksPresent,
NoAnimationsPresent,
NoAnimationCallbacksPresent,
}
// https://developer.mozilla.org/en-US/docs/Web/API/Using_the_Browser_API#Events
#[derive(Deserialize, Serialize)]
pub enum MozBrowserEvent {
/// Sent when the scroll position within a browser <iframe> changes.
AsyncScroll,
/// Sent when window.close() is called within a browser <iframe>.
Close,
/// Sent when a browser <iframe> tries to open a context menu. This allows
/// handling <menuitem> element available within the browser <iframe>'s content.
ContextMenu,
/// Sent when an error occurred while trying to load content within a browser <iframe>.
Error,
/// Sent when the favicon of a browser <iframe> changes.
IconChange,
/// Sent when the browser <iframe> has finished loading all its assets.
LoadEnd,
/// Sent when the browser <iframe> starts to load a new page.
LoadStart,
/// Sent when a browser <iframe>'s location changes.
LocationChange(String),
/// Sent when window.open() is called within a browser <iframe>.
OpenWindow,
/// Sent when the SSL state changes within a browser <iframe>.
SecurityChange,
/// Sent when alert(), confirm(), or prompt() is called within a browser <iframe>.
ShowModalPrompt,
/// Sent when the document.title changes within a browser <iframe>.
TitleChange(String),
/// Sent when an HTTP authentification is requested.
UsernameAndPasswordRequired,
/// Sent when a link to a search engine is found.
OpenSearch,
}
impl MozBrowserEvent {
pub fn name(&self) -> &'static str {
match *self {
MozBrowserEvent::AsyncScroll => "mozbrowserasyncscroll",
MozBrowserEvent::Close => "mozbrowserclose",
MozBrowserEvent::ContextMenu => "mozbrowsercontextmenu",
MozBrowserEvent::Error => "mozbrowsererror",
MozBrowserEvent::IconChange => "mozbrowsericonchange",
MozBrowserEvent::LoadEnd => "mozbrowserloadend",
MozBrowserEvent::LoadStart => "mozbrowserloadstart",
MozBrowserEvent::LocationChange(_) => "mozbrowserlocationchange",
MozBrowserEvent::OpenWindow => "mozbrowseropenwindow",
MozBrowserEvent::SecurityChange => "mozbrowsersecuritychange",
MozBrowserEvent::ShowModalPrompt => "mozbrowsershowmodalprompt",
MozBrowserEvent::TitleChange(_) => "mozbrowsertitlechange",
MozBrowserEvent::UsernameAndPasswordRequired => "mozbrowserusernameandpasswordrequired",
MozBrowserEvent::OpenSearch => "mozbrowseropensearch"
}
}
pub fn | (&self) -> Option<String> {
match *self {
MozBrowserEvent::AsyncScroll | MozBrowserEvent::Close | MozBrowserEvent::ContextMenu |
MozBrowserEvent::Error | MozBrowserEvent::IconChange | MozBrowserEvent::LoadEnd |
MozBrowserEvent::LoadStart | MozBrowserEvent::OpenWindow | MozBrowserEvent::SecurityChange |
MozBrowserEvent::ShowModalPrompt | MozBrowserEvent::UsernameAndPasswordRequired |
MozBrowserEvent::OpenSearch => None,
MozBrowserEvent::LocationChange(ref new_location) => Some(new_location.clone()),
MozBrowserEvent::TitleChange(ref new_title) => Some(new_title.clone()),
}
}
}
#[derive(Deserialize, Serialize)]
pub enum WebDriverCommandMsg {
LoadUrl(PipelineId, LoadData, IpcSender<LoadStatus>),
Refresh(PipelineId, IpcSender<LoadStatus>),
ScriptCommand(PipelineId, WebDriverScriptCommand),
TakeScreenshot(PipelineId, IpcSender<Option<Image>>)
}
/// Similar to net::resource_task::LoadData
/// can be passed to LoadUrl to load a page with GET/POST
/// parameters or headers
#[derive(Clone, Deserialize, Serialize)]
pub struct LoadData {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub data: Option<Vec<u8>>,
}
impl LoadData {
pub fn new(url: Url) -> LoadData {
LoadData {
url: url,
method: Method::Get,
headers: Headers::new(),
data: None,
}
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub enum NavigationDirection {
Forward,
Back,
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct FrameId(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct WorkerId(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct PipelineId(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct SubpageId(pub u32);
// The type of pipeline exit. During complete shutdowns, pipelines do not have to
// release resources automatically released on process termination.
#[derive(Copy, Clone, Debug, Deserialize, Serialize)]
pub enum PipelineExitType {
PipelineOnly,
Complete,
}
| detail | identifier_name |
constellation_msg.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The high-level interface from script to constellation. Using this abstract interface helps
//! reduce coupling between these two components.
use compositor_msg::Epoch;
use canvas_traits::CanvasMsg;
use euclid::rect::Rect;
use euclid::size::{Size2D, TypedSize2D};
use euclid::scale_factor::ScaleFactor;
use hyper::header::Headers;
use hyper::method::Method;
use ipc_channel::ipc::IpcSender;
use layers::geometry::DevicePixel;
use offscreen_gl_context::GLContextAttributes;
use png::Image;
use util::cursor::Cursor;
use util::geometry::{PagePx, ViewportPx};
use std::collections::HashMap;
use std::sync::mpsc::{channel, Sender, Receiver};
use style::viewport::ViewportConstraints;
use url::Url;
use webdriver_msg::{WebDriverScriptCommand, LoadStatus};
#[derive(Clone)]
pub struct ConstellationChan(pub Sender<Msg>);
impl ConstellationChan {
pub fn new() -> (Receiver<Msg>, ConstellationChan) {
let (chan, port) = channel();
(port, ConstellationChan(chan))
}
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum IFrameSandboxState {
IFrameSandboxed,
IFrameUnsandboxed
}
// We pass this info to various tasks, so it lives in a separate, cloneable struct.
#[derive(Clone, Copy, Deserialize, Serialize)]
pub struct Failure {
pub pipeline_id: PipelineId,
pub parent_info: Option<(PipelineId, SubpageId)>,
}
#[derive(Copy, Clone, Deserialize, Serialize)]
pub struct WindowSizeData {
/// The size of the initial layout viewport, before parsing an
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport
pub initial_viewport: TypedSize2D<ViewportPx, f32>,
/// The "viewing area" in page px. See `PagePx` documentation for details.
pub visible_viewport: TypedSize2D<PagePx, f32>,
/// The resolution of the window in dppx, not including any "pinch zoom" factor.
pub device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>,
}
#[derive(PartialEq, Eq, Copy, Clone, Deserialize, Serialize)]
pub enum KeyState {
Pressed,
Released,
Repeated,
}
//N.B. Based on the glutin key enum
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize)]
pub enum Key {
Space,
Apostrophe,
Comma,
Minus,
Period,
Slash,
Num0,
Num1,
Num2,
Num3,
Num4,
Num5,
Num6,
Num7,
Num8,
Num9,
Semicolon,
Equal,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
LeftBracket,
Backslash,
RightBracket,
GraveAccent,
World1,
World2,
Escape,
Enter,
Tab,
Backspace,
Insert,
Delete,
Right,
Left,
Down,
Up,
PageUp,
PageDown,
Home,
End,
CapsLock,
ScrollLock,
NumLock,
PrintScreen,
Pause,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
F25,
Kp0,
Kp1,
Kp2,
Kp3,
Kp4,
Kp5,
Kp6,
Kp7,
Kp8,
Kp9,
KpDecimal,
KpDivide,
KpMultiply,
KpSubtract,
KpAdd,
KpEnter,
KpEqual,
LeftShift,
LeftControl,
LeftAlt,
LeftSuper,
RightShift,
RightControl,
RightAlt,
RightSuper,
Menu,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags KeyModifiers: u8 {
const NONE = 0x00,
const SHIFT = 0x01,
const CONTROL = 0x02,
const ALT = 0x04,
const SUPER = 0x08,
}
}
/// Specifies the type of focus event that is sent to a pipeline
#[derive(Copy, Clone, PartialEq)]
pub enum FocusType {
Element, // The first focus message - focus the element itself
Parent, // Focusing a parent element (an iframe)
}
/// Messages from the compositor and script to the constellation.
#[derive(Deserialize, Serialize)]
pub enum Msg {
Exit,
Failure(Failure),
InitLoadUrl(Url),
LoadComplete(PipelineId),
FrameRect(PipelineId, SubpageId, Rect<f32>),
LoadUrl(PipelineId, LoadData),
ScriptLoadedURLInIFrame(Url, PipelineId, SubpageId, Option<SubpageId>, IFrameSandboxState),
Navigate(Option<(PipelineId, SubpageId)>, NavigationDirection),
PainterReady(PipelineId),
ResizedWindow(WindowSizeData),
KeyEvent(Key, KeyState, KeyModifiers),
/// Requests that the constellation inform the compositor of the title of the pipeline
/// immediately.
GetPipelineTitle(PipelineId),
/// Requests that the constellation inform the compositor of the a cursor change.
SetCursor(Cursor),
/// Dispatch a mozbrowser event to a given iframe. Only available in experimental mode.
MozBrowserEvent(PipelineId, SubpageId, MozBrowserEvent),
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Requests that the constellation instruct layout to begin a new tick of the animation.
TickAnimation(PipelineId),
/// Request that the constellation send the current pipeline id for the provided frame
/// id, or for the root frame if this is None, over a provided channel
GetPipeline(Option<FrameId>, IpcSender<Option<PipelineId>>),
/// Request that the constellation send the FrameId corresponding to the document | /// Requests that the constellation retrieve the current contents of the clipboard
GetClipboardContents(IpcSender<String>),
/// Requests that the constellation set the contents of the clipboard
SetClipboardContents(String),
/// Dispatch a webdriver command
WebDriverCommand(WebDriverCommandMsg),
/// Notifies the constellation that the viewport has been constrained in some manner
ViewportConstrained(PipelineId, ViewportConstraints),
/// Query the constellation to see if the current compositor output is stable
IsReadyToSaveImage(HashMap<PipelineId, Epoch>),
/// Notification that this iframe should be removed.
RemoveIFrame(PipelineId, SubpageId),
/// Favicon detected
NewFavicon(Url),
/// <head> tag finished parsing
HeadParsed,
/// Requests that a new 2D canvas thread be created. (This is done in the constellation because
/// 2D canvases may use the GPU and we don't want to give untrusted content access to the GPU.)
CreateCanvasPaintTask(Size2D<i32>, IpcSender<(IpcSender<CanvasMsg>, usize)>),
/// Requests that a new WebGL thread be created. (This is done in the constellation because
/// WebGL uses the GPU and we don't want to give untrusted content access to the GPU.)
CreateWebGLPaintTask(Size2D<i32>,
GLContextAttributes,
IpcSender<Result<(IpcSender<CanvasMsg>, usize), String>>),
}
#[derive(Clone, Eq, PartialEq, Deserialize, Serialize)]
pub enum AnimationState {
AnimationsPresent,
AnimationCallbacksPresent,
NoAnimationsPresent,
NoAnimationCallbacksPresent,
}
// https://developer.mozilla.org/en-US/docs/Web/API/Using_the_Browser_API#Events
#[derive(Deserialize, Serialize)]
pub enum MozBrowserEvent {
/// Sent when the scroll position within a browser <iframe> changes.
AsyncScroll,
/// Sent when window.close() is called within a browser <iframe>.
Close,
/// Sent when a browser <iframe> tries to open a context menu. This allows
/// handling <menuitem> element available within the browser <iframe>'s content.
ContextMenu,
/// Sent when an error occurred while trying to load content within a browser <iframe>.
Error,
/// Sent when the favicon of a browser <iframe> changes.
IconChange,
/// Sent when the browser <iframe> has finished loading all its assets.
LoadEnd,
/// Sent when the browser <iframe> starts to load a new page.
LoadStart,
/// Sent when a browser <iframe>'s location changes.
LocationChange(String),
/// Sent when window.open() is called within a browser <iframe>.
OpenWindow,
/// Sent when the SSL state changes within a browser <iframe>.
SecurityChange,
/// Sent when alert(), confirm(), or prompt() is called within a browser <iframe>.
ShowModalPrompt,
/// Sent when the document.title changes within a browser <iframe>.
TitleChange(String),
/// Sent when an HTTP authentification is requested.
UsernameAndPasswordRequired,
/// Sent when a link to a search engine is found.
OpenSearch,
}
impl MozBrowserEvent {
pub fn name(&self) -> &'static str {
match *self {
MozBrowserEvent::AsyncScroll => "mozbrowserasyncscroll",
MozBrowserEvent::Close => "mozbrowserclose",
MozBrowserEvent::ContextMenu => "mozbrowsercontextmenu",
MozBrowserEvent::Error => "mozbrowsererror",
MozBrowserEvent::IconChange => "mozbrowsericonchange",
MozBrowserEvent::LoadEnd => "mozbrowserloadend",
MozBrowserEvent::LoadStart => "mozbrowserloadstart",
MozBrowserEvent::LocationChange(_) => "mozbrowserlocationchange",
MozBrowserEvent::OpenWindow => "mozbrowseropenwindow",
MozBrowserEvent::SecurityChange => "mozbrowsersecuritychange",
MozBrowserEvent::ShowModalPrompt => "mozbrowsershowmodalprompt",
MozBrowserEvent::TitleChange(_) => "mozbrowsertitlechange",
MozBrowserEvent::UsernameAndPasswordRequired => "mozbrowserusernameandpasswordrequired",
MozBrowserEvent::OpenSearch => "mozbrowseropensearch"
}
}
pub fn detail(&self) -> Option<String> {
match *self {
MozBrowserEvent::AsyncScroll | MozBrowserEvent::Close | MozBrowserEvent::ContextMenu |
MozBrowserEvent::Error | MozBrowserEvent::IconChange | MozBrowserEvent::LoadEnd |
MozBrowserEvent::LoadStart | MozBrowserEvent::OpenWindow | MozBrowserEvent::SecurityChange |
MozBrowserEvent::ShowModalPrompt | MozBrowserEvent::UsernameAndPasswordRequired |
MozBrowserEvent::OpenSearch => None,
MozBrowserEvent::LocationChange(ref new_location) => Some(new_location.clone()),
MozBrowserEvent::TitleChange(ref new_title) => Some(new_title.clone()),
}
}
}
#[derive(Deserialize, Serialize)]
pub enum WebDriverCommandMsg {
LoadUrl(PipelineId, LoadData, IpcSender<LoadStatus>),
Refresh(PipelineId, IpcSender<LoadStatus>),
ScriptCommand(PipelineId, WebDriverScriptCommand),
TakeScreenshot(PipelineId, IpcSender<Option<Image>>)
}
/// Similar to net::resource_task::LoadData
/// can be passed to LoadUrl to load a page with GET/POST
/// parameters or headers
#[derive(Clone, Deserialize, Serialize)]
pub struct LoadData {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub data: Option<Vec<u8>>,
}
impl LoadData {
pub fn new(url: Url) -> LoadData {
LoadData {
url: url,
method: Method::Get,
headers: Headers::new(),
data: None,
}
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub enum NavigationDirection {
Forward,
Back,
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct FrameId(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct WorkerId(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct PipelineId(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct SubpageId(pub u32);
// The type of pipeline exit. During complete shutdowns, pipelines do not have to
// release resources automatically released on process termination.
#[derive(Copy, Clone, Debug, Deserialize, Serialize)]
pub enum PipelineExitType {
PipelineOnly,
Complete,
} | /// with the provided parent pipeline id and subpage id
GetFrame(PipelineId, SubpageId, IpcSender<Option<FrameId>>),
/// Notifies the constellation that this frame has received focus.
Focus(PipelineId), | random_line_split |
update-downloads.rs | #![deny(warnings)]
#![feature(std_misc, core, os, io, env)]
extern crate "cargo-registry" as cargo_registry;
extern crate postgres;
extern crate semver;
extern crate time;
extern crate env_logger;
use std::env;
use std::collections::HashMap;
use std::time::Duration;
use cargo_registry::{VersionDownload, Version, Model};
static LIMIT: i64 = 10000;
#[allow(dead_code)] // dead in tests
fn main() {
env_logger::init().unwrap();
let daemon = env::args().nth(1).as_ref().map(|s| s.to_str().unwrap())
== Some("daemon");
let sleep = env::args().nth(2).map(|s| s.to_str().unwrap().parse::<i64>().unwrap());
loop {
let conn = postgres::Connection::connect(env("DATABASE_URL").as_slice(),
&postgres::SslMode::None).unwrap();
{
let tx = conn.transaction().unwrap();
update(&tx).unwrap();
tx.set_commit();
tx.finish().unwrap();
}
drop(conn);
if daemon {
std::old_io::timer::sleep(Duration::seconds(sleep.unwrap()));
} else {
break
}
}
}
fn env(s: &str) -> String {
match env::var_string(s).ok() {
Some(s) => s,
None => panic!("must have `{}` defined", s),
}
}
fn update(tx: &postgres::Transaction) -> postgres::Result<()> {
let mut max = 0;
loop {
let tx = try!(tx.transaction());
{
let stmt = try!(tx.prepare("SELECT * FROM version_downloads \
WHERE processed = FALSE AND id > $1
ORDER BY id ASC
LIMIT $2"));
let mut rows = try!(stmt.query(&[&max, &LIMIT]));
match try!(collect(&tx, &mut rows)) {
None => break,
Some(m) => max = m,
}
}
tx.set_commit();
try!(tx.finish());
}
Ok(())
}
fn collect(tx: &postgres::Transaction,
rows: &mut postgres::Rows) -> postgres::Result<Option<i32>> {
// Anything older than 24 hours ago will be frozen and will not be queried
// against again.
let cutoff = time::now_utc().to_timespec();
let cutoff = cutoff + Duration::days(-1);
let mut map = HashMap::new();
for row in rows.by_ref() {
let download: VersionDownload = Model::from_row(&row);
assert!(map.insert(download.id, download).is_none());
}
println!("updating {} versions", map.len());
if map.len() == 0 { return Ok(None) }
let mut max = 0;
let mut total = 0;
for (id, download) in map.iter() {
if *id > max { max = *id; }
if download.counted == download.downloads { continue }
let amt = download.downloads - download.counted;
let crate_id = Version::find(tx, download.version_id).unwrap().crate_id;
// Update the total number of version downloads
try!(tx.execute("UPDATE versions
SET downloads = downloads + $1
WHERE id = $2",
&[&amt, &download.version_id]));
// Update the total number of crate downloads
try!(tx.execute("UPDATE crates SET downloads = downloads + $1
WHERE id = $2", &[&amt, &crate_id]));
// Update the total number of crate downloads for today
let cnt = try!(tx.execute("UPDATE crate_downloads
SET downloads = downloads + $2
WHERE crate_id = $1 AND date = date($3)",
&[&crate_id, &amt, &download.date]));
if cnt == 0 {
try!(tx.execute("INSERT INTO crate_downloads
(crate_id, downloads, date)
VALUES ($1, $2, $3)",
&[&crate_id, &amt, &download.date]));
}
// Flag this row as having been processed if we're passed the cutoff,
// and unconditionally increment the number of counted downloads.
try!(tx.execute("UPDATE version_downloads
SET processed = $2, counted = downloads
WHERE id = $1",
&[id, &(download.date < cutoff)]));
total += amt as i64;
}
// After everything else is done, update the global counter of total
// downloads.
try!(tx.execute("UPDATE metadata SET total_downloads = total_downloads + $1",
&[&total]));
Ok(Some(max))
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use postgres;
use semver;
use cargo_registry::{Version, Crate, User};
fn conn() -> postgres::Connection {
postgres::Connection::connect(::env("TEST_DATABASE_URL").as_slice(),
&postgres::SslMode::None).unwrap()
}
fn user(conn: &postgres::Transaction) -> User{
User::find_or_insert(conn, "login", None, None, None,
"access_token", "api_token").unwrap()
}
fn crate_downloads(tx: &postgres::Transaction, id: i32, expected: usize) {
let stmt = tx.prepare("SELECT * FROM crate_downloads
WHERE crate_id = $1").unwrap();
let dl: i32 = stmt.query(&[&id]).unwrap()
.next().unwrap().get("downloads");
assert_eq!(dl, expected as i32);
}
#[test]
fn increment() {
let conn = conn();
let tx = conn.transaction().unwrap();
let user = user(&tx);
let krate = Crate::find_or_insert(&tx, "foo", user.id, &None, &None,
&None, &None, &[], &None, &None,
&None).unwrap();
let version = Version::insert(&tx, krate.id,
&semver::Version::parse("1.0.0").unwrap(),
&HashMap::new(), &[]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 1, 0, current_date, false)",
&[&version.id]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 1, 0, current_date, true)",
&[&version.id]).unwrap();
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 1);
assert_eq!(Crate::find(&tx, krate.id).unwrap().downloads, 1);
crate_downloads(&tx, krate.id, 1);
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 1);
}
#[test]
fn increment_a_little() {
let conn = conn();
let tx = conn.transaction().unwrap();
let user = user(&tx);
let krate = Crate::find_or_insert(&tx, "foo", user.id, &None,
&None, &None, &None, &[], &None,
&None, &None).unwrap();
let version = Version::insert(&tx, krate.id,
&semver::Version::parse("1.0.0").unwrap(),
&HashMap::new(), &[]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 2, 1, current_date, false)",
&[&version.id]).unwrap(); | tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 1, 0, current_date, false)",
&[&version.id]).unwrap();
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 2);
assert_eq!(Crate::find(&tx, krate.id).unwrap().downloads, 2);
crate_downloads(&tx, krate.id, 2);
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 2);
}
} | random_line_split |
|
update-downloads.rs | #![deny(warnings)]
#![feature(std_misc, core, os, io, env)]
extern crate "cargo-registry" as cargo_registry;
extern crate postgres;
extern crate semver;
extern crate time;
extern crate env_logger;
use std::env;
use std::collections::HashMap;
use std::time::Duration;
use cargo_registry::{VersionDownload, Version, Model};
static LIMIT: i64 = 10000;
#[allow(dead_code)] // dead in tests
fn main() {
env_logger::init().unwrap();
let daemon = env::args().nth(1).as_ref().map(|s| s.to_str().unwrap())
== Some("daemon");
let sleep = env::args().nth(2).map(|s| s.to_str().unwrap().parse::<i64>().unwrap());
loop {
let conn = postgres::Connection::connect(env("DATABASE_URL").as_slice(),
&postgres::SslMode::None).unwrap();
{
let tx = conn.transaction().unwrap();
update(&tx).unwrap();
tx.set_commit();
tx.finish().unwrap();
}
drop(conn);
if daemon {
std::old_io::timer::sleep(Duration::seconds(sleep.unwrap()));
} else {
break
}
}
}
fn env(s: &str) -> String {
match env::var_string(s).ok() {
Some(s) => s,
None => panic!("must have `{}` defined", s),
}
}
fn update(tx: &postgres::Transaction) -> postgres::Result<()> {
let mut max = 0;
loop {
let tx = try!(tx.transaction());
{
let stmt = try!(tx.prepare("SELECT * FROM version_downloads \
WHERE processed = FALSE AND id > $1
ORDER BY id ASC
LIMIT $2"));
let mut rows = try!(stmt.query(&[&max, &LIMIT]));
match try!(collect(&tx, &mut rows)) {
None => break,
Some(m) => max = m,
}
}
tx.set_commit();
try!(tx.finish());
}
Ok(())
}
fn collect(tx: &postgres::Transaction,
rows: &mut postgres::Rows) -> postgres::Result<Option<i32>> {
// Anything older than 24 hours ago will be frozen and will not be queried
// against again.
let cutoff = time::now_utc().to_timespec();
let cutoff = cutoff + Duration::days(-1);
let mut map = HashMap::new();
for row in rows.by_ref() {
let download: VersionDownload = Model::from_row(&row);
assert!(map.insert(download.id, download).is_none());
}
println!("updating {} versions", map.len());
if map.len() == 0 { return Ok(None) }
let mut max = 0;
let mut total = 0;
for (id, download) in map.iter() {
if *id > max { max = *id; }
if download.counted == download.downloads { continue }
let amt = download.downloads - download.counted;
let crate_id = Version::find(tx, download.version_id).unwrap().crate_id;
// Update the total number of version downloads
try!(tx.execute("UPDATE versions
SET downloads = downloads + $1
WHERE id = $2",
&[&amt, &download.version_id]));
// Update the total number of crate downloads
try!(tx.execute("UPDATE crates SET downloads = downloads + $1
WHERE id = $2", &[&amt, &crate_id]));
// Update the total number of crate downloads for today
let cnt = try!(tx.execute("UPDATE crate_downloads
SET downloads = downloads + $2
WHERE crate_id = $1 AND date = date($3)",
&[&crate_id, &amt, &download.date]));
if cnt == 0 {
try!(tx.execute("INSERT INTO crate_downloads
(crate_id, downloads, date)
VALUES ($1, $2, $3)",
&[&crate_id, &amt, &download.date]));
}
// Flag this row as having been processed if we're passed the cutoff,
// and unconditionally increment the number of counted downloads.
try!(tx.execute("UPDATE version_downloads
SET processed = $2, counted = downloads
WHERE id = $1",
&[id, &(download.date < cutoff)]));
total += amt as i64;
}
// After everything else is done, update the global counter of total
// downloads.
try!(tx.execute("UPDATE metadata SET total_downloads = total_downloads + $1",
&[&total]));
Ok(Some(max))
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use postgres;
use semver;
use cargo_registry::{Version, Crate, User};
fn conn() -> postgres::Connection {
postgres::Connection::connect(::env("TEST_DATABASE_URL").as_slice(),
&postgres::SslMode::None).unwrap()
}
fn | (conn: &postgres::Transaction) -> User{
User::find_or_insert(conn, "login", None, None, None,
"access_token", "api_token").unwrap()
}
fn crate_downloads(tx: &postgres::Transaction, id: i32, expected: usize) {
let stmt = tx.prepare("SELECT * FROM crate_downloads
WHERE crate_id = $1").unwrap();
let dl: i32 = stmt.query(&[&id]).unwrap()
.next().unwrap().get("downloads");
assert_eq!(dl, expected as i32);
}
#[test]
fn increment() {
let conn = conn();
let tx = conn.transaction().unwrap();
let user = user(&tx);
let krate = Crate::find_or_insert(&tx, "foo", user.id, &None, &None,
&None, &None, &[], &None, &None,
&None).unwrap();
let version = Version::insert(&tx, krate.id,
&semver::Version::parse("1.0.0").unwrap(),
&HashMap::new(), &[]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 1, 0, current_date, false)",
&[&version.id]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 1, 0, current_date, true)",
&[&version.id]).unwrap();
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 1);
assert_eq!(Crate::find(&tx, krate.id).unwrap().downloads, 1);
crate_downloads(&tx, krate.id, 1);
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 1);
}
#[test]
fn increment_a_little() {
let conn = conn();
let tx = conn.transaction().unwrap();
let user = user(&tx);
let krate = Crate::find_or_insert(&tx, "foo", user.id, &None,
&None, &None, &None, &[], &None,
&None, &None).unwrap();
let version = Version::insert(&tx, krate.id,
&semver::Version::parse("1.0.0").unwrap(),
&HashMap::new(), &[]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 2, 1, current_date, false)",
&[&version.id]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 1, 0, current_date, false)",
&[&version.id]).unwrap();
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 2);
assert_eq!(Crate::find(&tx, krate.id).unwrap().downloads, 2);
crate_downloads(&tx, krate.id, 2);
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 2);
}
}
| user | identifier_name |
update-downloads.rs | #![deny(warnings)]
#![feature(std_misc, core, os, io, env)]
extern crate "cargo-registry" as cargo_registry;
extern crate postgres;
extern crate semver;
extern crate time;
extern crate env_logger;
use std::env;
use std::collections::HashMap;
use std::time::Duration;
use cargo_registry::{VersionDownload, Version, Model};
static LIMIT: i64 = 10000;
#[allow(dead_code)] // dead in tests
fn main() {
env_logger::init().unwrap();
let daemon = env::args().nth(1).as_ref().map(|s| s.to_str().unwrap())
== Some("daemon");
let sleep = env::args().nth(2).map(|s| s.to_str().unwrap().parse::<i64>().unwrap());
loop {
let conn = postgres::Connection::connect(env("DATABASE_URL").as_slice(),
&postgres::SslMode::None).unwrap();
{
let tx = conn.transaction().unwrap();
update(&tx).unwrap();
tx.set_commit();
tx.finish().unwrap();
}
drop(conn);
if daemon {
std::old_io::timer::sleep(Duration::seconds(sleep.unwrap()));
} else {
break
}
}
}
fn env(s: &str) -> String {
match env::var_string(s).ok() {
Some(s) => s,
None => panic!("must have `{}` defined", s),
}
}
fn update(tx: &postgres::Transaction) -> postgres::Result<()> {
let mut max = 0;
loop {
let tx = try!(tx.transaction());
{
let stmt = try!(tx.prepare("SELECT * FROM version_downloads \
WHERE processed = FALSE AND id > $1
ORDER BY id ASC
LIMIT $2"));
let mut rows = try!(stmt.query(&[&max, &LIMIT]));
match try!(collect(&tx, &mut rows)) {
None => break,
Some(m) => max = m,
}
}
tx.set_commit();
try!(tx.finish());
}
Ok(())
}
fn collect(tx: &postgres::Transaction,
rows: &mut postgres::Rows) -> postgres::Result<Option<i32>> {
// Anything older than 24 hours ago will be frozen and will not be queried
// against again.
let cutoff = time::now_utc().to_timespec();
let cutoff = cutoff + Duration::days(-1);
let mut map = HashMap::new();
for row in rows.by_ref() {
let download: VersionDownload = Model::from_row(&row);
assert!(map.insert(download.id, download).is_none());
}
println!("updating {} versions", map.len());
if map.len() == 0 { return Ok(None) }
let mut max = 0;
let mut total = 0;
for (id, download) in map.iter() {
if *id > max { max = *id; }
if download.counted == download.downloads { continue }
let amt = download.downloads - download.counted;
let crate_id = Version::find(tx, download.version_id).unwrap().crate_id;
// Update the total number of version downloads
try!(tx.execute("UPDATE versions
SET downloads = downloads + $1
WHERE id = $2",
&[&amt, &download.version_id]));
// Update the total number of crate downloads
try!(tx.execute("UPDATE crates SET downloads = downloads + $1
WHERE id = $2", &[&amt, &crate_id]));
// Update the total number of crate downloads for today
let cnt = try!(tx.execute("UPDATE crate_downloads
SET downloads = downloads + $2
WHERE crate_id = $1 AND date = date($3)",
&[&crate_id, &amt, &download.date]));
if cnt == 0 {
try!(tx.execute("INSERT INTO crate_downloads
(crate_id, downloads, date)
VALUES ($1, $2, $3)",
&[&crate_id, &amt, &download.date]));
}
// Flag this row as having been processed if we're passed the cutoff,
// and unconditionally increment the number of counted downloads.
try!(tx.execute("UPDATE version_downloads
SET processed = $2, counted = downloads
WHERE id = $1",
&[id, &(download.date < cutoff)]));
total += amt as i64;
}
// After everything else is done, update the global counter of total
// downloads.
try!(tx.execute("UPDATE metadata SET total_downloads = total_downloads + $1",
&[&total]));
Ok(Some(max))
}
#[cfg(test)]
mod test {
use std::collections::HashMap;
use postgres;
use semver;
use cargo_registry::{Version, Crate, User};
fn conn() -> postgres::Connection {
postgres::Connection::connect(::env("TEST_DATABASE_URL").as_slice(),
&postgres::SslMode::None).unwrap()
}
fn user(conn: &postgres::Transaction) -> User{
User::find_or_insert(conn, "login", None, None, None,
"access_token", "api_token").unwrap()
}
fn crate_downloads(tx: &postgres::Transaction, id: i32, expected: usize) {
let stmt = tx.prepare("SELECT * FROM crate_downloads
WHERE crate_id = $1").unwrap();
let dl: i32 = stmt.query(&[&id]).unwrap()
.next().unwrap().get("downloads");
assert_eq!(dl, expected as i32);
}
#[test]
fn increment() {
let conn = conn();
let tx = conn.transaction().unwrap();
let user = user(&tx);
let krate = Crate::find_or_insert(&tx, "foo", user.id, &None, &None,
&None, &None, &[], &None, &None,
&None).unwrap();
let version = Version::insert(&tx, krate.id,
&semver::Version::parse("1.0.0").unwrap(),
&HashMap::new(), &[]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 1, 0, current_date, false)",
&[&version.id]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 1, 0, current_date, true)",
&[&version.id]).unwrap();
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 1);
assert_eq!(Crate::find(&tx, krate.id).unwrap().downloads, 1);
crate_downloads(&tx, krate.id, 1);
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 1);
}
#[test]
fn increment_a_little() | assert_eq!(Crate::find(&tx, krate.id).unwrap().downloads, 2);
crate_downloads(&tx, krate.id, 2);
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 2);
}
}
| {
let conn = conn();
let tx = conn.transaction().unwrap();
let user = user(&tx);
let krate = Crate::find_or_insert(&tx, "foo", user.id, &None,
&None, &None, &None, &[], &None,
&None, &None).unwrap();
let version = Version::insert(&tx, krate.id,
&semver::Version::parse("1.0.0").unwrap(),
&HashMap::new(), &[]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 2, 1, current_date, false)",
&[&version.id]).unwrap();
tx.execute("INSERT INTO version_downloads \
(version_id, downloads, counted, date, processed)
VALUES ($1, 1, 0, current_date, false)",
&[&version.id]).unwrap();
::update(&tx).unwrap();
assert_eq!(Version::find(&tx, version.id).unwrap().downloads, 2); | identifier_body |
issue-5791.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
#![feature(libc)]
extern crate libc;
extern {
#[link_name = "malloc"]
fn malloc1(len: libc::c_int) -> *const libc::c_void;
#[link_name = "malloc"]
fn malloc2(len: libc::c_int, foo: libc::c_int) -> *const libc::c_void;
}
pub fn | () {}
| main | identifier_name |
issue-5791.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
#![feature(libc)]
extern crate libc;
extern { | fn malloc2(len: libc::c_int, foo: libc::c_int) -> *const libc::c_void;
}
pub fn main () {} | #[link_name = "malloc"]
fn malloc1(len: libc::c_int) -> *const libc::c_void;
#[link_name = "malloc"] | random_line_split |
issue-5791.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
#![feature(libc)]
extern crate libc;
extern {
#[link_name = "malloc"]
fn malloc1(len: libc::c_int) -> *const libc::c_void;
#[link_name = "malloc"]
fn malloc2(len: libc::c_int, foo: libc::c_int) -> *const libc::c_void;
}
pub fn main () | {} | identifier_body |
|
lib.rs | // this module adds some functionality based on the required implementations
// here like: `LinkedList::pop_back` or `Clone for LinkedList<T>`
// You are free to use anything in it, but it's mainly for the test framework.
mod pre_implemented;
pub struct LinkedList<T>(std::marker::PhantomData<T>);
pub struct Cursor<'a, T>(std::marker::PhantomData<&'a mut T>);
pub struct Iter<'a, T>(std::marker::PhantomData<&'a T>);
impl<T> LinkedList<T> {
pub fn new() -> Self {
unimplemented!()
}
// You may be wondering why it's necessary to have is_empty()
// when it can easily be determined from len().
// It's good custom to have both because len() can be expensive for some types,
// whereas is_empty() is almost always cheap.
// (Also ask yourself whether len() is expensive for LinkedList)
pub fn is_empty(&self) -> bool {
unimplemented!()
}
pub fn len(&self) -> usize {
unimplemented!()
}
/// Return a cursor positioned on the front element
pub fn cursor_front(&mut self) -> Cursor<'_, T> {
unimplemented!()
}
/// Return a cursor positioned on the back element
pub fn cursor_back(&mut self) -> Cursor<'_, T> {
unimplemented!()
}
/// Return an iterator that moves from front to back
pub fn iter(&self) -> Iter<'_, T> |
}
// the cursor is expected to act as if it is at the position of an element
// and it also has to work with and be able to insert into an empty list.
impl<T> Cursor<'_, T> {
/// Take a mutable reference to the current element
pub fn peek_mut(&mut self) -> Option<&mut T> {
unimplemented!()
}
/// Move one position forward (towards the back) and
/// return a reference to the new position
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self) -> Option<&mut T> {
unimplemented!()
}
/// Move one position backward (towards the front) and
/// return a reference to the new position
pub fn prev(&mut self) -> Option<&mut T> {
unimplemented!()
}
/// Remove and return the element at the current position and move the cursor
/// to the neighboring element that's closest to the back. This can be
/// either the next or previous position.
pub fn take(&mut self) -> Option<T> {
unimplemented!()
}
pub fn insert_after(&mut self, _element: T) {
unimplemented!()
}
pub fn insert_before(&mut self, _element: T) {
unimplemented!()
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
unimplemented!()
}
}
| {
unimplemented!()
} | identifier_body |
lib.rs | // this module adds some functionality based on the required implementations
// here like: `LinkedList::pop_back` or `Clone for LinkedList<T>`
// You are free to use anything in it, but it's mainly for the test framework.
mod pre_implemented;
pub struct LinkedList<T>(std::marker::PhantomData<T>);
pub struct Cursor<'a, T>(std::marker::PhantomData<&'a mut T>);
pub struct Iter<'a, T>(std::marker::PhantomData<&'a T>);
impl<T> LinkedList<T> {
pub fn new() -> Self {
unimplemented!()
}
// You may be wondering why it's necessary to have is_empty()
// when it can easily be determined from len().
// It's good custom to have both because len() can be expensive for some types,
// whereas is_empty() is almost always cheap.
// (Also ask yourself whether len() is expensive for LinkedList)
pub fn is_empty(&self) -> bool {
unimplemented!()
}
pub fn len(&self) -> usize {
unimplemented!()
}
/// Return a cursor positioned on the front element
pub fn cursor_front(&mut self) -> Cursor<'_, T> {
unimplemented!()
}
/// Return a cursor positioned on the back element
pub fn cursor_back(&mut self) -> Cursor<'_, T> {
unimplemented!()
}
/// Return an iterator that moves from front to back
pub fn iter(&self) -> Iter<'_, T> {
unimplemented!()
}
}
// the cursor is expected to act as if it is at the position of an element
// and it also has to work with and be able to insert into an empty list.
impl<T> Cursor<'_, T> {
/// Take a mutable reference to the current element
pub fn peek_mut(&mut self) -> Option<&mut T> {
unimplemented!()
}
/// Move one position forward (towards the back) and
/// return a reference to the new position
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self) -> Option<&mut T> {
unimplemented!()
}
| /// return a reference to the new position
pub fn prev(&mut self) -> Option<&mut T> {
unimplemented!()
}
/// Remove and return the element at the current position and move the cursor
/// to the neighboring element that's closest to the back. This can be
/// either the next or previous position.
pub fn take(&mut self) -> Option<T> {
unimplemented!()
}
pub fn insert_after(&mut self, _element: T) {
unimplemented!()
}
pub fn insert_before(&mut self, _element: T) {
unimplemented!()
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
unimplemented!()
}
} | /// Move one position backward (towards the front) and | random_line_split |
lib.rs | // this module adds some functionality based on the required implementations
// here like: `LinkedList::pop_back` or `Clone for LinkedList<T>`
// You are free to use anything in it, but it's mainly for the test framework.
mod pre_implemented;
pub struct LinkedList<T>(std::marker::PhantomData<T>);
pub struct Cursor<'a, T>(std::marker::PhantomData<&'a mut T>);
pub struct Iter<'a, T>(std::marker::PhantomData<&'a T>);
impl<T> LinkedList<T> {
pub fn new() -> Self {
unimplemented!()
}
// You may be wondering why it's necessary to have is_empty()
// when it can easily be determined from len().
// It's good custom to have both because len() can be expensive for some types,
// whereas is_empty() is almost always cheap.
// (Also ask yourself whether len() is expensive for LinkedList)
pub fn is_empty(&self) -> bool {
unimplemented!()
}
pub fn len(&self) -> usize {
unimplemented!()
}
/// Return a cursor positioned on the front element
pub fn | (&mut self) -> Cursor<'_, T> {
unimplemented!()
}
/// Return a cursor positioned on the back element
pub fn cursor_back(&mut self) -> Cursor<'_, T> {
unimplemented!()
}
/// Return an iterator that moves from front to back
pub fn iter(&self) -> Iter<'_, T> {
unimplemented!()
}
}
// the cursor is expected to act as if it is at the position of an element
// and it also has to work with and be able to insert into an empty list.
impl<T> Cursor<'_, T> {
/// Take a mutable reference to the current element
pub fn peek_mut(&mut self) -> Option<&mut T> {
unimplemented!()
}
/// Move one position forward (towards the back) and
/// return a reference to the new position
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self) -> Option<&mut T> {
unimplemented!()
}
/// Move one position backward (towards the front) and
/// return a reference to the new position
pub fn prev(&mut self) -> Option<&mut T> {
unimplemented!()
}
/// Remove and return the element at the current position and move the cursor
/// to the neighboring element that's closest to the back. This can be
/// either the next or previous position.
pub fn take(&mut self) -> Option<T> {
unimplemented!()
}
pub fn insert_after(&mut self, _element: T) {
unimplemented!()
}
pub fn insert_before(&mut self, _element: T) {
unimplemented!()
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
unimplemented!()
}
}
| cursor_front | identifier_name |
temporary_page.rs | //! Temporarily map a page
//! From [Phil Opp's Blog](http://os.phil-opp.com/remap-the-kernel.html)
use memory::Frame;
use super::{ActivePageTable, Page, VirtualAddress};
use super::entry::EntryFlags;
use super::table::{Table, Level1};
pub struct TemporaryPage {
page: Page,
}
impl TemporaryPage {
pub fn | (page: Page) -> TemporaryPage {
TemporaryPage {
page: page,
}
}
pub fn start_address (&self) -> VirtualAddress {
self.page.start_address()
}
/// Maps the temporary page to the given frame in the active table.
/// Returns the start address of the temporary page.
pub fn map(&mut self, frame: Frame, flags: EntryFlags, active_table: &mut ActivePageTable) -> VirtualAddress {
assert!(active_table.translate_page(self.page).is_none(), "temporary page is already mapped");
active_table.map_to(self.page, frame, flags);
self.page.start_address()
}
/// Maps the temporary page to the given page table frame in the active
/// table. Returns a reference to the now mapped table.
pub fn map_table_frame(&mut self, frame: Frame, flags: EntryFlags, active_table: &mut ActivePageTable) -> &mut Table<Level1> {
unsafe { &mut *(self.map(frame, flags, active_table).get() as *mut Table<Level1>) }
}
/// Unmaps the temporary page in the active table.
pub fn unmap(&mut self, active_table: &mut ActivePageTable) {
active_table.unmap(self.page)
}
}
| new | identifier_name |
temporary_page.rs | //! Temporarily map a page
//! From [Phil Opp's Blog](http://os.phil-opp.com/remap-the-kernel.html)
use memory::Frame;
use super::{ActivePageTable, Page, VirtualAddress};
use super::entry::EntryFlags;
use super::table::{Table, Level1};
pub struct TemporaryPage {
page: Page,
}
impl TemporaryPage {
pub fn new(page: Page) -> TemporaryPage {
TemporaryPage {
page: page,
}
}
pub fn start_address (&self) -> VirtualAddress |
/// Maps the temporary page to the given frame in the active table.
/// Returns the start address of the temporary page.
pub fn map(&mut self, frame: Frame, flags: EntryFlags, active_table: &mut ActivePageTable) -> VirtualAddress {
assert!(active_table.translate_page(self.page).is_none(), "temporary page is already mapped");
active_table.map_to(self.page, frame, flags);
self.page.start_address()
}
/// Maps the temporary page to the given page table frame in the active
/// table. Returns a reference to the now mapped table.
pub fn map_table_frame(&mut self, frame: Frame, flags: EntryFlags, active_table: &mut ActivePageTable) -> &mut Table<Level1> {
unsafe { &mut *(self.map(frame, flags, active_table).get() as *mut Table<Level1>) }
}
/// Unmaps the temporary page in the active table.
pub fn unmap(&mut self, active_table: &mut ActivePageTable) {
active_table.unmap(self.page)
}
}
| {
self.page.start_address()
} | identifier_body |
temporary_page.rs | //! Temporarily map a page
//! From [Phil Opp's Blog](http://os.phil-opp.com/remap-the-kernel.html)
use memory::Frame;
use super::{ActivePageTable, Page, VirtualAddress};
use super::entry::EntryFlags;
use super::table::{Table, Level1};
pub struct TemporaryPage {
page: Page,
}
impl TemporaryPage {
pub fn new(page: Page) -> TemporaryPage {
TemporaryPage {
page: page,
}
}
pub fn start_address (&self) -> VirtualAddress {
self.page.start_address()
}
/// Maps the temporary page to the given frame in the active table.
/// Returns the start address of the temporary page.
pub fn map(&mut self, frame: Frame, flags: EntryFlags, active_table: &mut ActivePageTable) -> VirtualAddress {
assert!(active_table.translate_page(self.page).is_none(), "temporary page is already mapped");
active_table.map_to(self.page, frame, flags);
self.page.start_address()
}
/// Maps the temporary page to the given page table frame in the active
/// table. Returns a reference to the now mapped table.
pub fn map_table_frame(&mut self, frame: Frame, flags: EntryFlags, active_table: &mut ActivePageTable) -> &mut Table<Level1> { | }
/// Unmaps the temporary page in the active table.
pub fn unmap(&mut self, active_table: &mut ActivePageTable) {
active_table.unmap(self.page)
}
} | unsafe { &mut *(self.map(frame, flags, active_table).get() as *mut Table<Level1>) } | random_line_split |
borrow-tuple-fields.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)]
#![feature(box_syntax)]
struct Foo(Box<isize>, isize);
struct | (isize, isize);
fn main() {
let x: (Box<_>, _) = (box 1, 2);
let r = &x.0;
let y = x; //~ ERROR cannot move out of `x` because it is borrowed
let mut x = (1, 2);
let a = &x.0;
let b = &mut x.0; //~ ERROR cannot borrow `x.0` as mutable because it is also borrowed as
let mut x = (1, 2);
let a = &mut x.0;
let b = &mut x.0; //~ ERROR cannot borrow `x.0` as mutable more than once at a time
let x = Foo(box 1, 2);
let r = &x.0;
let y = x; //~ ERROR cannot move out of `x` because it is borrowed
let mut x = Bar(1, 2);
let a = &x.0;
let b = &mut x.0; //~ ERROR cannot borrow `x.0` as mutable because it is also borrowed as
let mut x = Bar(1, 2);
let a = &mut x.0;
let b = &mut x.0; //~ ERROR cannot borrow `x.0` as mutable more than once at a time
}
| Bar | identifier_name |
borrow-tuple-fields.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unknown_features)] |
struct Foo(Box<isize>, isize);
struct Bar(isize, isize);
fn main() {
let x: (Box<_>, _) = (box 1, 2);
let r = &x.0;
let y = x; //~ ERROR cannot move out of `x` because it is borrowed
let mut x = (1, 2);
let a = &x.0;
let b = &mut x.0; //~ ERROR cannot borrow `x.0` as mutable because it is also borrowed as
let mut x = (1, 2);
let a = &mut x.0;
let b = &mut x.0; //~ ERROR cannot borrow `x.0` as mutable more than once at a time
let x = Foo(box 1, 2);
let r = &x.0;
let y = x; //~ ERROR cannot move out of `x` because it is borrowed
let mut x = Bar(1, 2);
let a = &x.0;
let b = &mut x.0; //~ ERROR cannot borrow `x.0` as mutable because it is also borrowed as
let mut x = Bar(1, 2);
let a = &mut x.0;
let b = &mut x.0; //~ ERROR cannot borrow `x.0` as mutable more than once at a time
} | #![feature(box_syntax)] | random_line_split |
os.rs | #![deny(unsafe_op_in_unsafe_fn)]
use crate::any::Any;
use crate::error::Error as StdError;
use crate::ffi::{CStr, CString, OsStr, OsString};
use crate::fmt;
use crate::io;
use crate::marker::PhantomData;
use crate::os::wasi::prelude::*;
use crate::path::{self, PathBuf};
use crate::str;
use crate::sys::memchr;
use crate::sys::unsupported;
use crate::vec;
// Add a few symbols not in upstream `libc` just yet.
mod libc {
pub use libc::*;
extern "C" {
pub fn getcwd(buf: *mut c_char, size: size_t) -> *mut c_char;
pub fn chdir(dir: *const c_char) -> c_int;
}
}
#[cfg(not(target_feature = "atomics"))]
pub unsafe fn env_lock() -> impl Any {
// No need for a lock if we're single-threaded, but this function will need
// to get implemented for multi-threaded scenarios
}
pub fn errno() -> i32 {
extern "C" {
#[thread_local]
static errno: libc::c_int;
}
unsafe { errno as i32 }
}
pub fn error_string(errno: i32) -> String {
let mut buf = [0 as libc::c_char; 1024];
let p = buf.as_mut_ptr();
unsafe {
if libc::strerror_r(errno as libc::c_int, p, buf.len()) < 0 {
panic!("strerror_r failure");
}
str::from_utf8(CStr::from_ptr(p).to_bytes()).unwrap().to_owned()
}
}
pub fn getcwd() -> io::Result<PathBuf> {
let mut buf = Vec::with_capacity(512);
loop {
unsafe {
let ptr = buf.as_mut_ptr() as *mut libc::c_char;
if!libc::getcwd(ptr, buf.capacity()).is_null() {
let len = CStr::from_ptr(buf.as_ptr() as *const libc::c_char).to_bytes().len();
buf.set_len(len);
buf.shrink_to_fit();
return Ok(PathBuf::from(OsString::from_vec(buf)));
} else {
let error = io::Error::last_os_error();
if error.raw_os_error()!= Some(libc::ERANGE) {
return Err(error);
}
}
// Trigger the internal buffer resizing logic of `Vec` by requiring
// more space than the current capacity.
let cap = buf.capacity();
buf.set_len(cap);
buf.reserve(1);
}
}
}
pub fn chdir(p: &path::Path) -> io::Result<()> {
let p: &OsStr = p.as_ref();
let p = CString::new(p.as_bytes())?;
unsafe {
match libc::chdir(p.as_ptr()) == (0 as libc::c_int) {
true => Ok(()),
false => Err(io::Error::last_os_error()),
}
}
}
pub struct SplitPaths<'a>(!, PhantomData<&'a ()>);
pub fn split_paths(_unparsed: &OsStr) -> SplitPaths<'_> {
panic!("unsupported")
}
impl<'a> Iterator for SplitPaths<'a> {
type Item = PathBuf;
fn next(&mut self) -> Option<PathBuf> {
self.0
}
}
#[derive(Debug)]
pub struct JoinPathsError;
pub fn join_paths<I, T>(_paths: I) -> Result<OsString, JoinPathsError>
where
I: Iterator<Item = T>,
T: AsRef<OsStr>,
{
Err(JoinPathsError)
}
impl fmt::Display for JoinPathsError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
"not supported on wasm yet".fmt(f)
}
}
impl StdError for JoinPathsError {
#[allow(deprecated)]
fn description(&self) -> &str {
"not supported on wasm yet"
}
}
pub fn current_exe() -> io::Result<PathBuf> {
unsupported()
}
pub struct Env {
iter: vec::IntoIter<(OsString, OsString)>,
}
impl!Send for Env {}
impl!Sync for Env {}
impl Iterator for Env {
type Item = (OsString, OsString);
fn next(&mut self) -> Option<(OsString, OsString)> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
pub fn env() -> Env {
unsafe {
let _guard = env_lock();
let mut environ = libc::environ;
let mut result = Vec::new();
if!environ.is_null() {
while!(*environ).is_null() {
if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) {
result.push(key_value);
}
environ = environ.add(1);
}
}
return Env { iter: result.into_iter() };
}
// See src/libstd/sys/unix/os.rs, same as that
fn parse(input: &[u8]) -> Option<(OsString, OsString)> {
if input.is_empty() {
return None;
}
let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1);
pos.map(|p| {
(
OsStringExt::from_vec(input[..p].to_vec()),
OsStringExt::from_vec(input[p + 1..].to_vec()),
)
})
}
}
pub fn getenv(k: &OsStr) -> Option<OsString> {
let k = CString::new(k.as_bytes()).ok()?;
unsafe {
let _guard = env_lock();
let s = libc::getenv(k.as_ptr()) as *const libc::c_char;
if s.is_null() {
None
} else {
Some(OsStringExt::from_vec(CStr::from_ptr(s).to_bytes().to_vec()))
}
}
}
pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> |
pub fn unsetenv(n: &OsStr) -> io::Result<()> {
let nbuf = CString::new(n.as_bytes())?;
unsafe {
let _guard = env_lock();
cvt(libc::unsetenv(nbuf.as_ptr())).map(drop)
}
}
pub fn temp_dir() -> PathBuf {
panic!("no filesystem on wasm")
}
pub fn home_dir() -> Option<PathBuf> {
None
}
pub fn exit(code: i32) ->! {
unsafe { libc::exit(code) }
}
pub fn getpid() -> u32 {
panic!("unsupported");
}
#[doc(hidden)]
pub trait IsMinusOne {
fn is_minus_one(&self) -> bool;
}
macro_rules! impl_is_minus_one {
($($t:ident)*) => ($(impl IsMinusOne for $t {
fn is_minus_one(&self) -> bool {
*self == -1
}
})*)
}
impl_is_minus_one! { i8 i16 i32 i64 isize }
fn cvt<T: IsMinusOne>(t: T) -> io::Result<T> {
if t.is_minus_one() { Err(io::Error::last_os_error()) } else { Ok(t) }
}
| {
let k = CString::new(k.as_bytes())?;
let v = CString::new(v.as_bytes())?;
unsafe {
let _guard = env_lock();
cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(drop)
}
} | identifier_body |
os.rs | #![deny(unsafe_op_in_unsafe_fn)]
use crate::any::Any;
use crate::error::Error as StdError;
use crate::ffi::{CStr, CString, OsStr, OsString};
use crate::fmt;
use crate::io;
use crate::marker::PhantomData;
use crate::os::wasi::prelude::*;
use crate::path::{self, PathBuf};
use crate::str;
use crate::sys::memchr;
use crate::sys::unsupported;
use crate::vec;
// Add a few symbols not in upstream `libc` just yet.
mod libc {
pub use libc::*;
extern "C" {
pub fn getcwd(buf: *mut c_char, size: size_t) -> *mut c_char;
pub fn chdir(dir: *const c_char) -> c_int;
}
}
#[cfg(not(target_feature = "atomics"))]
pub unsafe fn env_lock() -> impl Any {
// No need for a lock if we're single-threaded, but this function will need
// to get implemented for multi-threaded scenarios
}
pub fn errno() -> i32 {
extern "C" {
#[thread_local]
static errno: libc::c_int;
}
unsafe { errno as i32 }
}
pub fn error_string(errno: i32) -> String {
let mut buf = [0 as libc::c_char; 1024];
let p = buf.as_mut_ptr();
unsafe {
if libc::strerror_r(errno as libc::c_int, p, buf.len()) < 0 {
panic!("strerror_r failure");
}
str::from_utf8(CStr::from_ptr(p).to_bytes()).unwrap().to_owned()
}
}
pub fn getcwd() -> io::Result<PathBuf> {
let mut buf = Vec::with_capacity(512);
loop {
unsafe {
let ptr = buf.as_mut_ptr() as *mut libc::c_char;
if!libc::getcwd(ptr, buf.capacity()).is_null() | else {
let error = io::Error::last_os_error();
if error.raw_os_error()!= Some(libc::ERANGE) {
return Err(error);
}
}
// Trigger the internal buffer resizing logic of `Vec` by requiring
// more space than the current capacity.
let cap = buf.capacity();
buf.set_len(cap);
buf.reserve(1);
}
}
}
pub fn chdir(p: &path::Path) -> io::Result<()> {
let p: &OsStr = p.as_ref();
let p = CString::new(p.as_bytes())?;
unsafe {
match libc::chdir(p.as_ptr()) == (0 as libc::c_int) {
true => Ok(()),
false => Err(io::Error::last_os_error()),
}
}
}
pub struct SplitPaths<'a>(!, PhantomData<&'a ()>);
pub fn split_paths(_unparsed: &OsStr) -> SplitPaths<'_> {
panic!("unsupported")
}
impl<'a> Iterator for SplitPaths<'a> {
type Item = PathBuf;
fn next(&mut self) -> Option<PathBuf> {
self.0
}
}
#[derive(Debug)]
pub struct JoinPathsError;
pub fn join_paths<I, T>(_paths: I) -> Result<OsString, JoinPathsError>
where
I: Iterator<Item = T>,
T: AsRef<OsStr>,
{
Err(JoinPathsError)
}
impl fmt::Display for JoinPathsError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
"not supported on wasm yet".fmt(f)
}
}
impl StdError for JoinPathsError {
#[allow(deprecated)]
fn description(&self) -> &str {
"not supported on wasm yet"
}
}
pub fn current_exe() -> io::Result<PathBuf> {
unsupported()
}
pub struct Env {
iter: vec::IntoIter<(OsString, OsString)>,
}
impl!Send for Env {}
impl!Sync for Env {}
impl Iterator for Env {
type Item = (OsString, OsString);
fn next(&mut self) -> Option<(OsString, OsString)> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
pub fn env() -> Env {
unsafe {
let _guard = env_lock();
let mut environ = libc::environ;
let mut result = Vec::new();
if!environ.is_null() {
while!(*environ).is_null() {
if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) {
result.push(key_value);
}
environ = environ.add(1);
}
}
return Env { iter: result.into_iter() };
}
// See src/libstd/sys/unix/os.rs, same as that
fn parse(input: &[u8]) -> Option<(OsString, OsString)> {
if input.is_empty() {
return None;
}
let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1);
pos.map(|p| {
(
OsStringExt::from_vec(input[..p].to_vec()),
OsStringExt::from_vec(input[p + 1..].to_vec()),
)
})
}
}
pub fn getenv(k: &OsStr) -> Option<OsString> {
let k = CString::new(k.as_bytes()).ok()?;
unsafe {
let _guard = env_lock();
let s = libc::getenv(k.as_ptr()) as *const libc::c_char;
if s.is_null() {
None
} else {
Some(OsStringExt::from_vec(CStr::from_ptr(s).to_bytes().to_vec()))
}
}
}
pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> {
let k = CString::new(k.as_bytes())?;
let v = CString::new(v.as_bytes())?;
unsafe {
let _guard = env_lock();
cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(drop)
}
}
pub fn unsetenv(n: &OsStr) -> io::Result<()> {
let nbuf = CString::new(n.as_bytes())?;
unsafe {
let _guard = env_lock();
cvt(libc::unsetenv(nbuf.as_ptr())).map(drop)
}
}
pub fn temp_dir() -> PathBuf {
panic!("no filesystem on wasm")
}
pub fn home_dir() -> Option<PathBuf> {
None
}
pub fn exit(code: i32) ->! {
unsafe { libc::exit(code) }
}
pub fn getpid() -> u32 {
panic!("unsupported");
}
#[doc(hidden)]
pub trait IsMinusOne {
fn is_minus_one(&self) -> bool;
}
macro_rules! impl_is_minus_one {
($($t:ident)*) => ($(impl IsMinusOne for $t {
fn is_minus_one(&self) -> bool {
*self == -1
}
})*)
}
impl_is_minus_one! { i8 i16 i32 i64 isize }
fn cvt<T: IsMinusOne>(t: T) -> io::Result<T> {
if t.is_minus_one() { Err(io::Error::last_os_error()) } else { Ok(t) }
}
| {
let len = CStr::from_ptr(buf.as_ptr() as *const libc::c_char).to_bytes().len();
buf.set_len(len);
buf.shrink_to_fit();
return Ok(PathBuf::from(OsString::from_vec(buf)));
} | conditional_block |
os.rs | #![deny(unsafe_op_in_unsafe_fn)]
use crate::any::Any;
use crate::error::Error as StdError;
use crate::ffi::{CStr, CString, OsStr, OsString};
use crate::fmt;
use crate::io;
use crate::marker::PhantomData;
use crate::os::wasi::prelude::*;
use crate::path::{self, PathBuf};
use crate::str;
use crate::sys::memchr;
use crate::sys::unsupported;
use crate::vec;
// Add a few symbols not in upstream `libc` just yet.
mod libc {
pub use libc::*;
extern "C" {
pub fn getcwd(buf: *mut c_char, size: size_t) -> *mut c_char;
pub fn chdir(dir: *const c_char) -> c_int;
}
}
#[cfg(not(target_feature = "atomics"))]
pub unsafe fn env_lock() -> impl Any {
// No need for a lock if we're single-threaded, but this function will need
// to get implemented for multi-threaded scenarios
}
pub fn errno() -> i32 {
extern "C" {
#[thread_local]
static errno: libc::c_int;
}
unsafe { errno as i32 }
}
pub fn error_string(errno: i32) -> String {
let mut buf = [0 as libc::c_char; 1024];
let p = buf.as_mut_ptr();
unsafe {
if libc::strerror_r(errno as libc::c_int, p, buf.len()) < 0 {
panic!("strerror_r failure");
}
str::from_utf8(CStr::from_ptr(p).to_bytes()).unwrap().to_owned()
}
}
pub fn getcwd() -> io::Result<PathBuf> {
let mut buf = Vec::with_capacity(512);
loop {
unsafe {
let ptr = buf.as_mut_ptr() as *mut libc::c_char;
if!libc::getcwd(ptr, buf.capacity()).is_null() {
let len = CStr::from_ptr(buf.as_ptr() as *const libc::c_char).to_bytes().len();
buf.set_len(len);
buf.shrink_to_fit();
return Ok(PathBuf::from(OsString::from_vec(buf)));
} else {
let error = io::Error::last_os_error();
if error.raw_os_error()!= Some(libc::ERANGE) {
return Err(error);
}
}
// Trigger the internal buffer resizing logic of `Vec` by requiring
// more space than the current capacity.
let cap = buf.capacity();
buf.set_len(cap);
buf.reserve(1);
}
}
}
pub fn chdir(p: &path::Path) -> io::Result<()> {
let p: &OsStr = p.as_ref();
let p = CString::new(p.as_bytes())?;
unsafe {
match libc::chdir(p.as_ptr()) == (0 as libc::c_int) {
true => Ok(()),
false => Err(io::Error::last_os_error()),
}
}
}
pub struct SplitPaths<'a>(!, PhantomData<&'a ()>);
pub fn split_paths(_unparsed: &OsStr) -> SplitPaths<'_> {
panic!("unsupported")
}
impl<'a> Iterator for SplitPaths<'a> {
type Item = PathBuf;
fn next(&mut self) -> Option<PathBuf> {
self.0
}
}
#[derive(Debug)]
pub struct JoinPathsError;
pub fn join_paths<I, T>(_paths: I) -> Result<OsString, JoinPathsError>
where
I: Iterator<Item = T>,
T: AsRef<OsStr>,
{
Err(JoinPathsError)
}
impl fmt::Display for JoinPathsError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
"not supported on wasm yet".fmt(f)
}
}
impl StdError for JoinPathsError {
#[allow(deprecated)]
fn description(&self) -> &str {
"not supported on wasm yet"
}
}
pub fn current_exe() -> io::Result<PathBuf> {
unsupported()
}
pub struct Env {
iter: vec::IntoIter<(OsString, OsString)>,
}
impl!Send for Env {}
impl!Sync for Env {}
impl Iterator for Env {
type Item = (OsString, OsString);
fn next(&mut self) -> Option<(OsString, OsString)> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
pub fn env() -> Env {
unsafe {
let _guard = env_lock();
let mut environ = libc::environ;
let mut result = Vec::new();
if!environ.is_null() {
while!(*environ).is_null() {
if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) {
result.push(key_value);
}
environ = environ.add(1);
}
}
return Env { iter: result.into_iter() };
}
// See src/libstd/sys/unix/os.rs, same as that
fn parse(input: &[u8]) -> Option<(OsString, OsString)> {
if input.is_empty() {
return None;
}
let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1);
pos.map(|p| {
(
OsStringExt::from_vec(input[..p].to_vec()),
OsStringExt::from_vec(input[p + 1..].to_vec()),
)
})
}
}
pub fn getenv(k: &OsStr) -> Option<OsString> {
let k = CString::new(k.as_bytes()).ok()?;
unsafe {
let _guard = env_lock();
let s = libc::getenv(k.as_ptr()) as *const libc::c_char; | }
}
pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> {
let k = CString::new(k.as_bytes())?;
let v = CString::new(v.as_bytes())?;
unsafe {
let _guard = env_lock();
cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(drop)
}
}
pub fn unsetenv(n: &OsStr) -> io::Result<()> {
let nbuf = CString::new(n.as_bytes())?;
unsafe {
let _guard = env_lock();
cvt(libc::unsetenv(nbuf.as_ptr())).map(drop)
}
}
pub fn temp_dir() -> PathBuf {
panic!("no filesystem on wasm")
}
pub fn home_dir() -> Option<PathBuf> {
None
}
pub fn exit(code: i32) ->! {
unsafe { libc::exit(code) }
}
pub fn getpid() -> u32 {
panic!("unsupported");
}
#[doc(hidden)]
pub trait IsMinusOne {
fn is_minus_one(&self) -> bool;
}
macro_rules! impl_is_minus_one {
($($t:ident)*) => ($(impl IsMinusOne for $t {
fn is_minus_one(&self) -> bool {
*self == -1
}
})*)
}
impl_is_minus_one! { i8 i16 i32 i64 isize }
fn cvt<T: IsMinusOne>(t: T) -> io::Result<T> {
if t.is_minus_one() { Err(io::Error::last_os_error()) } else { Ok(t) }
} | if s.is_null() {
None
} else {
Some(OsStringExt::from_vec(CStr::from_ptr(s).to_bytes().to_vec()))
} | random_line_split |
os.rs | #![deny(unsafe_op_in_unsafe_fn)]
use crate::any::Any;
use crate::error::Error as StdError;
use crate::ffi::{CStr, CString, OsStr, OsString};
use crate::fmt;
use crate::io;
use crate::marker::PhantomData;
use crate::os::wasi::prelude::*;
use crate::path::{self, PathBuf};
use crate::str;
use crate::sys::memchr;
use crate::sys::unsupported;
use crate::vec;
// Add a few symbols not in upstream `libc` just yet.
mod libc {
pub use libc::*;
extern "C" {
pub fn getcwd(buf: *mut c_char, size: size_t) -> *mut c_char;
pub fn chdir(dir: *const c_char) -> c_int;
}
}
#[cfg(not(target_feature = "atomics"))]
pub unsafe fn env_lock() -> impl Any {
// No need for a lock if we're single-threaded, but this function will need
// to get implemented for multi-threaded scenarios
}
pub fn errno() -> i32 {
extern "C" {
#[thread_local]
static errno: libc::c_int;
}
unsafe { errno as i32 }
}
pub fn error_string(errno: i32) -> String {
let mut buf = [0 as libc::c_char; 1024];
let p = buf.as_mut_ptr();
unsafe {
if libc::strerror_r(errno as libc::c_int, p, buf.len()) < 0 {
panic!("strerror_r failure");
}
str::from_utf8(CStr::from_ptr(p).to_bytes()).unwrap().to_owned()
}
}
pub fn getcwd() -> io::Result<PathBuf> {
let mut buf = Vec::with_capacity(512);
loop {
unsafe {
let ptr = buf.as_mut_ptr() as *mut libc::c_char;
if!libc::getcwd(ptr, buf.capacity()).is_null() {
let len = CStr::from_ptr(buf.as_ptr() as *const libc::c_char).to_bytes().len();
buf.set_len(len);
buf.shrink_to_fit();
return Ok(PathBuf::from(OsString::from_vec(buf)));
} else {
let error = io::Error::last_os_error();
if error.raw_os_error()!= Some(libc::ERANGE) {
return Err(error);
}
}
// Trigger the internal buffer resizing logic of `Vec` by requiring
// more space than the current capacity.
let cap = buf.capacity();
buf.set_len(cap);
buf.reserve(1);
}
}
}
pub fn chdir(p: &path::Path) -> io::Result<()> {
let p: &OsStr = p.as_ref();
let p = CString::new(p.as_bytes())?;
unsafe {
match libc::chdir(p.as_ptr()) == (0 as libc::c_int) {
true => Ok(()),
false => Err(io::Error::last_os_error()),
}
}
}
pub struct SplitPaths<'a>(!, PhantomData<&'a ()>);
pub fn split_paths(_unparsed: &OsStr) -> SplitPaths<'_> {
panic!("unsupported")
}
impl<'a> Iterator for SplitPaths<'a> {
type Item = PathBuf;
fn next(&mut self) -> Option<PathBuf> {
self.0
}
}
#[derive(Debug)]
pub struct JoinPathsError;
pub fn join_paths<I, T>(_paths: I) -> Result<OsString, JoinPathsError>
where
I: Iterator<Item = T>,
T: AsRef<OsStr>,
{
Err(JoinPathsError)
}
impl fmt::Display for JoinPathsError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
"not supported on wasm yet".fmt(f)
}
}
impl StdError for JoinPathsError {
#[allow(deprecated)]
fn description(&self) -> &str {
"not supported on wasm yet"
}
}
pub fn | () -> io::Result<PathBuf> {
unsupported()
}
pub struct Env {
iter: vec::IntoIter<(OsString, OsString)>,
}
impl!Send for Env {}
impl!Sync for Env {}
impl Iterator for Env {
type Item = (OsString, OsString);
fn next(&mut self) -> Option<(OsString, OsString)> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
pub fn env() -> Env {
unsafe {
let _guard = env_lock();
let mut environ = libc::environ;
let mut result = Vec::new();
if!environ.is_null() {
while!(*environ).is_null() {
if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) {
result.push(key_value);
}
environ = environ.add(1);
}
}
return Env { iter: result.into_iter() };
}
// See src/libstd/sys/unix/os.rs, same as that
fn parse(input: &[u8]) -> Option<(OsString, OsString)> {
if input.is_empty() {
return None;
}
let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1);
pos.map(|p| {
(
OsStringExt::from_vec(input[..p].to_vec()),
OsStringExt::from_vec(input[p + 1..].to_vec()),
)
})
}
}
pub fn getenv(k: &OsStr) -> Option<OsString> {
let k = CString::new(k.as_bytes()).ok()?;
unsafe {
let _guard = env_lock();
let s = libc::getenv(k.as_ptr()) as *const libc::c_char;
if s.is_null() {
None
} else {
Some(OsStringExt::from_vec(CStr::from_ptr(s).to_bytes().to_vec()))
}
}
}
pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> {
let k = CString::new(k.as_bytes())?;
let v = CString::new(v.as_bytes())?;
unsafe {
let _guard = env_lock();
cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(drop)
}
}
pub fn unsetenv(n: &OsStr) -> io::Result<()> {
let nbuf = CString::new(n.as_bytes())?;
unsafe {
let _guard = env_lock();
cvt(libc::unsetenv(nbuf.as_ptr())).map(drop)
}
}
pub fn temp_dir() -> PathBuf {
panic!("no filesystem on wasm")
}
pub fn home_dir() -> Option<PathBuf> {
None
}
pub fn exit(code: i32) ->! {
unsafe { libc::exit(code) }
}
pub fn getpid() -> u32 {
panic!("unsupported");
}
#[doc(hidden)]
pub trait IsMinusOne {
fn is_minus_one(&self) -> bool;
}
macro_rules! impl_is_minus_one {
($($t:ident)*) => ($(impl IsMinusOne for $t {
fn is_minus_one(&self) -> bool {
*self == -1
}
})*)
}
impl_is_minus_one! { i8 i16 i32 i64 isize }
fn cvt<T: IsMinusOne>(t: T) -> io::Result<T> {
if t.is_minus_one() { Err(io::Error::last_os_error()) } else { Ok(t) }
}
| current_exe | identifier_name |
atom.rs | use joker::word::{Atom, Name};
use tristate::TriState;
pub trait AtomExt {
fn is_strict_reserved(&self) -> TriState;
fn is_illegal_strict_binding(&self) -> bool;
}
| match self {
&Name::Atom(ref atom) => atom.is_strict_reserved(),
_ => TriState::No
}
}
fn is_illegal_strict_binding(&self) -> bool {
match *self {
Name::Atom(ref atom) => atom.is_illegal_strict_binding(),
_ => false
}
}
}
impl AtomExt for Atom {
fn is_strict_reserved(&self) -> TriState {
match *self {
// 11.6.2.2
Atom::Await => TriState::Unknown,
// 12.1.1
Atom::Implements
| Atom::Interface
| Atom::Let
| Atom::Package
| Atom::Private
| Atom::Protected
| Atom::Public
| Atom::Static
| Atom::Yield => TriState::Yes,
_ => TriState::No
}
}
// 12.1.1
fn is_illegal_strict_binding(&self) -> bool {
match *self {
Atom::Arguments
| Atom::Eval => true,
_ => false
}
}
} | impl AtomExt for Name {
fn is_strict_reserved(&self) -> TriState { | random_line_split |
atom.rs | use joker::word::{Atom, Name};
use tristate::TriState;
pub trait AtomExt {
fn is_strict_reserved(&self) -> TriState;
fn is_illegal_strict_binding(&self) -> bool;
}
impl AtomExt for Name {
fn is_strict_reserved(&self) -> TriState {
match self {
&Name::Atom(ref atom) => atom.is_strict_reserved(),
_ => TriState::No
}
}
fn | (&self) -> bool {
match *self {
Name::Atom(ref atom) => atom.is_illegal_strict_binding(),
_ => false
}
}
}
impl AtomExt for Atom {
fn is_strict_reserved(&self) -> TriState {
match *self {
// 11.6.2.2
Atom::Await => TriState::Unknown,
// 12.1.1
Atom::Implements
| Atom::Interface
| Atom::Let
| Atom::Package
| Atom::Private
| Atom::Protected
| Atom::Public
| Atom::Static
| Atom::Yield => TriState::Yes,
_ => TriState::No
}
}
// 12.1.1
fn is_illegal_strict_binding(&self) -> bool {
match *self {
Atom::Arguments
| Atom::Eval => true,
_ => false
}
}
}
| is_illegal_strict_binding | identifier_name |
atom.rs | use joker::word::{Atom, Name};
use tristate::TriState;
pub trait AtomExt {
fn is_strict_reserved(&self) -> TriState;
fn is_illegal_strict_binding(&self) -> bool;
}
impl AtomExt for Name {
fn is_strict_reserved(&self) -> TriState {
match self {
&Name::Atom(ref atom) => atom.is_strict_reserved(),
_ => TriState::No
}
}
fn is_illegal_strict_binding(&self) -> bool {
match *self {
Name::Atom(ref atom) => atom.is_illegal_strict_binding(),
_ => false
}
}
}
impl AtomExt for Atom {
fn is_strict_reserved(&self) -> TriState |
// 12.1.1
fn is_illegal_strict_binding(&self) -> bool {
match *self {
Atom::Arguments
| Atom::Eval => true,
_ => false
}
}
}
| {
match *self {
// 11.6.2.2
Atom::Await => TriState::Unknown,
// 12.1.1
Atom::Implements
| Atom::Interface
| Atom::Let
| Atom::Package
| Atom::Private
| Atom::Protected
| Atom::Public
| Atom::Static
| Atom::Yield => TriState::Yes,
_ => TriState::No
}
} | identifier_body |
main.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
// | // option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:crate_with_invalid_spans.rs
// pretty-expanded FIXME #23616
extern crate crate_with_invalid_spans;
fn main() {
// The AST of `exported_generic` stored in crate_with_invalid_spans's
// metadata should contain an invalid span where span.lo() > span.hi().
// Let's make sure the compiler doesn't crash when encountering this.
let _ = crate_with_invalid_spans::exported_generic(32u32, 7u32);
} | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | random_line_split |
main.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:crate_with_invalid_spans.rs
// pretty-expanded FIXME #23616
extern crate crate_with_invalid_spans;
fn | () {
// The AST of `exported_generic` stored in crate_with_invalid_spans's
// metadata should contain an invalid span where span.lo() > span.hi().
// Let's make sure the compiler doesn't crash when encountering this.
let _ = crate_with_invalid_spans::exported_generic(32u32, 7u32);
}
| main | identifier_name |
main.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:crate_with_invalid_spans.rs
// pretty-expanded FIXME #23616
extern crate crate_with_invalid_spans;
fn main() | {
// The AST of `exported_generic` stored in crate_with_invalid_spans's
// metadata should contain an invalid span where span.lo() > span.hi().
// Let's make sure the compiler doesn't crash when encountering this.
let _ = crate_with_invalid_spans::exported_generic(32u32, 7u32);
} | identifier_body |
|
vga.rs | use extra::prelude::*;
use cpu;
use cpu::io;
#[packed]
pub struct | {
char: u8,
attr: u8,
}
static SCREEN_ROWS: uint = 25;
static SCREEN_COLS: uint = 80;
static SCREEN_SIZE: uint = SCREEN_ROWS*SCREEN_COLS;
type screen_buf = [character,..SCREEN_SIZE];
static screen: *mut screen_buf = 0xB8000 as *mut screen_buf;
static mut cur_pos: uint = 0;
pub fn init() {
unsafe {
cur_pos = cursor_pos();
}
}
pub fn puts(string: &str, attr: term::color::Color) {
stdio::puts(string, attr, putc, new_line);
}
pub fn putc(c: char, attr: term::color::Color) {
unsafe {
put_char(cur_pos, character{char: c as u8, attr: attr as u8});
cursor_move(1);
}
}
pub fn new_line() {
unsafe {
cursor_move(SCREEN_COLS - cur_pos % SCREEN_COLS);
}
}
#[inline]
unsafe fn cursor_move(delta: uint) {
cur_pos += delta;
if cur_pos >= SCREEN_SIZE {
cpu::memmove(mem_ptr_of(0, 0), mem_ptr_of(1, 0),
(SCREEN_SIZE - SCREEN_COLS) * mem::size_of::<character>());
let mut i = SCREEN_SIZE - SCREEN_COLS;
while i < SCREEN_SIZE {
put_char(i, character{char:'' as u8, attr: term::color::BLACK as u8});
i += 1;
};
cur_pos -= SCREEN_COLS;
}
cursor_to(cur_pos);
}
#[inline]
unsafe fn put_char(pos: uint, c: character) {
(*screen)[pos] = c;
}
#[inline]
unsafe fn cursor_pos() -> uint {
let mut pos: uint;
io::outb(0x3D4, 14);
pos = (io::inb(0x3D5) as uint) << 8;
io::outb(0x3D4, 15);
pos |= io::inb(0x3D5) as uint;
pos
}
#[inline]
unsafe fn cursor_to(pos: uint) {
io::outb(0x3D4, 14);
io::outb(0x3D5, (pos >> 8) as u8);
io::outb(0x3D4, 15);
io::outb(0x3D5, pos as u8);
}
#[inline]
unsafe fn mem_ptr_of(row: uint, col: uint) -> uint {
screen as uint +
row * SCREEN_COLS * mem::size_of::<character>() +
col * mem::size_of::<character>()
}
| character | identifier_name |
vga.rs | use extra::prelude::*;
use cpu;
use cpu::io;
#[packed]
pub struct character {
char: u8,
attr: u8,
}
static SCREEN_ROWS: uint = 25;
static SCREEN_COLS: uint = 80;
static SCREEN_SIZE: uint = SCREEN_ROWS*SCREEN_COLS;
type screen_buf = [character,..SCREEN_SIZE];
static screen: *mut screen_buf = 0xB8000 as *mut screen_buf;
static mut cur_pos: uint = 0;
pub fn init() {
unsafe {
cur_pos = cursor_pos();
}
}
pub fn puts(string: &str, attr: term::color::Color) {
stdio::puts(string, attr, putc, new_line);
}
pub fn putc(c: char, attr: term::color::Color) {
unsafe {
put_char(cur_pos, character{char: c as u8, attr: attr as u8});
cursor_move(1);
}
}
pub fn new_line() {
unsafe {
cursor_move(SCREEN_COLS - cur_pos % SCREEN_COLS);
}
}
#[inline]
unsafe fn cursor_move(delta: uint) {
cur_pos += delta;
if cur_pos >= SCREEN_SIZE {
cpu::memmove(mem_ptr_of(0, 0), mem_ptr_of(1, 0),
(SCREEN_SIZE - SCREEN_COLS) * mem::size_of::<character>());
let mut i = SCREEN_SIZE - SCREEN_COLS;
while i < SCREEN_SIZE {
put_char(i, character{char:'' as u8, attr: term::color::BLACK as u8});
i += 1;
};
cur_pos -= SCREEN_COLS;
}
cursor_to(cur_pos);
}
#[inline]
unsafe fn put_char(pos: uint, c: character) {
(*screen)[pos] = c;
}
#[inline]
unsafe fn cursor_pos() -> uint {
let mut pos: uint;
io::outb(0x3D4, 14);
pos = (io::inb(0x3D5) as uint) << 8;
io::outb(0x3D4, 15);
pos |= io::inb(0x3D5) as uint;
pos
}
#[inline] | }
#[inline]
unsafe fn mem_ptr_of(row: uint, col: uint) -> uint {
screen as uint +
row * SCREEN_COLS * mem::size_of::<character>() +
col * mem::size_of::<character>()
} | unsafe fn cursor_to(pos: uint) {
io::outb(0x3D4, 14);
io::outb(0x3D5, (pos >> 8) as u8);
io::outb(0x3D4, 15);
io::outb(0x3D5, pos as u8); | random_line_split |
vga.rs | use extra::prelude::*;
use cpu;
use cpu::io;
#[packed]
pub struct character {
char: u8,
attr: u8,
}
static SCREEN_ROWS: uint = 25;
static SCREEN_COLS: uint = 80;
static SCREEN_SIZE: uint = SCREEN_ROWS*SCREEN_COLS;
type screen_buf = [character,..SCREEN_SIZE];
static screen: *mut screen_buf = 0xB8000 as *mut screen_buf;
static mut cur_pos: uint = 0;
pub fn init() |
pub fn puts(string: &str, attr: term::color::Color) {
stdio::puts(string, attr, putc, new_line);
}
pub fn putc(c: char, attr: term::color::Color) {
unsafe {
put_char(cur_pos, character{char: c as u8, attr: attr as u8});
cursor_move(1);
}
}
pub fn new_line() {
unsafe {
cursor_move(SCREEN_COLS - cur_pos % SCREEN_COLS);
}
}
#[inline]
unsafe fn cursor_move(delta: uint) {
cur_pos += delta;
if cur_pos >= SCREEN_SIZE {
cpu::memmove(mem_ptr_of(0, 0), mem_ptr_of(1, 0),
(SCREEN_SIZE - SCREEN_COLS) * mem::size_of::<character>());
let mut i = SCREEN_SIZE - SCREEN_COLS;
while i < SCREEN_SIZE {
put_char(i, character{char:'' as u8, attr: term::color::BLACK as u8});
i += 1;
};
cur_pos -= SCREEN_COLS;
}
cursor_to(cur_pos);
}
#[inline]
unsafe fn put_char(pos: uint, c: character) {
(*screen)[pos] = c;
}
#[inline]
unsafe fn cursor_pos() -> uint {
let mut pos: uint;
io::outb(0x3D4, 14);
pos = (io::inb(0x3D5) as uint) << 8;
io::outb(0x3D4, 15);
pos |= io::inb(0x3D5) as uint;
pos
}
#[inline]
unsafe fn cursor_to(pos: uint) {
io::outb(0x3D4, 14);
io::outb(0x3D5, (pos >> 8) as u8);
io::outb(0x3D4, 15);
io::outb(0x3D5, pos as u8);
}
#[inline]
unsafe fn mem_ptr_of(row: uint, col: uint) -> uint {
screen as uint +
row * SCREEN_COLS * mem::size_of::<character>() +
col * mem::size_of::<character>()
}
| {
unsafe {
cur_pos = cursor_pos();
}
} | identifier_body |
vga.rs | use extra::prelude::*;
use cpu;
use cpu::io;
#[packed]
pub struct character {
char: u8,
attr: u8,
}
static SCREEN_ROWS: uint = 25;
static SCREEN_COLS: uint = 80;
static SCREEN_SIZE: uint = SCREEN_ROWS*SCREEN_COLS;
type screen_buf = [character,..SCREEN_SIZE];
static screen: *mut screen_buf = 0xB8000 as *mut screen_buf;
static mut cur_pos: uint = 0;
pub fn init() {
unsafe {
cur_pos = cursor_pos();
}
}
pub fn puts(string: &str, attr: term::color::Color) {
stdio::puts(string, attr, putc, new_line);
}
pub fn putc(c: char, attr: term::color::Color) {
unsafe {
put_char(cur_pos, character{char: c as u8, attr: attr as u8});
cursor_move(1);
}
}
pub fn new_line() {
unsafe {
cursor_move(SCREEN_COLS - cur_pos % SCREEN_COLS);
}
}
#[inline]
unsafe fn cursor_move(delta: uint) {
cur_pos += delta;
if cur_pos >= SCREEN_SIZE |
cursor_to(cur_pos);
}
#[inline]
unsafe fn put_char(pos: uint, c: character) {
(*screen)[pos] = c;
}
#[inline]
unsafe fn cursor_pos() -> uint {
let mut pos: uint;
io::outb(0x3D4, 14);
pos = (io::inb(0x3D5) as uint) << 8;
io::outb(0x3D4, 15);
pos |= io::inb(0x3D5) as uint;
pos
}
#[inline]
unsafe fn cursor_to(pos: uint) {
io::outb(0x3D4, 14);
io::outb(0x3D5, (pos >> 8) as u8);
io::outb(0x3D4, 15);
io::outb(0x3D5, pos as u8);
}
#[inline]
unsafe fn mem_ptr_of(row: uint, col: uint) -> uint {
screen as uint +
row * SCREEN_COLS * mem::size_of::<character>() +
col * mem::size_of::<character>()
}
| {
cpu::memmove(mem_ptr_of(0, 0), mem_ptr_of(1, 0),
(SCREEN_SIZE - SCREEN_COLS) * mem::size_of::<character>());
let mut i = SCREEN_SIZE - SCREEN_COLS;
while i < SCREEN_SIZE {
put_char(i, character{char: ' ' as u8, attr: term::color::BLACK as u8});
i += 1;
};
cur_pos -= SCREEN_COLS;
} | conditional_block |
lib.rs | pub fn score(word: &str) -> usize | {
// lowercase for case insensitivity
// use map to convert to numbers
// sum them
word.to_lowercase()
.chars()
.map(|c| match c {
'a' | 'e' | 'i' | 'o' | 'u' | 'l' | 'n' | 'r' | 's' | 't' => 1,
'd' | 'g' => 2,
'b' | 'c' | 'm' | 'p' => 3,
'f' | 'h' | 'v' | 'w' | 'y' => 4,
'k' => 5,
'j' | 'x' => 8,
'q' | 'z' => 10,
_ => 0,
})
.fold(0, |accu, x| accu + x)
} | identifier_body |
|
lib.rs | pub fn | (word: &str) -> usize {
// lowercase for case insensitivity
// use map to convert to numbers
// sum them
word.to_lowercase()
.chars()
.map(|c| match c {
'a' | 'e' | 'i' | 'o' | 'u' | 'l' | 'n' | 'r' |'s' | 't' => 1,
'd' | 'g' => 2,
'b' | 'c' |'m' | 'p' => 3,
'f' | 'h' | 'v' | 'w' | 'y' => 4,
'k' => 5,
'j' | 'x' => 8,
'q' | 'z' => 10,
_ => 0,
})
.fold(0, |accu, x| accu + x)
}
| score | identifier_name |
lib.rs | pub fn score(word: &str) -> usize {
// lowercase for case insensitivity
// use map to convert to numbers | 'd' | 'g' => 2,
'b' | 'c' |'m' | 'p' => 3,
'f' | 'h' | 'v' | 'w' | 'y' => 4,
'k' => 5,
'j' | 'x' => 8,
'q' | 'z' => 10,
_ => 0,
})
.fold(0, |accu, x| accu + x)
} | // sum them
word.to_lowercase()
.chars()
.map(|c| match c {
'a' | 'e' | 'i' | 'o' | 'u' | 'l' | 'n' | 'r' | 's' | 't' => 1, | random_line_split |
advance.rs | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{Arc, Mutex};
use std::time::Duration;
use collections::HashMap;
use concurrency_manager::ConcurrencyManager;
use engine_traits::KvEngine;
use futures::compat::Future01CompatExt;
use grpcio::{ChannelBuilder, Environment};
use kvproto::kvrpcpb::{CheckLeaderRequest, LeaderInfo};
use kvproto::metapb::{Peer, PeerRole};
use kvproto::tikvpb::TikvClient;
use pd_client::PdClient;
use protobuf::Message;
use raftstore::store::fsm::StoreMeta;
use raftstore::store::util::RegionReadProgressRegistry;
use security::SecurityManager;
use tikv_util::timer::SteadyTimer;
use tikv_util::worker::Scheduler;
use tokio::runtime::{Builder, Runtime};
use txn_types::TimeStamp;
use crate::endpoint::Task;
use crate::errors::Result;
use crate::metrics::{CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM, CHECK_LEADER_REQ_SIZE_HISTOGRAM};
const DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS: u64 = 5_000; // 5s
pub struct AdvanceTsWorker<E: KvEngine> {
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
timer: SteadyTimer,
worker: Runtime,
scheduler: Scheduler<Task<E::Snapshot>>,
/// The concurrency manager for transactions. It's needed for CDC to check locks when
/// calculating resolved_ts.
concurrency_manager: ConcurrencyManager,
// store_id -> client
tikv_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn new(
pd_client: Arc<dyn PdClient>,
scheduler: Scheduler<Task<E::Snapshot>>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
concurrency_manager: ConcurrencyManager,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
) -> Self {
let worker = Builder::new_multi_thread()
.thread_name("advance-ts")
.worker_threads(1)
.enable_time()
.build()
.unwrap();
Self {
env,
security_mgr,
scheduler,
pd_client,
worker,
timer: SteadyTimer::default(),
store_meta,
region_read_progress,
concurrency_manager,
tikv_clients: Arc::new(Mutex::new(HashMap::default())),
}
}
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn advance_ts_for_regions(&self, regions: Vec<u64>) {
let pd_client = self.pd_client.clone();
let scheduler = self.scheduler.clone();
let cm: ConcurrencyManager = self.concurrency_manager.clone();
let env = self.env.clone();
let security_mgr = self.security_mgr.clone();
let store_meta = self.store_meta.clone();
let tikv_clients = self.tikv_clients.clone();
let region_read_progress = self.region_read_progress.clone();
let fut = async move {
// Ignore get tso errors since we will retry every `advance_ts_interval`.
let mut min_ts = pd_client.get_tso().await.unwrap_or_default();
// Sync with concurrency manager so that it can work correctly when optimizations
// like async commit is enabled.
// Note: This step must be done before scheduling `Task::MinTS` task, and the
// resolver must be checked in or after `Task::MinTS`' execution.
cm.update_max_ts(min_ts);
if let Some(min_mem_lock_ts) = cm.global_min_lock_ts() {
if min_mem_lock_ts < min_ts {
min_ts = min_mem_lock_ts;
}
}
let regions = Self::region_resolved_ts_store(
regions,
store_meta,
region_read_progress,
pd_client,
security_mgr,
env,
tikv_clients,
min_ts,
)
.await;
if!regions.is_empty() {
if let Err(e) = scheduler.schedule(Task::AdvanceResolvedTs {
regions,
ts: min_ts,
}) {
info!("failed to schedule advance event"; "err" =>?e);
}
}
};
self.worker.spawn(fut);
}
pub fn register_next_event(&self, advance_ts_interval: Duration, cfg_version: usize) {
let scheduler = self.scheduler.clone();
let timeout = self.timer.delay(advance_ts_interval);
let fut = async move {
let _ = timeout.compat().await;
if let Err(e) = scheduler.schedule(Task::RegisterAdvanceEvent { cfg_version }) {
info!("failed to schedule register advance event"; "err" =>?e);
}
};
self.worker.spawn(fut);
}
// Confirms leadership of region peer before trying to advance resolved ts.
// This function broadcasts a special message to all stores, get the leader id of them to confirm whether
// current peer has a quorum which accept its leadership.
async fn region_resolved_ts_store(
regions: Vec<u64>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
security_mgr: Arc<SecurityManager>,
env: Arc<Environment>,
cdc_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
min_ts: TimeStamp,
) -> Vec<u64> {
#[cfg(feature = "failpoint")]
(|| fail_point!("before_sync_replica_read_state", |_| regions))();
let store_id = match store_meta.lock().unwrap().store_id {
Some(id) => id,
None => return vec![],
};
// store_id -> leaders info, record the request to each stores
let mut store_map: HashMap<u64, Vec<LeaderInfo>> = HashMap::default();
// region_id -> region, cache the information of regions
let mut region_map: HashMap<u64, Vec<Peer>> = HashMap::default();
// region_id -> peers id, record the responses
let mut resp_map: HashMap<u64, Vec<u64>> = HashMap::default();
// region_id -> `(Vec<Peer>, LeaderInfo)`
let info_map = region_read_progress.dump_leader_infos(®ions);
for (region_id, (peer_list, leader_info)) in info_map {
let leader_id = leader_info.get_peer_id();
// Check if the leader in this store
if find_store_id(&peer_list, leader_id)!= Some(store_id) {
continue;
}
for peer in &peer_list {
if peer.store_id == store_id && peer.id == leader_id {
resp_map.entry(region_id).or_default().push(store_id);
continue;
}
store_map
.entry(peer.store_id)
.or_default()
.push(leader_info.clone());
}
region_map.insert(region_id, peer_list);
}
// Approximate `LeaderInfo` size
let leader_info_size = store_map
.values()
.next()
.map_or(0, |regions| regions[0].compute_size());
let stores = store_map.into_iter().map(|(store_id, regions)| {
let cdc_clients = cdc_clients.clone();
let env = env.clone();
let pd_client = pd_client.clone();
let security_mgr = security_mgr.clone();
let region_num = regions.len() as u32;
CHECK_LEADER_REQ_SIZE_HISTOGRAM.observe((leader_info_size * region_num) as f64);
CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM.observe(region_num as f64);
async move {
if cdc_clients.lock().unwrap().get(&store_id).is_none() {
let store = box_try!(pd_client.get_store_async(store_id).await);
let cb = ChannelBuilder::new(env.clone());
let channel = security_mgr.connect(cb, &store.address);
cdc_clients
.lock()
.unwrap()
.insert(store_id, TikvClient::new(channel));
}
let client = cdc_clients.lock().unwrap().get(&store_id).unwrap().clone();
let mut req = CheckLeaderRequest::default();
req.set_regions(regions.into());
req.set_ts(min_ts.into_inner());
let res = box_try!(
tokio::time::timeout(
Duration::from_millis(DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS),
box_try!(client.check_leader_async(&req))
)
.await
);
let resp = box_try!(res);
Result::Ok((store_id, resp))
} | .filter_map(|resp| match resp {
Ok(resp) => Some(resp),
Err(e) => {
debug!("resolved-ts check leader error"; "err" =>?e);
None
}
})
.map(|(store_id, resp)| {
resp.regions
.into_iter()
.map(move |region_id| (store_id, region_id))
})
.flatten()
.for_each(|(store_id, region_id)| {
resp_map.entry(region_id).or_default().push(store_id);
});
resp_map
.into_iter()
.filter_map(|(region_id, stores)| {
if region_has_quorum(®ion_map[®ion_id], &stores) {
Some(region_id)
} else {
debug!(
"resolved-ts cannot get quorum for resolved ts";
"region_id" => region_id,
"stores" =>?stores,
"region" =>?®ion_map[®ion_id]
);
None
}
})
.collect()
}
}
fn region_has_quorum(peers: &[Peer], stores: &[u64]) -> bool {
let mut voters = 0;
let mut incoming_voters = 0;
let mut demoting_voters = 0;
let mut resp_voters = 0;
let mut resp_incoming_voters = 0;
let mut resp_demoting_voters = 0;
peers.iter().for_each(|peer| {
let mut in_resp = false;
for store_id in stores {
if *store_id == peer.store_id {
in_resp = true;
break;
}
}
match peer.get_role() {
PeerRole::Voter => {
voters += 1;
if in_resp {
resp_voters += 1;
}
}
PeerRole::IncomingVoter => {
incoming_voters += 1;
if in_resp {
resp_incoming_voters += 1;
}
}
PeerRole::DemotingVoter => {
demoting_voters += 1;
if in_resp {
resp_demoting_voters += 1;
}
}
PeerRole::Learner => (),
}
});
let has_incoming_majority =
(resp_voters + resp_incoming_voters) >= ((voters + incoming_voters) / 2 + 1);
let has_demoting_majority =
(resp_voters + resp_demoting_voters) >= ((voters + demoting_voters) / 2 + 1);
has_incoming_majority && has_demoting_majority
}
fn find_store_id(peer_list: &[Peer], peer_id: u64) -> Option<u64> {
for peer in peer_list {
if peer.id == peer_id {
return Some(peer.store_id);
}
}
None
} | });
let resps = futures::future::join_all(stores).await;
resps
.into_iter() | random_line_split |
advance.rs | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{Arc, Mutex};
use std::time::Duration;
use collections::HashMap;
use concurrency_manager::ConcurrencyManager;
use engine_traits::KvEngine;
use futures::compat::Future01CompatExt;
use grpcio::{ChannelBuilder, Environment};
use kvproto::kvrpcpb::{CheckLeaderRequest, LeaderInfo};
use kvproto::metapb::{Peer, PeerRole};
use kvproto::tikvpb::TikvClient;
use pd_client::PdClient;
use protobuf::Message;
use raftstore::store::fsm::StoreMeta;
use raftstore::store::util::RegionReadProgressRegistry;
use security::SecurityManager;
use tikv_util::timer::SteadyTimer;
use tikv_util::worker::Scheduler;
use tokio::runtime::{Builder, Runtime};
use txn_types::TimeStamp;
use crate::endpoint::Task;
use crate::errors::Result;
use crate::metrics::{CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM, CHECK_LEADER_REQ_SIZE_HISTOGRAM};
const DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS: u64 = 5_000; // 5s
pub struct AdvanceTsWorker<E: KvEngine> {
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
timer: SteadyTimer,
worker: Runtime,
scheduler: Scheduler<Task<E::Snapshot>>,
/// The concurrency manager for transactions. It's needed for CDC to check locks when
/// calculating resolved_ts.
concurrency_manager: ConcurrencyManager,
// store_id -> client
tikv_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn new(
pd_client: Arc<dyn PdClient>,
scheduler: Scheduler<Task<E::Snapshot>>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
concurrency_manager: ConcurrencyManager,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
) -> Self |
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn advance_ts_for_regions(&self, regions: Vec<u64>) {
let pd_client = self.pd_client.clone();
let scheduler = self.scheduler.clone();
let cm: ConcurrencyManager = self.concurrency_manager.clone();
let env = self.env.clone();
let security_mgr = self.security_mgr.clone();
let store_meta = self.store_meta.clone();
let tikv_clients = self.tikv_clients.clone();
let region_read_progress = self.region_read_progress.clone();
let fut = async move {
// Ignore get tso errors since we will retry every `advance_ts_interval`.
let mut min_ts = pd_client.get_tso().await.unwrap_or_default();
// Sync with concurrency manager so that it can work correctly when optimizations
// like async commit is enabled.
// Note: This step must be done before scheduling `Task::MinTS` task, and the
// resolver must be checked in or after `Task::MinTS`' execution.
cm.update_max_ts(min_ts);
if let Some(min_mem_lock_ts) = cm.global_min_lock_ts() {
if min_mem_lock_ts < min_ts {
min_ts = min_mem_lock_ts;
}
}
let regions = Self::region_resolved_ts_store(
regions,
store_meta,
region_read_progress,
pd_client,
security_mgr,
env,
tikv_clients,
min_ts,
)
.await;
if!regions.is_empty() {
if let Err(e) = scheduler.schedule(Task::AdvanceResolvedTs {
regions,
ts: min_ts,
}) {
info!("failed to schedule advance event"; "err" =>?e);
}
}
};
self.worker.spawn(fut);
}
pub fn register_next_event(&self, advance_ts_interval: Duration, cfg_version: usize) {
let scheduler = self.scheduler.clone();
let timeout = self.timer.delay(advance_ts_interval);
let fut = async move {
let _ = timeout.compat().await;
if let Err(e) = scheduler.schedule(Task::RegisterAdvanceEvent { cfg_version }) {
info!("failed to schedule register advance event"; "err" =>?e);
}
};
self.worker.spawn(fut);
}
// Confirms leadership of region peer before trying to advance resolved ts.
// This function broadcasts a special message to all stores, get the leader id of them to confirm whether
// current peer has a quorum which accept its leadership.
async fn region_resolved_ts_store(
regions: Vec<u64>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
security_mgr: Arc<SecurityManager>,
env: Arc<Environment>,
cdc_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
min_ts: TimeStamp,
) -> Vec<u64> {
#[cfg(feature = "failpoint")]
(|| fail_point!("before_sync_replica_read_state", |_| regions))();
let store_id = match store_meta.lock().unwrap().store_id {
Some(id) => id,
None => return vec![],
};
// store_id -> leaders info, record the request to each stores
let mut store_map: HashMap<u64, Vec<LeaderInfo>> = HashMap::default();
// region_id -> region, cache the information of regions
let mut region_map: HashMap<u64, Vec<Peer>> = HashMap::default();
// region_id -> peers id, record the responses
let mut resp_map: HashMap<u64, Vec<u64>> = HashMap::default();
// region_id -> `(Vec<Peer>, LeaderInfo)`
let info_map = region_read_progress.dump_leader_infos(®ions);
for (region_id, (peer_list, leader_info)) in info_map {
let leader_id = leader_info.get_peer_id();
// Check if the leader in this store
if find_store_id(&peer_list, leader_id)!= Some(store_id) {
continue;
}
for peer in &peer_list {
if peer.store_id == store_id && peer.id == leader_id {
resp_map.entry(region_id).or_default().push(store_id);
continue;
}
store_map
.entry(peer.store_id)
.or_default()
.push(leader_info.clone());
}
region_map.insert(region_id, peer_list);
}
// Approximate `LeaderInfo` size
let leader_info_size = store_map
.values()
.next()
.map_or(0, |regions| regions[0].compute_size());
let stores = store_map.into_iter().map(|(store_id, regions)| {
let cdc_clients = cdc_clients.clone();
let env = env.clone();
let pd_client = pd_client.clone();
let security_mgr = security_mgr.clone();
let region_num = regions.len() as u32;
CHECK_LEADER_REQ_SIZE_HISTOGRAM.observe((leader_info_size * region_num) as f64);
CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM.observe(region_num as f64);
async move {
if cdc_clients.lock().unwrap().get(&store_id).is_none() {
let store = box_try!(pd_client.get_store_async(store_id).await);
let cb = ChannelBuilder::new(env.clone());
let channel = security_mgr.connect(cb, &store.address);
cdc_clients
.lock()
.unwrap()
.insert(store_id, TikvClient::new(channel));
}
let client = cdc_clients.lock().unwrap().get(&store_id).unwrap().clone();
let mut req = CheckLeaderRequest::default();
req.set_regions(regions.into());
req.set_ts(min_ts.into_inner());
let res = box_try!(
tokio::time::timeout(
Duration::from_millis(DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS),
box_try!(client.check_leader_async(&req))
)
.await
);
let resp = box_try!(res);
Result::Ok((store_id, resp))
}
});
let resps = futures::future::join_all(stores).await;
resps
.into_iter()
.filter_map(|resp| match resp {
Ok(resp) => Some(resp),
Err(e) => {
debug!("resolved-ts check leader error"; "err" =>?e);
None
}
})
.map(|(store_id, resp)| {
resp.regions
.into_iter()
.map(move |region_id| (store_id, region_id))
})
.flatten()
.for_each(|(store_id, region_id)| {
resp_map.entry(region_id).or_default().push(store_id);
});
resp_map
.into_iter()
.filter_map(|(region_id, stores)| {
if region_has_quorum(®ion_map[®ion_id], &stores) {
Some(region_id)
} else {
debug!(
"resolved-ts cannot get quorum for resolved ts";
"region_id" => region_id,
"stores" =>?stores,
"region" =>?®ion_map[®ion_id]
);
None
}
})
.collect()
}
}
fn region_has_quorum(peers: &[Peer], stores: &[u64]) -> bool {
let mut voters = 0;
let mut incoming_voters = 0;
let mut demoting_voters = 0;
let mut resp_voters = 0;
let mut resp_incoming_voters = 0;
let mut resp_demoting_voters = 0;
peers.iter().for_each(|peer| {
let mut in_resp = false;
for store_id in stores {
if *store_id == peer.store_id {
in_resp = true;
break;
}
}
match peer.get_role() {
PeerRole::Voter => {
voters += 1;
if in_resp {
resp_voters += 1;
}
}
PeerRole::IncomingVoter => {
incoming_voters += 1;
if in_resp {
resp_incoming_voters += 1;
}
}
PeerRole::DemotingVoter => {
demoting_voters += 1;
if in_resp {
resp_demoting_voters += 1;
}
}
PeerRole::Learner => (),
}
});
let has_incoming_majority =
(resp_voters + resp_incoming_voters) >= ((voters + incoming_voters) / 2 + 1);
let has_demoting_majority =
(resp_voters + resp_demoting_voters) >= ((voters + demoting_voters) / 2 + 1);
has_incoming_majority && has_demoting_majority
}
fn find_store_id(peer_list: &[Peer], peer_id: u64) -> Option<u64> {
for peer in peer_list {
if peer.id == peer_id {
return Some(peer.store_id);
}
}
None
}
| {
let worker = Builder::new_multi_thread()
.thread_name("advance-ts")
.worker_threads(1)
.enable_time()
.build()
.unwrap();
Self {
env,
security_mgr,
scheduler,
pd_client,
worker,
timer: SteadyTimer::default(),
store_meta,
region_read_progress,
concurrency_manager,
tikv_clients: Arc::new(Mutex::new(HashMap::default())),
}
} | identifier_body |
advance.rs | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{Arc, Mutex};
use std::time::Duration;
use collections::HashMap;
use concurrency_manager::ConcurrencyManager;
use engine_traits::KvEngine;
use futures::compat::Future01CompatExt;
use grpcio::{ChannelBuilder, Environment};
use kvproto::kvrpcpb::{CheckLeaderRequest, LeaderInfo};
use kvproto::metapb::{Peer, PeerRole};
use kvproto::tikvpb::TikvClient;
use pd_client::PdClient;
use protobuf::Message;
use raftstore::store::fsm::StoreMeta;
use raftstore::store::util::RegionReadProgressRegistry;
use security::SecurityManager;
use tikv_util::timer::SteadyTimer;
use tikv_util::worker::Scheduler;
use tokio::runtime::{Builder, Runtime};
use txn_types::TimeStamp;
use crate::endpoint::Task;
use crate::errors::Result;
use crate::metrics::{CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM, CHECK_LEADER_REQ_SIZE_HISTOGRAM};
const DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS: u64 = 5_000; // 5s
pub struct | <E: KvEngine> {
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
timer: SteadyTimer,
worker: Runtime,
scheduler: Scheduler<Task<E::Snapshot>>,
/// The concurrency manager for transactions. It's needed for CDC to check locks when
/// calculating resolved_ts.
concurrency_manager: ConcurrencyManager,
// store_id -> client
tikv_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn new(
pd_client: Arc<dyn PdClient>,
scheduler: Scheduler<Task<E::Snapshot>>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
concurrency_manager: ConcurrencyManager,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
) -> Self {
let worker = Builder::new_multi_thread()
.thread_name("advance-ts")
.worker_threads(1)
.enable_time()
.build()
.unwrap();
Self {
env,
security_mgr,
scheduler,
pd_client,
worker,
timer: SteadyTimer::default(),
store_meta,
region_read_progress,
concurrency_manager,
tikv_clients: Arc::new(Mutex::new(HashMap::default())),
}
}
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn advance_ts_for_regions(&self, regions: Vec<u64>) {
let pd_client = self.pd_client.clone();
let scheduler = self.scheduler.clone();
let cm: ConcurrencyManager = self.concurrency_manager.clone();
let env = self.env.clone();
let security_mgr = self.security_mgr.clone();
let store_meta = self.store_meta.clone();
let tikv_clients = self.tikv_clients.clone();
let region_read_progress = self.region_read_progress.clone();
let fut = async move {
// Ignore get tso errors since we will retry every `advance_ts_interval`.
let mut min_ts = pd_client.get_tso().await.unwrap_or_default();
// Sync with concurrency manager so that it can work correctly when optimizations
// like async commit is enabled.
// Note: This step must be done before scheduling `Task::MinTS` task, and the
// resolver must be checked in or after `Task::MinTS`' execution.
cm.update_max_ts(min_ts);
if let Some(min_mem_lock_ts) = cm.global_min_lock_ts() {
if min_mem_lock_ts < min_ts {
min_ts = min_mem_lock_ts;
}
}
let regions = Self::region_resolved_ts_store(
regions,
store_meta,
region_read_progress,
pd_client,
security_mgr,
env,
tikv_clients,
min_ts,
)
.await;
if!regions.is_empty() {
if let Err(e) = scheduler.schedule(Task::AdvanceResolvedTs {
regions,
ts: min_ts,
}) {
info!("failed to schedule advance event"; "err" =>?e);
}
}
};
self.worker.spawn(fut);
}
pub fn register_next_event(&self, advance_ts_interval: Duration, cfg_version: usize) {
let scheduler = self.scheduler.clone();
let timeout = self.timer.delay(advance_ts_interval);
let fut = async move {
let _ = timeout.compat().await;
if let Err(e) = scheduler.schedule(Task::RegisterAdvanceEvent { cfg_version }) {
info!("failed to schedule register advance event"; "err" =>?e);
}
};
self.worker.spawn(fut);
}
// Confirms leadership of region peer before trying to advance resolved ts.
// This function broadcasts a special message to all stores, get the leader id of them to confirm whether
// current peer has a quorum which accept its leadership.
async fn region_resolved_ts_store(
regions: Vec<u64>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
security_mgr: Arc<SecurityManager>,
env: Arc<Environment>,
cdc_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
min_ts: TimeStamp,
) -> Vec<u64> {
#[cfg(feature = "failpoint")]
(|| fail_point!("before_sync_replica_read_state", |_| regions))();
let store_id = match store_meta.lock().unwrap().store_id {
Some(id) => id,
None => return vec![],
};
// store_id -> leaders info, record the request to each stores
let mut store_map: HashMap<u64, Vec<LeaderInfo>> = HashMap::default();
// region_id -> region, cache the information of regions
let mut region_map: HashMap<u64, Vec<Peer>> = HashMap::default();
// region_id -> peers id, record the responses
let mut resp_map: HashMap<u64, Vec<u64>> = HashMap::default();
// region_id -> `(Vec<Peer>, LeaderInfo)`
let info_map = region_read_progress.dump_leader_infos(®ions);
for (region_id, (peer_list, leader_info)) in info_map {
let leader_id = leader_info.get_peer_id();
// Check if the leader in this store
if find_store_id(&peer_list, leader_id)!= Some(store_id) {
continue;
}
for peer in &peer_list {
if peer.store_id == store_id && peer.id == leader_id {
resp_map.entry(region_id).or_default().push(store_id);
continue;
}
store_map
.entry(peer.store_id)
.or_default()
.push(leader_info.clone());
}
region_map.insert(region_id, peer_list);
}
// Approximate `LeaderInfo` size
let leader_info_size = store_map
.values()
.next()
.map_or(0, |regions| regions[0].compute_size());
let stores = store_map.into_iter().map(|(store_id, regions)| {
let cdc_clients = cdc_clients.clone();
let env = env.clone();
let pd_client = pd_client.clone();
let security_mgr = security_mgr.clone();
let region_num = regions.len() as u32;
CHECK_LEADER_REQ_SIZE_HISTOGRAM.observe((leader_info_size * region_num) as f64);
CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM.observe(region_num as f64);
async move {
if cdc_clients.lock().unwrap().get(&store_id).is_none() {
let store = box_try!(pd_client.get_store_async(store_id).await);
let cb = ChannelBuilder::new(env.clone());
let channel = security_mgr.connect(cb, &store.address);
cdc_clients
.lock()
.unwrap()
.insert(store_id, TikvClient::new(channel));
}
let client = cdc_clients.lock().unwrap().get(&store_id).unwrap().clone();
let mut req = CheckLeaderRequest::default();
req.set_regions(regions.into());
req.set_ts(min_ts.into_inner());
let res = box_try!(
tokio::time::timeout(
Duration::from_millis(DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS),
box_try!(client.check_leader_async(&req))
)
.await
);
let resp = box_try!(res);
Result::Ok((store_id, resp))
}
});
let resps = futures::future::join_all(stores).await;
resps
.into_iter()
.filter_map(|resp| match resp {
Ok(resp) => Some(resp),
Err(e) => {
debug!("resolved-ts check leader error"; "err" =>?e);
None
}
})
.map(|(store_id, resp)| {
resp.regions
.into_iter()
.map(move |region_id| (store_id, region_id))
})
.flatten()
.for_each(|(store_id, region_id)| {
resp_map.entry(region_id).or_default().push(store_id);
});
resp_map
.into_iter()
.filter_map(|(region_id, stores)| {
if region_has_quorum(®ion_map[®ion_id], &stores) {
Some(region_id)
} else {
debug!(
"resolved-ts cannot get quorum for resolved ts";
"region_id" => region_id,
"stores" =>?stores,
"region" =>?®ion_map[®ion_id]
);
None
}
})
.collect()
}
}
fn region_has_quorum(peers: &[Peer], stores: &[u64]) -> bool {
let mut voters = 0;
let mut incoming_voters = 0;
let mut demoting_voters = 0;
let mut resp_voters = 0;
let mut resp_incoming_voters = 0;
let mut resp_demoting_voters = 0;
peers.iter().for_each(|peer| {
let mut in_resp = false;
for store_id in stores {
if *store_id == peer.store_id {
in_resp = true;
break;
}
}
match peer.get_role() {
PeerRole::Voter => {
voters += 1;
if in_resp {
resp_voters += 1;
}
}
PeerRole::IncomingVoter => {
incoming_voters += 1;
if in_resp {
resp_incoming_voters += 1;
}
}
PeerRole::DemotingVoter => {
demoting_voters += 1;
if in_resp {
resp_demoting_voters += 1;
}
}
PeerRole::Learner => (),
}
});
let has_incoming_majority =
(resp_voters + resp_incoming_voters) >= ((voters + incoming_voters) / 2 + 1);
let has_demoting_majority =
(resp_voters + resp_demoting_voters) >= ((voters + demoting_voters) / 2 + 1);
has_incoming_majority && has_demoting_majority
}
fn find_store_id(peer_list: &[Peer], peer_id: u64) -> Option<u64> {
for peer in peer_list {
if peer.id == peer_id {
return Some(peer.store_id);
}
}
None
}
| AdvanceTsWorker | identifier_name |
day_5.rs | pub use tdd_kata::lcd_kata::day_5::Display;
pub use tdd_kata::lcd_kata::day_5::Number::{One, Two, Three, Four, Five, Six, Seven, Eight, Nine, Zero};
pub use tdd_kata::lcd_kata::day_5::Data::{NotANumber, Output};
pub use expectest::prelude::be_equal_to;
describe! lcd_tests {
before_each {
let mut display = Display::new();
}
it "should output nothing on new display " {
expect!(display.output()).to(be_equal_to(Output(vec![])));
}
it "should output nothing with empty input" {
display.input("");
expect!(display.output()).to(be_equal_to(Output(vec![])));
}
it "should output one" {
display.input("1");
expect!(display.output()).to(be_equal_to(Output(vec![One])));
}
| it "should output all numbers" {
display.input("1234567890");
expect!(display.output()).to(be_equal_to(Output(vec![One, Two, Three, Four, Five, Six, Seven, Eight, Nine, Zero])));
}
it "should show error when input is not a number" {
display.input("abc");
expect!(display.output()).to(be_equal_to(NotANumber));
}
} | random_line_split |
|
print_with_newline.rs | // FIXME: Ideally these suggestions would be fixed via rustfix. Blocked by rust-lang/rust#53934
// // run-rustfix
#![allow(clippy::print_literal)]
#![warn(clippy::print_with_newline)]
fn | () {
print!("Hello\n");
print!("Hello {}\n", "world");
print!("Hello {} {}\n", "world", "#2");
print!("{}\n", 1265);
print!("\n");
// these are all fine
print!("");
print!("Hello");
println!("Hello");
println!("Hello\n");
println!("Hello {}\n", "world");
print!("Issue\n{}", 1265);
print!("{}", 1265);
print!("\n{}", 1275);
print!("\n\n");
print!("like eof\n\n");
print!("Hello {} {}\n\n", "world", "#2");
println!("\ndon't\nwarn\nfor\nmultiple\nnewlines\n"); // #3126
println!("\nbla\n\n"); // #3126
// Escaping
print!("\\n"); // #3514
print!("\\\n"); // should fail
print!("\\\\n");
// Raw strings
print!(r"\n"); // #3778
// Literal newlines should also fail
print!(
"
"
);
print!(
r"
"
);
// Don't warn on CRLF (#4208)
print!("\r\n");
print!("foo\r\n");
print!("\\r\n"); //~ ERROR
print!("foo\rbar\n") // ~ ERROR
}
| main | identifier_name |
print_with_newline.rs | // FIXME: Ideally these suggestions would be fixed via rustfix. Blocked by rust-lang/rust#53934
// // run-rustfix
#![allow(clippy::print_literal)]
#![warn(clippy::print_with_newline)]
fn main() {
print!("Hello\n");
print!("Hello {}\n", "world");
print!("Hello {} {}\n", "world", "#2");
print!("{}\n", 1265);
print!("\n");
// these are all fine
print!("");
print!("Hello");
println!("Hello");
println!("Hello\n");
println!("Hello {}\n", "world");
print!("Issue\n{}", 1265);
print!("{}", 1265);
print!("\n{}", 1275);
print!("\n\n");
print!("like eof\n\n");
print!("Hello {} {}\n\n", "world", "#2");
println!("\ndon't\nwarn\nfor\nmultiple\nnewlines\n"); // #3126
println!("\nbla\n\n"); // #3126
// Escaping
print!("\\n"); // #3514
print!("\\\n"); // should fail
print!("\\\\n");
// Raw strings
print!(r"\n"); // #3778
// Literal newlines should also fail | );
print!(
r"
"
);
// Don't warn on CRLF (#4208)
print!("\r\n");
print!("foo\r\n");
print!("\\r\n"); //~ ERROR
print!("foo\rbar\n") // ~ ERROR
} | print!(
"
" | random_line_split |
print_with_newline.rs | // FIXME: Ideally these suggestions would be fixed via rustfix. Blocked by rust-lang/rust#53934
// // run-rustfix
#![allow(clippy::print_literal)]
#![warn(clippy::print_with_newline)]
fn main() | println!("\nbla\n\n"); // #3126
// Escaping
print!("\\n"); // #3514
print!("\\\n"); // should fail
print!("\\\\n");
// Raw strings
print!(r"\n"); // #3778
// Literal newlines should also fail
print!(
"
"
);
print!(
r"
"
);
// Don't warn on CRLF (#4208)
print!("\r\n");
print!("foo\r\n");
print!("\\r\n"); //~ ERROR
print!("foo\rbar\n") // ~ ERROR
}
| {
print!("Hello\n");
print!("Hello {}\n", "world");
print!("Hello {} {}\n", "world", "#2");
print!("{}\n", 1265);
print!("\n");
// these are all fine
print!("");
print!("Hello");
println!("Hello");
println!("Hello\n");
println!("Hello {}\n", "world");
print!("Issue\n{}", 1265);
print!("{}", 1265);
print!("\n{}", 1275);
print!("\n\n");
print!("like eof\n\n");
print!("Hello {} {}\n\n", "world", "#2");
println!("\ndon't\nwarn\nfor\nmultiple\nnewlines\n"); // #3126 | identifier_body |
cache.rs | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use async_trait::async_trait;
use kvproto::coprocessor::Response;
use crate::coprocessor::RequestHandler;
use crate::coprocessor::*;
use crate::storage::Snapshot;
pub struct CachedRequestHandler {
data_version: Option<u64>,
}
impl CachedRequestHandler {
pub fn new<S: Snapshot>(snap: S) -> Self {
Self {
data_version: snap.get_data_version(),
}
}
pub fn builder<S: Snapshot>() -> RequestHandlerBuilder<S> {
Box::new(|snap, _req_ctx: &ReqContext| Ok(CachedRequestHandler::new(snap).into_boxed()))
}
}
#[async_trait]
impl RequestHandler for CachedRequestHandler {
async fn handle_request(&mut self) -> Result<Response> |
}
| {
let mut resp = Response::default();
resp.set_is_cache_hit(true);
if let Some(v) = self.data_version {
resp.set_cache_last_version(v);
}
Ok(resp)
} | identifier_body |
cache.rs | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use async_trait::async_trait;
use kvproto::coprocessor::Response;
use crate::coprocessor::RequestHandler;
use crate::coprocessor::*;
use crate::storage::Snapshot;
pub struct CachedRequestHandler {
data_version: Option<u64>,
}
impl CachedRequestHandler {
pub fn new<S: Snapshot>(snap: S) -> Self {
Self {
data_version: snap.get_data_version(),
}
}
pub fn builder<S: Snapshot>() -> RequestHandlerBuilder<S> {
Box::new(|snap, _req_ctx: &ReqContext| Ok(CachedRequestHandler::new(snap).into_boxed()))
}
}
#[async_trait]
impl RequestHandler for CachedRequestHandler {
async fn handle_request(&mut self) -> Result<Response> {
let mut resp = Response::default(); | resp.set_cache_last_version(v);
}
Ok(resp)
}
} | resp.set_is_cache_hit(true);
if let Some(v) = self.data_version { | random_line_split |
cache.rs | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use async_trait::async_trait;
use kvproto::coprocessor::Response;
use crate::coprocessor::RequestHandler;
use crate::coprocessor::*;
use crate::storage::Snapshot;
pub struct CachedRequestHandler {
data_version: Option<u64>,
}
impl CachedRequestHandler {
pub fn | <S: Snapshot>(snap: S) -> Self {
Self {
data_version: snap.get_data_version(),
}
}
pub fn builder<S: Snapshot>() -> RequestHandlerBuilder<S> {
Box::new(|snap, _req_ctx: &ReqContext| Ok(CachedRequestHandler::new(snap).into_boxed()))
}
}
#[async_trait]
impl RequestHandler for CachedRequestHandler {
async fn handle_request(&mut self) -> Result<Response> {
let mut resp = Response::default();
resp.set_is_cache_hit(true);
if let Some(v) = self.data_version {
resp.set_cache_last_version(v);
}
Ok(resp)
}
}
| new | identifier_name |
cache.rs | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use async_trait::async_trait;
use kvproto::coprocessor::Response;
use crate::coprocessor::RequestHandler;
use crate::coprocessor::*;
use crate::storage::Snapshot;
pub struct CachedRequestHandler {
data_version: Option<u64>,
}
impl CachedRequestHandler {
pub fn new<S: Snapshot>(snap: S) -> Self {
Self {
data_version: snap.get_data_version(),
}
}
pub fn builder<S: Snapshot>() -> RequestHandlerBuilder<S> {
Box::new(|snap, _req_ctx: &ReqContext| Ok(CachedRequestHandler::new(snap).into_boxed()))
}
}
#[async_trait]
impl RequestHandler for CachedRequestHandler {
async fn handle_request(&mut self) -> Result<Response> {
let mut resp = Response::default();
resp.set_is_cache_hit(true);
if let Some(v) = self.data_version |
Ok(resp)
}
}
| {
resp.set_cache_last_version(v);
} | conditional_block |
error.rs | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::config::ValidationError;
use prometheus::Error as MetricsError;
#[cfg(doc)]
use crate::filters::{Filter, FilterFactory};
/// An error that occurred when attempting to create a [`Filter`] from
/// a [`FilterFactory`].
#[derive(Debug, PartialEq, thiserror::Error)]
pub enum Error {
#[error("filter `{}` not found",.0)]
NotFound(String),
#[error("filter `{}` requires configuration, but none provided",.0)]
MissingConfig(&'static str),
#[error("field `{}` is invalid, reason: {}", field, reason)]
FieldInvalid { field: String, reason: String },
#[error("Deserialization failed: {}",.0)]
DeserializeFailed(String),
#[error("Failed to initialize metrics: {}",.0)]
InitializeMetricsFailed(String),
#[error("Protobuf error: {}",.0)]
ConvertProtoConfig(ConvertProtoConfigError),
}
impl From<Error> for ValidationError {
fn from(error: Error) -> Self {
Self::FilterInvalid(error)
}
}
impl From<MetricsError> for Error {
fn from(error: MetricsError) -> Self |
}
/// An error representing failure to convert a filter's protobuf configuration
/// to its static representation.
#[derive(Debug, PartialEq, thiserror::Error)]
#[error(
"{}failed to convert protobuf config: {}",
self.field.as_ref().map(|f| format!("Field `{f}`")).unwrap_or_default(),
reason
)]
pub struct ConvertProtoConfigError {
/// Reason for the failure.
reason: String,
/// Set if the failure is specific to a single field in the config.
field: Option<String>,
}
impl ConvertProtoConfigError {
pub fn new(reason: impl std::fmt::Display, field: Option<String>) -> Self {
Self {
reason: reason.to_string(),
field,
}
}
}
/// Returns a [`ConvertProtoConfigError`] with an error message when
/// an invalid proto enum value was provided in a filter's proto config.
#[macro_export]
macro_rules! enum_no_match_error {
(
field = $field:literal,
invalid_value = $invalid_value:ident,
enum_type = $enum_type:ty,
allowed_values = [ $( $allowed_value:tt ),+ ]
) => {
Err($crate::filters::error::ConvertProtoConfigError::new(
format!(
"invalid value `{}` provided: allowed values are {}",
$invalid_value,
vec![
$( (stringify!($allowed_value), <$enum_type>::$allowed_value as i32) ),+
]
.into_iter()
.map(|(a, b)| format!("{a} => {}", b as i32))
.collect::<Vec<_>>()
.join(", ")
),
Some($field.into()),
))
};
}
/// Maps an integer from a protobuf enum value to a target enum variant.
/// Both protobuf and target enum must have similar variants.
/// The protobuf enum variant should be cast-able to an i32
/// Returns an `OK` Result with the target enum variant otherwise [`ConvertProtoConfigError`]
/// if the provided value does not map to any enum variant.
#[macro_export]
macro_rules! map_proto_enum {
(
value = $value:expr,
field = $field:literal,
proto_enum_type = $proto_enum_type:ty,
target_enum_type = $target_enum_type:ty,
variants = [ $( $variant:tt ),+ ]
) => {
match $value {
$( v if v == <$proto_enum_type>::$variant as i32 => Ok(<$target_enum_type>::$variant) ),+,
invalid => $crate::enum_no_match_error!(
field = $field,
invalid_value = invalid,
enum_type = $proto_enum_type,
allowed_values = [ $( $variant ),+ ]
)
}
}
}
| {
Error::InitializeMetricsFailed(error.to_string())
} | identifier_body |
error.rs | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::config::ValidationError;
use prometheus::Error as MetricsError;
#[cfg(doc)]
use crate::filters::{Filter, FilterFactory};
/// An error that occurred when attempting to create a [`Filter`] from | #[derive(Debug, PartialEq, thiserror::Error)]
pub enum Error {
#[error("filter `{}` not found",.0)]
NotFound(String),
#[error("filter `{}` requires configuration, but none provided",.0)]
MissingConfig(&'static str),
#[error("field `{}` is invalid, reason: {}", field, reason)]
FieldInvalid { field: String, reason: String },
#[error("Deserialization failed: {}",.0)]
DeserializeFailed(String),
#[error("Failed to initialize metrics: {}",.0)]
InitializeMetricsFailed(String),
#[error("Protobuf error: {}",.0)]
ConvertProtoConfig(ConvertProtoConfigError),
}
impl From<Error> for ValidationError {
fn from(error: Error) -> Self {
Self::FilterInvalid(error)
}
}
impl From<MetricsError> for Error {
fn from(error: MetricsError) -> Self {
Error::InitializeMetricsFailed(error.to_string())
}
}
/// An error representing failure to convert a filter's protobuf configuration
/// to its static representation.
#[derive(Debug, PartialEq, thiserror::Error)]
#[error(
"{}failed to convert protobuf config: {}",
self.field.as_ref().map(|f| format!("Field `{f}`")).unwrap_or_default(),
reason
)]
pub struct ConvertProtoConfigError {
/// Reason for the failure.
reason: String,
/// Set if the failure is specific to a single field in the config.
field: Option<String>,
}
impl ConvertProtoConfigError {
pub fn new(reason: impl std::fmt::Display, field: Option<String>) -> Self {
Self {
reason: reason.to_string(),
field,
}
}
}
/// Returns a [`ConvertProtoConfigError`] with an error message when
/// an invalid proto enum value was provided in a filter's proto config.
#[macro_export]
macro_rules! enum_no_match_error {
(
field = $field:literal,
invalid_value = $invalid_value:ident,
enum_type = $enum_type:ty,
allowed_values = [ $( $allowed_value:tt ),+ ]
) => {
Err($crate::filters::error::ConvertProtoConfigError::new(
format!(
"invalid value `{}` provided: allowed values are {}",
$invalid_value,
vec![
$( (stringify!($allowed_value), <$enum_type>::$allowed_value as i32) ),+
]
.into_iter()
.map(|(a, b)| format!("{a} => {}", b as i32))
.collect::<Vec<_>>()
.join(", ")
),
Some($field.into()),
))
};
}
/// Maps an integer from a protobuf enum value to a target enum variant.
/// Both protobuf and target enum must have similar variants.
/// The protobuf enum variant should be cast-able to an i32
/// Returns an `OK` Result with the target enum variant otherwise [`ConvertProtoConfigError`]
/// if the provided value does not map to any enum variant.
#[macro_export]
macro_rules! map_proto_enum {
(
value = $value:expr,
field = $field:literal,
proto_enum_type = $proto_enum_type:ty,
target_enum_type = $target_enum_type:ty,
variants = [ $( $variant:tt ),+ ]
) => {
match $value {
$( v if v == <$proto_enum_type>::$variant as i32 => Ok(<$target_enum_type>::$variant) ),+,
invalid => $crate::enum_no_match_error!(
field = $field,
invalid_value = invalid,
enum_type = $proto_enum_type,
allowed_values = [ $( $variant ),+ ]
)
}
}
} | /// a [`FilterFactory`]. | random_line_split |
error.rs | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::config::ValidationError;
use prometheus::Error as MetricsError;
#[cfg(doc)]
use crate::filters::{Filter, FilterFactory};
/// An error that occurred when attempting to create a [`Filter`] from
/// a [`FilterFactory`].
#[derive(Debug, PartialEq, thiserror::Error)]
pub enum Error {
#[error("filter `{}` not found",.0)]
NotFound(String),
#[error("filter `{}` requires configuration, but none provided",.0)]
MissingConfig(&'static str),
#[error("field `{}` is invalid, reason: {}", field, reason)]
FieldInvalid { field: String, reason: String },
#[error("Deserialization failed: {}",.0)]
DeserializeFailed(String),
#[error("Failed to initialize metrics: {}",.0)]
InitializeMetricsFailed(String),
#[error("Protobuf error: {}",.0)]
ConvertProtoConfig(ConvertProtoConfigError),
}
impl From<Error> for ValidationError {
fn from(error: Error) -> Self {
Self::FilterInvalid(error)
}
}
impl From<MetricsError> for Error {
fn from(error: MetricsError) -> Self {
Error::InitializeMetricsFailed(error.to_string())
}
}
/// An error representing failure to convert a filter's protobuf configuration
/// to its static representation.
#[derive(Debug, PartialEq, thiserror::Error)]
#[error(
"{}failed to convert protobuf config: {}",
self.field.as_ref().map(|f| format!("Field `{f}`")).unwrap_or_default(),
reason
)]
pub struct | {
/// Reason for the failure.
reason: String,
/// Set if the failure is specific to a single field in the config.
field: Option<String>,
}
impl ConvertProtoConfigError {
pub fn new(reason: impl std::fmt::Display, field: Option<String>) -> Self {
Self {
reason: reason.to_string(),
field,
}
}
}
/// Returns a [`ConvertProtoConfigError`] with an error message when
/// an invalid proto enum value was provided in a filter's proto config.
#[macro_export]
macro_rules! enum_no_match_error {
(
field = $field:literal,
invalid_value = $invalid_value:ident,
enum_type = $enum_type:ty,
allowed_values = [ $( $allowed_value:tt ),+ ]
) => {
Err($crate::filters::error::ConvertProtoConfigError::new(
format!(
"invalid value `{}` provided: allowed values are {}",
$invalid_value,
vec![
$( (stringify!($allowed_value), <$enum_type>::$allowed_value as i32) ),+
]
.into_iter()
.map(|(a, b)| format!("{a} => {}", b as i32))
.collect::<Vec<_>>()
.join(", ")
),
Some($field.into()),
))
};
}
/// Maps an integer from a protobuf enum value to a target enum variant.
/// Both protobuf and target enum must have similar variants.
/// The protobuf enum variant should be cast-able to an i32
/// Returns an `OK` Result with the target enum variant otherwise [`ConvertProtoConfigError`]
/// if the provided value does not map to any enum variant.
#[macro_export]
macro_rules! map_proto_enum {
(
value = $value:expr,
field = $field:literal,
proto_enum_type = $proto_enum_type:ty,
target_enum_type = $target_enum_type:ty,
variants = [ $( $variant:tt ),+ ]
) => {
match $value {
$( v if v == <$proto_enum_type>::$variant as i32 => Ok(<$target_enum_type>::$variant) ),+,
invalid => $crate::enum_no_match_error!(
field = $field,
invalid_value = invalid,
enum_type = $proto_enum_type,
allowed_values = [ $( $variant ),+ ]
)
}
}
}
| ConvertProtoConfigError | identifier_name |
mod.rs | use std::rc::Rc;
use std::cell::RefCell;
use rand;
use rand::XorShiftRng;
use rand::Rng;
use gmath::vectors::Vec2;
use game::entity::{Object, Physics};
use game::entity::creature::Creature;
use keyboard::KeyboardState;
use sdl2::keycode;
pub trait Controller<A> {
/// Update the controller
/// # Arguments
/// `object` - The object to control
/// `secs` - The time elapsed sinced last update
fn update(&mut self, _object: &mut A, _secs: f32) {
}
}
pub struct NoneController<A>;
impl<A: Object> NoneController<A> {
pub fn new() -> NoneController<A> {
NoneController
}
}
impl<A: Object> Controller<A> for NoneController<A> {
// Just use default trait implementations
}
/// A controller that controls objects using the keyboard
pub struct KeyboardController {
keyboard: Rc<RefCell<KeyboardState>>,
}
impl KeyboardController {
pub fn new(keyboard: Rc<RefCell<KeyboardState>>) -> KeyboardController {
KeyboardController {
keyboard: keyboard,
}
}
}
impl Controller<Creature> for KeyboardController {
fn | (&mut self, object: &mut Creature, _: f32) {
let keyboard = self.keyboard.borrow();
let move_accel = object.move_accel;
let x_accel =
if keyboard.is_keydown(keycode::LeftKey) {
-move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else if keyboard.is_keydown(keycode::RightKey) {
move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else {
0.0
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
let jump_accel = object.jump_accel;
if object.is_on_ground() && keyboard.is_keydown(keycode::UpKey) {
let new_velocity = object.velocity() + Vec2::new(0.0, -jump_accel);
object.set_velocity(new_velocity);
}
}
}
/// A controller that controls objects using randomness
pub struct RandomController {
rng: XorShiftRng,
move_time: f32,
wait_time: f32,
}
impl RandomController {
pub fn new(move_time: f32) -> RandomController {
RandomController {
rng: rand::weak_rng(),
move_time: move_time,
wait_time: 0.0,
}
}
}
impl Controller<Creature> for RandomController {
fn update(&mut self, object: &mut Creature, secs: f32) {
self.wait_time += secs;
if self.wait_time > self.move_time {
let move_accel = object.move_accel;
let x_accel = match self.rng.gen::<f32>() {
dir if dir < 0.5 => 0.0,
dir if dir < 0.75 => move_accel,
_ => -move_accel,
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
self.wait_time -= self.move_time;
}
}
}
| update | identifier_name |
mod.rs | use std::rc::Rc;
use std::cell::RefCell;
use rand;
use rand::XorShiftRng;
use rand::Rng;
use gmath::vectors::Vec2;
use game::entity::{Object, Physics};
use game::entity::creature::Creature;
use keyboard::KeyboardState;
use sdl2::keycode;
pub trait Controller<A> {
/// Update the controller
/// # Arguments
/// `object` - The object to control
/// `secs` - The time elapsed sinced last update
fn update(&mut self, _object: &mut A, _secs: f32) {
}
}
pub struct NoneController<A>;
impl<A: Object> NoneController<A> {
pub fn new() -> NoneController<A> {
NoneController
}
}
impl<A: Object> Controller<A> for NoneController<A> {
// Just use default trait implementations
}
/// A controller that controls objects using the keyboard
pub struct KeyboardController {
keyboard: Rc<RefCell<KeyboardState>>,
}
impl KeyboardController {
pub fn new(keyboard: Rc<RefCell<KeyboardState>>) -> KeyboardController {
KeyboardController {
keyboard: keyboard,
}
}
}
impl Controller<Creature> for KeyboardController {
fn update(&mut self, object: &mut Creature, _: f32) {
let keyboard = self.keyboard.borrow();
let move_accel = object.move_accel;
let x_accel =
if keyboard.is_keydown(keycode::LeftKey) |
else if keyboard.is_keydown(keycode::RightKey) {
move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else {
0.0
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
let jump_accel = object.jump_accel;
if object.is_on_ground() && keyboard.is_keydown(keycode::UpKey) {
let new_velocity = object.velocity() + Vec2::new(0.0, -jump_accel);
object.set_velocity(new_velocity);
}
}
}
/// A controller that controls objects using randomness
pub struct RandomController {
rng: XorShiftRng,
move_time: f32,
wait_time: f32,
}
impl RandomController {
pub fn new(move_time: f32) -> RandomController {
RandomController {
rng: rand::weak_rng(),
move_time: move_time,
wait_time: 0.0,
}
}
}
impl Controller<Creature> for RandomController {
fn update(&mut self, object: &mut Creature, secs: f32) {
self.wait_time += secs;
if self.wait_time > self.move_time {
let move_accel = object.move_accel;
let x_accel = match self.rng.gen::<f32>() {
dir if dir < 0.5 => 0.0,
dir if dir < 0.75 => move_accel,
_ => -move_accel,
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
self.wait_time -= self.move_time;
}
}
}
| {
-move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
} | conditional_block |
mod.rs | use std::rc::Rc;
use std::cell::RefCell;
use rand;
use rand::XorShiftRng;
use rand::Rng;
use gmath::vectors::Vec2;
use game::entity::{Object, Physics};
use game::entity::creature::Creature;
use keyboard::KeyboardState;
use sdl2::keycode;
pub trait Controller<A> {
/// Update the controller
/// # Arguments
/// `object` - The object to control
/// `secs` - The time elapsed sinced last update
fn update(&mut self, _object: &mut A, _secs: f32) {
}
}
pub struct NoneController<A>;
impl<A: Object> NoneController<A> {
pub fn new() -> NoneController<A> {
NoneController
}
}
impl<A: Object> Controller<A> for NoneController<A> {
// Just use default trait implementations
}
/// A controller that controls objects using the keyboard
pub struct KeyboardController {
keyboard: Rc<RefCell<KeyboardState>>,
}
impl KeyboardController {
pub fn new(keyboard: Rc<RefCell<KeyboardState>>) -> KeyboardController {
KeyboardController {
keyboard: keyboard,
}
} | impl Controller<Creature> for KeyboardController {
fn update(&mut self, object: &mut Creature, _: f32) {
let keyboard = self.keyboard.borrow();
let move_accel = object.move_accel;
let x_accel =
if keyboard.is_keydown(keycode::LeftKey) {
-move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else if keyboard.is_keydown(keycode::RightKey) {
move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else {
0.0
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
let jump_accel = object.jump_accel;
if object.is_on_ground() && keyboard.is_keydown(keycode::UpKey) {
let new_velocity = object.velocity() + Vec2::new(0.0, -jump_accel);
object.set_velocity(new_velocity);
}
}
}
/// A controller that controls objects using randomness
pub struct RandomController {
rng: XorShiftRng,
move_time: f32,
wait_time: f32,
}
impl RandomController {
pub fn new(move_time: f32) -> RandomController {
RandomController {
rng: rand::weak_rng(),
move_time: move_time,
wait_time: 0.0,
}
}
}
impl Controller<Creature> for RandomController {
fn update(&mut self, object: &mut Creature, secs: f32) {
self.wait_time += secs;
if self.wait_time > self.move_time {
let move_accel = object.move_accel;
let x_accel = match self.rng.gen::<f32>() {
dir if dir < 0.5 => 0.0,
dir if dir < 0.75 => move_accel,
_ => -move_accel,
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
self.wait_time -= self.move_time;
}
}
} | }
| random_line_split |
mod.rs | use std::rc::Rc;
use std::cell::RefCell;
use rand;
use rand::XorShiftRng;
use rand::Rng;
use gmath::vectors::Vec2;
use game::entity::{Object, Physics};
use game::entity::creature::Creature;
use keyboard::KeyboardState;
use sdl2::keycode;
pub trait Controller<A> {
/// Update the controller
/// # Arguments
/// `object` - The object to control
/// `secs` - The time elapsed sinced last update
fn update(&mut self, _object: &mut A, _secs: f32) |
}
pub struct NoneController<A>;
impl<A: Object> NoneController<A> {
pub fn new() -> NoneController<A> {
NoneController
}
}
impl<A: Object> Controller<A> for NoneController<A> {
// Just use default trait implementations
}
/// A controller that controls objects using the keyboard
pub struct KeyboardController {
keyboard: Rc<RefCell<KeyboardState>>,
}
impl KeyboardController {
pub fn new(keyboard: Rc<RefCell<KeyboardState>>) -> KeyboardController {
KeyboardController {
keyboard: keyboard,
}
}
}
impl Controller<Creature> for KeyboardController {
fn update(&mut self, object: &mut Creature, _: f32) {
let keyboard = self.keyboard.borrow();
let move_accel = object.move_accel;
let x_accel =
if keyboard.is_keydown(keycode::LeftKey) {
-move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else if keyboard.is_keydown(keycode::RightKey) {
move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else {
0.0
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
let jump_accel = object.jump_accel;
if object.is_on_ground() && keyboard.is_keydown(keycode::UpKey) {
let new_velocity = object.velocity() + Vec2::new(0.0, -jump_accel);
object.set_velocity(new_velocity);
}
}
}
/// A controller that controls objects using randomness
pub struct RandomController {
rng: XorShiftRng,
move_time: f32,
wait_time: f32,
}
impl RandomController {
pub fn new(move_time: f32) -> RandomController {
RandomController {
rng: rand::weak_rng(),
move_time: move_time,
wait_time: 0.0,
}
}
}
impl Controller<Creature> for RandomController {
fn update(&mut self, object: &mut Creature, secs: f32) {
self.wait_time += secs;
if self.wait_time > self.move_time {
let move_accel = object.move_accel;
let x_accel = match self.rng.gen::<f32>() {
dir if dir < 0.5 => 0.0,
dir if dir < 0.75 => move_accel,
_ => -move_accel,
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
self.wait_time -= self.move_time;
}
}
}
| {
} | identifier_body |
p_2_0_01.rs | // P_2_0_01
//
// Generative Gestaltung – Creative Coding im Web
// ISBN: 978-3-87439-902-9, First Edition, Hermann Schmidt, Mainz, 2018
// Benedikt Groß, Hartmut Bohnacker, Julia Laub, Claudius Lazzeroni
// with contributions by Joey Lee and Niels Poldervaart
// Copyright 2018
//
// http://www.generative-gestaltung.de
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* drawing a filled circle with lines.
*
* MOUSE
* position x : length
* position y : thickness and number of lines
*
* KEYS
* s : save png
*/
extern crate nannou;
use nannou::prelude::*;
fn main() {
nannou::sketch(view).size(550, 550).run();
}
fn view(app: &App, frame: Frame) {
// Prepare to draw.
let draw = app.draw();
let win = app.window_rect();
let circle_resolution = map_range(app.mouse.y, win.top(), win.bottom(), 2, 80);
let radius = app.mouse.x - win.left();
let angle = TAU / circle_resolution as f32;
draw.background().color(BLACK);
for i in 0..circle_resolution {
let x = (angle * i as f32).cos() * radius;
let y = (angle * i as f32).sin() * radius;
draw.line()
.start(pt2(0.0, 0.0))
.end(pt2(x, y))
.stroke_weight(app.mouse.y / 20.0)
.caps_round()
.color(WHITE);
}
// Write to the window frame.
draw.to_frame(app, &frame).unwrap();
if app.keys.down.contains(&Key::S) {
| app.main_window()
.capture_frame(app.exe_name().unwrap() + ".png");
}
}
| conditional_block |
|
p_2_0_01.rs | // P_2_0_01
//
// Generative Gestaltung – Creative Coding im Web
// ISBN: 978-3-87439-902-9, First Edition, Hermann Schmidt, Mainz, 2018
// Benedikt Groß, Hartmut Bohnacker, Julia Laub, Claudius Lazzeroni
// with contributions by Joey Lee and Niels Poldervaart
// Copyright 2018
//
// http://www.generative-gestaltung.de
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* drawing a filled circle with lines.
*
* MOUSE
* position x : length
* position y : thickness and number of lines
*
* KEYS
* s : save png
*/
extern crate nannou;
use nannou::prelude::*;
fn main() {
nannou::sketch(view).size(550, 550).run();
}
fn view(app: &App, frame: Frame) {
| draw.to_frame(app, &frame).unwrap();
if app.keys.down.contains(&Key::S) {
app.main_window()
.capture_frame(app.exe_name().unwrap() + ".png");
}
}
| // Prepare to draw.
let draw = app.draw();
let win = app.window_rect();
let circle_resolution = map_range(app.mouse.y, win.top(), win.bottom(), 2, 80);
let radius = app.mouse.x - win.left();
let angle = TAU / circle_resolution as f32;
draw.background().color(BLACK);
for i in 0..circle_resolution {
let x = (angle * i as f32).cos() * radius;
let y = (angle * i as f32).sin() * radius;
draw.line()
.start(pt2(0.0, 0.0))
.end(pt2(x, y))
.stroke_weight(app.mouse.y / 20.0)
.caps_round()
.color(WHITE);
}
// Write to the window frame. | identifier_body |
p_2_0_01.rs | // P_2_0_01
//
// Generative Gestaltung – Creative Coding im Web
// ISBN: 978-3-87439-902-9, First Edition, Hermann Schmidt, Mainz, 2018
// Benedikt Groß, Hartmut Bohnacker, Julia Laub, Claudius Lazzeroni
// with contributions by Joey Lee and Niels Poldervaart
// Copyright 2018
//
// http://www.generative-gestaltung.de
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* drawing a filled circle with lines.
*
* MOUSE
* position x : length
* position y : thickness and number of lines
*
* KEYS
* s : save png
*/
extern crate nannou;
use nannou::prelude::*;
fn main() {
nannou::sketch(view).size(550, 550).run();
}
fn vie | p: &App, frame: Frame) {
// Prepare to draw.
let draw = app.draw();
let win = app.window_rect();
let circle_resolution = map_range(app.mouse.y, win.top(), win.bottom(), 2, 80);
let radius = app.mouse.x - win.left();
let angle = TAU / circle_resolution as f32;
draw.background().color(BLACK);
for i in 0..circle_resolution {
let x = (angle * i as f32).cos() * radius;
let y = (angle * i as f32).sin() * radius;
draw.line()
.start(pt2(0.0, 0.0))
.end(pt2(x, y))
.stroke_weight(app.mouse.y / 20.0)
.caps_round()
.color(WHITE);
}
// Write to the window frame.
draw.to_frame(app, &frame).unwrap();
if app.keys.down.contains(&Key::S) {
app.main_window()
.capture_frame(app.exe_name().unwrap() + ".png");
}
}
| w(ap | identifier_name |
p_2_0_01.rs | // P_2_0_01
//
// Generative Gestaltung – Creative Coding im Web
// ISBN: 978-3-87439-902-9, First Edition, Hermann Schmidt, Mainz, 2018
// Benedikt Groß, Hartmut Bohnacker, Julia Laub, Claudius Lazzeroni
// with contributions by Joey Lee and Niels Poldervaart
// Copyright 2018
//
// http://www.generative-gestaltung.de
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* drawing a filled circle with lines.
*
* MOUSE
* position x : length
* position y : thickness and number of lines
*
* KEYS
* s : save png
*/
extern crate nannou;
use nannou::prelude::*;
fn main() {
nannou::sketch(view).size(550, 550).run();
}
fn view(app: &App, frame: Frame) {
// Prepare to draw.
let draw = app.draw();
let win = app.window_rect();
let circle_resolution = map_range(app.mouse.y, win.top(), win.bottom(), 2, 80);
let radius = app.mouse.x - win.left();
let angle = TAU / circle_resolution as f32;
draw.background().color(BLACK);
for i in 0..circle_resolution {
let x = (angle * i as f32).cos() * radius;
let y = (angle * i as f32).sin() * radius;
draw.line()
.start(pt2(0.0, 0.0))
.end(pt2(x, y)) | .caps_round()
.color(WHITE);
}
// Write to the window frame.
draw.to_frame(app, &frame).unwrap();
if app.keys.down.contains(&Key::S) {
app.main_window()
.capture_frame(app.exe_name().unwrap() + ".png");
}
} | .stroke_weight(app.mouse.y / 20.0) | random_line_split |
convert.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Traits for conversions between types.
//!
//! The traits in this module provide a general way to talk about conversions
//! from one type to another. They follow the standard Rust conventions of
//! `as`/`into`/`from`.
//!
//! Like many traits, these are often used as bounds for generic functions, to
//! support arguments of multiple types.
//!
//! See each trait for usage examples.
#![stable(feature = "rust1", since = "1.0.0")]
use marker::Sized;
/// A cheap, reference-to-reference conversion.
///
/// `AsRef` is very similar to, but different than, `Borrow`. See
/// [the book][book] for more.
///
/// [book]:../../book/borrow-and-asref.html
///
/// # Examples
///
/// Both `String` and `&str` implement `AsRef<str>`:
///
/// ```
/// fn is_hello<T: AsRef<str>>(s: T) {
/// assert_eq!("hello", s.as_ref());
/// }
///
/// let s = "hello";
/// is_hello(s);
///
/// let s = "hello".to_string();
/// is_hello(s);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsRef<T:?Sized> {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn as_ref(&self) -> &T;
}
/// A cheap, mutable reference-to-mutable reference conversion.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsMut<T:?Sized> {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn as_mut(&mut self) -> &mut T;
}
/// A conversion that consumes `self`, which may or may not be expensive.
///
/// # Examples
///
/// `String` implements `Into<Vec<u8>>`:
///
/// ```
/// fn is_hello<T: Into<Vec<u8>>>(s: T) {
/// let bytes = b"hello".to_vec();
/// assert_eq!(bytes, s.into());
/// }
///
/// let s = "hello".to_string();
/// is_hello(s);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Into<T>: Sized {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn into(self) -> T;
}
/// Construct `Self` via a conversion.
///
/// # Examples
///
/// `String` implements `From<&str>`:
///
/// ```
/// let string = "hello".to_string();
/// let other_string = String::from("hello");
///
/// assert_eq!(string, other_string);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait From<T>: Sized {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn from(T) -> Self;
}
////////////////////////////////////////////////////////////////////////////////
// GENERIC IMPLS
////////////////////////////////////////////////////////////////////////////////
// As lifts over &
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T:?Sized, U:?Sized> AsRef<U> for &'a T where T: AsRef<U> {
fn as_ref(&self) -> &U {
<T as AsRef<U>>::as_ref(*self)
}
}
// As lifts over &mut
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T:?Sized, U:?Sized> AsRef<U> for &'a mut T where T: AsRef<U> {
fn as_ref(&self) -> &U {
<T as AsRef<U>>::as_ref(*self)
}
}
// FIXME (#23442): replace the above impls for &/&mut with the following more general one:
// // As lifts over Deref
// impl<D:?Sized + Deref, U:?Sized> AsRef<U> for D where D::Target: AsRef<U> {
// fn as_ref(&self) -> &U {
// self.deref().as_ref()
// }
// }
// AsMut lifts over &mut
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T:?Sized, U:?Sized> AsMut<U> for &'a mut T where T: AsMut<U> {
fn as_mut(&mut self) -> &mut U |
}
// FIXME (#23442): replace the above impl for &mut with the following more general one:
// // AsMut lifts over DerefMut
// impl<D:?Sized + Deref, U:?Sized> AsMut<U> for D where D::Target: AsMut<U> {
// fn as_mut(&mut self) -> &mut U {
// self.deref_mut().as_mut()
// }
// }
// From implies Into
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, U> Into<U> for T where U: From<T> {
fn into(self) -> U {
U::from(self)
}
}
// From (and thus Into) is reflexive
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> From<T> for T {
fn from(t: T) -> T { t }
}
////////////////////////////////////////////////////////////////////////////////
// CONCRETE IMPLS
////////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> AsRef<[T]> for [T] {
fn as_ref(&self) -> &[T] {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> AsMut<[T]> for [T] {
fn as_mut(&mut self) -> &mut [T] {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl AsRef<str> for str {
#[inline]
fn as_ref(&self) -> &str {
self
}
}
| {
(*self).as_mut()
} | identifier_body |
convert.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Traits for conversions between types.
//!
//! The traits in this module provide a general way to talk about conversions
//! from one type to another. They follow the standard Rust conventions of
//! `as`/`into`/`from`.
//!
//! Like many traits, these are often used as bounds for generic functions, to
//! support arguments of multiple types.
//!
//! See each trait for usage examples.
#![stable(feature = "rust1", since = "1.0.0")]
use marker::Sized;
/// A cheap, reference-to-reference conversion.
///
/// `AsRef` is very similar to, but different than, `Borrow`. See
/// [the book][book] for more.
///
/// [book]:../../book/borrow-and-asref.html
///
/// # Examples
///
/// Both `String` and `&str` implement `AsRef<str>`:
///
/// ```
/// fn is_hello<T: AsRef<str>>(s: T) {
/// assert_eq!("hello", s.as_ref());
/// }
///
/// let s = "hello";
/// is_hello(s);
///
/// let s = "hello".to_string();
/// is_hello(s);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsRef<T:?Sized> {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn as_ref(&self) -> &T;
}
/// A cheap, mutable reference-to-mutable reference conversion.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsMut<T:?Sized> {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn as_mut(&mut self) -> &mut T;
}
/// A conversion that consumes `self`, which may or may not be expensive.
///
/// # Examples
///
/// `String` implements `Into<Vec<u8>>`:
///
/// ```
/// fn is_hello<T: Into<Vec<u8>>>(s: T) {
/// let bytes = b"hello".to_vec();
/// assert_eq!(bytes, s.into());
/// }
///
/// let s = "hello".to_string();
/// is_hello(s);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Into<T>: Sized {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn into(self) -> T;
}
/// Construct `Self` via a conversion.
///
/// # Examples
///
/// `String` implements `From<&str>`:
///
/// ```
/// let string = "hello".to_string();
/// let other_string = String::from("hello");
///
/// assert_eq!(string, other_string);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait From<T>: Sized {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn from(T) -> Self;
}
////////////////////////////////////////////////////////////////////////////////
// GENERIC IMPLS
////////////////////////////////////////////////////////////////////////////////
// As lifts over &
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T:?Sized, U:?Sized> AsRef<U> for &'a T where T: AsRef<U> {
fn as_ref(&self) -> &U {
<T as AsRef<U>>::as_ref(*self)
}
}
// As lifts over &mut
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T:?Sized, U:?Sized> AsRef<U> for &'a mut T where T: AsRef<U> {
fn as_ref(&self) -> &U {
<T as AsRef<U>>::as_ref(*self)
}
}
// FIXME (#23442): replace the above impls for &/&mut with the following more general one:
// // As lifts over Deref
// impl<D:?Sized + Deref, U:?Sized> AsRef<U> for D where D::Target: AsRef<U> {
// fn as_ref(&self) -> &U {
// self.deref().as_ref()
// }
// }
// AsMut lifts over &mut
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T:?Sized, U:?Sized> AsMut<U> for &'a mut T where T: AsMut<U> {
fn | (&mut self) -> &mut U {
(*self).as_mut()
}
}
// FIXME (#23442): replace the above impl for &mut with the following more general one:
// // AsMut lifts over DerefMut
// impl<D:?Sized + Deref, U:?Sized> AsMut<U> for D where D::Target: AsMut<U> {
// fn as_mut(&mut self) -> &mut U {
// self.deref_mut().as_mut()
// }
// }
// From implies Into
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, U> Into<U> for T where U: From<T> {
fn into(self) -> U {
U::from(self)
}
}
// From (and thus Into) is reflexive
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> From<T> for T {
fn from(t: T) -> T { t }
}
////////////////////////////////////////////////////////////////////////////////
// CONCRETE IMPLS
////////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> AsRef<[T]> for [T] {
fn as_ref(&self) -> &[T] {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> AsMut<[T]> for [T] {
fn as_mut(&mut self) -> &mut [T] {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl AsRef<str> for str {
#[inline]
fn as_ref(&self) -> &str {
self
}
}
| as_mut | identifier_name |
convert.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Traits for conversions between types.
//!
//! The traits in this module provide a general way to talk about conversions
//! from one type to another. They follow the standard Rust conventions of
//! `as`/`into`/`from`.
//!
//! Like many traits, these are often used as bounds for generic functions, to
//! support arguments of multiple types.
//!
//! See each trait for usage examples.
#![stable(feature = "rust1", since = "1.0.0")]
use marker::Sized;
/// A cheap, reference-to-reference conversion.
///
/// `AsRef` is very similar to, but different than, `Borrow`. See
/// [the book][book] for more.
///
/// [book]:../../book/borrow-and-asref.html | /// Both `String` and `&str` implement `AsRef<str>`:
///
/// ```
/// fn is_hello<T: AsRef<str>>(s: T) {
/// assert_eq!("hello", s.as_ref());
/// }
///
/// let s = "hello";
/// is_hello(s);
///
/// let s = "hello".to_string();
/// is_hello(s);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsRef<T:?Sized> {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn as_ref(&self) -> &T;
}
/// A cheap, mutable reference-to-mutable reference conversion.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsMut<T:?Sized> {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn as_mut(&mut self) -> &mut T;
}
/// A conversion that consumes `self`, which may or may not be expensive.
///
/// # Examples
///
/// `String` implements `Into<Vec<u8>>`:
///
/// ```
/// fn is_hello<T: Into<Vec<u8>>>(s: T) {
/// let bytes = b"hello".to_vec();
/// assert_eq!(bytes, s.into());
/// }
///
/// let s = "hello".to_string();
/// is_hello(s);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Into<T>: Sized {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn into(self) -> T;
}
/// Construct `Self` via a conversion.
///
/// # Examples
///
/// `String` implements `From<&str>`:
///
/// ```
/// let string = "hello".to_string();
/// let other_string = String::from("hello");
///
/// assert_eq!(string, other_string);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait From<T>: Sized {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn from(T) -> Self;
}
////////////////////////////////////////////////////////////////////////////////
// GENERIC IMPLS
////////////////////////////////////////////////////////////////////////////////
// As lifts over &
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T:?Sized, U:?Sized> AsRef<U> for &'a T where T: AsRef<U> {
fn as_ref(&self) -> &U {
<T as AsRef<U>>::as_ref(*self)
}
}
// As lifts over &mut
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T:?Sized, U:?Sized> AsRef<U> for &'a mut T where T: AsRef<U> {
fn as_ref(&self) -> &U {
<T as AsRef<U>>::as_ref(*self)
}
}
// FIXME (#23442): replace the above impls for &/&mut with the following more general one:
// // As lifts over Deref
// impl<D:?Sized + Deref, U:?Sized> AsRef<U> for D where D::Target: AsRef<U> {
// fn as_ref(&self) -> &U {
// self.deref().as_ref()
// }
// }
// AsMut lifts over &mut
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T:?Sized, U:?Sized> AsMut<U> for &'a mut T where T: AsMut<U> {
fn as_mut(&mut self) -> &mut U {
(*self).as_mut()
}
}
// FIXME (#23442): replace the above impl for &mut with the following more general one:
// // AsMut lifts over DerefMut
// impl<D:?Sized + Deref, U:?Sized> AsMut<U> for D where D::Target: AsMut<U> {
// fn as_mut(&mut self) -> &mut U {
// self.deref_mut().as_mut()
// }
// }
// From implies Into
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, U> Into<U> for T where U: From<T> {
fn into(self) -> U {
U::from(self)
}
}
// From (and thus Into) is reflexive
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> From<T> for T {
fn from(t: T) -> T { t }
}
////////////////////////////////////////////////////////////////////////////////
// CONCRETE IMPLS
////////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> AsRef<[T]> for [T] {
fn as_ref(&self) -> &[T] {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> AsMut<[T]> for [T] {
fn as_mut(&mut self) -> &mut [T] {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl AsRef<str> for str {
#[inline]
fn as_ref(&self) -> &str {
self
}
} | ///
/// # Examples
/// | random_line_split |
dirichlet.rs | // Copyright 2018 Developers of the Rand project.
// Copyright 2013 The Rust Project Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The dirichlet distribution.
#![cfg(feature = "alloc")]
use num_traits::Float;
use crate::{Distribution, Exp1, Gamma, Open01, StandardNormal};
use rand::Rng;
use core::fmt;
use alloc::{boxed::Box, vec, vec::Vec};
/// The Dirichlet distribution `Dirichlet(alpha)`.
///
/// The Dirichlet distribution is a family of continuous multivariate
/// probability distributions parameterized by a vector alpha of positive reals.
/// It is a multivariate generalization of the beta distribution.
/// | /// # Example
///
/// ```
/// use rand::prelude::*;
/// use rand_distr::Dirichlet;
///
/// let dirichlet = Dirichlet::new(&[1.0, 2.0, 3.0]).unwrap();
/// let samples = dirichlet.sample(&mut rand::thread_rng());
/// println!("{:?} is from a Dirichlet([1.0, 2.0, 3.0]) distribution", samples);
/// ```
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[derive(Clone, Debug)]
pub struct Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
/// Concentration parameters (alpha)
alpha: Box<[F]>,
}
/// Error type returned from `Dirchlet::new`.
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Error {
/// `alpha.len() < 2`.
AlphaTooShort,
/// `alpha <= 0.0` or `nan`.
AlphaTooSmall,
/// `size < 2`.
SizeTooSmall,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
Error::AlphaTooShort | Error::SizeTooSmall => {
"less than 2 dimensions in Dirichlet distribution"
}
Error::AlphaTooSmall => "alpha is not positive in Dirichlet distribution",
})
}
}
#[cfg(feature = "std")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
impl std::error::Error for Error {}
impl<F> Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
/// Construct a new `Dirichlet` with the given alpha parameter `alpha`.
///
/// Requires `alpha.len() >= 2`.
#[inline]
pub fn new(alpha: &[F]) -> Result<Dirichlet<F>, Error> {
if alpha.len() < 2 {
return Err(Error::AlphaTooShort);
}
for &ai in alpha.iter() {
if!(ai > F::zero()) {
return Err(Error::AlphaTooSmall);
}
}
Ok(Dirichlet { alpha: alpha.to_vec().into_boxed_slice() })
}
/// Construct a new `Dirichlet` with the given shape parameter `alpha` and `size`.
///
/// Requires `size >= 2`.
#[inline]
pub fn new_with_size(alpha: F, size: usize) -> Result<Dirichlet<F>, Error> {
if!(alpha > F::zero()) {
return Err(Error::AlphaTooSmall);
}
if size < 2 {
return Err(Error::SizeTooSmall);
}
Ok(Dirichlet {
alpha: vec![alpha; size].into_boxed_slice(),
})
}
}
impl<F> Distribution<Vec<F>> for Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
fn sample<R: Rng +?Sized>(&self, rng: &mut R) -> Vec<F> {
let n = self.alpha.len();
let mut samples = vec![F::zero(); n];
let mut sum = F::zero();
for (s, &a) in samples.iter_mut().zip(self.alpha.iter()) {
let g = Gamma::new(a, F::one()).unwrap();
*s = g.sample(rng);
sum = sum + (*s);
}
let invacc = F::one() / sum;
for s in samples.iter_mut() {
*s = (*s)*invacc;
}
samples
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_dirichlet() {
let d = Dirichlet::new(&[1.0, 2.0, 3.0]).unwrap();
let mut rng = crate::test::rng(221);
let samples = d.sample(&mut rng);
let _: Vec<f64> = samples
.into_iter()
.map(|x| {
assert!(x > 0.0);
x
})
.collect();
}
#[test]
fn test_dirichlet_with_param() {
let alpha = 0.5f64;
let size = 2;
let d = Dirichlet::new_with_size(alpha, size).unwrap();
let mut rng = crate::test::rng(221);
let samples = d.sample(&mut rng);
let _: Vec<f64> = samples
.into_iter()
.map(|x| {
assert!(x > 0.0);
x
})
.collect();
}
#[test]
#[should_panic]
fn test_dirichlet_invalid_length() {
Dirichlet::new_with_size(0.5f64, 1).unwrap();
}
#[test]
#[should_panic]
fn test_dirichlet_invalid_alpha() {
Dirichlet::new_with_size(0.0f64, 2).unwrap();
}
} | random_line_split |
|
dirichlet.rs | // Copyright 2018 Developers of the Rand project.
// Copyright 2013 The Rust Project Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The dirichlet distribution.
#![cfg(feature = "alloc")]
use num_traits::Float;
use crate::{Distribution, Exp1, Gamma, Open01, StandardNormal};
use rand::Rng;
use core::fmt;
use alloc::{boxed::Box, vec, vec::Vec};
/// The Dirichlet distribution `Dirichlet(alpha)`.
///
/// The Dirichlet distribution is a family of continuous multivariate
/// probability distributions parameterized by a vector alpha of positive reals.
/// It is a multivariate generalization of the beta distribution.
///
/// # Example
///
/// ```
/// use rand::prelude::*;
/// use rand_distr::Dirichlet;
///
/// let dirichlet = Dirichlet::new(&[1.0, 2.0, 3.0]).unwrap();
/// let samples = dirichlet.sample(&mut rand::thread_rng());
/// println!("{:?} is from a Dirichlet([1.0, 2.0, 3.0]) distribution", samples);
/// ```
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[derive(Clone, Debug)]
pub struct Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
/// Concentration parameters (alpha)
alpha: Box<[F]>,
}
/// Error type returned from `Dirchlet::new`.
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Error {
/// `alpha.len() < 2`.
AlphaTooShort,
/// `alpha <= 0.0` or `nan`.
AlphaTooSmall,
/// `size < 2`.
SizeTooSmall,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
Error::AlphaTooShort | Error::SizeTooSmall => {
"less than 2 dimensions in Dirichlet distribution"
}
Error::AlphaTooSmall => "alpha is not positive in Dirichlet distribution",
})
}
}
#[cfg(feature = "std")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
impl std::error::Error for Error {}
impl<F> Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
/// Construct a new `Dirichlet` with the given alpha parameter `alpha`.
///
/// Requires `alpha.len() >= 2`.
#[inline]
pub fn new(alpha: &[F]) -> Result<Dirichlet<F>, Error> {
if alpha.len() < 2 {
return Err(Error::AlphaTooShort);
}
for &ai in alpha.iter() {
if!(ai > F::zero()) {
return Err(Error::AlphaTooSmall);
}
}
Ok(Dirichlet { alpha: alpha.to_vec().into_boxed_slice() })
}
/// Construct a new `Dirichlet` with the given shape parameter `alpha` and `size`.
///
/// Requires `size >= 2`.
#[inline]
pub fn | (alpha: F, size: usize) -> Result<Dirichlet<F>, Error> {
if!(alpha > F::zero()) {
return Err(Error::AlphaTooSmall);
}
if size < 2 {
return Err(Error::SizeTooSmall);
}
Ok(Dirichlet {
alpha: vec![alpha; size].into_boxed_slice(),
})
}
}
impl<F> Distribution<Vec<F>> for Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
fn sample<R: Rng +?Sized>(&self, rng: &mut R) -> Vec<F> {
let n = self.alpha.len();
let mut samples = vec![F::zero(); n];
let mut sum = F::zero();
for (s, &a) in samples.iter_mut().zip(self.alpha.iter()) {
let g = Gamma::new(a, F::one()).unwrap();
*s = g.sample(rng);
sum = sum + (*s);
}
let invacc = F::one() / sum;
for s in samples.iter_mut() {
*s = (*s)*invacc;
}
samples
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_dirichlet() {
let d = Dirichlet::new(&[1.0, 2.0, 3.0]).unwrap();
let mut rng = crate::test::rng(221);
let samples = d.sample(&mut rng);
let _: Vec<f64> = samples
.into_iter()
.map(|x| {
assert!(x > 0.0);
x
})
.collect();
}
#[test]
fn test_dirichlet_with_param() {
let alpha = 0.5f64;
let size = 2;
let d = Dirichlet::new_with_size(alpha, size).unwrap();
let mut rng = crate::test::rng(221);
let samples = d.sample(&mut rng);
let _: Vec<f64> = samples
.into_iter()
.map(|x| {
assert!(x > 0.0);
x
})
.collect();
}
#[test]
#[should_panic]
fn test_dirichlet_invalid_length() {
Dirichlet::new_with_size(0.5f64, 1).unwrap();
}
#[test]
#[should_panic]
fn test_dirichlet_invalid_alpha() {
Dirichlet::new_with_size(0.0f64, 2).unwrap();
}
}
| new_with_size | identifier_name |
dirichlet.rs | // Copyright 2018 Developers of the Rand project.
// Copyright 2013 The Rust Project Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The dirichlet distribution.
#![cfg(feature = "alloc")]
use num_traits::Float;
use crate::{Distribution, Exp1, Gamma, Open01, StandardNormal};
use rand::Rng;
use core::fmt;
use alloc::{boxed::Box, vec, vec::Vec};
/// The Dirichlet distribution `Dirichlet(alpha)`.
///
/// The Dirichlet distribution is a family of continuous multivariate
/// probability distributions parameterized by a vector alpha of positive reals.
/// It is a multivariate generalization of the beta distribution.
///
/// # Example
///
/// ```
/// use rand::prelude::*;
/// use rand_distr::Dirichlet;
///
/// let dirichlet = Dirichlet::new(&[1.0, 2.0, 3.0]).unwrap();
/// let samples = dirichlet.sample(&mut rand::thread_rng());
/// println!("{:?} is from a Dirichlet([1.0, 2.0, 3.0]) distribution", samples);
/// ```
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[derive(Clone, Debug)]
pub struct Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
/// Concentration parameters (alpha)
alpha: Box<[F]>,
}
/// Error type returned from `Dirchlet::new`.
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Error {
/// `alpha.len() < 2`.
AlphaTooShort,
/// `alpha <= 0.0` or `nan`.
AlphaTooSmall,
/// `size < 2`.
SizeTooSmall,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
Error::AlphaTooShort | Error::SizeTooSmall => {
"less than 2 dimensions in Dirichlet distribution"
}
Error::AlphaTooSmall => "alpha is not positive in Dirichlet distribution",
})
}
}
#[cfg(feature = "std")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
impl std::error::Error for Error {}
impl<F> Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
/// Construct a new `Dirichlet` with the given alpha parameter `alpha`.
///
/// Requires `alpha.len() >= 2`.
#[inline]
pub fn new(alpha: &[F]) -> Result<Dirichlet<F>, Error> {
if alpha.len() < 2 {
return Err(Error::AlphaTooShort);
}
for &ai in alpha.iter() {
if!(ai > F::zero()) |
}
Ok(Dirichlet { alpha: alpha.to_vec().into_boxed_slice() })
}
/// Construct a new `Dirichlet` with the given shape parameter `alpha` and `size`.
///
/// Requires `size >= 2`.
#[inline]
pub fn new_with_size(alpha: F, size: usize) -> Result<Dirichlet<F>, Error> {
if!(alpha > F::zero()) {
return Err(Error::AlphaTooSmall);
}
if size < 2 {
return Err(Error::SizeTooSmall);
}
Ok(Dirichlet {
alpha: vec![alpha; size].into_boxed_slice(),
})
}
}
impl<F> Distribution<Vec<F>> for Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
fn sample<R: Rng +?Sized>(&self, rng: &mut R) -> Vec<F> {
let n = self.alpha.len();
let mut samples = vec![F::zero(); n];
let mut sum = F::zero();
for (s, &a) in samples.iter_mut().zip(self.alpha.iter()) {
let g = Gamma::new(a, F::one()).unwrap();
*s = g.sample(rng);
sum = sum + (*s);
}
let invacc = F::one() / sum;
for s in samples.iter_mut() {
*s = (*s)*invacc;
}
samples
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_dirichlet() {
let d = Dirichlet::new(&[1.0, 2.0, 3.0]).unwrap();
let mut rng = crate::test::rng(221);
let samples = d.sample(&mut rng);
let _: Vec<f64> = samples
.into_iter()
.map(|x| {
assert!(x > 0.0);
x
})
.collect();
}
#[test]
fn test_dirichlet_with_param() {
let alpha = 0.5f64;
let size = 2;
let d = Dirichlet::new_with_size(alpha, size).unwrap();
let mut rng = crate::test::rng(221);
let samples = d.sample(&mut rng);
let _: Vec<f64> = samples
.into_iter()
.map(|x| {
assert!(x > 0.0);
x
})
.collect();
}
#[test]
#[should_panic]
fn test_dirichlet_invalid_length() {
Dirichlet::new_with_size(0.5f64, 1).unwrap();
}
#[test]
#[should_panic]
fn test_dirichlet_invalid_alpha() {
Dirichlet::new_with_size(0.0f64, 2).unwrap();
}
}
| {
return Err(Error::AlphaTooSmall);
} | conditional_block |
dirichlet.rs | // Copyright 2018 Developers of the Rand project.
// Copyright 2013 The Rust Project Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The dirichlet distribution.
#![cfg(feature = "alloc")]
use num_traits::Float;
use crate::{Distribution, Exp1, Gamma, Open01, StandardNormal};
use rand::Rng;
use core::fmt;
use alloc::{boxed::Box, vec, vec::Vec};
/// The Dirichlet distribution `Dirichlet(alpha)`.
///
/// The Dirichlet distribution is a family of continuous multivariate
/// probability distributions parameterized by a vector alpha of positive reals.
/// It is a multivariate generalization of the beta distribution.
///
/// # Example
///
/// ```
/// use rand::prelude::*;
/// use rand_distr::Dirichlet;
///
/// let dirichlet = Dirichlet::new(&[1.0, 2.0, 3.0]).unwrap();
/// let samples = dirichlet.sample(&mut rand::thread_rng());
/// println!("{:?} is from a Dirichlet([1.0, 2.0, 3.0]) distribution", samples);
/// ```
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[derive(Clone, Debug)]
pub struct Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
/// Concentration parameters (alpha)
alpha: Box<[F]>,
}
/// Error type returned from `Dirchlet::new`.
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Error {
/// `alpha.len() < 2`.
AlphaTooShort,
/// `alpha <= 0.0` or `nan`.
AlphaTooSmall,
/// `size < 2`.
SizeTooSmall,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
Error::AlphaTooShort | Error::SizeTooSmall => {
"less than 2 dimensions in Dirichlet distribution"
}
Error::AlphaTooSmall => "alpha is not positive in Dirichlet distribution",
})
}
}
#[cfg(feature = "std")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "std")))]
impl std::error::Error for Error {}
impl<F> Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
/// Construct a new `Dirichlet` with the given alpha parameter `alpha`.
///
/// Requires `alpha.len() >= 2`.
#[inline]
pub fn new(alpha: &[F]) -> Result<Dirichlet<F>, Error> {
if alpha.len() < 2 {
return Err(Error::AlphaTooShort);
}
for &ai in alpha.iter() {
if!(ai > F::zero()) {
return Err(Error::AlphaTooSmall);
}
}
Ok(Dirichlet { alpha: alpha.to_vec().into_boxed_slice() })
}
/// Construct a new `Dirichlet` with the given shape parameter `alpha` and `size`.
///
/// Requires `size >= 2`.
#[inline]
pub fn new_with_size(alpha: F, size: usize) -> Result<Dirichlet<F>, Error> {
if!(alpha > F::zero()) {
return Err(Error::AlphaTooSmall);
}
if size < 2 {
return Err(Error::SizeTooSmall);
}
Ok(Dirichlet {
alpha: vec![alpha; size].into_boxed_slice(),
})
}
}
impl<F> Distribution<Vec<F>> for Dirichlet<F>
where
F: Float,
StandardNormal: Distribution<F>,
Exp1: Distribution<F>,
Open01: Distribution<F>,
{
fn sample<R: Rng +?Sized>(&self, rng: &mut R) -> Vec<F> {
let n = self.alpha.len();
let mut samples = vec![F::zero(); n];
let mut sum = F::zero();
for (s, &a) in samples.iter_mut().zip(self.alpha.iter()) {
let g = Gamma::new(a, F::one()).unwrap();
*s = g.sample(rng);
sum = sum + (*s);
}
let invacc = F::one() / sum;
for s in samples.iter_mut() {
*s = (*s)*invacc;
}
samples
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_dirichlet() |
#[test]
fn test_dirichlet_with_param() {
let alpha = 0.5f64;
let size = 2;
let d = Dirichlet::new_with_size(alpha, size).unwrap();
let mut rng = crate::test::rng(221);
let samples = d.sample(&mut rng);
let _: Vec<f64> = samples
.into_iter()
.map(|x| {
assert!(x > 0.0);
x
})
.collect();
}
#[test]
#[should_panic]
fn test_dirichlet_invalid_length() {
Dirichlet::new_with_size(0.5f64, 1).unwrap();
}
#[test]
#[should_panic]
fn test_dirichlet_invalid_alpha() {
Dirichlet::new_with_size(0.0f64, 2).unwrap();
}
}
| {
let d = Dirichlet::new(&[1.0, 2.0, 3.0]).unwrap();
let mut rng = crate::test::rng(221);
let samples = d.sample(&mut rng);
let _: Vec<f64> = samples
.into_iter()
.map(|x| {
assert!(x > 0.0);
x
})
.collect();
} | identifier_body |
disk.rs | #![feature(plugin, custom_derive, custom_attribute)]
#![plugin(serde_macros)]
extern crate drum;
extern crate serde;
use drum::*;
use std::io::*;
use std::collections::*;
use std::fs::{OpenOptions};
#[derive(PartialEq, Ord, Eq, PartialOrd, Serialize, Deserialize)]
enum Value {
Array(Vec<Value>),
Object(BTreeMap<Value, Value>),
String(String),
Number(i64)
}
fn run() -> Result<()> {
let msg = "Hello World";
let file =
try!(OpenOptions::new()
.read(true)
.write(true)
.create(true)
.append(true)
.open("test.db"));
let mut store = try!(Store::reopen(file));
for key in store.keys() {
println!("{}", key)
}
let previous = try!(store.get(&String::from(msg)));
try!(store.insert(
String::from(msg),
Value::Array(vec![Value::Number(100)]))
);
match previous {
Some(Value::Array(vec)) => {
match vec[0] {
Value::Number(num) => | ,
_ => panic!()
}
},
_ => ()
}
Ok(())
}
fn main() {
run().unwrap();
return;
} | {
println!("previous: {}", num);
} | conditional_block |
disk.rs | #![feature(plugin, custom_derive, custom_attribute)]
#![plugin(serde_macros)]
extern crate drum;
extern crate serde;
use drum::*;
use std::io::*;
use std::collections::*;
use std::fs::{OpenOptions};
#[derive(PartialEq, Ord, Eq, PartialOrd, Serialize, Deserialize)]
enum Value {
Array(Vec<Value>),
Object(BTreeMap<Value, Value>),
String(String),
Number(i64)
}
fn run() -> Result<()> {
let msg = "Hello World";
let file =
try!(OpenOptions::new()
.read(true)
.write(true)
.create(true)
.append(true)
.open("test.db"));
let mut store = try!(Store::reopen(file));
for key in store.keys() {
println!("{}", key)
}
let previous = try!(store.get(&String::from(msg)));
try!(store.insert(
String::from(msg),
Value::Array(vec![Value::Number(100)]))
);
match previous {
Some(Value::Array(vec)) => {
match vec[0] {
Value::Number(num) => {
println!("previous: {}", num);
},
_ => panic!()
}
},
_ => ()
}
Ok(())
}
fn | () {
run().unwrap();
return;
} | main | identifier_name |
disk.rs | #![feature(plugin, custom_derive, custom_attribute)]
#![plugin(serde_macros)]
extern crate drum;
extern crate serde;
use drum::*;
use std::io::*;
use std::collections::*;
use std::fs::{OpenOptions};
#[derive(PartialEq, Ord, Eq, PartialOrd, Serialize, Deserialize)]
enum Value {
Array(Vec<Value>),
Object(BTreeMap<Value, Value>),
String(String),
Number(i64)
}
fn run() -> Result<()> {
let msg = "Hello World";
let file =
try!(OpenOptions::new()
.read(true)
.write(true)
.create(true)
.append(true)
.open("test.db"));
let mut store = try!(Store::reopen(file));
for key in store.keys() {
println!("{}", key)
}
let previous = try!(store.get(&String::from(msg)));
try!(store.insert(
String::from(msg),
Value::Array(vec![Value::Number(100)]))
);
match previous {
Some(Value::Array(vec)) => {
match vec[0] {
Value::Number(num) => {
println!("previous: {}", num);
},
_ => panic!()
}
},
_ => ()
}
Ok(())
}
fn main() | {
run().unwrap();
return;
} | identifier_body |
|
disk.rs | #![feature(plugin, custom_derive, custom_attribute)]
#![plugin(serde_macros)]
extern crate drum;
extern crate serde;
use drum::*;
use std::io::*;
use std::collections::*;
use std::fs::{OpenOptions};
#[derive(PartialEq, Ord, Eq, PartialOrd, Serialize, Deserialize)]
enum Value {
Array(Vec<Value>),
Object(BTreeMap<Value, Value>),
String(String),
Number(i64)
}
fn run() -> Result<()> {
let msg = "Hello World";
let file =
try!(OpenOptions::new()
.read(true)
.write(true)
.create(true)
.append(true)
.open("test.db"));
let mut store = try!(Store::reopen(file));
for key in store.keys() {
println!("{}", key)
}
let previous = try!(store.get(&String::from(msg)));
try!(store.insert(
String::from(msg),
Value::Array(vec![Value::Number(100)]))
);
match previous {
Some(Value::Array(vec)) => {
match vec[0] {
Value::Number(num) => {
println!("previous: {}", num);
},
_ => panic!()
}
},
_ => ()
}
Ok(())
}
fn main() {
run().unwrap();
return; | } | random_line_split |
|
builders.rs | use std::fmt;
use std::rc::Rc;
use quire::validate as V;
use serde::de::{self, Deserializer, Deserialize, EnumAccess, VariantAccess, Visitor};
use serde::ser::{Serializer, Serialize};
use crate::build_step::{Step, BuildStep};
use crate::builder::commands as cmd;
macro_rules! define_commands {
($($module: ident :: $item: ident,)*) => {
const COMMANDS: &'static [&'static str] = &[
$(stringify!($item),)*
];
pub enum CommandName {
$($item,)*
}
pub fn builder_validator<'x>() -> V::Enum<'x> {
V::Enum::new()
$(
.option(stringify!($item), cmd::$module::$item::config())
)*
}
impl<'a> Visitor<'a> for NameVisitor {
type Value = CommandName;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "build step is one of {}", COMMANDS.join(", "))
}
fn visit_str<E: de::Error>(self, val: &str)
-> Result<CommandName, E>
{
use self::CommandName::*;
let res = match val {
$(
stringify!($item) => $item,
)*
_ => return Err(E::custom("invalid build step")),
};
Ok(res)
}
}
impl<'a> Visitor<'a> for StepVisitor {
type Value = Step;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "build step is one of {}", COMMANDS.join(", "))
}
fn visit_enum<A>(self, data: A) -> Result<Step, A::Error>
where A: EnumAccess<'a>,
{
use self::CommandName::*;
let (tag, v) = data.variant()?;
match tag {
$(
$item => decode::<cmd::$module::$item, _>(v),
)*
}
}
}
impl Serialize for Step {
fn serialize<S: Serializer>(&self, s: S) | {
if false { unreachable!() }
$(
else if let Some(b) =
self.0.downcast_ref::<cmd::$module::$item>()
{
b.serialize(s)
}
)*
else {
unreachable!("all steps should be serializeable");
}
}
}
}
}
define_commands! {
alpine::Alpine,
alpine::AlpineRepo,
ubuntu::Ubuntu,
ubuntu::UbuntuRepo,
ubuntu::UbuntuRelease,
ubuntu::UbuntuPPA,
ubuntu::UbuntuUniverse,
ubuntu::AptTrust,
packaging::Repo,
packaging::Install,
packaging::BuildDeps,
vcs::Git,
vcs::GitInstall,
vcs::GitDescribe,
pip::PipConfig,
pip::Py2Install,
pip::Py2Requirements,
pip::Py3Install,
pip::Py3Requirements,
tarcmd::Tar,
tarcmd::TarInstall,
unzip::Unzip,
generic::Sh,
generic::Cmd,
generic::RunAs,
generic::Env,
text::Text,
copy::Copy,
download::Download,
dirs::EnsureDir,
dirs::CacheDirs,
dirs::EmptyDir,
dirs::Remove,
copy::Depends,
subcontainer::Container,
subcontainer::Build,
subcontainer::SubConfig,
npm::NpmConfig,
npm::NpmDependencies,
npm::YarnDependencies,
npm::NpmInstall,
gem::GemInstall,
gem::GemBundle,
gem::GemConfig,
composer::ComposerInstall,
composer::ComposerDependencies,
composer::ComposerConfig,
}
pub struct NameVisitor;
pub struct StepVisitor;
fn decode<'x, T, V>(v: V)
-> Result<Step, V::Error>
where
T: BuildStep + Deserialize<'x> +'static,
V: VariantAccess<'x>,
{
v.newtype_variant::<T>().map(|x| Step(Rc::new(x) as Rc<dyn BuildStep>))
}
impl<'a> Deserialize<'a> for CommandName {
fn deserialize<D: Deserializer<'a>>(d: D) -> Result<CommandName, D::Error>
{
d.deserialize_identifier(NameVisitor)
}
}
impl<'a> Deserialize<'a> for Step {
fn deserialize<D: Deserializer<'a>>(d: D) -> Result<Step, D::Error> {
d.deserialize_enum("BuildStep", COMMANDS, StepVisitor)
}
} | -> Result<S::Ok, S::Error> | random_line_split |
builders.rs | use std::fmt;
use std::rc::Rc;
use quire::validate as V;
use serde::de::{self, Deserializer, Deserialize, EnumAccess, VariantAccess, Visitor};
use serde::ser::{Serializer, Serialize};
use crate::build_step::{Step, BuildStep};
use crate::builder::commands as cmd;
macro_rules! define_commands {
($($module: ident :: $item: ident,)*) => {
const COMMANDS: &'static [&'static str] = &[
$(stringify!($item),)*
];
pub enum CommandName {
$($item,)*
}
pub fn builder_validator<'x>() -> V::Enum<'x> {
V::Enum::new()
$(
.option(stringify!($item), cmd::$module::$item::config())
)*
}
impl<'a> Visitor<'a> for NameVisitor {
type Value = CommandName;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "build step is one of {}", COMMANDS.join(", "))
}
fn visit_str<E: de::Error>(self, val: &str)
-> Result<CommandName, E>
{
use self::CommandName::*;
let res = match val {
$(
stringify!($item) => $item,
)*
_ => return Err(E::custom("invalid build step")),
};
Ok(res)
}
}
impl<'a> Visitor<'a> for StepVisitor {
type Value = Step;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "build step is one of {}", COMMANDS.join(", "))
}
fn visit_enum<A>(self, data: A) -> Result<Step, A::Error>
where A: EnumAccess<'a>,
{
use self::CommandName::*;
let (tag, v) = data.variant()?;
match tag {
$(
$item => decode::<cmd::$module::$item, _>(v),
)*
}
}
}
impl Serialize for Step {
fn serialize<S: Serializer>(&self, s: S)
-> Result<S::Ok, S::Error>
{
if false { unreachable!() }
$(
else if let Some(b) =
self.0.downcast_ref::<cmd::$module::$item>()
{
b.serialize(s)
}
)*
else {
unreachable!("all steps should be serializeable");
}
}
}
}
}
define_commands! {
alpine::Alpine,
alpine::AlpineRepo,
ubuntu::Ubuntu,
ubuntu::UbuntuRepo,
ubuntu::UbuntuRelease,
ubuntu::UbuntuPPA,
ubuntu::UbuntuUniverse,
ubuntu::AptTrust,
packaging::Repo,
packaging::Install,
packaging::BuildDeps,
vcs::Git,
vcs::GitInstall,
vcs::GitDescribe,
pip::PipConfig,
pip::Py2Install,
pip::Py2Requirements,
pip::Py3Install,
pip::Py3Requirements,
tarcmd::Tar,
tarcmd::TarInstall,
unzip::Unzip,
generic::Sh,
generic::Cmd,
generic::RunAs,
generic::Env,
text::Text,
copy::Copy,
download::Download,
dirs::EnsureDir,
dirs::CacheDirs,
dirs::EmptyDir,
dirs::Remove,
copy::Depends,
subcontainer::Container,
subcontainer::Build,
subcontainer::SubConfig,
npm::NpmConfig,
npm::NpmDependencies,
npm::YarnDependencies,
npm::NpmInstall,
gem::GemInstall,
gem::GemBundle,
gem::GemConfig,
composer::ComposerInstall,
composer::ComposerDependencies,
composer::ComposerConfig,
}
pub struct NameVisitor;
pub struct | ;
fn decode<'x, T, V>(v: V)
-> Result<Step, V::Error>
where
T: BuildStep + Deserialize<'x> +'static,
V: VariantAccess<'x>,
{
v.newtype_variant::<T>().map(|x| Step(Rc::new(x) as Rc<dyn BuildStep>))
}
impl<'a> Deserialize<'a> for CommandName {
fn deserialize<D: Deserializer<'a>>(d: D) -> Result<CommandName, D::Error>
{
d.deserialize_identifier(NameVisitor)
}
}
impl<'a> Deserialize<'a> for Step {
fn deserialize<D: Deserializer<'a>>(d: D) -> Result<Step, D::Error> {
d.deserialize_enum("BuildStep", COMMANDS, StepVisitor)
}
}
| StepVisitor | identifier_name |
builders.rs | use std::fmt;
use std::rc::Rc;
use quire::validate as V;
use serde::de::{self, Deserializer, Deserialize, EnumAccess, VariantAccess, Visitor};
use serde::ser::{Serializer, Serialize};
use crate::build_step::{Step, BuildStep};
use crate::builder::commands as cmd;
macro_rules! define_commands {
($($module: ident :: $item: ident,)*) => {
const COMMANDS: &'static [&'static str] = &[
$(stringify!($item),)*
];
pub enum CommandName {
$($item,)*
}
pub fn builder_validator<'x>() -> V::Enum<'x> {
V::Enum::new()
$(
.option(stringify!($item), cmd::$module::$item::config())
)*
}
impl<'a> Visitor<'a> for NameVisitor {
type Value = CommandName;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "build step is one of {}", COMMANDS.join(", "))
}
fn visit_str<E: de::Error>(self, val: &str)
-> Result<CommandName, E>
{
use self::CommandName::*;
let res = match val {
$(
stringify!($item) => $item,
)*
_ => return Err(E::custom("invalid build step")),
};
Ok(res)
}
}
impl<'a> Visitor<'a> for StepVisitor {
type Value = Step;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "build step is one of {}", COMMANDS.join(", "))
}
fn visit_enum<A>(self, data: A) -> Result<Step, A::Error>
where A: EnumAccess<'a>,
{
use self::CommandName::*;
let (tag, v) = data.variant()?;
match tag {
$(
$item => decode::<cmd::$module::$item, _>(v),
)*
}
}
}
impl Serialize for Step {
fn serialize<S: Serializer>(&self, s: S)
-> Result<S::Ok, S::Error>
{
if false { unreachable!() }
$(
else if let Some(b) =
self.0.downcast_ref::<cmd::$module::$item>()
{
b.serialize(s)
}
)*
else {
unreachable!("all steps should be serializeable");
}
}
}
}
}
define_commands! {
alpine::Alpine,
alpine::AlpineRepo,
ubuntu::Ubuntu,
ubuntu::UbuntuRepo,
ubuntu::UbuntuRelease,
ubuntu::UbuntuPPA,
ubuntu::UbuntuUniverse,
ubuntu::AptTrust,
packaging::Repo,
packaging::Install,
packaging::BuildDeps,
vcs::Git,
vcs::GitInstall,
vcs::GitDescribe,
pip::PipConfig,
pip::Py2Install,
pip::Py2Requirements,
pip::Py3Install,
pip::Py3Requirements,
tarcmd::Tar,
tarcmd::TarInstall,
unzip::Unzip,
generic::Sh,
generic::Cmd,
generic::RunAs,
generic::Env,
text::Text,
copy::Copy,
download::Download,
dirs::EnsureDir,
dirs::CacheDirs,
dirs::EmptyDir,
dirs::Remove,
copy::Depends,
subcontainer::Container,
subcontainer::Build,
subcontainer::SubConfig,
npm::NpmConfig,
npm::NpmDependencies,
npm::YarnDependencies,
npm::NpmInstall,
gem::GemInstall,
gem::GemBundle,
gem::GemConfig,
composer::ComposerInstall,
composer::ComposerDependencies,
composer::ComposerConfig,
}
pub struct NameVisitor;
pub struct StepVisitor;
fn decode<'x, T, V>(v: V)
-> Result<Step, V::Error>
where
T: BuildStep + Deserialize<'x> +'static,
V: VariantAccess<'x>,
{
v.newtype_variant::<T>().map(|x| Step(Rc::new(x) as Rc<dyn BuildStep>))
}
impl<'a> Deserialize<'a> for CommandName {
fn deserialize<D: Deserializer<'a>>(d: D) -> Result<CommandName, D::Error>
|
}
impl<'a> Deserialize<'a> for Step {
fn deserialize<D: Deserializer<'a>>(d: D) -> Result<Step, D::Error> {
d.deserialize_enum("BuildStep", COMMANDS, StepVisitor)
}
}
| {
d.deserialize_identifier(NameVisitor)
} | identifier_body |
custom_build.rs | use std::collections::{HashMap, BTreeSet, HashSet};
use std::fs;
use std::path::{PathBuf, Path};
use std::str;
use std::sync::{Mutex, Arc};
use package_id::PackageId;
use util::{CraftResult, Human, Freshness, internal, ChainError, profile, paths};
use super::job::Work;
use super::{fingerprint, Kind, Context, Unit};
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)]
pub struct BuildOutput {
/// Paths to pass to cc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
/// Glob paths to trigger a rerun of this build script.
pub rerun_if_changed: Vec<String>,
/// Warnings generated by this build,
pub warnings: Vec<String>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
overrides: HashMap<(String, Kind), BuildOutput>,
}
#[derive(Default)]
pub struct BuildScripts {
// Craft will use this `to_link` vector to add -L flags to compiles as we
// propagate them upwards towards the final build. Note, however, that we
// need to preserve the ordering of `to_link` to be topologically sorted.
// This will ensure that build scripts which print their paths properly will
// correctly pick up the files they generated (if there are duplicates
// elsewhere).
//
// To preserve this ordering, the (id, kind) is stored in two places, once
// in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain
// this as we're building interactively below to ensure that the memory
// usage here doesn't blow up too much.
//
// For more information, see #2354
pub to_link: Vec<(PackageId, Kind)>,
seen_to_link: HashSet<(PackageId, Kind)>,
pub plugins: BTreeSet<PackageId>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CraftResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}", unit.pkg, unit.target.name()));
let overridden = cx.build_state.has_override(unit);
let (work_dirty, work_fresh) = if overridden {
(Work::new(|_| Ok(())), Work::new(|_| Ok(())))
} else {
build_work(cx, unit)?
};
// Now that we've prep'd our work, build the work needed to manage the
// fingerprint and then start returning that upwards.
let (freshness, dirty, fresh) = fingerprint::prepare_build_cmd(cx, unit)?;
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
}
fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CraftResult<(Work, Work)> {
let host_unit = Unit { kind: Kind::Host,..*unit };
let (script_output, build_output) = {
(cx.layout(&host_unit).build(unit.pkg), cx.layout(unit).build_out(unit.pkg))
};
// Building the command to execute
let to_exec = script_output.join(unit.target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(unit.pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?;
cmd.env("OUT_DIR", &build_output)
.env("CRAFT_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET",
&match unit.kind {
Kind::Host => cx.host_triple(),
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level)
.env("PROFILE",
if cx.build_config.release {
"release"
} else {
"debug"
})
.env("HOST", cx.host_triple())
.env("CC", &cx.config.cc()?.path)
.env("DOC", &*cx.config.doc()?);
if let Some(links) = unit.pkg.manifest().links() {
cmd.env("CRAFT_MANIFEST_LINKS", links);
}
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {
for feat in features.iter() {
cmd.env(&format!("CRAFT_FEATURE_{}", super::envify(feat)), "1");
}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
cx.dep_run_custom_build(unit)?
.iter()
.filter_map(|unit| {
if unit.profile.run_custom_build {
Some((unit.pkg.manifest().links().unwrap().to_string(), unit.pkg.package_id().clone()))
} else {
None
}
})
.collect::<Vec<_>>()
};
let pkg_name = unit.pkg.to_string();
let build_state = cx.build_state.clone();
let id = unit.pkg.package_id().clone();
let output_file = build_output.parent().unwrap().join("output");
let all = (id.clone(), pkg_name.clone(), build_state.clone(), output_file.clone());
let build_scripts = super::load_build_deps(cx, unit);
let kind = unit.kind;
// Check to see if the build script as already run, and if it has keep
// track of whether it has told us about some explicit dependencies
let prev_output = BuildOutput::parse_file(&output_file, &pkg_name).ok();
let rerun_if_changed = match prev_output {
Some(ref prev) => prev.rerun_if_changed.clone(),
None => Vec::new(),
};
cx.build_explicit_deps.insert(*unit, (output_file.clone(), rerun_if_changed));
fs::create_dir_all(&cx.layout(&host_unit).build(unit.pkg))?;
fs::create_dir_all(&cx.layout(unit).build(unit.pkg))?;
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let dirty = Work::new(move |state| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
fs::create_dir(&build_output)
.chain_error(|| internal("failed to create script output directory for build command"))?;
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for (name, id) in lib_deps {
let key = (id.clone(), kind);
let state = build_state.get(&key)
.chain_error(|| {
internal(format!("failed to locate build state for env vars: {}/{:?}",
id,
kind))
})?;
let data = &state.metadata;
for &(ref key, ref value) in data.iter() {
cmd.env(&format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
value);
}
}
if let Some(build_scripts) = build_scripts {
super::add_plugin_deps(&mut cmd, &build_state, &build_scripts)?;
}
}
// And now finally, run the build command itself!
state.running(&cmd);
let output = cmd.exec_with_streaming(&mut |out_line| {
state.stdout(out_line);
Ok(())
},
&mut |err_line| {
state.stderr(err_line);
Ok(())
})
.map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name,
e.desc);
Human(e)
})?;
paths::write(&output_file, &output.stdout)?;
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let parsed_output = BuildOutput::parse(&output.stdout, &pkg_name)?;
build_state.insert(id, kind, parsed_output);
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
let fresh = Work::new(move |_tx| {
let (id, pkg_name, build_state, output_file) = all;
let output = match prev_output {
Some(output) => output,
None => BuildOutput::parse_file(&output_file, &pkg_name)?,
};
build_state.insert(id, kind, output);
Ok(())
});
Ok((dirty, fresh))
}
impl BuildState {
pub fn new(config: &super::BuildConfig) -> BuildState {
let mut overrides = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
overrides.insert((name.clone(), kind), output.clone());
}
BuildState {
outputs: Mutex::new(HashMap::new()),
overrides: overrides,
}
}
fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {
self.outputs.lock().unwrap().insert((id, kind), output);
}
fn has_override(&self, unit: &Unit) -> bool {
let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind));
match key.and_then(|k| self.overrides.get(&k)) {
Some(output) => {
self.insert(unit.pkg.package_id().clone(), unit.kind, output.clone());
true
}
None => false,
}
}
}
impl BuildOutput {
pub fn parse_file(path: &Path, pkg_name: &str) -> CraftResult<BuildOutput> |
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &[u8], pkg_name: &str) -> CraftResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut metadata = Vec::new();
let mut rerun_if_changed = Vec::new();
let mut warnings = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.split(|b| *b == b'\n') {
let line = match str::from_utf8(line) {
Ok(line) => line.trim(),
Err(..) => continue,
};
let mut iter = line.splitn(2, ':');
if iter.next()!= Some("craft") {
// skip this line since it doesn't start with "craft:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue,
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()),
// line started with `craft:` but didn't match `key=value`
_ => bail!("Wrong output in {}: `{}`", whence, line),
};
match key {
"cc-flags" => {
let (libs, links) = BuildOutput::parse_cc_flags(value, &whence)?;
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"cc-link-lib" => library_links.push(value.to_string()),
"cc-link-search" => library_paths.push(PathBuf::from(value)),
"warning" => warnings.push(value.to_string()),
"rerun-if-changed" => rerun_if_changed.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
metadata: metadata,
rerun_if_changed: rerun_if_changed,
warnings: warnings,
})
}
pub fn parse_cc_flags(value: &str, whence: &str) -> CraftResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c|!c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break,
};
if flag!= "-l" && flag!= "-L" {
bail!("Only `-l` and `-L` flags are allowed in {}: `{}`",
whence,
value)
}
let value = match flags_iter.next() {
Some(v) => v,
None => {
bail!("Flag in cc-flags has no value in {}: `{}`",
whence,
value)
}
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => bail!("only -l and -L flags are allowed"),
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CraftResult<()> {
let mut ret = HashMap::new();
for unit in units {
build(&mut ret, cx, unit)?;
}
cx.build_scripts.extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v))));
return Ok(());
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>,
cx: &Context<'b, 'cfg>,
unit: &Unit<'b>)
-> CraftResult<&'a BuildScripts> {
// Do a quick pre-flight check to see if we've already calculated the
// set of dependencies.
if out.contains_key(unit) {
return Ok(&out[unit]);
}
let mut ret = BuildScripts::default();
if!unit.target.is_custom_build() && unit.pkg.has_custom_build() {
add_to_link(&mut ret, unit.pkg.package_id(), unit.kind);
}
for unit in cx.dep_targets(unit)?.iter() {
let dep_scripts = build(out, cx, unit)?;
if unit.target.for_host() {
ret.plugins.extend(dep_scripts.to_link
.iter()
.map(|p| &p.0)
.cloned());
} else if unit.target.linkable() {
for &(ref pkg, kind) in dep_scripts.to_link.iter() {
add_to_link(&mut ret, pkg, kind);
}
}
}
let prev = out.entry(*unit).or_insert(BuildScripts::default());
for (pkg, kind) in ret.to_link {
add_to_link(prev, &pkg, kind);
}
prev.plugins.extend(ret.plugins);
Ok(prev)
}
// When adding an entry to 'to_link' we only actually push it on if the
// script hasn't seen it yet (e.g. we don't push on duplicates).
fn add_to_link(scripts: &mut BuildScripts, pkg: &PackageId, kind: Kind) {
if scripts.seen_to_link.insert((pkg.clone(), kind)) {
scripts.to_link.push((pkg.clone(), kind));
}
}
}
| {
let contents = paths::read_bytes(path)?;
BuildOutput::parse(&contents, pkg_name)
} | identifier_body |
custom_build.rs | use std::collections::{HashMap, BTreeSet, HashSet};
use std::fs;
use std::path::{PathBuf, Path};
use std::str;
use std::sync::{Mutex, Arc};
use package_id::PackageId;
use util::{CraftResult, Human, Freshness, internal, ChainError, profile, paths};
use super::job::Work;
use super::{fingerprint, Kind, Context, Unit};
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug, Hash)]
pub struct BuildOutput {
/// Paths to pass to cc with the `-L` flag
pub library_paths: Vec<PathBuf>,
/// Names and link kinds of libraries, suitable for the `-l` flag
pub library_links: Vec<String>,
/// Metadata to pass to the immediate dependencies
pub metadata: Vec<(String, String)>,
/// Glob paths to trigger a rerun of this build script.
pub rerun_if_changed: Vec<String>,
/// Warnings generated by this build,
pub warnings: Vec<String>,
}
pub type BuildMap = HashMap<(PackageId, Kind), BuildOutput>;
pub struct BuildState {
pub outputs: Mutex<BuildMap>,
overrides: HashMap<(String, Kind), BuildOutput>,
}
#[derive(Default)]
pub struct BuildScripts {
// Craft will use this `to_link` vector to add -L flags to compiles as we
// propagate them upwards towards the final build. Note, however, that we
// need to preserve the ordering of `to_link` to be topologically sorted.
// This will ensure that build scripts which print their paths properly will
// correctly pick up the files they generated (if there are duplicates
// elsewhere).
//
// To preserve this ordering, the (id, kind) is stored in two places, once
// in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain
// this as we're building interactively below to ensure that the memory
// usage here doesn't blow up too much.
//
// For more information, see #2354
pub to_link: Vec<(PackageId, Kind)>,
seen_to_link: HashSet<(PackageId, Kind)>,
pub plugins: BTreeSet<PackageId>,
}
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
pub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CraftResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}", unit.pkg, unit.target.name()));
let overridden = cx.build_state.has_override(unit);
let (work_dirty, work_fresh) = if overridden {
(Work::new(|_| Ok(())), Work::new(|_| Ok(())))
} else {
build_work(cx, unit)?
};
// Now that we've prep'd our work, build the work needed to manage the
// fingerprint and then start returning that upwards.
let (freshness, dirty, fresh) = fingerprint::prepare_build_cmd(cx, unit)?;
Ok((work_dirty.then(dirty), work_fresh.then(fresh), freshness))
}
fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CraftResult<(Work, Work)> {
let host_unit = Unit { kind: Kind::Host,..*unit };
let (script_output, build_output) = {
(cx.layout(&host_unit).build(unit.pkg), cx.layout(unit).build_out(unit.pkg))
};
// Building the command to execute
let to_exec = script_output.join(unit.target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
let profile = cx.lib_profile(unit.pkg.package_id());
let to_exec = to_exec.into_os_string();
let mut cmd = cx.compilation.host_process(to_exec, unit.pkg)?;
cmd.env("OUT_DIR", &build_output)
.env("CRAFT_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
.env("TARGET",
&match unit.kind {
Kind::Host => cx.host_triple(),
Kind::Target => cx.target_triple(),
})
.env("DEBUG", &profile.debuginfo.to_string())
.env("OPT_LEVEL", &profile.opt_level)
.env("PROFILE",
if cx.build_config.release {
"release"
} else {
"debug"
})
.env("HOST", cx.host_triple())
.env("CC", &cx.config.cc()?.path)
.env("DOC", &*cx.config.doc()?);
if let Some(links) = unit.pkg.manifest().links() {
cmd.env("CRAFT_MANIFEST_LINKS", links);
}
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {
for feat in features.iter() {
cmd.env(&format!("CRAFT_FEATURE_{}", super::envify(feat)), "1");
}
}
// Gather the set of native dependencies that this package has along with
// some other variables to close over.
//
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
cx.dep_run_custom_build(unit)?
.iter()
.filter_map(|unit| {
if unit.profile.run_custom_build {
Some((unit.pkg.manifest().links().unwrap().to_string(), unit.pkg.package_id().clone()))
} else {
None
}
})
.collect::<Vec<_>>()
};
let pkg_name = unit.pkg.to_string();
let build_state = cx.build_state.clone();
let id = unit.pkg.package_id().clone();
let output_file = build_output.parent().unwrap().join("output");
let all = (id.clone(), pkg_name.clone(), build_state.clone(), output_file.clone());
let build_scripts = super::load_build_deps(cx, unit);
let kind = unit.kind;
// Check to see if the build script as already run, and if it has keep
// track of whether it has told us about some explicit dependencies
let prev_output = BuildOutput::parse_file(&output_file, &pkg_name).ok();
let rerun_if_changed = match prev_output {
Some(ref prev) => prev.rerun_if_changed.clone(),
None => Vec::new(),
};
cx.build_explicit_deps.insert(*unit, (output_file.clone(), rerun_if_changed));
fs::create_dir_all(&cx.layout(&host_unit).build(unit.pkg))?;
fs::create_dir_all(&cx.layout(unit).build(unit.pkg))?;
// Prepare the unit of "dirty work" which will actually run the custom build
// command.
//
// Note that this has to do some extra work just before running the command
// to determine extra environment variables and such.
let dirty = Work::new(move |state| {
// Make sure that OUT_DIR exists.
//
// If we have an old build directory, then just move it into place,
// otherwise create it!
if fs::metadata(&build_output).is_err() {
fs::create_dir(&build_output)
.chain_error(|| internal("failed to create script output directory for build command"))?;
}
// For all our native lib dependencies, pick up their metadata to pass
// along to this custom build command. We're also careful to augment our
// dynamic library search path in case the build script depended on any
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
for (name, id) in lib_deps {
let key = (id.clone(), kind);
let state = build_state.get(&key)
.chain_error(|| {
internal(format!("failed to locate build state for env vars: {}/{:?}",
id,
kind))
})?;
let data = &state.metadata;
for &(ref key, ref value) in data.iter() {
cmd.env(&format!("DEP_{}_{}", super::envify(&name), super::envify(key)),
value);
}
}
if let Some(build_scripts) = build_scripts {
super::add_plugin_deps(&mut cmd, &build_state, &build_scripts)?;
}
}
// And now finally, run the build command itself!
state.running(&cmd);
let output = cmd.exec_with_streaming(&mut |out_line| {
state.stdout(out_line);
Ok(())
},
&mut |err_line| {
state.stderr(err_line);
Ok(())
})
.map_err(|mut e| {
e.desc = format!("failed to run custom build command for `{}`\n{}",
pkg_name,
e.desc);
Human(e)
})?;
paths::write(&output_file, &output.stdout)?;
// After the build command has finished running, we need to be sure to
// remember all of its output so we can later discover precisely what it
// was, even if we don't run the build command again (due to freshness).
//
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
let parsed_output = BuildOutput::parse(&output.stdout, &pkg_name)?;
build_state.insert(id, kind, parsed_output);
Ok(())
});
// Now that we've prepared our work-to-do, we need to prepare the fresh work
// itself to run when we actually end up just discarding what we calculated
// above.
let fresh = Work::new(move |_tx| {
let (id, pkg_name, build_state, output_file) = all;
let output = match prev_output {
Some(output) => output,
None => BuildOutput::parse_file(&output_file, &pkg_name)?,
};
build_state.insert(id, kind, output);
Ok(())
});
Ok((dirty, fresh))
}
impl BuildState {
pub fn new(config: &super::BuildConfig) -> BuildState {
let mut overrides = HashMap::new();
let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host));
let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target));
for ((name, output), kind) in i1.chain(i2) {
overrides.insert((name.clone(), kind), output.clone());
}
BuildState {
outputs: Mutex::new(HashMap::new()),
overrides: overrides,
}
}
fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {
self.outputs.lock().unwrap().insert((id, kind), output);
}
fn has_override(&self, unit: &Unit) -> bool {
let key = unit.pkg.manifest().links().map(|l| (l.to_string(), unit.kind));
match key.and_then(|k| self.overrides.get(&k)) {
Some(output) => {
self.insert(unit.pkg.package_id().clone(), unit.kind, output.clone());
true
}
None => false,
}
}
}
impl BuildOutput {
pub fn parse_file(path: &Path, pkg_name: &str) -> CraftResult<BuildOutput> {
let contents = paths::read_bytes(path)?;
BuildOutput::parse(&contents, pkg_name)
}
// Parses the output of a script.
// The `pkg_name` is used for error messages.
pub fn parse(input: &[u8], pkg_name: &str) -> CraftResult<BuildOutput> {
let mut library_paths = Vec::new();
let mut library_links = Vec::new();
let mut metadata = Vec::new();
let mut rerun_if_changed = Vec::new();
let mut warnings = Vec::new();
let whence = format!("build script of `{}`", pkg_name);
for line in input.split(|b| *b == b'\n') {
let line = match str::from_utf8(line) {
Ok(line) => line.trim(),
Err(..) => continue,
};
let mut iter = line.splitn(2, ':');
if iter.next()!= Some("craft") {
// skip this line since it doesn't start with "craft:"
continue;
}
let data = match iter.next() {
Some(val) => val,
None => continue,
};
// getting the `key=value` part of the line
let mut iter = data.splitn(2, '=');
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
(Some(a), Some(b)) => (a, b.trim_right()),
// line started with `craft:` but didn't match `key=value`
_ => bail!("Wrong output in {}: `{}`", whence, line),
};
match key {
"cc-flags" => {
let (libs, links) = BuildOutput::parse_cc_flags(value, &whence)?;
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
"cc-link-lib" => library_links.push(value.to_string()),
"cc-link-search" => library_paths.push(PathBuf::from(value)),
"warning" => warnings.push(value.to_string()),
"rerun-if-changed" => rerun_if_changed.push(value.to_string()),
_ => metadata.push((key.to_string(), value.to_string())),
}
}
Ok(BuildOutput {
library_paths: library_paths,
library_links: library_links,
metadata: metadata,
rerun_if_changed: rerun_if_changed,
warnings: warnings,
})
}
pub fn parse_cc_flags(value: &str, whence: &str) -> CraftResult<(Vec<PathBuf>, Vec<String>)> {
let value = value.trim();
let mut flags_iter = value.split(|c: char| c.is_whitespace())
.filter(|w| w.chars().any(|c|!c.is_whitespace()));
let (mut library_links, mut library_paths) = (Vec::new(), Vec::new());
loop {
let flag = match flags_iter.next() {
Some(f) => f,
None => break,
};
if flag!= "-l" && flag!= "-L" {
bail!("Only `-l` and `-L` flags are allowed in {}: `{}`",
whence,
value)
}
let value = match flags_iter.next() {
Some(v) => v,
None => {
bail!("Flag in cc-flags has no value in {}: `{}`",
whence,
value)
}
};
match flag {
"-l" => library_links.push(value.to_string()),
"-L" => library_paths.push(PathBuf::from(value)),
// was already checked above
_ => bail!("only -l and -L flags are allowed"),
};
}
Ok((library_paths, library_links))
}
}
/// Compute the `build_scripts` map in the `Context` which tracks what build
/// scripts each package depends on.
///
/// The global `build_scripts` map lists for all (package, kind) tuples what set
/// of packages' build script outputs must be considered. For example this lists
/// all dependencies' `-L` flags which need to be propagated transitively.
///
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CraftResult<()> {
let mut ret = HashMap::new();
for unit in units {
build(&mut ret, cx, unit)?;
}
cx.build_scripts.extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v))));
return Ok(());
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
fn | <'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>,
cx: &Context<'b, 'cfg>,
unit: &Unit<'b>)
-> CraftResult<&'a BuildScripts> {
// Do a quick pre-flight check to see if we've already calculated the
// set of dependencies.
if out.contains_key(unit) {
return Ok(&out[unit]);
}
let mut ret = BuildScripts::default();
if!unit.target.is_custom_build() && unit.pkg.has_custom_build() {
add_to_link(&mut ret, unit.pkg.package_id(), unit.kind);
}
for unit in cx.dep_targets(unit)?.iter() {
let dep_scripts = build(out, cx, unit)?;
if unit.target.for_host() {
ret.plugins.extend(dep_scripts.to_link
.iter()
.map(|p| &p.0)
.cloned());
} else if unit.target.linkable() {
for &(ref pkg, kind) in dep_scripts.to_link.iter() {
add_to_link(&mut ret, pkg, kind);
}
}
}
let prev = out.entry(*unit).or_insert(BuildScripts::default());
for (pkg, kind) in ret.to_link {
add_to_link(prev, &pkg, kind);
}
prev.plugins.extend(ret.plugins);
Ok(prev)
}
// When adding an entry to 'to_link' we only actually push it on if the
// script hasn't seen it yet (e.g. we don't push on duplicates).
fn add_to_link(scripts: &mut BuildScripts, pkg: &PackageId, kind: Kind) {
if scripts.seen_to_link.insert((pkg.clone(), kind)) {
scripts.to_link.push((pkg.clone(), kind));
}
}
}
| build | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.