file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
main.rs | extern crate sdl2;
extern crate rustc_serialize;
use sdl2::keycode::KeyCode;
use sdl2::event::Event;
use sdl2::timer::get_ticks;
mod sprite;
mod assets;
mod draw;
mod player;
mod tile;
mod map;
mod physics;
use sprite::Sprite;
use player::Player;
use player::PlayerStatus;
use draw::Draw;
fn main() { | let window = sdl_context.window("Rust-Man", 640, 480)
.position_centered()
.build()
.ok().expect("Failed to create window.");
//create a renderer
let mut renderer = window.renderer().accelerated().build()
.ok().expect("Failed to create accelerated renderer.");
//create a new player
let mut player = Player::new(Sprite::new_from_file("sonic.bmp", &renderer),
PlayerStatus::Stationary);
//start drawing
let mut drawer = renderer.drawer();
drawer.clear();
drawer.present();
//event loop stuff
let mut running = true;
let mut event_pump = sdl_context.event_pump();
let mut prev_time = get_ticks();
let mut delta_t = get_ticks() - prev_time;
while running {
//timer stuff
delta_t = get_ticks() - prev_time;
//limit to 60fps
if delta_t > 16 {
//handle event queue
for event in event_pump.poll_iter() {
match event {
Event::Quit {..} | Event::KeyDown {keycode: KeyCode::Escape,.. } => {
running = false
},
Event::KeyDown {keycode: KeyCode::Left,.. } => {
player.status = PlayerStatus::MovingLeft
},
Event::KeyDown {keycode: KeyCode::Right,.. } => {
player.status = PlayerStatus::MovingRight
},
Event::KeyUp {keycode: KeyCode::Left,.. } => {
player.status = PlayerStatus::Decelerating
},
Event::KeyUp {keycode: KeyCode::Right,.. } => {
player.status = PlayerStatus::Decelerating
},
_ => {}
}
}
//move player
player.update();
//draw
drawer.clear();
player.draw(&mut drawer, &None);
drawer.present();
//more timer stuff
prev_time = get_ticks();
}
}
println!("Goodbye, world!");
} | //initialize sdl
let sdl_context = sdl2::init().video().events().build()
.ok().expect("Failed to initialize SDL.");
//create a window | random_line_split |
main.rs | extern crate sdl2;
extern crate rustc_serialize;
use sdl2::keycode::KeyCode;
use sdl2::event::Event;
use sdl2::timer::get_ticks;
mod sprite;
mod assets;
mod draw;
mod player;
mod tile;
mod map;
mod physics;
use sprite::Sprite;
use player::Player;
use player::PlayerStatus;
use draw::Draw;
fn main() {
//initialize sdl
let sdl_context = sdl2::init().video().events().build()
.ok().expect("Failed to initialize SDL.");
//create a window
let window = sdl_context.window("Rust-Man", 640, 480)
.position_centered()
.build()
.ok().expect("Failed to create window.");
//create a renderer
let mut renderer = window.renderer().accelerated().build()
.ok().expect("Failed to create accelerated renderer.");
//create a new player
let mut player = Player::new(Sprite::new_from_file("sonic.bmp", &renderer),
PlayerStatus::Stationary);
//start drawing
let mut drawer = renderer.drawer();
drawer.clear();
drawer.present();
//event loop stuff
let mut running = true;
let mut event_pump = sdl_context.event_pump();
let mut prev_time = get_ticks();
let mut delta_t = get_ticks() - prev_time;
while running {
//timer stuff
delta_t = get_ticks() - prev_time;
//limit to 60fps
if delta_t > 16 | }
}
//move player
player.update();
//draw
drawer.clear();
player.draw(&mut drawer, &None);
drawer.present();
//more timer stuff
prev_time = get_ticks();
}
}
println!("Goodbye, world!");
}
| {
//handle event queue
for event in event_pump.poll_iter() {
match event {
Event::Quit {..} | Event::KeyDown {keycode: KeyCode::Escape, .. } => {
running = false
},
Event::KeyDown {keycode: KeyCode::Left, .. } => {
player.status = PlayerStatus::MovingLeft
},
Event::KeyDown {keycode: KeyCode::Right, .. } => {
player.status = PlayerStatus::MovingRight
},
Event::KeyUp {keycode: KeyCode::Left, .. } => {
player.status = PlayerStatus::Decelerating
},
Event::KeyUp {keycode: KeyCode::Right, .. } => {
player.status = PlayerStatus::Decelerating
},
_ => {} | conditional_block |
unsafe_lib.rs | use std::collections::HashMap;
use std::cell::{Cell, RefCell};
use std::hash::Hash;
use std::ops::{Index, IndexMut};
use std::fmt::Debug;
pub struct MutMap<K, V: Default> {
map: HashMap<K, RefCell<V>>,
}
impl<K: Eq + Hash, V: Default> MutMap<K, V> {
pub fn new() -> Self {
MutMap { map: HashMap::new() }
}
}
impl<K: Hash + Eq + Clone + Debug, V: Default> Index<K> for MutMap<K, V> {
type Output = V;
fn index(&self, idx: K) -> &Self::Output {
let map = &self.map;
if!map.contains_key(&idx) {
panic!("{:?} not found", idx)
}
let cntp = map[&idx].as_ptr();
unsafe { &*cntp }
}
}
impl<K: Hash + Eq + Clone + Debug, V: Default> IndexMut<K> for MutMap<K, V> {
fn index_mut(&mut self, idx: K) -> &mut Self::Output {
let map = &mut self.map;
if!map.contains_key(&idx) {
map.insert(idx.clone(), RefCell::new(V::default()));
}
let cntp = map[&idx].as_ptr();
unsafe { &mut *cntp }
}
}
// Pythonesque Counter implementation
// XXX Move to a separate module
static ZERO: usize = 0;
pub struct Counter<T: Hash + Eq + Clone> {
pub map: HashMap<T, Cell<usize>>,
}
impl<T: Hash + Eq + Clone> Counter<T> {
pub fn new() -> Self {
Counter { map: HashMap::new() }
}
pub fn len(&self) -> usize {
self.map.len()
}
pub fn remove(&mut self, idx: &T) {
self.map.remove(idx);
}
}
impl<T: Hash + Eq + Clone> Index<T> for Counter<T> {
type Output = usize;
fn index(&self, idx: T) -> &Self::Output |
}
impl<T: Hash + Eq + Clone> IndexMut<T> for Counter<T> {
fn index_mut(&mut self, idx: T) -> &mut Self::Output {
if self.map.contains_key(&idx) {
let cntp = self.map[&idx].as_ptr();
unsafe { &mut *cntp }
} else {
self.map.insert(idx.clone(), Cell::new(0));
let cntp = self.map[&idx].as_ptr();
unsafe { &mut *cntp }
}
}
}
| {
if self.map.contains_key(&idx) {
let cntp = self.map[&idx].as_ptr();
unsafe { &*cntp }
} else {
//map.insert(idx, Cell::new(0));
//let mut cntp = map[&idx].as_ptr();
//unsafe {& *cntp}
&ZERO
}
} | identifier_body |
unsafe_lib.rs | use std::collections::HashMap;
use std::cell::{Cell, RefCell};
use std::hash::Hash;
use std::ops::{Index, IndexMut};
use std::fmt::Debug;
pub struct MutMap<K, V: Default> {
map: HashMap<K, RefCell<V>>,
}
impl<K: Eq + Hash, V: Default> MutMap<K, V> {
pub fn new() -> Self {
MutMap { map: HashMap::new() }
}
}
impl<K: Hash + Eq + Clone + Debug, V: Default> Index<K> for MutMap<K, V> {
type Output = V;
fn index(&self, idx: K) -> &Self::Output {
let map = &self.map;
if!map.contains_key(&idx) {
panic!("{:?} not found", idx)
}
let cntp = map[&idx].as_ptr();
unsafe { &*cntp }
}
}
impl<K: Hash + Eq + Clone + Debug, V: Default> IndexMut<K> for MutMap<K, V> {
fn index_mut(&mut self, idx: K) -> &mut Self::Output {
let map = &mut self.map;
if!map.contains_key(&idx) {
map.insert(idx.clone(), RefCell::new(V::default()));
}
let cntp = map[&idx].as_ptr();
unsafe { &mut *cntp }
} | // Pythonesque Counter implementation
// XXX Move to a separate module
static ZERO: usize = 0;
pub struct Counter<T: Hash + Eq + Clone> {
pub map: HashMap<T, Cell<usize>>,
}
impl<T: Hash + Eq + Clone> Counter<T> {
pub fn new() -> Self {
Counter { map: HashMap::new() }
}
pub fn len(&self) -> usize {
self.map.len()
}
pub fn remove(&mut self, idx: &T) {
self.map.remove(idx);
}
}
impl<T: Hash + Eq + Clone> Index<T> for Counter<T> {
type Output = usize;
fn index(&self, idx: T) -> &Self::Output {
if self.map.contains_key(&idx) {
let cntp = self.map[&idx].as_ptr();
unsafe { &*cntp }
} else {
//map.insert(idx, Cell::new(0));
//let mut cntp = map[&idx].as_ptr();
//unsafe {& *cntp}
&ZERO
}
}
}
impl<T: Hash + Eq + Clone> IndexMut<T> for Counter<T> {
fn index_mut(&mut self, idx: T) -> &mut Self::Output {
if self.map.contains_key(&idx) {
let cntp = self.map[&idx].as_ptr();
unsafe { &mut *cntp }
} else {
self.map.insert(idx.clone(), Cell::new(0));
let cntp = self.map[&idx].as_ptr();
unsafe { &mut *cntp }
}
}
} | }
| random_line_split |
unsafe_lib.rs | use std::collections::HashMap;
use std::cell::{Cell, RefCell};
use std::hash::Hash;
use std::ops::{Index, IndexMut};
use std::fmt::Debug;
pub struct MutMap<K, V: Default> {
map: HashMap<K, RefCell<V>>,
}
impl<K: Eq + Hash, V: Default> MutMap<K, V> {
pub fn new() -> Self {
MutMap { map: HashMap::new() }
}
}
impl<K: Hash + Eq + Clone + Debug, V: Default> Index<K> for MutMap<K, V> {
type Output = V;
fn index(&self, idx: K) -> &Self::Output {
let map = &self.map;
if!map.contains_key(&idx) {
panic!("{:?} not found", idx)
}
let cntp = map[&idx].as_ptr();
unsafe { &*cntp }
}
}
impl<K: Hash + Eq + Clone + Debug, V: Default> IndexMut<K> for MutMap<K, V> {
fn index_mut(&mut self, idx: K) -> &mut Self::Output {
let map = &mut self.map;
if!map.contains_key(&idx) {
map.insert(idx.clone(), RefCell::new(V::default()));
}
let cntp = map[&idx].as_ptr();
unsafe { &mut *cntp }
}
}
// Pythonesque Counter implementation
// XXX Move to a separate module
static ZERO: usize = 0;
pub struct Counter<T: Hash + Eq + Clone> {
pub map: HashMap<T, Cell<usize>>,
}
impl<T: Hash + Eq + Clone> Counter<T> {
pub fn | () -> Self {
Counter { map: HashMap::new() }
}
pub fn len(&self) -> usize {
self.map.len()
}
pub fn remove(&mut self, idx: &T) {
self.map.remove(idx);
}
}
impl<T: Hash + Eq + Clone> Index<T> for Counter<T> {
type Output = usize;
fn index(&self, idx: T) -> &Self::Output {
if self.map.contains_key(&idx) {
let cntp = self.map[&idx].as_ptr();
unsafe { &*cntp }
} else {
//map.insert(idx, Cell::new(0));
//let mut cntp = map[&idx].as_ptr();
//unsafe {& *cntp}
&ZERO
}
}
}
impl<T: Hash + Eq + Clone> IndexMut<T> for Counter<T> {
fn index_mut(&mut self, idx: T) -> &mut Self::Output {
if self.map.contains_key(&idx) {
let cntp = self.map[&idx].as_ptr();
unsafe { &mut *cntp }
} else {
self.map.insert(idx.clone(), Cell::new(0));
let cntp = self.map[&idx].as_ptr();
unsafe { &mut *cntp }
}
}
}
| new | identifier_name |
unsafe_lib.rs | use std::collections::HashMap;
use std::cell::{Cell, RefCell};
use std::hash::Hash;
use std::ops::{Index, IndexMut};
use std::fmt::Debug;
pub struct MutMap<K, V: Default> {
map: HashMap<K, RefCell<V>>,
}
impl<K: Eq + Hash, V: Default> MutMap<K, V> {
pub fn new() -> Self {
MutMap { map: HashMap::new() }
}
}
impl<K: Hash + Eq + Clone + Debug, V: Default> Index<K> for MutMap<K, V> {
type Output = V;
fn index(&self, idx: K) -> &Self::Output {
let map = &self.map;
if!map.contains_key(&idx) {
panic!("{:?} not found", idx)
}
let cntp = map[&idx].as_ptr();
unsafe { &*cntp }
}
}
impl<K: Hash + Eq + Clone + Debug, V: Default> IndexMut<K> for MutMap<K, V> {
fn index_mut(&mut self, idx: K) -> &mut Self::Output {
let map = &mut self.map;
if!map.contains_key(&idx) {
map.insert(idx.clone(), RefCell::new(V::default()));
}
let cntp = map[&idx].as_ptr();
unsafe { &mut *cntp }
}
}
// Pythonesque Counter implementation
// XXX Move to a separate module
static ZERO: usize = 0;
pub struct Counter<T: Hash + Eq + Clone> {
pub map: HashMap<T, Cell<usize>>,
}
impl<T: Hash + Eq + Clone> Counter<T> {
pub fn new() -> Self {
Counter { map: HashMap::new() }
}
pub fn len(&self) -> usize {
self.map.len()
}
pub fn remove(&mut self, idx: &T) {
self.map.remove(idx);
}
}
impl<T: Hash + Eq + Clone> Index<T> for Counter<T> {
type Output = usize;
fn index(&self, idx: T) -> &Self::Output {
if self.map.contains_key(&idx) {
let cntp = self.map[&idx].as_ptr();
unsafe { &*cntp }
} else {
//map.insert(idx, Cell::new(0));
//let mut cntp = map[&idx].as_ptr();
//unsafe {& *cntp}
&ZERO
}
}
}
impl<T: Hash + Eq + Clone> IndexMut<T> for Counter<T> {
fn index_mut(&mut self, idx: T) -> &mut Self::Output {
if self.map.contains_key(&idx) | else {
self.map.insert(idx.clone(), Cell::new(0));
let cntp = self.map[&idx].as_ptr();
unsafe { &mut *cntp }
}
}
}
| {
let cntp = self.map[&idx].as_ptr();
unsafe { &mut *cntp }
} | conditional_block |
c_win32.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! C definitions used by libnative that don't belong in liblibc
#![allow(type_overflow)]
use libc;
pub static WSADESCRIPTION_LEN: uint = 256;
pub static WSASYS_STATUS_LEN: uint = 128;
pub static FIONBIO: libc::c_long = 0x8004667e;
static FD_SETSIZE: uint = 64;
pub static MSG_DONTWAIT: libc::c_int = 0;
#[repr(C)]
pub struct WSADATA {
pub wVersion: libc::WORD,
pub wHighVersion: libc::WORD,
pub szDescription: [u8,..WSADESCRIPTION_LEN + 1],
pub szSystemStatus: [u8,..WSASYS_STATUS_LEN + 1],
pub iMaxSockets: u16,
pub iMaxUdpDg: u16,
pub lpVendorInfo: *mut u8,
}
pub type LPWSADATA = *mut WSADATA;
#[repr(C)]
pub struct fd_set {
fd_count: libc::c_uint,
fd_array: [libc::SOCKET,..FD_SETSIZE],
}
pub fn fd_set(set: &mut fd_set, s: libc::SOCKET) {
set.fd_array[set.fd_count as uint] = s;
set.fd_count += 1;
}
#[link(name = "ws2_32")]
extern "system" {
pub fn WSAStartup(wVersionRequested: libc::WORD,
lpWSAData: LPWSADATA) -> libc::c_int;
pub fn WSAGetLastError() -> libc::c_int;
pub fn ioctlsocket(s: libc::SOCKET, cmd: libc::c_long,
argp: *mut libc::c_ulong) -> libc::c_int;
pub fn select(nfds: libc::c_int,
readfds: *mut fd_set,
writefds: *mut fd_set,
exceptfds: *mut fd_set,
timeout: *mut libc::timeval) -> libc::c_int;
pub fn getsockopt(sockfd: libc::SOCKET,
level: libc::c_int,
optname: libc::c_int,
optval: *mut libc::c_char,
optlen: *mut libc::c_int) -> libc::c_int;
pub fn CancelIo(hFile: libc::HANDLE) -> libc::BOOL;
pub fn CancelIoEx(hFile: libc::HANDLE,
lpOverlapped: libc::LPOVERLAPPED) -> libc::BOOL;
}
pub mod compat {
use std::intrinsics::{atomic_store_relaxed, transmute};
use std::iter::Iterator;
use libc::types::os::arch::extra::{LPCWSTR, HMODULE, LPCSTR, LPVOID};
extern "system" {
fn GetModuleHandleW(lpModuleName: LPCWSTR) -> HMODULE;
fn GetProcAddress(hModule: HMODULE, lpProcName: LPCSTR) -> LPVOID;
}
// store_func() is idempotent, so using relaxed ordering for the atomics
// should be enough. This way, calling a function in this compatibility
// layer (after it's loaded) shouldn't be any slower than a regular DLL
// call.
unsafe fn store_func(ptr: *mut uint, module: &str, symbol: &str, fallback: uint) {
let module: Vec<u16> = module.utf16_units().collect();
let module = module.append_one(0);
symbol.with_c_str(|symbol| {
let handle = GetModuleHandleW(module.as_ptr());
let func: uint = transmute(GetProcAddress(handle, symbol));
atomic_store_relaxed(ptr, if func == 0 | else {
func
})
})
}
/// Macro for creating a compatibility fallback for a Windows function
///
/// # Example
/// ```
/// compat_fn!(adll32::SomeFunctionW(_arg: LPCWSTR) {
/// // Fallback implementation
/// })
/// ```
///
/// Note that arguments unused by the fallback implementation should not be called `_` as
/// they are used to be passed to the real function if available.
macro_rules! compat_fn(
($module:ident::$symbol:ident($($argname:ident: $argtype:ty),*)
-> $rettype:ty $fallback:block) => (
#[inline(always)]
pub unsafe fn $symbol($($argname: $argtype),*) -> $rettype {
static mut ptr: extern "system" fn($($argname: $argtype),*) -> $rettype = thunk;
extern "system" fn thunk($($argname: $argtype),*) -> $rettype {
unsafe {
::io::c::compat::store_func(&mut ptr as *mut _ as *mut uint,
stringify!($module),
stringify!($symbol),
fallback as uint);
::std::intrinsics::atomic_load_relaxed(&ptr)($($argname),*)
}
}
extern "system" fn fallback($($argname: $argtype),*) -> $rettype $fallback
::std::intrinsics::atomic_load_relaxed(&ptr)($($argname),*)
}
);
($module:ident::$symbol:ident($($argname:ident: $argtype:ty),*) $fallback:block) => (
compat_fn!($module::$symbol($($argname: $argtype),*) -> () $fallback)
)
)
/// Compatibility layer for functions in `kernel32.dll`
///
/// Latest versions of Windows this is needed for:
///
/// * `CreateSymbolicLinkW`: Windows XP, Windows Server 2003
/// * `GetFinalPathNameByHandleW`: Windows XP, Windows Server 2003
pub mod kernel32 {
use libc::types::os::arch::extra::{DWORD, LPCWSTR, BOOLEAN, HANDLE};
use libc::consts::os::extra::ERROR_CALL_NOT_IMPLEMENTED;
extern "system" {
fn SetLastError(dwErrCode: DWORD);
}
compat_fn!(kernel32::CreateSymbolicLinkW(_lpSymlinkFileName: LPCWSTR,
_lpTargetFileName: LPCWSTR,
_dwFlags: DWORD) -> BOOLEAN {
unsafe { SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); }
0
})
compat_fn!(kernel32::GetFinalPathNameByHandleW(_hFile: HANDLE,
_lpszFilePath: LPCWSTR,
_cchFilePath: DWORD,
_dwFlags: DWORD) -> DWORD {
unsafe { SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); }
0
})
}
}
| {
fallback
} | conditional_block |
c_win32.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! C definitions used by libnative that don't belong in liblibc
#![allow(type_overflow)]
use libc;
pub static WSADESCRIPTION_LEN: uint = 256;
pub static WSASYS_STATUS_LEN: uint = 128;
pub static FIONBIO: libc::c_long = 0x8004667e;
static FD_SETSIZE: uint = 64;
pub static MSG_DONTWAIT: libc::c_int = 0;
#[repr(C)]
pub struct WSADATA {
pub wVersion: libc::WORD,
pub wHighVersion: libc::WORD,
pub szDescription: [u8,..WSADESCRIPTION_LEN + 1],
pub szSystemStatus: [u8,..WSASYS_STATUS_LEN + 1],
pub iMaxSockets: u16,
pub iMaxUdpDg: u16,
pub lpVendorInfo: *mut u8,
}
pub type LPWSADATA = *mut WSADATA;
#[repr(C)]
pub struct fd_set {
fd_count: libc::c_uint,
fd_array: [libc::SOCKET,..FD_SETSIZE],
}
pub fn fd_set(set: &mut fd_set, s: libc::SOCKET) {
set.fd_array[set.fd_count as uint] = s;
set.fd_count += 1;
}
#[link(name = "ws2_32")]
extern "system" {
pub fn WSAStartup(wVersionRequested: libc::WORD,
lpWSAData: LPWSADATA) -> libc::c_int;
pub fn WSAGetLastError() -> libc::c_int;
pub fn ioctlsocket(s: libc::SOCKET, cmd: libc::c_long,
argp: *mut libc::c_ulong) -> libc::c_int;
pub fn select(nfds: libc::c_int,
readfds: *mut fd_set,
writefds: *mut fd_set,
exceptfds: *mut fd_set,
timeout: *mut libc::timeval) -> libc::c_int;
pub fn getsockopt(sockfd: libc::SOCKET,
level: libc::c_int,
optname: libc::c_int,
optval: *mut libc::c_char,
optlen: *mut libc::c_int) -> libc::c_int;
pub fn CancelIo(hFile: libc::HANDLE) -> libc::BOOL;
pub fn CancelIoEx(hFile: libc::HANDLE,
lpOverlapped: libc::LPOVERLAPPED) -> libc::BOOL;
}
pub mod compat {
use std::intrinsics::{atomic_store_relaxed, transmute};
use std::iter::Iterator;
use libc::types::os::arch::extra::{LPCWSTR, HMODULE, LPCSTR, LPVOID};
extern "system" {
fn GetModuleHandleW(lpModuleName: LPCWSTR) -> HMODULE;
fn GetProcAddress(hModule: HMODULE, lpProcName: LPCSTR) -> LPVOID;
}
// store_func() is idempotent, so using relaxed ordering for the atomics
// should be enough. This way, calling a function in this compatibility
// layer (after it's loaded) shouldn't be any slower than a regular DLL
// call.
unsafe fn store_func(ptr: *mut uint, module: &str, symbol: &str, fallback: uint) {
let module: Vec<u16> = module.utf16_units().collect();
let module = module.append_one(0);
symbol.with_c_str(|symbol| {
let handle = GetModuleHandleW(module.as_ptr());
let func: uint = transmute(GetProcAddress(handle, symbol));
atomic_store_relaxed(ptr, if func == 0 {
fallback
} else {
func
})
})
}
/// Macro for creating a compatibility fallback for a Windows function
///
/// # Example
/// ```
/// compat_fn!(adll32::SomeFunctionW(_arg: LPCWSTR) {
/// // Fallback implementation
/// })
/// ```
///
/// Note that arguments unused by the fallback implementation should not be called `_` as
/// they are used to be passed to the real function if available.
macro_rules! compat_fn(
($module:ident::$symbol:ident($($argname:ident: $argtype:ty),*)
-> $rettype:ty $fallback:block) => (
#[inline(always)]
pub unsafe fn $symbol($($argname: $argtype),*) -> $rettype {
static mut ptr: extern "system" fn($($argname: $argtype),*) -> $rettype = thunk;
extern "system" fn thunk($($argname: $argtype),*) -> $rettype {
unsafe {
::io::c::compat::store_func(&mut ptr as *mut _ as *mut uint,
stringify!($module),
stringify!($symbol),
fallback as uint);
::std::intrinsics::atomic_load_relaxed(&ptr)($($argname),*)
}
}
extern "system" fn fallback($($argname: $argtype),*) -> $rettype $fallback
::std::intrinsics::atomic_load_relaxed(&ptr)($($argname),*)
}
);
($module:ident::$symbol:ident($($argname:ident: $argtype:ty),*) $fallback:block) => (
compat_fn!($module::$symbol($($argname: $argtype),*) -> () $fallback)
)
)
/// Compatibility layer for functions in `kernel32.dll`
///
/// Latest versions of Windows this is needed for:
///
/// * `CreateSymbolicLinkW`: Windows XP, Windows Server 2003
/// * `GetFinalPathNameByHandleW`: Windows XP, Windows Server 2003
pub mod kernel32 {
use libc::types::os::arch::extra::{DWORD, LPCWSTR, BOOLEAN, HANDLE};
use libc::consts::os::extra::ERROR_CALL_NOT_IMPLEMENTED;
extern "system" {
fn SetLastError(dwErrCode: DWORD);
}
| unsafe { SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); }
0
})
compat_fn!(kernel32::GetFinalPathNameByHandleW(_hFile: HANDLE,
_lpszFilePath: LPCWSTR,
_cchFilePath: DWORD,
_dwFlags: DWORD) -> DWORD {
unsafe { SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); }
0
})
}
} | compat_fn!(kernel32::CreateSymbolicLinkW(_lpSymlinkFileName: LPCWSTR,
_lpTargetFileName: LPCWSTR,
_dwFlags: DWORD) -> BOOLEAN { | random_line_split |
c_win32.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! C definitions used by libnative that don't belong in liblibc
#![allow(type_overflow)]
use libc;
pub static WSADESCRIPTION_LEN: uint = 256;
pub static WSASYS_STATUS_LEN: uint = 128;
pub static FIONBIO: libc::c_long = 0x8004667e;
static FD_SETSIZE: uint = 64;
pub static MSG_DONTWAIT: libc::c_int = 0;
#[repr(C)]
pub struct WSADATA {
pub wVersion: libc::WORD,
pub wHighVersion: libc::WORD,
pub szDescription: [u8,..WSADESCRIPTION_LEN + 1],
pub szSystemStatus: [u8,..WSASYS_STATUS_LEN + 1],
pub iMaxSockets: u16,
pub iMaxUdpDg: u16,
pub lpVendorInfo: *mut u8,
}
pub type LPWSADATA = *mut WSADATA;
#[repr(C)]
pub struct fd_set {
fd_count: libc::c_uint,
fd_array: [libc::SOCKET,..FD_SETSIZE],
}
pub fn fd_set(set: &mut fd_set, s: libc::SOCKET) {
set.fd_array[set.fd_count as uint] = s;
set.fd_count += 1;
}
#[link(name = "ws2_32")]
extern "system" {
pub fn WSAStartup(wVersionRequested: libc::WORD,
lpWSAData: LPWSADATA) -> libc::c_int;
pub fn WSAGetLastError() -> libc::c_int;
pub fn ioctlsocket(s: libc::SOCKET, cmd: libc::c_long,
argp: *mut libc::c_ulong) -> libc::c_int;
pub fn select(nfds: libc::c_int,
readfds: *mut fd_set,
writefds: *mut fd_set,
exceptfds: *mut fd_set,
timeout: *mut libc::timeval) -> libc::c_int;
pub fn getsockopt(sockfd: libc::SOCKET,
level: libc::c_int,
optname: libc::c_int,
optval: *mut libc::c_char,
optlen: *mut libc::c_int) -> libc::c_int;
pub fn CancelIo(hFile: libc::HANDLE) -> libc::BOOL;
pub fn CancelIoEx(hFile: libc::HANDLE,
lpOverlapped: libc::LPOVERLAPPED) -> libc::BOOL;
}
pub mod compat {
use std::intrinsics::{atomic_store_relaxed, transmute};
use std::iter::Iterator;
use libc::types::os::arch::extra::{LPCWSTR, HMODULE, LPCSTR, LPVOID};
extern "system" {
fn GetModuleHandleW(lpModuleName: LPCWSTR) -> HMODULE;
fn GetProcAddress(hModule: HMODULE, lpProcName: LPCSTR) -> LPVOID;
}
// store_func() is idempotent, so using relaxed ordering for the atomics
// should be enough. This way, calling a function in this compatibility
// layer (after it's loaded) shouldn't be any slower than a regular DLL
// call.
unsafe fn | (ptr: *mut uint, module: &str, symbol: &str, fallback: uint) {
let module: Vec<u16> = module.utf16_units().collect();
let module = module.append_one(0);
symbol.with_c_str(|symbol| {
let handle = GetModuleHandleW(module.as_ptr());
let func: uint = transmute(GetProcAddress(handle, symbol));
atomic_store_relaxed(ptr, if func == 0 {
fallback
} else {
func
})
})
}
/// Macro for creating a compatibility fallback for a Windows function
///
/// # Example
/// ```
/// compat_fn!(adll32::SomeFunctionW(_arg: LPCWSTR) {
/// // Fallback implementation
/// })
/// ```
///
/// Note that arguments unused by the fallback implementation should not be called `_` as
/// they are used to be passed to the real function if available.
macro_rules! compat_fn(
($module:ident::$symbol:ident($($argname:ident: $argtype:ty),*)
-> $rettype:ty $fallback:block) => (
#[inline(always)]
pub unsafe fn $symbol($($argname: $argtype),*) -> $rettype {
static mut ptr: extern "system" fn($($argname: $argtype),*) -> $rettype = thunk;
extern "system" fn thunk($($argname: $argtype),*) -> $rettype {
unsafe {
::io::c::compat::store_func(&mut ptr as *mut _ as *mut uint,
stringify!($module),
stringify!($symbol),
fallback as uint);
::std::intrinsics::atomic_load_relaxed(&ptr)($($argname),*)
}
}
extern "system" fn fallback($($argname: $argtype),*) -> $rettype $fallback
::std::intrinsics::atomic_load_relaxed(&ptr)($($argname),*)
}
);
($module:ident::$symbol:ident($($argname:ident: $argtype:ty),*) $fallback:block) => (
compat_fn!($module::$symbol($($argname: $argtype),*) -> () $fallback)
)
)
/// Compatibility layer for functions in `kernel32.dll`
///
/// Latest versions of Windows this is needed for:
///
/// * `CreateSymbolicLinkW`: Windows XP, Windows Server 2003
/// * `GetFinalPathNameByHandleW`: Windows XP, Windows Server 2003
pub mod kernel32 {
use libc::types::os::arch::extra::{DWORD, LPCWSTR, BOOLEAN, HANDLE};
use libc::consts::os::extra::ERROR_CALL_NOT_IMPLEMENTED;
extern "system" {
fn SetLastError(dwErrCode: DWORD);
}
compat_fn!(kernel32::CreateSymbolicLinkW(_lpSymlinkFileName: LPCWSTR,
_lpTargetFileName: LPCWSTR,
_dwFlags: DWORD) -> BOOLEAN {
unsafe { SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); }
0
})
compat_fn!(kernel32::GetFinalPathNameByHandleW(_hFile: HANDLE,
_lpszFilePath: LPCWSTR,
_cchFilePath: DWORD,
_dwFlags: DWORD) -> DWORD {
unsafe { SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); }
0
})
}
}
| store_func | identifier_name |
video.rs | const VI_V_CURRENT_REG: u32 = 0x10;
const VI_INTR_REG: u32 = 0x0c;
const VI_H_START_REG: u32 = 0x24;
#[derive(Default, Debug)]
pub struct Video {
intr_half_line: u32,
horizontal_video_start: u16,
horizontal_video_end: u16,
current_vertical_line: u16,
}
impl Video {
pub fn read(&self, addr: u32) -> u32 {
match addr {
VI_INTR_REG => self.read_halfline(),
VI_H_START_REG => self.read_h_video(),
VI_V_CURRENT_REG => self.read_current_vertical_line() as u32,
_ => panic!("Unknown address in Video {:#x}", addr),
}
}
pub fn write(&mut self, addr: u32, value: u32) {
match addr {
VI_INTR_REG => self.write_halfline(value),
VI_H_START_REG => self.write_h_video(value),
VI_V_CURRENT_REG => self.write_current_vertical_line(value),
_ => {
panic!("Cannot write to register in Video {:#x} <- {:#x}",
addr,
value)
}
}
}
fn read_halfline(&self) -> u32 {
self.intr_half_line
}
fn write_halfline(&mut self, value: u32) {
self.intr_half_line = value & 0x3ff;
}
fn read_h_video(&self) -> u32 {
(self.horizontal_video_start as u32) << 16 | (self.horizontal_video_end as u32)
}
fn write_h_video(&mut self, value: u32) {
self.horizontal_video_start = (value >> 16 & 0x3ff) as u16;
self.horizontal_video_end = (value & 0x3ff) as u16;
}
fn read_current_vertical_line(&self) -> u16 { | self.current_vertical_line = (value & 0x3ff) as u16;
// TODO clear interrupt line
}
} | self.current_vertical_line & 0x3ff
}
fn write_current_vertical_line(&mut self, value: u32) { | random_line_split |
video.rs | const VI_V_CURRENT_REG: u32 = 0x10;
const VI_INTR_REG: u32 = 0x0c;
const VI_H_START_REG: u32 = 0x24;
#[derive(Default, Debug)]
pub struct Video {
intr_half_line: u32,
horizontal_video_start: u16,
horizontal_video_end: u16,
current_vertical_line: u16,
}
impl Video {
pub fn read(&self, addr: u32) -> u32 {
match addr {
VI_INTR_REG => self.read_halfline(),
VI_H_START_REG => self.read_h_video(),
VI_V_CURRENT_REG => self.read_current_vertical_line() as u32,
_ => panic!("Unknown address in Video {:#x}", addr),
}
}
pub fn | (&mut self, addr: u32, value: u32) {
match addr {
VI_INTR_REG => self.write_halfline(value),
VI_H_START_REG => self.write_h_video(value),
VI_V_CURRENT_REG => self.write_current_vertical_line(value),
_ => {
panic!("Cannot write to register in Video {:#x} <- {:#x}",
addr,
value)
}
}
}
fn read_halfline(&self) -> u32 {
self.intr_half_line
}
fn write_halfline(&mut self, value: u32) {
self.intr_half_line = value & 0x3ff;
}
fn read_h_video(&self) -> u32 {
(self.horizontal_video_start as u32) << 16 | (self.horizontal_video_end as u32)
}
fn write_h_video(&mut self, value: u32) {
self.horizontal_video_start = (value >> 16 & 0x3ff) as u16;
self.horizontal_video_end = (value & 0x3ff) as u16;
}
fn read_current_vertical_line(&self) -> u16 {
self.current_vertical_line & 0x3ff
}
fn write_current_vertical_line(&mut self, value: u32) {
self.current_vertical_line = (value & 0x3ff) as u16;
// TODO clear interrupt line
}
}
| write | identifier_name |
video.rs | const VI_V_CURRENT_REG: u32 = 0x10;
const VI_INTR_REG: u32 = 0x0c;
const VI_H_START_REG: u32 = 0x24;
#[derive(Default, Debug)]
pub struct Video {
intr_half_line: u32,
horizontal_video_start: u16,
horizontal_video_end: u16,
current_vertical_line: u16,
}
impl Video {
pub fn read(&self, addr: u32) -> u32 {
match addr {
VI_INTR_REG => self.read_halfline(),
VI_H_START_REG => self.read_h_video(),
VI_V_CURRENT_REG => self.read_current_vertical_line() as u32,
_ => panic!("Unknown address in Video {:#x}", addr),
}
}
pub fn write(&mut self, addr: u32, value: u32) {
match addr {
VI_INTR_REG => self.write_halfline(value),
VI_H_START_REG => self.write_h_video(value),
VI_V_CURRENT_REG => self.write_current_vertical_line(value),
_ => {
panic!("Cannot write to register in Video {:#x} <- {:#x}",
addr,
value)
}
}
}
fn read_halfline(&self) -> u32 |
fn write_halfline(&mut self, value: u32) {
self.intr_half_line = value & 0x3ff;
}
fn read_h_video(&self) -> u32 {
(self.horizontal_video_start as u32) << 16 | (self.horizontal_video_end as u32)
}
fn write_h_video(&mut self, value: u32) {
self.horizontal_video_start = (value >> 16 & 0x3ff) as u16;
self.horizontal_video_end = (value & 0x3ff) as u16;
}
fn read_current_vertical_line(&self) -> u16 {
self.current_vertical_line & 0x3ff
}
fn write_current_vertical_line(&mut self, value: u32) {
self.current_vertical_line = (value & 0x3ff) as u16;
// TODO clear interrupt line
}
}
| {
self.intr_half_line
} | identifier_body |
uid_filter.rs | use filter;
use filter::Filter;
use walkdir::DirEntry;
use std::os::unix::fs::MetadataExt;
use std::process;
pub struct UidFilter {
uid: u32,
comp_op: filter::CompOp,
}
impl UidFilter {
pub fn new(comp_op: filter::CompOp, uid: u32) -> UidFilter {
UidFilter{comp_op: comp_op, uid: uid}
}
}
impl Filter for UidFilter {
fn test(&self, dir_entry: &DirEntry) -> bool {
match self.comp_op {
filter::CompOp::Equal => self.uid == dir_entry.metadata().unwrap().uid(), | _ => {
eprintln!("Operator {:?} not covered for attribute uid!", self.comp_op);
process::exit(1);
},
}
}
} | filter::CompOp::Unequal => self.uid != dir_entry.metadata().unwrap().uid(), | random_line_split |
uid_filter.rs | use filter;
use filter::Filter;
use walkdir::DirEntry;
use std::os::unix::fs::MetadataExt;
use std::process;
pub struct UidFilter {
uid: u32,
comp_op: filter::CompOp,
}
impl UidFilter {
pub fn new(comp_op: filter::CompOp, uid: u32) -> UidFilter {
UidFilter{comp_op: comp_op, uid: uid}
}
}
impl Filter for UidFilter {
fn | (&self, dir_entry: &DirEntry) -> bool {
match self.comp_op {
filter::CompOp::Equal => self.uid == dir_entry.metadata().unwrap().uid(),
filter::CompOp::Unequal => self.uid!= dir_entry.metadata().unwrap().uid(),
_ => {
eprintln!("Operator {:?} not covered for attribute uid!", self.comp_op);
process::exit(1);
},
}
}
}
| test | identifier_name |
tail.rs | #![crate_name = "tail"]
#![feature(collections, core, old_io, old_path, rustc_private, std_misc)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Morten Olsen Lysgaard <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
*/
extern crate getopts;
use std::char::CharExt;
use std::old_io::{stdin, stdout};
use std::old_io::{BufferedReader, BytesReader};
use std::old_io::fs::File;
use std::old_path::Path;
use std::str::from_utf8;
use getopts::{optopt, optflag, getopts, usage};
use std::collections::VecDeque;
use std::old_io::timer::sleep;
use std::time::duration::Duration;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "tail";
static VERSION: &'static str = "0.0.1";
pub fn uumain(args: Vec<String>) -> i32 {
let mut beginning = false;
let mut lines = true;
let mut byte_count = 0usize;
let mut line_count = 10usize;
let mut sleep_msec = 1000u64;
// handle obsolete -number syntax
let options = match obsolete(args.tail()) {
(args, Some(n)) => { line_count = n; args },
(args, None) => args
};
let args = options;
let possible_options = [
optopt("c", "bytes", "Number of bytes to print", "k"),
optopt("n", "lines", "Number of lines to print", "k"),
optflag("f", "follow", "Print the file as it grows"),
optopt("s", "sleep-interval", "Number or seconds to sleep between polling the file when running with -f", "n"),
optflag("h", "help", "help"),
optflag("V", "version", "version"),
];
let given_options = match getopts(args.as_slice(), &possible_options) {
Ok (m) => { m }
Err(_) => {
println!("{}", usage(NAME, &possible_options));
return 1;
}
};
if given_options.opt_present("h") {
println!("{}", usage(NAME, &possible_options));
return 0;
}
if given_options.opt_present("V") { version(); return 0 }
let follow = given_options.opt_present("f");
if follow {
match given_options.opt_str("s") {
Some(n) => {
let parsed: Option<u64> = n.parse().ok();
match parsed {
Some(m) => { sleep_msec = m * 1000 }
None => {}
}
}
None => {}
};
}
match given_options.opt_str("n") {
Some(n) => {
let mut slice = n.as_slice();
if slice.len() > 0 && slice.char_at(0) == '+' {
beginning = true;
slice = &slice[1..];
}
line_count = match parse_size(slice) {
Some(m) => m,
None => {
show_error!("invalid number of lines ({})", slice);
return 1;
}
};
}
None => match given_options.opt_str("c") {
Some(n) => {
let mut slice = n.as_slice();
if slice.len() > 0 && slice.char_at(0) == '+' {
beginning = true;
slice = &slice[1..];
}
byte_count = match parse_size(slice) {
Some(m) => m,
None => {
show_error!("invalid number of bytes ({})", slice);
return 1;
}
};
lines = false;
}
None => { }
}
};
let files = given_options.free;
if files.is_empty() {
let mut buffer = BufferedReader::new(stdin());
tail(&mut buffer, line_count, byte_count, beginning, lines, follow, sleep_msec);
} else {
let mut multiple = false;
let mut firstime = true;
if files.len() > 1 {
multiple = true;
}
for file in files.iter() {
if multiple {
if!firstime { println!(""); }
println!("==> {} <==", file.as_slice());
}
firstime = false;
let path = Path::new(file.as_slice());
let reader = File::open(&path).unwrap();
let mut buffer = BufferedReader::new(reader);
tail(&mut buffer, line_count, byte_count, beginning, lines, follow, sleep_msec);
}
}
0
}
fn parse_size(mut size_slice: &str) -> Option<usize> {
let mut base =
if size_slice.len() > 0 && size_slice.char_at(size_slice.len() - 1) == 'B' {
size_slice = &size_slice[..size_slice.len() - 1];
1000usize
} else {
1024usize
};
let exponent =
if size_slice.len() > 0 {
let mut has_suffix = true;
let exp = match size_slice.char_at(size_slice.len() - 1) {
'K' => 1usize,
'M' => 2usize,
'G' => 3usize,
'T' => 4usize,
'P' => 5usize,
'E' => 6usize,
'Z' => 7usize,
'Y' => 8usize,
'b' => {
base = 512usize;
1usize
}
_ => {
has_suffix = false;
0usize
}
};
if has_suffix {
size_slice = &size_slice[..size_slice.len() - 1];
}
exp
} else {
0usize
};
let mut multiplier = 1usize;
for _ in range(0usize, exponent) {
multiplier *= base;
}
if base == 1000usize && exponent == 0usize {
// sole B is not a valid suffix
None
} else {
let value = size_slice.parse();
match value {
Ok(v) => Some(multiplier * v),
_ => None
}
}
}
// It searches for an option in the form of -123123
//
// In case is found, the options vector will get rid of that object so that
// getopts works correctly.
fn obsolete(options: &[String]) -> (Vec<String>, Option<usize>) {
let mut options: Vec<String> = options.to_vec();
let mut a = 0;
let b = options.len();
while a < b {
let current = options[a].clone();
let current = current.as_bytes();
if current.len() > 1 && current[0] == '-' as u8 {
let len = current.len();
for pos in range(1, len) {
// Ensure that the argument is only made out of digits
if!(current[pos] as char).is_numeric() { break; }
// If this is the last number
if pos == len - 1 {
options.remove(a);
let number: Option<usize> = from_utf8(¤t[1..len]).unwrap().parse().ok();
return (options, Some(number.unwrap()));
}
}
}
a += 1;
};
(options, None)
}
macro_rules! tail_impl (
($kind:ty, $kindfn:ident, $kindprint:ident, $reader:ident, $count:ident, $beginning:ident) => ({
// read through each line and store them in a ringbuffer that always contains
// count lines/chars. When reaching the end of file, output the data in the
// ringbuf.
let mut ringbuf: VecDeque<$kind> = VecDeque::new();
let data = $reader.$kindfn().skip(
if $beginning {
let temp = $count;
$count = ::std::usize::MAX;
temp - 1
} else {
0
}
);
for io_datum in data {
match io_datum {
Ok(datum) => {
if $count <= ringbuf.len() {
ringbuf.pop_front();
}
ringbuf.push_back(datum);
}
Err(err) => panic!(err)
}
}
let mut stdout = stdout();
for datum in ringbuf.iter() {
$kindprint(&mut stdout, datum);
}
})
);
fn tail<T: Reader>(reader: &mut BufferedReader<T>, mut line_count: usize, mut byte_count: usize, beginning: bool, lines: bool, follow: bool, sleep_msec: u64) |
#[inline]
fn print_byte<T: Writer>(stdout: &mut T, ch: &u8) {
if let Err(err) = stdout.write_u8(*ch) {
crash!(1, "{}", err);
}
}
#[inline]
fn print_string<T: Writer>(_: &mut T, s: &String) {
print!("{}", s);
}
fn version () {
println!("{} v{}", NAME, VERSION);
}
| {
if lines {
tail_impl!(String, lines, print_string, reader, line_count, beginning);
} else {
tail_impl!(u8, bytes, print_byte, reader, byte_count, beginning);
}
// if we follow the file, sleep a bit and print the rest if the file has grown.
while follow {
sleep(Duration::milliseconds(sleep_msec as i64));
for io_line in reader.lines() {
match io_line {
Ok(line) => print!("{}", line),
Err(err) => panic!(err)
}
}
}
} | identifier_body |
tail.rs | #![crate_name = "tail"]
#![feature(collections, core, old_io, old_path, rustc_private, std_misc)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Morten Olsen Lysgaard <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
*/
extern crate getopts;
use std::char::CharExt;
use std::old_io::{stdin, stdout};
use std::old_io::{BufferedReader, BytesReader};
use std::old_io::fs::File;
use std::old_path::Path;
use std::str::from_utf8;
use getopts::{optopt, optflag, getopts, usage};
use std::collections::VecDeque;
use std::old_io::timer::sleep;
use std::time::duration::Duration;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "tail";
static VERSION: &'static str = "0.0.1";
pub fn uumain(args: Vec<String>) -> i32 {
let mut beginning = false;
let mut lines = true;
let mut byte_count = 0usize;
let mut line_count = 10usize;
let mut sleep_msec = 1000u64;
// handle obsolete -number syntax
let options = match obsolete(args.tail()) {
(args, Some(n)) => { line_count = n; args },
(args, None) => args
};
let args = options;
let possible_options = [
optopt("c", "bytes", "Number of bytes to print", "k"),
optopt("n", "lines", "Number of lines to print", "k"),
optflag("f", "follow", "Print the file as it grows"),
optopt("s", "sleep-interval", "Number or seconds to sleep between polling the file when running with -f", "n"),
optflag("h", "help", "help"),
optflag("V", "version", "version"),
];
let given_options = match getopts(args.as_slice(), &possible_options) {
Ok (m) => { m }
Err(_) => {
println!("{}", usage(NAME, &possible_options));
return 1;
}
};
if given_options.opt_present("h") {
println!("{}", usage(NAME, &possible_options));
return 0;
}
if given_options.opt_present("V") { version(); return 0 }
let follow = given_options.opt_present("f");
if follow {
match given_options.opt_str("s") {
Some(n) => {
let parsed: Option<u64> = n.parse().ok();
match parsed {
Some(m) => { sleep_msec = m * 1000 }
None => {}
}
}
None => {}
};
}
match given_options.opt_str("n") {
Some(n) => |
None => match given_options.opt_str("c") {
Some(n) => {
let mut slice = n.as_slice();
if slice.len() > 0 && slice.char_at(0) == '+' {
beginning = true;
slice = &slice[1..];
}
byte_count = match parse_size(slice) {
Some(m) => m,
None => {
show_error!("invalid number of bytes ({})", slice);
return 1;
}
};
lines = false;
}
None => { }
}
};
let files = given_options.free;
if files.is_empty() {
let mut buffer = BufferedReader::new(stdin());
tail(&mut buffer, line_count, byte_count, beginning, lines, follow, sleep_msec);
} else {
let mut multiple = false;
let mut firstime = true;
if files.len() > 1 {
multiple = true;
}
for file in files.iter() {
if multiple {
if!firstime { println!(""); }
println!("==> {} <==", file.as_slice());
}
firstime = false;
let path = Path::new(file.as_slice());
let reader = File::open(&path).unwrap();
let mut buffer = BufferedReader::new(reader);
tail(&mut buffer, line_count, byte_count, beginning, lines, follow, sleep_msec);
}
}
0
}
fn parse_size(mut size_slice: &str) -> Option<usize> {
let mut base =
if size_slice.len() > 0 && size_slice.char_at(size_slice.len() - 1) == 'B' {
size_slice = &size_slice[..size_slice.len() - 1];
1000usize
} else {
1024usize
};
let exponent =
if size_slice.len() > 0 {
let mut has_suffix = true;
let exp = match size_slice.char_at(size_slice.len() - 1) {
'K' => 1usize,
'M' => 2usize,
'G' => 3usize,
'T' => 4usize,
'P' => 5usize,
'E' => 6usize,
'Z' => 7usize,
'Y' => 8usize,
'b' => {
base = 512usize;
1usize
}
_ => {
has_suffix = false;
0usize
}
};
if has_suffix {
size_slice = &size_slice[..size_slice.len() - 1];
}
exp
} else {
0usize
};
let mut multiplier = 1usize;
for _ in range(0usize, exponent) {
multiplier *= base;
}
if base == 1000usize && exponent == 0usize {
// sole B is not a valid suffix
None
} else {
let value = size_slice.parse();
match value {
Ok(v) => Some(multiplier * v),
_ => None
}
}
}
// It searches for an option in the form of -123123
//
// In case is found, the options vector will get rid of that object so that
// getopts works correctly.
fn obsolete(options: &[String]) -> (Vec<String>, Option<usize>) {
let mut options: Vec<String> = options.to_vec();
let mut a = 0;
let b = options.len();
while a < b {
let current = options[a].clone();
let current = current.as_bytes();
if current.len() > 1 && current[0] == '-' as u8 {
let len = current.len();
for pos in range(1, len) {
// Ensure that the argument is only made out of digits
if!(current[pos] as char).is_numeric() { break; }
// If this is the last number
if pos == len - 1 {
options.remove(a);
let number: Option<usize> = from_utf8(¤t[1..len]).unwrap().parse().ok();
return (options, Some(number.unwrap()));
}
}
}
a += 1;
};
(options, None)
}
macro_rules! tail_impl (
($kind:ty, $kindfn:ident, $kindprint:ident, $reader:ident, $count:ident, $beginning:ident) => ({
// read through each line and store them in a ringbuffer that always contains
// count lines/chars. When reaching the end of file, output the data in the
// ringbuf.
let mut ringbuf: VecDeque<$kind> = VecDeque::new();
let data = $reader.$kindfn().skip(
if $beginning {
let temp = $count;
$count = ::std::usize::MAX;
temp - 1
} else {
0
}
);
for io_datum in data {
match io_datum {
Ok(datum) => {
if $count <= ringbuf.len() {
ringbuf.pop_front();
}
ringbuf.push_back(datum);
}
Err(err) => panic!(err)
}
}
let mut stdout = stdout();
for datum in ringbuf.iter() {
$kindprint(&mut stdout, datum);
}
})
);
fn tail<T: Reader>(reader: &mut BufferedReader<T>, mut line_count: usize, mut byte_count: usize, beginning: bool, lines: bool, follow: bool, sleep_msec: u64) {
if lines {
tail_impl!(String, lines, print_string, reader, line_count, beginning);
} else {
tail_impl!(u8, bytes, print_byte, reader, byte_count, beginning);
}
// if we follow the file, sleep a bit and print the rest if the file has grown.
while follow {
sleep(Duration::milliseconds(sleep_msec as i64));
for io_line in reader.lines() {
match io_line {
Ok(line) => print!("{}", line),
Err(err) => panic!(err)
}
}
}
}
#[inline]
fn print_byte<T: Writer>(stdout: &mut T, ch: &u8) {
if let Err(err) = stdout.write_u8(*ch) {
crash!(1, "{}", err);
}
}
#[inline]
fn print_string<T: Writer>(_: &mut T, s: &String) {
print!("{}", s);
}
fn version () {
println!("{} v{}", NAME, VERSION);
}
| {
let mut slice = n.as_slice();
if slice.len() > 0 && slice.char_at(0) == '+' {
beginning = true;
slice = &slice[1..];
}
line_count = match parse_size(slice) {
Some(m) => m,
None => {
show_error!("invalid number of lines ({})", slice);
return 1;
}
};
} | conditional_block |
tail.rs | #![crate_name = "tail"]
#![feature(collections, core, old_io, old_path, rustc_private, std_misc)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Morten Olsen Lysgaard <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
*/
extern crate getopts;
use std::char::CharExt;
use std::old_io::{stdin, stdout};
use std::old_io::{BufferedReader, BytesReader};
use std::old_io::fs::File;
use std::old_path::Path;
use std::str::from_utf8;
use getopts::{optopt, optflag, getopts, usage};
use std::collections::VecDeque;
use std::old_io::timer::sleep;
use std::time::duration::Duration;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "tail";
static VERSION: &'static str = "0.0.1";
pub fn uumain(args: Vec<String>) -> i32 {
let mut beginning = false;
let mut lines = true;
let mut byte_count = 0usize;
let mut line_count = 10usize;
let mut sleep_msec = 1000u64;
// handle obsolete -number syntax
let options = match obsolete(args.tail()) {
(args, Some(n)) => { line_count = n; args },
(args, None) => args
};
let args = options;
let possible_options = [
optopt("c", "bytes", "Number of bytes to print", "k"),
optopt("n", "lines", "Number of lines to print", "k"),
optflag("f", "follow", "Print the file as it grows"),
optopt("s", "sleep-interval", "Number or seconds to sleep between polling the file when running with -f", "n"),
optflag("h", "help", "help"),
optflag("V", "version", "version"),
];
let given_options = match getopts(args.as_slice(), &possible_options) {
Ok (m) => { m }
Err(_) => {
println!("{}", usage(NAME, &possible_options));
return 1;
}
};
if given_options.opt_present("h") {
println!("{}", usage(NAME, &possible_options));
return 0;
}
if given_options.opt_present("V") { version(); return 0 }
let follow = given_options.opt_present("f");
if follow {
match given_options.opt_str("s") {
Some(n) => {
let parsed: Option<u64> = n.parse().ok();
match parsed {
Some(m) => { sleep_msec = m * 1000 }
None => {}
}
}
None => {}
};
}
match given_options.opt_str("n") {
Some(n) => {
let mut slice = n.as_slice();
if slice.len() > 0 && slice.char_at(0) == '+' {
beginning = true;
slice = &slice[1..];
}
line_count = match parse_size(slice) {
Some(m) => m,
None => {
show_error!("invalid number of lines ({})", slice);
return 1;
}
};
}
None => match given_options.opt_str("c") {
Some(n) => {
let mut slice = n.as_slice();
if slice.len() > 0 && slice.char_at(0) == '+' {
beginning = true;
slice = &slice[1..];
}
byte_count = match parse_size(slice) {
Some(m) => m,
None => {
show_error!("invalid number of bytes ({})", slice);
return 1;
}
};
lines = false;
}
None => { }
}
};
let files = given_options.free;
if files.is_empty() {
let mut buffer = BufferedReader::new(stdin());
tail(&mut buffer, line_count, byte_count, beginning, lines, follow, sleep_msec);
} else {
let mut multiple = false;
let mut firstime = true;
if files.len() > 1 {
multiple = true;
}
for file in files.iter() {
if multiple {
if!firstime { println!(""); }
println!("==> {} <==", file.as_slice());
}
firstime = false;
let path = Path::new(file.as_slice());
let reader = File::open(&path).unwrap();
let mut buffer = BufferedReader::new(reader);
tail(&mut buffer, line_count, byte_count, beginning, lines, follow, sleep_msec);
}
}
0
}
fn parse_size(mut size_slice: &str) -> Option<usize> {
let mut base =
if size_slice.len() > 0 && size_slice.char_at(size_slice.len() - 1) == 'B' {
size_slice = &size_slice[..size_slice.len() - 1];
1000usize
} else {
1024usize
};
let exponent =
if size_slice.len() > 0 {
let mut has_suffix = true;
let exp = match size_slice.char_at(size_slice.len() - 1) {
'K' => 1usize,
'M' => 2usize,
'G' => 3usize,
'T' => 4usize,
'P' => 5usize,
'E' => 6usize,
'Z' => 7usize,
'Y' => 8usize,
'b' => {
base = 512usize;
1usize
}
_ => {
has_suffix = false;
0usize
}
};
if has_suffix {
size_slice = &size_slice[..size_slice.len() - 1];
}
exp
} else {
0usize
};
let mut multiplier = 1usize;
for _ in range(0usize, exponent) {
multiplier *= base;
}
if base == 1000usize && exponent == 0usize {
// sole B is not a valid suffix
None
} else {
let value = size_slice.parse();
match value {
Ok(v) => Some(multiplier * v),
_ => None
}
}
}
// It searches for an option in the form of -123123
//
// In case is found, the options vector will get rid of that object so that
// getopts works correctly.
fn obsolete(options: &[String]) -> (Vec<String>, Option<usize>) {
let mut options: Vec<String> = options.to_vec();
let mut a = 0;
let b = options.len();
while a < b {
let current = options[a].clone();
let current = current.as_bytes();
if current.len() > 1 && current[0] == '-' as u8 {
let len = current.len();
for pos in range(1, len) {
// Ensure that the argument is only made out of digits
if!(current[pos] as char).is_numeric() { break; }
// If this is the last number
if pos == len - 1 {
options.remove(a);
let number: Option<usize> = from_utf8(¤t[1..len]).unwrap().parse().ok();
return (options, Some(number.unwrap()));
}
}
}
a += 1;
};
(options, None)
}
macro_rules! tail_impl (
($kind:ty, $kindfn:ident, $kindprint:ident, $reader:ident, $count:ident, $beginning:ident) => ({
// read through each line and store them in a ringbuffer that always contains
// count lines/chars. When reaching the end of file, output the data in the
// ringbuf.
let mut ringbuf: VecDeque<$kind> = VecDeque::new();
let data = $reader.$kindfn().skip(
if $beginning {
let temp = $count;
$count = ::std::usize::MAX;
temp - 1
} else {
0
}
);
for io_datum in data {
match io_datum {
Ok(datum) => {
if $count <= ringbuf.len() {
ringbuf.pop_front();
}
ringbuf.push_back(datum);
}
Err(err) => panic!(err)
}
}
let mut stdout = stdout();
for datum in ringbuf.iter() {
$kindprint(&mut stdout, datum);
}
})
);
fn tail<T: Reader>(reader: &mut BufferedReader<T>, mut line_count: usize, mut byte_count: usize, beginning: bool, lines: bool, follow: bool, sleep_msec: u64) {
if lines {
tail_impl!(String, lines, print_string, reader, line_count, beginning); |
// if we follow the file, sleep a bit and print the rest if the file has grown.
while follow {
sleep(Duration::milliseconds(sleep_msec as i64));
for io_line in reader.lines() {
match io_line {
Ok(line) => print!("{}", line),
Err(err) => panic!(err)
}
}
}
}
#[inline]
fn print_byte<T: Writer>(stdout: &mut T, ch: &u8) {
if let Err(err) = stdout.write_u8(*ch) {
crash!(1, "{}", err);
}
}
#[inline]
fn print_string<T: Writer>(_: &mut T, s: &String) {
print!("{}", s);
}
fn version () {
println!("{} v{}", NAME, VERSION);
} | } else {
tail_impl!(u8, bytes, print_byte, reader, byte_count, beginning);
} | random_line_split |
tail.rs | #![crate_name = "tail"]
#![feature(collections, core, old_io, old_path, rustc_private, std_misc)]
/*
* This file is part of the uutils coreutils package.
*
* (c) Morten Olsen Lysgaard <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
*/
extern crate getopts;
use std::char::CharExt;
use std::old_io::{stdin, stdout};
use std::old_io::{BufferedReader, BytesReader};
use std::old_io::fs::File;
use std::old_path::Path;
use std::str::from_utf8;
use getopts::{optopt, optflag, getopts, usage};
use std::collections::VecDeque;
use std::old_io::timer::sleep;
use std::time::duration::Duration;
#[path = "../common/util.rs"]
#[macro_use]
mod util;
static NAME: &'static str = "tail";
static VERSION: &'static str = "0.0.1";
pub fn uumain(args: Vec<String>) -> i32 {
let mut beginning = false;
let mut lines = true;
let mut byte_count = 0usize;
let mut line_count = 10usize;
let mut sleep_msec = 1000u64;
// handle obsolete -number syntax
let options = match obsolete(args.tail()) {
(args, Some(n)) => { line_count = n; args },
(args, None) => args
};
let args = options;
let possible_options = [
optopt("c", "bytes", "Number of bytes to print", "k"),
optopt("n", "lines", "Number of lines to print", "k"),
optflag("f", "follow", "Print the file as it grows"),
optopt("s", "sleep-interval", "Number or seconds to sleep between polling the file when running with -f", "n"),
optflag("h", "help", "help"),
optflag("V", "version", "version"),
];
let given_options = match getopts(args.as_slice(), &possible_options) {
Ok (m) => { m }
Err(_) => {
println!("{}", usage(NAME, &possible_options));
return 1;
}
};
if given_options.opt_present("h") {
println!("{}", usage(NAME, &possible_options));
return 0;
}
if given_options.opt_present("V") { version(); return 0 }
let follow = given_options.opt_present("f");
if follow {
match given_options.opt_str("s") {
Some(n) => {
let parsed: Option<u64> = n.parse().ok();
match parsed {
Some(m) => { sleep_msec = m * 1000 }
None => {}
}
}
None => {}
};
}
match given_options.opt_str("n") {
Some(n) => {
let mut slice = n.as_slice();
if slice.len() > 0 && slice.char_at(0) == '+' {
beginning = true;
slice = &slice[1..];
}
line_count = match parse_size(slice) {
Some(m) => m,
None => {
show_error!("invalid number of lines ({})", slice);
return 1;
}
};
}
None => match given_options.opt_str("c") {
Some(n) => {
let mut slice = n.as_slice();
if slice.len() > 0 && slice.char_at(0) == '+' {
beginning = true;
slice = &slice[1..];
}
byte_count = match parse_size(slice) {
Some(m) => m,
None => {
show_error!("invalid number of bytes ({})", slice);
return 1;
}
};
lines = false;
}
None => { }
}
};
let files = given_options.free;
if files.is_empty() {
let mut buffer = BufferedReader::new(stdin());
tail(&mut buffer, line_count, byte_count, beginning, lines, follow, sleep_msec);
} else {
let mut multiple = false;
let mut firstime = true;
if files.len() > 1 {
multiple = true;
}
for file in files.iter() {
if multiple {
if!firstime { println!(""); }
println!("==> {} <==", file.as_slice());
}
firstime = false;
let path = Path::new(file.as_slice());
let reader = File::open(&path).unwrap();
let mut buffer = BufferedReader::new(reader);
tail(&mut buffer, line_count, byte_count, beginning, lines, follow, sleep_msec);
}
}
0
}
fn parse_size(mut size_slice: &str) -> Option<usize> {
let mut base =
if size_slice.len() > 0 && size_slice.char_at(size_slice.len() - 1) == 'B' {
size_slice = &size_slice[..size_slice.len() - 1];
1000usize
} else {
1024usize
};
let exponent =
if size_slice.len() > 0 {
let mut has_suffix = true;
let exp = match size_slice.char_at(size_slice.len() - 1) {
'K' => 1usize,
'M' => 2usize,
'G' => 3usize,
'T' => 4usize,
'P' => 5usize,
'E' => 6usize,
'Z' => 7usize,
'Y' => 8usize,
'b' => {
base = 512usize;
1usize
}
_ => {
has_suffix = false;
0usize
}
};
if has_suffix {
size_slice = &size_slice[..size_slice.len() - 1];
}
exp
} else {
0usize
};
let mut multiplier = 1usize;
for _ in range(0usize, exponent) {
multiplier *= base;
}
if base == 1000usize && exponent == 0usize {
// sole B is not a valid suffix
None
} else {
let value = size_slice.parse();
match value {
Ok(v) => Some(multiplier * v),
_ => None
}
}
}
// It searches for an option in the form of -123123
//
// In case is found, the options vector will get rid of that object so that
// getopts works correctly.
fn obsolete(options: &[String]) -> (Vec<String>, Option<usize>) {
let mut options: Vec<String> = options.to_vec();
let mut a = 0;
let b = options.len();
while a < b {
let current = options[a].clone();
let current = current.as_bytes();
if current.len() > 1 && current[0] == '-' as u8 {
let len = current.len();
for pos in range(1, len) {
// Ensure that the argument is only made out of digits
if!(current[pos] as char).is_numeric() { break; }
// If this is the last number
if pos == len - 1 {
options.remove(a);
let number: Option<usize> = from_utf8(¤t[1..len]).unwrap().parse().ok();
return (options, Some(number.unwrap()));
}
}
}
a += 1;
};
(options, None)
}
macro_rules! tail_impl (
($kind:ty, $kindfn:ident, $kindprint:ident, $reader:ident, $count:ident, $beginning:ident) => ({
// read through each line and store them in a ringbuffer that always contains
// count lines/chars. When reaching the end of file, output the data in the
// ringbuf.
let mut ringbuf: VecDeque<$kind> = VecDeque::new();
let data = $reader.$kindfn().skip(
if $beginning {
let temp = $count;
$count = ::std::usize::MAX;
temp - 1
} else {
0
}
);
for io_datum in data {
match io_datum {
Ok(datum) => {
if $count <= ringbuf.len() {
ringbuf.pop_front();
}
ringbuf.push_back(datum);
}
Err(err) => panic!(err)
}
}
let mut stdout = stdout();
for datum in ringbuf.iter() {
$kindprint(&mut stdout, datum);
}
})
);
fn | <T: Reader>(reader: &mut BufferedReader<T>, mut line_count: usize, mut byte_count: usize, beginning: bool, lines: bool, follow: bool, sleep_msec: u64) {
if lines {
tail_impl!(String, lines, print_string, reader, line_count, beginning);
} else {
tail_impl!(u8, bytes, print_byte, reader, byte_count, beginning);
}
// if we follow the file, sleep a bit and print the rest if the file has grown.
while follow {
sleep(Duration::milliseconds(sleep_msec as i64));
for io_line in reader.lines() {
match io_line {
Ok(line) => print!("{}", line),
Err(err) => panic!(err)
}
}
}
}
#[inline]
fn print_byte<T: Writer>(stdout: &mut T, ch: &u8) {
if let Err(err) = stdout.write_u8(*ch) {
crash!(1, "{}", err);
}
}
#[inline]
fn print_string<T: Writer>(_: &mut T, s: &String) {
print!("{}", s);
}
fn version () {
println!("{} v{}", NAME, VERSION);
}
| tail | identifier_name |
textedit.rs | //! Editing text in this library is handled by either `nk_edit_string` or
//! `nk_edit_buffer`. But like almost everything in this library there are multiple
//! ways of doing it and a balance between control and ease of use with memory
//! as well as functionality controlled by flags.
//!
//! This library generally allows three different levels of memory control:
//! First of is the most basic way of just providing a simple char array with
//! string length. This method is probably the easiest way of handling simple
//! user text input. Main upside is complete control over memory while the biggest
//! downside in comparsion with the other two approaches is missing undo/redo.
//!
//! For UIs that require undo/redo the second way was created. It is based on
//! a fixed size nk_text_edit struct, which has an internal undo/redo stack.
//! This is mainly useful if you want something more like a text editor but don't want
//! to have a dynamically growing buffer.
//!
| //! The final way is using a dynamically growing nk_text_edit struct, which
//! has both a default version if you don't care where memory comes from and an
//! allocator version if you do. While the text editor is quite powerful for its
//! complexity I would not recommend editing gigabytes of data with it.
//! It is rather designed for uses cases which make sense for a GUI library not for
//! an full blown text editor. | random_line_split |
|
context.rs | //
// SOS: the Stupid Operating System
// by Eliza Weisman ([email protected])
//
// Copyright (c) 2015-2017 Eliza Weisman
// Released under the terms of the MIT license. See `LICENSE` in the root
// directory of this repository for more information.
//
//! `x86_64` execution contexts.
//!
//! This is inteded to be general-purpose and composable, so that the same
//! code can be reused for interrupts and for multithreading.
use core::mem;
use core::fmt;
use super::flags::{Flags as RFlags};
use super::segment;
/// Registers pushed to the stack when handling an interrupt or context switch.
#[repr(C, packed)]
#[derive(Copy, Clone)]
pub struct Registers { pub rsi: u64
, pub rdi: u64
, pub r11: u64
, pub r10: u64
, pub r9: u64
, pub r8: u64
, pub rdx: u64
, pub rcx: u64
, pub rax: u64
}
impl Registers {
/// Transform this struct into an array of `u64`s
/// (if you would ever want to do this)
/// TODO: rewrite this to be a `convert::Into` implementation.
// - eliza, 03/09/2017
pub unsafe fn to_array(&self) -> [u64; 9] {
// [ self.rsi, self.rdi, self.r11
// , self.r10, self.r9, self.r8
// , self.rdx, self.rcx, self.rax
// ]
// using transmute is probably faster and we're already unsafe...
mem::transmute(*self)
}
/// Create a new empty set of Registers
pub const fn empty() -> Self {
Registers { rsi: 0, rdi: 0, r11: 0
, r10: 0, r9: 0, r8: 0
, rdx: 0, rcx: 0, rax: 0
}
}
/// Push the caller-saved registers to the stack
/// (such as when handling a context switch or interrupt).
///
/// THIS FUNCTION IS NAKED. DO NOT CALL IT NORMALLY.
#[naked]
#[inline(always)]
pub unsafe fn push() {
asm!( "push rax
push rcx
push rdx
push r8
push r9
push r10
push r11
push rdi
push rsi"
:::: "intel"
, "volatile");
}
/// Push the caller-saved registers off the stack
/// (such as when handling a context switch or interrupt).
///
/// THIS FUNCTION IS NAKED. DO NOT CALL IT NORMALLY.
#[naked]
#[inline(always)]
pub unsafe fn pop() {
asm!( "pop rsi
pop rdi
pop r11
pop r10
pop r9
pop r8
pop rdx
pop rcx
pop rax"
:::: "intel"
, "volatile");
}
}
impl fmt::Debug for Registers {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!( f
, " RSI: {:#018x} RDI: {:#018x} R11: {:#018x}\n \
R10: {:#018x} R9: {:#018x} R8: {:#018x}\n \
RDX: {:#018x} RCX: {:#018x} RAX: {:#018x}"
, self.rsi, self.rdi, self.r11
, self.r10, self.r9, self.r8
, self.rdx, self.rcx, self.rax)
}
}
#[repr(C, packed)]
pub struct InterruptFrame {
// this is the actual value of the interrupt stack frame context,
// not the old one (which is wrong). note that the old one seems to cause
// stack misalignment.
// -- eliza, october 4th, 2016
/// Value of the instruction pointer (`$rip`) register
pub rip: *const u8
, /// Value of the code segment (`$cs`) register
pub cs: segment::Selector
, __pad_1: u32
, __pad_2: u16
, /// Value of the CPU flags (`$rflags`) register
pub rflags: RFlags
, /// Value of the stack pointer (`$rsp`) register
// TODO: should this actually be a pointer?
pub rsp: *const u8
, /// Value of the stack segment (`$ss`) register
pub ss: segment::Selector
, __pad_3: u32
, __pad_4: u16
}
| use super::InterruptFrame;
assert_eq!(size_of::<InterruptFrame>(), 32);
}
}
impl fmt::Debug for InterruptFrame {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!( f
, "Interrupt Frame: \
\n instruction pointer: {:p} \
\n code segment: {} \
\n rflags: {:?} \
\n stack pointer: {:p} \
\n stack segment: {}"
, self.rip
// , self.__pad_1, self.__pad_2
, self.cs
, self.rflags
, self.rsp
// , self.__pad_3, self.__pad_4
, self.ss)
}
}
/// Thread execution context
#[repr(C, packed)]
pub struct Context { /// Value of the stack pointer (`rsp`) register
pub rsp: *mut u8
, /// Value of the caller-saved registers
pub registers: Registers
, /// Value of the instruction pointer (`rip`) register
pub rip: *mut u8
//, pub stack: [u8] // TODO: should be box
}
impl Context {
pub fn empty() -> Self {
unsafe {
Context { rsp: mem::transmute(0u64)
, registers: Registers::empty()
, rip: mem::transmute(0u64)
//, stack: [0u8; 8]
}
}
}
} | #[cfg(test)]
mod test {
#[test]
fn test_interrupt_frame_correct_size() {
use core::mem::size_of; | random_line_split |
context.rs | //
// SOS: the Stupid Operating System
// by Eliza Weisman ([email protected])
//
// Copyright (c) 2015-2017 Eliza Weisman
// Released under the terms of the MIT license. See `LICENSE` in the root
// directory of this repository for more information.
//
//! `x86_64` execution contexts.
//!
//! This is inteded to be general-purpose and composable, so that the same
//! code can be reused for interrupts and for multithreading.
use core::mem;
use core::fmt;
use super::flags::{Flags as RFlags};
use super::segment;
/// Registers pushed to the stack when handling an interrupt or context switch.
#[repr(C, packed)]
#[derive(Copy, Clone)]
pub struct Registers { pub rsi: u64
, pub rdi: u64
, pub r11: u64
, pub r10: u64
, pub r9: u64
, pub r8: u64
, pub rdx: u64
, pub rcx: u64
, pub rax: u64
}
impl Registers {
/// Transform this struct into an array of `u64`s
/// (if you would ever want to do this)
/// TODO: rewrite this to be a `convert::Into` implementation.
// - eliza, 03/09/2017
pub unsafe fn to_array(&self) -> [u64; 9] {
// [ self.rsi, self.rdi, self.r11
// , self.r10, self.r9, self.r8
// , self.rdx, self.rcx, self.rax
// ]
// using transmute is probably faster and we're already unsafe...
mem::transmute(*self)
}
/// Create a new empty set of Registers
pub const fn empty() -> Self {
Registers { rsi: 0, rdi: 0, r11: 0
, r10: 0, r9: 0, r8: 0
, rdx: 0, rcx: 0, rax: 0
}
}
/// Push the caller-saved registers to the stack
/// (such as when handling a context switch or interrupt).
///
/// THIS FUNCTION IS NAKED. DO NOT CALL IT NORMALLY.
#[naked]
#[inline(always)]
pub unsafe fn push() {
asm!( "push rax
push rcx
push rdx
push r8
push r9
push r10
push r11
push rdi
push rsi"
:::: "intel"
, "volatile");
}
/// Push the caller-saved registers off the stack
/// (such as when handling a context switch or interrupt).
///
/// THIS FUNCTION IS NAKED. DO NOT CALL IT NORMALLY.
#[naked]
#[inline(always)]
pub unsafe fn pop() {
asm!( "pop rsi
pop rdi
pop r11
pop r10
pop r9
pop r8
pop rdx
pop rcx
pop rax"
:::: "intel"
, "volatile");
}
}
impl fmt::Debug for Registers {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!( f
, " RSI: {:#018x} RDI: {:#018x} R11: {:#018x}\n \
R10: {:#018x} R9: {:#018x} R8: {:#018x}\n \
RDX: {:#018x} RCX: {:#018x} RAX: {:#018x}"
, self.rsi, self.rdi, self.r11
, self.r10, self.r9, self.r8
, self.rdx, self.rcx, self.rax)
}
}
#[repr(C, packed)]
pub struct InterruptFrame {
// this is the actual value of the interrupt stack frame context,
// not the old one (which is wrong). note that the old one seems to cause
// stack misalignment.
// -- eliza, october 4th, 2016
/// Value of the instruction pointer (`$rip`) register
pub rip: *const u8
, /// Value of the code segment (`$cs`) register
pub cs: segment::Selector
, __pad_1: u32
, __pad_2: u16
, /// Value of the CPU flags (`$rflags`) register
pub rflags: RFlags
, /// Value of the stack pointer (`$rsp`) register
// TODO: should this actually be a pointer?
pub rsp: *const u8
, /// Value of the stack segment (`$ss`) register
pub ss: segment::Selector
, __pad_3: u32
, __pad_4: u16
}
#[cfg(test)]
mod test {
#[test]
fn | () {
use core::mem::size_of;
use super::InterruptFrame;
assert_eq!(size_of::<InterruptFrame>(), 32);
}
}
impl fmt::Debug for InterruptFrame {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!( f
, "Interrupt Frame: \
\n instruction pointer: {:p} \
\n code segment: {} \
\n rflags: {:?} \
\n stack pointer: {:p} \
\n stack segment: {}"
, self.rip
// , self.__pad_1, self.__pad_2
, self.cs
, self.rflags
, self.rsp
// , self.__pad_3, self.__pad_4
, self.ss)
}
}
/// Thread execution context
#[repr(C, packed)]
pub struct Context { /// Value of the stack pointer (`rsp`) register
pub rsp: *mut u8
, /// Value of the caller-saved registers
pub registers: Registers
, /// Value of the instruction pointer (`rip`) register
pub rip: *mut u8
//, pub stack: [u8] // TODO: should be box
}
impl Context {
pub fn empty() -> Self {
unsafe {
Context { rsp: mem::transmute(0u64)
, registers: Registers::empty()
, rip: mem::transmute(0u64)
//, stack: [0u8; 8]
}
}
}
}
| test_interrupt_frame_correct_size | identifier_name |
mod.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! VM Output display utils.
use std::time::Duration;
use bigint::prelude::U256;
pub mod json;
pub mod simple;
/// Formats duration into human readable format.
pub fn | (time: &Duration) -> String {
format!("{}.{:.9}s", time.as_secs(), time.subsec_nanos())
}
/// Formats the time as microseconds.
pub fn as_micros(time: &Duration) -> u64 {
time.as_secs() * 1_000_000 + time.subsec_nanos() as u64 / 1_000
}
/// Converts U256 into string.
/// TODO Overcomes: https://github.com/paritytech/bigint/issues/13
pub fn u256_as_str(v: &U256) -> String {
if v.is_zero() {
"\"0x0\"".into()
} else {
format!("\"{:x}\"", v)
}
}
| format_time | identifier_name |
mod.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! VM Output display utils.
use std::time::Duration;
use bigint::prelude::U256;
pub mod json;
pub mod simple;
/// Formats duration into human readable format.
pub fn format_time(time: &Duration) -> String {
format!("{}.{:.9}s", time.as_secs(), time.subsec_nanos())
}
/// Formats the time as microseconds.
pub fn as_micros(time: &Duration) -> u64 {
time.as_secs() * 1_000_000 + time.subsec_nanos() as u64 / 1_000
}
/// Converts U256 into string.
/// TODO Overcomes: https://github.com/paritytech/bigint/issues/13
pub fn u256_as_str(v: &U256) -> String {
if v.is_zero() | else {
format!("\"{:x}\"", v)
}
}
| {
"\"0x0\"".into()
} | conditional_block |
mod.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity. | // (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! VM Output display utils.
use std::time::Duration;
use bigint::prelude::U256;
pub mod json;
pub mod simple;
/// Formats duration into human readable format.
pub fn format_time(time: &Duration) -> String {
format!("{}.{:.9}s", time.as_secs(), time.subsec_nanos())
}
/// Formats the time as microseconds.
pub fn as_micros(time: &Duration) -> u64 {
time.as_secs() * 1_000_000 + time.subsec_nanos() as u64 / 1_000
}
/// Converts U256 into string.
/// TODO Overcomes: https://github.com/paritytech/bigint/issues/13
pub fn u256_as_str(v: &U256) -> String {
if v.is_zero() {
"\"0x0\"".into()
} else {
format!("\"{:x}\"", v)
}
} |
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or | random_line_split |
mod.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! VM Output display utils.
use std::time::Duration;
use bigint::prelude::U256;
pub mod json;
pub mod simple;
/// Formats duration into human readable format.
pub fn format_time(time: &Duration) -> String |
/// Formats the time as microseconds.
pub fn as_micros(time: &Duration) -> u64 {
time.as_secs() * 1_000_000 + time.subsec_nanos() as u64 / 1_000
}
/// Converts U256 into string.
/// TODO Overcomes: https://github.com/paritytech/bigint/issues/13
pub fn u256_as_str(v: &U256) -> String {
if v.is_zero() {
"\"0x0\"".into()
} else {
format!("\"{:x}\"", v)
}
}
| {
format!("{}.{:.9}s", time.as_secs(), time.subsec_nanos())
} | identifier_body |
lib.rs | //! A Rust library for allocation-limited computation of the Discrete Cosine Transform.
//!
//! 1D DCTs are allocation-free but 2D requires allocation.
//!
//! Features:
//!
//! * `simd`: use SIMD types to speed computation (2D DCT only)
//! * `cos-approx`: use a Taylor series approximation of cosine instead of the stdlib
//! implementation (which is usually much slower but also higher precision)
use std::f64::consts::{PI, SQRT_2};
use std::ops::Range;
/// An allocation-free one-dimensional Discrete Cosine Transform.
///
/// Each iteration produces the next DCT value in the sequence.
#[derive(Clone, Debug)]
pub struct DCT1D<'a> {
data: &'a [f64],
curr: Range<usize>,
}
impl<'a> DCT1D<'a> {
/// Create a new DCT 1D adaptor from a 1D vector of data.
pub fn new(data: &[f64]) -> DCT1D {
let curr = 0.. data.len();
DCT1D {
data: data,
curr: curr,
}
}
// Converted from the C implementation here:
// http://unix4lyfe.org/dct/listing2.c
// Source page:
// http://unix4lyfe.org/dct/ (Accessed 8/10/2014)
fn next_dct_val(&mut self) -> Option<f64> {
self.curr.next().map(|u| {
let mut z = 0.0;
let data_len = self.data.len();
for (x_idx, &x) in self.data.iter().enumerate() {
z += x * cos(
PI * u as f64 * (2 * x_idx + 1) as f64
/ (2 * data_len) as f64
);
}
if u == 0 {
z *= 1.0 / SQRT_2;
}
z / 2.0
})
}
}
impl<'a> Iterator for DCT1D<'a> {
type Item = f64;
fn next(&mut self) -> Option<f64> {
self.next_dct_val()
}
}
/// An implementation of cosine that switches to a Taylor-series approximation when throughput is
/// preferred over precision.
#[inline(always)]
pub fn cos(x: f64) -> f64 {
// This branch should be optimized out.
if cfg!(feature = "cos-approx") {
// Normalize to [0, pi] or else the Taylor series spits out very wrong results.
let x = (x.abs() + PI) % (2.0 * PI) - PI;
// Approximate the cosine of `val` using a 4-term Taylor series.
// Can be expanded for higher precision.
let x2 = x.powi(2);
let x4 = x.powi(4);
let x6 = x.powi(6);
let x8 = x.powi(8);
1.0 - (x2 / 2.0) + (x4 / 24.0) - (x6 / 720.0) + (x8 / 40320.0)
} else {
x.cos()
}
}
/// Perform a 2D DCT on a 1D-packed vector with a given rowstride.
///
/// E.g. a vector of length 9 with a rowstride of 3 will be processed as a 3x3 matrix.
///
/// Returns a vector of the same size packed in the same way.
pub fn dct_2d(packed_2d: &[f64], rowstride: usize) -> Vec<f64> {
assert_eq!(packed_2d.len() % rowstride, 0);
let mut row_dct: Vec<f64> = packed_2d
.chunks(rowstride)
.flat_map(DCT1D::new)
.collect();
swap_rows_columns(&mut row_dct, rowstride);
let mut column_dct: Vec<f64> = packed_2d
.chunks(rowstride)
.flat_map(DCT1D::new)
.collect();
swap_rows_columns(&mut column_dct, rowstride);
column_dct
}
fn swap_rows_columns(data: &mut [f64], rowstride: usize) {
let height = data.len() / rowstride;
for y in 0.. height {
for x in 0.. rowstride {
data.swap(y * rowstride + x, x * rowstride + y);
}
}
}
#[cfg_attr(all(test, feature = "cos-approx"), test)]
#[cfg_attr(not(all(test, feature = "cos-approx")), allow(dead_code))]
fn test_cos_approx() {
const ERROR: f64 = 0.05;
fn test_cos_approx(x: f64) {
let approx = cos(x);
let cos = x.cos();
assert!(
approx.abs_sub(x.cos()) <= ERROR,
"Approximation cos({x}) = {approx} was outside a tolerance of {error}; control value: {cos}",
x = x, approx = approx, error = ERROR, cos = cos,
);
}
let test_values = [PI, PI / 2.0, PI / 4.0, 1.0, -1.0, 2.0 * PI, 3.0 * PI, 4.0 / 3.0 * PI];
for &x in &test_values {
test_cos_approx(x);
test_cos_approx(-x);
}
}
/*
#[cfg(feature = "simd")]
mod dct_simd {
use simdty::f64x2;
use std::f64::consts::{PI, SQRT_2};
macro_rules! valx2 ( ($val:expr) => ( ::simdty::f64x2($val, $val) ) );
const PI: f64x2 = valx2!(PI);
const ONE_DIV_SQRT_2: f64x2 = valx2!(1 / SQRT_2);
const SQRT_2: f64x2 = valx2!(SQRT_2);
pub dct_rows(vals: &[Vec<f64>]) -> Vec<Vec<f64>> {
let mut out = Vec::with_capacity(vals.len());
for pair in vals.iter().chunks(2) {
if pair.len() == 2 {
let vals = pair[0].iter().cloned().zip(pair[1].iter().cloned()) |
}
}
fn dct_1dx2(vec: Vec<f64x2>) -> Vec<f64x2> {
let mut out = Vec::with_capacity(vec.len());
for u in 0.. vec.len() {
let mut z = valx2!(0.0);
for x in 0.. vec.len() {
z += vec[x] * cos_approx(
PI * valx2!(
u as f64 * (2 * x + 1) as f64
/ (2 * vec.len()) as f64
)
);
}
if u == 0 {
z *= ONE_DIV_SQRT_2;
}
out.insert(u, z / valx2!(2.0));
}
out
}
fn cos_approx(x2: f64x2) -> f64x2 {
#[inline(always)]
fn powi(val: f64x2, pow: i32) -> f64x2 {
unsafe { llvmint::powi_v2f64(val, pow) }
}
let x2 = powi(val, 2);
let x4 = powi(val, 4);
let x6 = powi(val, 6);
let x8 = powi(val, 8);
valx2!(1.0) - (x2 / valx2!(2.0)) + (x4 / valx2!(24.0))
- (x6 / valx2!(720.0)) + (x8 / valx2!(40320.0))
}
}
*/ | .map(f64x2)
.collect();
dct_1dx2(vals); | random_line_split |
lib.rs | //! A Rust library for allocation-limited computation of the Discrete Cosine Transform.
//!
//! 1D DCTs are allocation-free but 2D requires allocation.
//!
//! Features:
//!
//! * `simd`: use SIMD types to speed computation (2D DCT only)
//! * `cos-approx`: use a Taylor series approximation of cosine instead of the stdlib
//! implementation (which is usually much slower but also higher precision)
use std::f64::consts::{PI, SQRT_2};
use std::ops::Range;
/// An allocation-free one-dimensional Discrete Cosine Transform.
///
/// Each iteration produces the next DCT value in the sequence.
#[derive(Clone, Debug)]
pub struct DCT1D<'a> {
data: &'a [f64],
curr: Range<usize>,
}
impl<'a> DCT1D<'a> {
/// Create a new DCT 1D adaptor from a 1D vector of data.
pub fn new(data: &[f64]) -> DCT1D {
let curr = 0.. data.len();
DCT1D {
data: data,
curr: curr,
}
}
// Converted from the C implementation here:
// http://unix4lyfe.org/dct/listing2.c
// Source page:
// http://unix4lyfe.org/dct/ (Accessed 8/10/2014)
fn next_dct_val(&mut self) -> Option<f64> {
self.curr.next().map(|u| {
let mut z = 0.0;
let data_len = self.data.len();
for (x_idx, &x) in self.data.iter().enumerate() {
z += x * cos(
PI * u as f64 * (2 * x_idx + 1) as f64
/ (2 * data_len) as f64
);
}
if u == 0 {
z *= 1.0 / SQRT_2;
}
z / 2.0
})
}
}
impl<'a> Iterator for DCT1D<'a> {
type Item = f64;
fn next(&mut self) -> Option<f64> {
self.next_dct_val()
}
}
/// An implementation of cosine that switches to a Taylor-series approximation when throughput is
/// preferred over precision.
#[inline(always)]
pub fn cos(x: f64) -> f64 {
// This branch should be optimized out.
if cfg!(feature = "cos-approx") {
// Normalize to [0, pi] or else the Taylor series spits out very wrong results.
let x = (x.abs() + PI) % (2.0 * PI) - PI;
// Approximate the cosine of `val` using a 4-term Taylor series.
// Can be expanded for higher precision.
let x2 = x.powi(2);
let x4 = x.powi(4);
let x6 = x.powi(6);
let x8 = x.powi(8);
1.0 - (x2 / 2.0) + (x4 / 24.0) - (x6 / 720.0) + (x8 / 40320.0)
} else {
x.cos()
}
}
/// Perform a 2D DCT on a 1D-packed vector with a given rowstride.
///
/// E.g. a vector of length 9 with a rowstride of 3 will be processed as a 3x3 matrix.
///
/// Returns a vector of the same size packed in the same way.
pub fn dct_2d(packed_2d: &[f64], rowstride: usize) -> Vec<f64> {
assert_eq!(packed_2d.len() % rowstride, 0);
let mut row_dct: Vec<f64> = packed_2d
.chunks(rowstride)
.flat_map(DCT1D::new)
.collect();
swap_rows_columns(&mut row_dct, rowstride);
let mut column_dct: Vec<f64> = packed_2d
.chunks(rowstride)
.flat_map(DCT1D::new)
.collect();
swap_rows_columns(&mut column_dct, rowstride);
column_dct
}
fn swap_rows_columns(data: &mut [f64], rowstride: usize) {
let height = data.len() / rowstride;
for y in 0.. height {
for x in 0.. rowstride {
data.swap(y * rowstride + x, x * rowstride + y);
}
}
}
#[cfg_attr(all(test, feature = "cos-approx"), test)]
#[cfg_attr(not(all(test, feature = "cos-approx")), allow(dead_code))]
fn test_cos_approx() {
const ERROR: f64 = 0.05;
fn test_cos_approx(x: f64) |
let test_values = [PI, PI / 2.0, PI / 4.0, 1.0, -1.0, 2.0 * PI, 3.0 * PI, 4.0 / 3.0 * PI];
for &x in &test_values {
test_cos_approx(x);
test_cos_approx(-x);
}
}
/*
#[cfg(feature = "simd")]
mod dct_simd {
use simdty::f64x2;
use std::f64::consts::{PI, SQRT_2};
macro_rules! valx2 ( ($val:expr) => ( ::simdty::f64x2($val, $val) ) );
const PI: f64x2 = valx2!(PI);
const ONE_DIV_SQRT_2: f64x2 = valx2!(1 / SQRT_2);
const SQRT_2: f64x2 = valx2!(SQRT_2);
pub dct_rows(vals: &[Vec<f64>]) -> Vec<Vec<f64>> {
let mut out = Vec::with_capacity(vals.len());
for pair in vals.iter().chunks(2) {
if pair.len() == 2 {
let vals = pair[0].iter().cloned().zip(pair[1].iter().cloned())
.map(f64x2)
.collect();
dct_1dx2(vals);
}
}
fn dct_1dx2(vec: Vec<f64x2>) -> Vec<f64x2> {
let mut out = Vec::with_capacity(vec.len());
for u in 0.. vec.len() {
let mut z = valx2!(0.0);
for x in 0.. vec.len() {
z += vec[x] * cos_approx(
PI * valx2!(
u as f64 * (2 * x + 1) as f64
/ (2 * vec.len()) as f64
)
);
}
if u == 0 {
z *= ONE_DIV_SQRT_2;
}
out.insert(u, z / valx2!(2.0));
}
out
}
fn cos_approx(x2: f64x2) -> f64x2 {
#[inline(always)]
fn powi(val: f64x2, pow: i32) -> f64x2 {
unsafe { llvmint::powi_v2f64(val, pow) }
}
let x2 = powi(val, 2);
let x4 = powi(val, 4);
let x6 = powi(val, 6);
let x8 = powi(val, 8);
valx2!(1.0) - (x2 / valx2!(2.0)) + (x4 / valx2!(24.0))
- (x6 / valx2!(720.0)) + (x8 / valx2!(40320.0))
}
}
*/
| {
let approx = cos(x);
let cos = x.cos();
assert!(
approx.abs_sub(x.cos()) <= ERROR,
"Approximation cos({x}) = {approx} was outside a tolerance of {error}; control value: {cos}",
x = x, approx = approx, error = ERROR, cos = cos,
);
} | identifier_body |
lib.rs | //! A Rust library for allocation-limited computation of the Discrete Cosine Transform.
//!
//! 1D DCTs are allocation-free but 2D requires allocation.
//!
//! Features:
//!
//! * `simd`: use SIMD types to speed computation (2D DCT only)
//! * `cos-approx`: use a Taylor series approximation of cosine instead of the stdlib
//! implementation (which is usually much slower but also higher precision)
use std::f64::consts::{PI, SQRT_2};
use std::ops::Range;
/// An allocation-free one-dimensional Discrete Cosine Transform.
///
/// Each iteration produces the next DCT value in the sequence.
#[derive(Clone, Debug)]
pub struct DCT1D<'a> {
data: &'a [f64],
curr: Range<usize>,
}
impl<'a> DCT1D<'a> {
/// Create a new DCT 1D adaptor from a 1D vector of data.
pub fn new(data: &[f64]) -> DCT1D {
let curr = 0.. data.len();
DCT1D {
data: data,
curr: curr,
}
}
// Converted from the C implementation here:
// http://unix4lyfe.org/dct/listing2.c
// Source page:
// http://unix4lyfe.org/dct/ (Accessed 8/10/2014)
fn next_dct_val(&mut self) -> Option<f64> {
self.curr.next().map(|u| {
let mut z = 0.0;
let data_len = self.data.len();
for (x_idx, &x) in self.data.iter().enumerate() {
z += x * cos(
PI * u as f64 * (2 * x_idx + 1) as f64
/ (2 * data_len) as f64
);
}
if u == 0 {
z *= 1.0 / SQRT_2;
}
z / 2.0
})
}
}
impl<'a> Iterator for DCT1D<'a> {
type Item = f64;
fn next(&mut self) -> Option<f64> {
self.next_dct_val()
}
}
/// An implementation of cosine that switches to a Taylor-series approximation when throughput is
/// preferred over precision.
#[inline(always)]
pub fn cos(x: f64) -> f64 {
// This branch should be optimized out.
if cfg!(feature = "cos-approx") | else {
x.cos()
}
}
/// Perform a 2D DCT on a 1D-packed vector with a given rowstride.
///
/// E.g. a vector of length 9 with a rowstride of 3 will be processed as a 3x3 matrix.
///
/// Returns a vector of the same size packed in the same way.
pub fn dct_2d(packed_2d: &[f64], rowstride: usize) -> Vec<f64> {
assert_eq!(packed_2d.len() % rowstride, 0);
let mut row_dct: Vec<f64> = packed_2d
.chunks(rowstride)
.flat_map(DCT1D::new)
.collect();
swap_rows_columns(&mut row_dct, rowstride);
let mut column_dct: Vec<f64> = packed_2d
.chunks(rowstride)
.flat_map(DCT1D::new)
.collect();
swap_rows_columns(&mut column_dct, rowstride);
column_dct
}
fn swap_rows_columns(data: &mut [f64], rowstride: usize) {
let height = data.len() / rowstride;
for y in 0.. height {
for x in 0.. rowstride {
data.swap(y * rowstride + x, x * rowstride + y);
}
}
}
#[cfg_attr(all(test, feature = "cos-approx"), test)]
#[cfg_attr(not(all(test, feature = "cos-approx")), allow(dead_code))]
fn test_cos_approx() {
const ERROR: f64 = 0.05;
fn test_cos_approx(x: f64) {
let approx = cos(x);
let cos = x.cos();
assert!(
approx.abs_sub(x.cos()) <= ERROR,
"Approximation cos({x}) = {approx} was outside a tolerance of {error}; control value: {cos}",
x = x, approx = approx, error = ERROR, cos = cos,
);
}
let test_values = [PI, PI / 2.0, PI / 4.0, 1.0, -1.0, 2.0 * PI, 3.0 * PI, 4.0 / 3.0 * PI];
for &x in &test_values {
test_cos_approx(x);
test_cos_approx(-x);
}
}
/*
#[cfg(feature = "simd")]
mod dct_simd {
use simdty::f64x2;
use std::f64::consts::{PI, SQRT_2};
macro_rules! valx2 ( ($val:expr) => ( ::simdty::f64x2($val, $val) ) );
const PI: f64x2 = valx2!(PI);
const ONE_DIV_SQRT_2: f64x2 = valx2!(1 / SQRT_2);
const SQRT_2: f64x2 = valx2!(SQRT_2);
pub dct_rows(vals: &[Vec<f64>]) -> Vec<Vec<f64>> {
let mut out = Vec::with_capacity(vals.len());
for pair in vals.iter().chunks(2) {
if pair.len() == 2 {
let vals = pair[0].iter().cloned().zip(pair[1].iter().cloned())
.map(f64x2)
.collect();
dct_1dx2(vals);
}
}
fn dct_1dx2(vec: Vec<f64x2>) -> Vec<f64x2> {
let mut out = Vec::with_capacity(vec.len());
for u in 0.. vec.len() {
let mut z = valx2!(0.0);
for x in 0.. vec.len() {
z += vec[x] * cos_approx(
PI * valx2!(
u as f64 * (2 * x + 1) as f64
/ (2 * vec.len()) as f64
)
);
}
if u == 0 {
z *= ONE_DIV_SQRT_2;
}
out.insert(u, z / valx2!(2.0));
}
out
}
fn cos_approx(x2: f64x2) -> f64x2 {
#[inline(always)]
fn powi(val: f64x2, pow: i32) -> f64x2 {
unsafe { llvmint::powi_v2f64(val, pow) }
}
let x2 = powi(val, 2);
let x4 = powi(val, 4);
let x6 = powi(val, 6);
let x8 = powi(val, 8);
valx2!(1.0) - (x2 / valx2!(2.0)) + (x4 / valx2!(24.0))
- (x6 / valx2!(720.0)) + (x8 / valx2!(40320.0))
}
}
*/
| {
// Normalize to [0, pi] or else the Taylor series spits out very wrong results.
let x = (x.abs() + PI) % (2.0 * PI) - PI;
// Approximate the cosine of `val` using a 4-term Taylor series.
// Can be expanded for higher precision.
let x2 = x.powi(2);
let x4 = x.powi(4);
let x6 = x.powi(6);
let x8 = x.powi(8);
1.0 - (x2 / 2.0) + (x4 / 24.0) - (x6 / 720.0) + (x8 / 40320.0)
} | conditional_block |
lib.rs | //! A Rust library for allocation-limited computation of the Discrete Cosine Transform.
//!
//! 1D DCTs are allocation-free but 2D requires allocation.
//!
//! Features:
//!
//! * `simd`: use SIMD types to speed computation (2D DCT only)
//! * `cos-approx`: use a Taylor series approximation of cosine instead of the stdlib
//! implementation (which is usually much slower but also higher precision)
use std::f64::consts::{PI, SQRT_2};
use std::ops::Range;
/// An allocation-free one-dimensional Discrete Cosine Transform.
///
/// Each iteration produces the next DCT value in the sequence.
#[derive(Clone, Debug)]
pub struct DCT1D<'a> {
data: &'a [f64],
curr: Range<usize>,
}
impl<'a> DCT1D<'a> {
/// Create a new DCT 1D adaptor from a 1D vector of data.
pub fn new(data: &[f64]) -> DCT1D {
let curr = 0.. data.len();
DCT1D {
data: data,
curr: curr,
}
}
// Converted from the C implementation here:
// http://unix4lyfe.org/dct/listing2.c
// Source page:
// http://unix4lyfe.org/dct/ (Accessed 8/10/2014)
fn next_dct_val(&mut self) -> Option<f64> {
self.curr.next().map(|u| {
let mut z = 0.0;
let data_len = self.data.len();
for (x_idx, &x) in self.data.iter().enumerate() {
z += x * cos(
PI * u as f64 * (2 * x_idx + 1) as f64
/ (2 * data_len) as f64
);
}
if u == 0 {
z *= 1.0 / SQRT_2;
}
z / 2.0
})
}
}
impl<'a> Iterator for DCT1D<'a> {
type Item = f64;
fn next(&mut self) -> Option<f64> {
self.next_dct_val()
}
}
/// An implementation of cosine that switches to a Taylor-series approximation when throughput is
/// preferred over precision.
#[inline(always)]
pub fn cos(x: f64) -> f64 {
// This branch should be optimized out.
if cfg!(feature = "cos-approx") {
// Normalize to [0, pi] or else the Taylor series spits out very wrong results.
let x = (x.abs() + PI) % (2.0 * PI) - PI;
// Approximate the cosine of `val` using a 4-term Taylor series.
// Can be expanded for higher precision.
let x2 = x.powi(2);
let x4 = x.powi(4);
let x6 = x.powi(6);
let x8 = x.powi(8);
1.0 - (x2 / 2.0) + (x4 / 24.0) - (x6 / 720.0) + (x8 / 40320.0)
} else {
x.cos()
}
}
/// Perform a 2D DCT on a 1D-packed vector with a given rowstride.
///
/// E.g. a vector of length 9 with a rowstride of 3 will be processed as a 3x3 matrix.
///
/// Returns a vector of the same size packed in the same way.
pub fn | (packed_2d: &[f64], rowstride: usize) -> Vec<f64> {
assert_eq!(packed_2d.len() % rowstride, 0);
let mut row_dct: Vec<f64> = packed_2d
.chunks(rowstride)
.flat_map(DCT1D::new)
.collect();
swap_rows_columns(&mut row_dct, rowstride);
let mut column_dct: Vec<f64> = packed_2d
.chunks(rowstride)
.flat_map(DCT1D::new)
.collect();
swap_rows_columns(&mut column_dct, rowstride);
column_dct
}
fn swap_rows_columns(data: &mut [f64], rowstride: usize) {
let height = data.len() / rowstride;
for y in 0.. height {
for x in 0.. rowstride {
data.swap(y * rowstride + x, x * rowstride + y);
}
}
}
#[cfg_attr(all(test, feature = "cos-approx"), test)]
#[cfg_attr(not(all(test, feature = "cos-approx")), allow(dead_code))]
fn test_cos_approx() {
const ERROR: f64 = 0.05;
fn test_cos_approx(x: f64) {
let approx = cos(x);
let cos = x.cos();
assert!(
approx.abs_sub(x.cos()) <= ERROR,
"Approximation cos({x}) = {approx} was outside a tolerance of {error}; control value: {cos}",
x = x, approx = approx, error = ERROR, cos = cos,
);
}
let test_values = [PI, PI / 2.0, PI / 4.0, 1.0, -1.0, 2.0 * PI, 3.0 * PI, 4.0 / 3.0 * PI];
for &x in &test_values {
test_cos_approx(x);
test_cos_approx(-x);
}
}
/*
#[cfg(feature = "simd")]
mod dct_simd {
use simdty::f64x2;
use std::f64::consts::{PI, SQRT_2};
macro_rules! valx2 ( ($val:expr) => ( ::simdty::f64x2($val, $val) ) );
const PI: f64x2 = valx2!(PI);
const ONE_DIV_SQRT_2: f64x2 = valx2!(1 / SQRT_2);
const SQRT_2: f64x2 = valx2!(SQRT_2);
pub dct_rows(vals: &[Vec<f64>]) -> Vec<Vec<f64>> {
let mut out = Vec::with_capacity(vals.len());
for pair in vals.iter().chunks(2) {
if pair.len() == 2 {
let vals = pair[0].iter().cloned().zip(pair[1].iter().cloned())
.map(f64x2)
.collect();
dct_1dx2(vals);
}
}
fn dct_1dx2(vec: Vec<f64x2>) -> Vec<f64x2> {
let mut out = Vec::with_capacity(vec.len());
for u in 0.. vec.len() {
let mut z = valx2!(0.0);
for x in 0.. vec.len() {
z += vec[x] * cos_approx(
PI * valx2!(
u as f64 * (2 * x + 1) as f64
/ (2 * vec.len()) as f64
)
);
}
if u == 0 {
z *= ONE_DIV_SQRT_2;
}
out.insert(u, z / valx2!(2.0));
}
out
}
fn cos_approx(x2: f64x2) -> f64x2 {
#[inline(always)]
fn powi(val: f64x2, pow: i32) -> f64x2 {
unsafe { llvmint::powi_v2f64(val, pow) }
}
let x2 = powi(val, 2);
let x4 = powi(val, 4);
let x6 = powi(val, 6);
let x8 = powi(val, 8);
valx2!(1.0) - (x2 / valx2!(2.0)) + (x4 / valx2!(24.0))
- (x6 / valx2!(720.0)) + (x8 / valx2!(40320.0))
}
}
*/
| dct_2d | identifier_name |
constants.rs | // The MIT License (MIT)
// Copyright © 2014-2018 Miguel Peláez <[email protected]>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
// files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy,
// modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
// is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
/// Fancy logo
pub const ASCII_ART: &str = r"
_ _ _ __ _
| | (_) |_ ___ __ _ _ __ _ / _| |_
| |__| | _/ -_) _| '_/ _` | _| _|
|____|_|\__\___\__|_| \__,_|_| \__|
";
/// Config file to lookup
pub const CONFIG_FILE: &str = "litecraft.yml";
/// Client version
pub const LITECRAFT_VERSION: &str = "A1";
/// Compatible Minecraft server version
pub const MINECRAFT_VERSION: &str = "1.13.1";
| pub const VERSION_TEXT: &str = "Litecraft A1\nMinecraft 1.13.1"; | /// Debug version string | random_line_split |
real_ints.rs | //! Defines basic operations defined under Real_Ints theory in SMTLIB2.
use std::fmt;
#[macro_use]
use crate::backends::backend::SMTNode;
#[derive(Clone, Debug)]
pub enum OpCodes {
Neg,
Sub,
Add,
Mul,
Div,
Lte,
Lt,
Gte,
Gt,
ToReal,
ToInt,
IsInt,
ConstInt(u64),
ConstReal(f64),
FreeVar(String),
}
impl fmt::Display for OpCodes {
fn | (&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match *self {
OpCodes::Neg => "-".to_owned(),
OpCodes::Sub => "-".to_owned(),
OpCodes::Add => "+".to_owned(),
OpCodes::Mul => "*".to_owned(),
OpCodes::Div => "/".to_owned(),
OpCodes::Lte => "<=".to_owned(),
OpCodes::Lt => "<".to_owned(),
OpCodes::Gte => ">=".to_owned(),
OpCodes::Gt => ">".to_owned(),
OpCodes::ToReal => "to_real".to_owned(),
OpCodes::ToInt => "to_int".to_owned(),
OpCodes::IsInt => "is_int".to_owned(),
OpCodes::ConstInt(ref val) => format!("{}", val),
OpCodes::ConstReal(ref val) => format!("{}", val),
OpCodes::FreeVar(ref name) => format!("{}", name),
};
write!(f, "{}", s)
}
}
impl_smt_node!(OpCodes, define vars [OpCodes::FreeVar(_)], define consts [OpCodes::ConstInt(_), OpCodes::ConstReal(_)]);
#[derive(Clone,Debug)]
pub enum Sorts {
Real,
Int
}
impl fmt::Display for Sorts {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let _s = match *self {
Sorts::Real => "Real",
Sorts::Int => "Int"
};
write!(f, "{}", "s")
}
}
| fmt | identifier_name |
real_ints.rs | //! Defines basic operations defined under Real_Ints theory in SMTLIB2.
use std::fmt;
#[macro_use]
use crate::backends::backend::SMTNode;
#[derive(Clone, Debug)]
pub enum OpCodes { | Neg,
Sub,
Add,
Mul,
Div,
Lte,
Lt,
Gte,
Gt,
ToReal,
ToInt,
IsInt,
ConstInt(u64),
ConstReal(f64),
FreeVar(String),
}
impl fmt::Display for OpCodes {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match *self {
OpCodes::Neg => "-".to_owned(),
OpCodes::Sub => "-".to_owned(),
OpCodes::Add => "+".to_owned(),
OpCodes::Mul => "*".to_owned(),
OpCodes::Div => "/".to_owned(),
OpCodes::Lte => "<=".to_owned(),
OpCodes::Lt => "<".to_owned(),
OpCodes::Gte => ">=".to_owned(),
OpCodes::Gt => ">".to_owned(),
OpCodes::ToReal => "to_real".to_owned(),
OpCodes::ToInt => "to_int".to_owned(),
OpCodes::IsInt => "is_int".to_owned(),
OpCodes::ConstInt(ref val) => format!("{}", val),
OpCodes::ConstReal(ref val) => format!("{}", val),
OpCodes::FreeVar(ref name) => format!("{}", name),
};
write!(f, "{}", s)
}
}
impl_smt_node!(OpCodes, define vars [OpCodes::FreeVar(_)], define consts [OpCodes::ConstInt(_), OpCodes::ConstReal(_)]);
#[derive(Clone,Debug)]
pub enum Sorts {
Real,
Int
}
impl fmt::Display for Sorts {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let _s = match *self {
Sorts::Real => "Real",
Sorts::Int => "Int"
};
write!(f, "{}", "s")
}
} | random_line_split |
|
http.rs | use crate::real_std::{
fmt, fs,
path::PathBuf,
pin::Pin,
sync::{Arc, Mutex},
};
use {
collect_mac::collect,
futures::{
future::{self, BoxFuture},
prelude::*,
ready,
task::{self, Poll},
},
http::{
header::{HeaderMap, HeaderName, HeaderValue},
StatusCode,
},
hyper::{body::Bytes, Server},
pin_project_lite::pin_project,
};
use crate::base::types::{ArcType, Type};
use crate::{
vm::{
self,
api::{
generic, Collect, Eff, Function, Getable, OpaqueValue, PushAsRef, Pushable, VmType,
WithVM, IO,
},
thread::{ActiveThread, RootedThread, Thread},
ExternModule, Variants,
},
Error,
};
macro_rules! try_future {
($e:expr) => {
try_future!($e, Box::pin)
};
($e:expr, $f:expr) => {
match $e {
Ok(x) => x,
Err(err) => return $f(::futures::future::err(err.into())),
}
};
}
pub struct HttpEffect;
impl VmType for HttpEffect {
type Type = Self;
fn | (vm: &Thread) -> ArcType {
let r = generic::R::make_type(vm);
Type::app(
vm.find_type_info("std.http.types.HttpEffect")
.map(|alias| alias.into_type())
.unwrap_or_else(|_| Type::hole()),
collect![r],
)
}
}
pub type EffectHandler<T> = Eff<HttpEffect, T>;
pub struct Headers(HeaderMap);
impl VmType for Headers {
type Type = Vec<(String, Vec<u8>)>;
fn make_type(vm: &Thread) -> ArcType {
Vec::<(String, Vec<u8>)>::make_type(vm)
}
}
impl<'vm> Pushable<'vm> for Headers {
fn vm_push(self, context: &mut ActiveThread<'vm>) -> vm::Result<()> {
Collect::new(
self.0
.iter()
.map(|(name, value)| (name.as_str(), value.as_bytes())),
)
.vm_push(context)
}
}
impl<'vm, 'value> Getable<'vm, 'value> for Headers {
impl_getable_simple!();
fn from_value(vm: &'vm Thread, value: Variants<'value>) -> Self {
Headers(
Collect::from_value(vm, value)
// TODO Error somehow on invalid headers
.filter_map(|(name, value): (&str, &[u8])| {
match (
HeaderName::from_bytes(name.as_bytes()),
HeaderValue::from_bytes(value),
) {
(Ok(name), Ok(value)) => Some((name, value)),
_ => None,
}
})
.collect(),
)
}
}
// By implementing `Userdata` on `Body` it can be automatically pushed and retrieved from gluon
// threads
#[derive(Userdata, Trace, VmType, Clone)]
#[gluon(vm_type = "std.http.types.Body")]
#[gluon(crate_name = "::vm")]
#[gluon_userdata(clone)]
#[gluon_trace(skip)]
// Representation of a http body that is in the prograss of being read
pub struct Body(
Arc<Mutex<Pin<Box<dyn Stream<Item = Result<PushAsRef<Bytes, [u8]>, vm::Error>> + Send>>>>,
);
// Types implementing `Userdata` requires a `std::fmt::Debug` implementation so it can be displayed
impl fmt::Debug for Body {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "hyper::Body")
}
}
// Since `Body` implements `Userdata` gluon will automatically marshal the gluon representation
// into `&Body` argument
fn read_chunk(body: &Body) -> impl Future<Output = IO<Option<PushAsRef<Bytes, [u8]>>>> {
use futures::future::poll_fn;
let body = body.0.clone();
poll_fn(move |cx| {
let mut stream = body.lock().unwrap();
Poll::Ready(IO::Value(
if let Some(result) = ready!(stream.as_mut().poll_next(cx)) {
match result {
Ok(chunk) => Some(chunk),
Err(err) => return IO::Exception(err.to_string()).into(),
}
} else {
None
},
))
})
}
// A http body that is being written
#[derive(Userdata, Trace, VmType, Clone)]
#[gluon(vm_type = "std.http.types.ResponseBody")]
#[gluon(crate_name = "::vm")]
#[gluon_userdata(clone)]
#[gluon_trace(skip)]
pub struct ResponseBody(Arc<Mutex<Option<hyper::body::Sender>>>);
impl fmt::Debug for ResponseBody {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ResponseBody")
}
}
fn write_response(response: &ResponseBody, bytes: &[u8]) -> impl Future<Output = IO<()>> {
use futures::future::poll_fn;
// Turn `bytes´ into a `Bytes` which can be sent to the http body
let mut unsent_chunk = Some(bytes.to_owned().into());
let response = response.0.clone();
poll_fn(move |cx| {
info!("Starting response send");
let mut sender = response.lock().unwrap();
let sender = sender
.as_mut()
.expect("Sender has been dropped while still in use");
let chunk = unsent_chunk
.take()
.expect("Attempt to poll after chunk is sent");
match sender.poll_ready(cx) {
Poll::Pending => {
unsent_chunk = Some(chunk);
return Poll::Pending;
}
Poll::Ready(Ok(_)) => (),
Poll::Ready(Err(err)) => {
info!("Could not send http response {}", err);
return IO::Exception(err.to_string()).into();
}
}
match sender.try_send_data(chunk) {
Ok(()) => Poll::Ready(IO::Value(())),
Err(chunk) => {
unsent_chunk = Some(chunk);
IO::Exception("Could not send http response".into()).into()
}
}
})
}
#[derive(Debug, Userdata, Trace, VmType, Clone)]
#[gluon(vm_type = "std.http.types.Uri")]
#[gluon(crate_name = "::vm")]
#[gluon_trace(skip)]
#[gluon_userdata(clone)]
struct Uri(http::Uri);
// Next we define some record types which are marshalled to and from gluon. These have equivalent
// definitions in http_types.glu
field_decl! { http, method, uri, status, body, request, response, headers }
type Request = record_type! {
method => String,
uri => Uri,
body => Body
};
pub type Response = record_type! {
status => u16,
headers => Headers
};
type HttpState = record_type! {
request => Request,
response => ResponseBody
};
#[derive(Getable, VmType)]
#[gluon(crate_name = "::vm")]
struct Settings {
port: u16,
tls_cert: Option<PathBuf>,
}
fn listen(
settings: Settings,
WithVM { vm, value }: WithVM<OpaqueValue<RootedThread, EffectHandler<Response>>>,
) -> impl Future<Output = IO<()>> + Send +'static {
let vm = vm.root_thread();
listen_(settings, vm, value).map(IO::from)
}
async fn listen_(
settings: Settings,
thread: RootedThread,
handler: OpaqueValue<RootedThread, EffectHandler<Response>>,
) -> vm::Result<()> {
let thread = match thread.new_thread() {
Ok(thread) => thread,
Err(err) => return Err(err),
};
impl tower_service::Service<hyper::Request<hyper::Body>> for Handler {
type Response = hyper::Response<hyper::Body>;
type Error = Error;
type Future = BoxFuture<'static, Result<http::Response<hyper::Body>, Error>>;
fn poll_ready(&mut self, _cx: &mut task::Context<'_>) -> Poll<Result<(), Self::Error>> {
Ok(()).into()
}
fn call(&mut self, request: hyper::Request<hyper::Body>) -> Self::Future {
let (parts, body) = request.into_parts();
self.handle(parts.method, parts.uri, body)
}
}
let addr = format!("0.0.0.0:{}", settings.port).parse().unwrap();
let listener = Handler::new(&thread, handler);
if let Some(cert_path) = &settings.tls_cert {
let identity = fs::read(cert_path).map_err(|err| {
vm::Error::Message(format!(
"Unable to open certificate `{}`: {}",
cert_path.display(),
err
))
})?;
let identity = native_tls::Identity::from_pkcs12(&identity, "")
.map_err(|err| vm::Error::Message(err.to_string()))?;
let acceptor = tokio_native_tls::TlsAcceptor::from(
native_tls::TlsAcceptor::new(identity)
.map_err(|err| vm::Error::Message(err.to_string()))?,
);
let http = hyper::server::conn::Http::new();
let tcp_listener = tokio::net::TcpListener::bind(&addr)
.map_err(|err| vm::Error::Message(err.to_string()))
.await?;
let incoming = tokio_stream::wrappers::TcpListenerStream::new(tcp_listener)
.err_into()
.and_then(|stream| {
acceptor.accept(stream).map_err(|err| {
info!("Unable to accept TLS connection: {}", err);
Box::new(err) as Box<dyn ::std::error::Error + Send + Sync>
})
});
pin_project! {
struct Acceptor<S> {
#[pin]
incoming: S,
}
}
impl<S, T, E> hyper::server::accept::Accept for Acceptor<S>
where
S: Stream<Item = Result<T, E>>,
{
type Conn = T;
type Error = E;
fn poll_accept(
self: Pin<&mut Self>,
cx: &mut task::Context<'_>,
) -> Poll<Option<Result<Self::Conn, Self::Error>>> {
self.project().incoming.poll_next(cx)
}
}
return hyper::server::Builder::new(Acceptor { incoming }, http)
.serve(hyper::service::make_service_fn(move |_| {
future::ready(Ok::<_, hyper::Error>(listener.clone()))
}))
.map_err(|err| vm::Error::from(format!("Server error: {}", err)))
.await;
}
Server::bind(&addr)
.serve(hyper::service::make_service_fn(move |_| {
future::ready(Ok::<_, hyper::Error>(listener.clone()))
}))
.map_err(|err| vm::Error::from(format!("Server error: {}", err)))
.map_ok(|_| ())
.await
}
type ListenFn = fn(OpaqueValue<RootedThread, EffectHandler<Response>>, HttpState) -> IO<Response>;
#[derive(Clone)]
pub struct Handler {
handle: Function<RootedThread, ListenFn>,
handler: OpaqueValue<RootedThread, EffectHandler<Response>>,
}
impl Handler {
pub fn new(
thread: &Thread,
handler: OpaqueValue<RootedThread, EffectHandler<Response>>,
) -> Self {
// Retrieve the `handle` function from the http module which we use to evaluate values of type
// `EffectHandler Response`
let handle: Function<RootedThread, ListenFn> = thread
.get_global("std.http.handle")
.unwrap_or_else(|err| panic!("{}", err));
Self { handle, handler }
}
pub fn handle<E>(
&mut self,
method: http::Method,
uri: http::Uri,
body: impl Stream<Item = Result<Bytes, E>> + Send +'static,
) -> BoxFuture<'static, crate::Result<hyper::Response<hyper::Body>>>
where
E: fmt::Display + Send +'static,
{
let child_thread = try_future!(self.handle.vm().new_thread());
let mut handle = try_future!(self.handle.re_root(child_thread));
let gluon_request = record_no_decl! {
method => method.as_str().to_owned(),
uri => Uri(uri),
// Since `Body` implements `Userdata` it can be directly pushed to gluon
body => Body(Arc::new(Mutex::new(Box::pin(
body
.map_err(|err| vm::Error::Message(format!("{}", err)))
// `PushAsRef` makes the `body` parameter act as a `&[u8]` which means it is
// marshalled to `Array Byte` in gluon
.map_ok(PushAsRef::<_, [u8]>::new)
))))
};
let (response_sender, response_body) = hyper::Body::channel();
let response_sender = Arc::new(Mutex::new(Some(response_sender)));
let http_state = record_no_decl! {
request => gluon_request,
response => ResponseBody(response_sender.clone())
};
let handler = self.handler.clone();
Box::pin(async move {
handle
.call_async(handler, http_state)
.map(move |result| match result {
Ok(value) => {
match value {
IO::Value(record_p! { status, headers }) => {
// Drop the sender to so that it the receiver stops waiting for
// more chunks
*response_sender.lock().unwrap() = None;
let status = StatusCode::from_u16(status)
.unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);
let mut response = http::Response::builder()
.status(status)
.body(response_body)
.unwrap();
*response.headers_mut() = headers.0;
Ok(response)
}
IO::Exception(err) => {
info!("{}", err);
Ok(http::Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body("".into())
.unwrap())
}
}
}
Err(err) => {
info!("{}", err);
Ok(http::Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body("".into())
.unwrap())
}
})
.await
})
}
}
// To let the `http_types` module refer to `Body` and `ResponseBody` we register these types in a
// separate function which is called before loading `http_types`
pub fn load_types(vm: &Thread) -> vm::Result<ExternModule> {
vm.register_type::<Body>("std.http.types.Body", &[])?;
vm.register_type::<ResponseBody>("std.http.types.ResponseBody", &[])?;
vm.register_type::<Uri>("std.http.types.Uri", &[])?;
ExternModule::new(
vm,
record! {
// Define the types so that they can be used from gluon
type std::http::types::Body => Body,
type std::http::types::ResponseBody => ResponseBody,
type std::http::types::Uri => Uri,
type std::http::Method => String,
type std::http::StatusCode => u16,
type std::http::Request => Request,
type std::http::Response => Response,
type std::http::Headers => Headers,
type std::http::HttpState => HttpState
},
)
}
macro_rules! uri_binds {
($($id: ident)*) => {
record!{
$(
$id => primitive!(1, concat!("std.http.prim.uri.", stringify!($id)), |u: &Uri| (u.0).$id())
),*
}
}
}
mod std {
pub(crate) mod http {
pub(crate) use crate::std_lib::http as prim;
}
}
pub fn load(vm: &Thread) -> vm::Result<ExternModule> {
ExternModule::new(
vm,
record! {
listen => primitive!(2, async fn std::http::prim::listen),
read_chunk => primitive!(1, async fn std::http::prim::read_chunk),
write_response => primitive!(2, async fn std::http::prim::write_response),
port => primitive!(1, "std.http.prim.uri.port", |u: &Uri| (u.0).port().map(|p| p.as_u16())),
uri => uri_binds!(path host query to_string)
},
)
}
| make_type | identifier_name |
http.rs | use crate::real_std::{
fmt, fs,
path::PathBuf,
pin::Pin,
sync::{Arc, Mutex},
};
use {
collect_mac::collect,
futures::{
future::{self, BoxFuture},
prelude::*,
ready,
task::{self, Poll},
},
http::{
header::{HeaderMap, HeaderName, HeaderValue},
StatusCode,
},
hyper::{body::Bytes, Server},
pin_project_lite::pin_project,
};
use crate::base::types::{ArcType, Type};
use crate::{
vm::{
self,
api::{
generic, Collect, Eff, Function, Getable, OpaqueValue, PushAsRef, Pushable, VmType,
WithVM, IO,
},
thread::{ActiveThread, RootedThread, Thread},
ExternModule, Variants,
},
Error,
};
macro_rules! try_future {
($e:expr) => {
try_future!($e, Box::pin)
};
($e:expr, $f:expr) => {
match $e {
Ok(x) => x,
Err(err) => return $f(::futures::future::err(err.into())),
}
};
}
pub struct HttpEffect;
impl VmType for HttpEffect {
type Type = Self;
fn make_type(vm: &Thread) -> ArcType {
let r = generic::R::make_type(vm);
Type::app(
vm.find_type_info("std.http.types.HttpEffect")
.map(|alias| alias.into_type())
.unwrap_or_else(|_| Type::hole()),
collect![r],
)
}
}
pub type EffectHandler<T> = Eff<HttpEffect, T>;
pub struct Headers(HeaderMap);
impl VmType for Headers {
type Type = Vec<(String, Vec<u8>)>;
fn make_type(vm: &Thread) -> ArcType {
Vec::<(String, Vec<u8>)>::make_type(vm)
}
}
impl<'vm> Pushable<'vm> for Headers {
fn vm_push(self, context: &mut ActiveThread<'vm>) -> vm::Result<()> {
Collect::new(
self.0
.iter()
.map(|(name, value)| (name.as_str(), value.as_bytes())),
)
.vm_push(context)
}
}
impl<'vm, 'value> Getable<'vm, 'value> for Headers {
impl_getable_simple!();
fn from_value(vm: &'vm Thread, value: Variants<'value>) -> Self {
Headers(
Collect::from_value(vm, value)
// TODO Error somehow on invalid headers
.filter_map(|(name, value): (&str, &[u8])| {
match (
HeaderName::from_bytes(name.as_bytes()),
HeaderValue::from_bytes(value),
) {
(Ok(name), Ok(value)) => Some((name, value)),
_ => None,
}
})
.collect(),
)
}
}
// By implementing `Userdata` on `Body` it can be automatically pushed and retrieved from gluon
// threads
#[derive(Userdata, Trace, VmType, Clone)]
#[gluon(vm_type = "std.http.types.Body")]
#[gluon(crate_name = "::vm")]
#[gluon_userdata(clone)]
#[gluon_trace(skip)]
// Representation of a http body that is in the prograss of being read
pub struct Body(
Arc<Mutex<Pin<Box<dyn Stream<Item = Result<PushAsRef<Bytes, [u8]>, vm::Error>> + Send>>>>,
);
// Types implementing `Userdata` requires a `std::fmt::Debug` implementation so it can be displayed | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "hyper::Body")
}
}
// Since `Body` implements `Userdata` gluon will automatically marshal the gluon representation
// into `&Body` argument
fn read_chunk(body: &Body) -> impl Future<Output = IO<Option<PushAsRef<Bytes, [u8]>>>> {
use futures::future::poll_fn;
let body = body.0.clone();
poll_fn(move |cx| {
let mut stream = body.lock().unwrap();
Poll::Ready(IO::Value(
if let Some(result) = ready!(stream.as_mut().poll_next(cx)) {
match result {
Ok(chunk) => Some(chunk),
Err(err) => return IO::Exception(err.to_string()).into(),
}
} else {
None
},
))
})
}
// A http body that is being written
#[derive(Userdata, Trace, VmType, Clone)]
#[gluon(vm_type = "std.http.types.ResponseBody")]
#[gluon(crate_name = "::vm")]
#[gluon_userdata(clone)]
#[gluon_trace(skip)]
pub struct ResponseBody(Arc<Mutex<Option<hyper::body::Sender>>>);
impl fmt::Debug for ResponseBody {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ResponseBody")
}
}
fn write_response(response: &ResponseBody, bytes: &[u8]) -> impl Future<Output = IO<()>> {
use futures::future::poll_fn;
// Turn `bytes´ into a `Bytes` which can be sent to the http body
let mut unsent_chunk = Some(bytes.to_owned().into());
let response = response.0.clone();
poll_fn(move |cx| {
info!("Starting response send");
let mut sender = response.lock().unwrap();
let sender = sender
.as_mut()
.expect("Sender has been dropped while still in use");
let chunk = unsent_chunk
.take()
.expect("Attempt to poll after chunk is sent");
match sender.poll_ready(cx) {
Poll::Pending => {
unsent_chunk = Some(chunk);
return Poll::Pending;
}
Poll::Ready(Ok(_)) => (),
Poll::Ready(Err(err)) => {
info!("Could not send http response {}", err);
return IO::Exception(err.to_string()).into();
}
}
match sender.try_send_data(chunk) {
Ok(()) => Poll::Ready(IO::Value(())),
Err(chunk) => {
unsent_chunk = Some(chunk);
IO::Exception("Could not send http response".into()).into()
}
}
})
}
#[derive(Debug, Userdata, Trace, VmType, Clone)]
#[gluon(vm_type = "std.http.types.Uri")]
#[gluon(crate_name = "::vm")]
#[gluon_trace(skip)]
#[gluon_userdata(clone)]
struct Uri(http::Uri);
// Next we define some record types which are marshalled to and from gluon. These have equivalent
// definitions in http_types.glu
field_decl! { http, method, uri, status, body, request, response, headers }
type Request = record_type! {
method => String,
uri => Uri,
body => Body
};
pub type Response = record_type! {
status => u16,
headers => Headers
};
type HttpState = record_type! {
request => Request,
response => ResponseBody
};
#[derive(Getable, VmType)]
#[gluon(crate_name = "::vm")]
struct Settings {
port: u16,
tls_cert: Option<PathBuf>,
}
fn listen(
settings: Settings,
WithVM { vm, value }: WithVM<OpaqueValue<RootedThread, EffectHandler<Response>>>,
) -> impl Future<Output = IO<()>> + Send +'static {
let vm = vm.root_thread();
listen_(settings, vm, value).map(IO::from)
}
async fn listen_(
settings: Settings,
thread: RootedThread,
handler: OpaqueValue<RootedThread, EffectHandler<Response>>,
) -> vm::Result<()> {
let thread = match thread.new_thread() {
Ok(thread) => thread,
Err(err) => return Err(err),
};
impl tower_service::Service<hyper::Request<hyper::Body>> for Handler {
type Response = hyper::Response<hyper::Body>;
type Error = Error;
type Future = BoxFuture<'static, Result<http::Response<hyper::Body>, Error>>;
fn poll_ready(&mut self, _cx: &mut task::Context<'_>) -> Poll<Result<(), Self::Error>> {
Ok(()).into()
}
fn call(&mut self, request: hyper::Request<hyper::Body>) -> Self::Future {
let (parts, body) = request.into_parts();
self.handle(parts.method, parts.uri, body)
}
}
let addr = format!("0.0.0.0:{}", settings.port).parse().unwrap();
let listener = Handler::new(&thread, handler);
if let Some(cert_path) = &settings.tls_cert {
let identity = fs::read(cert_path).map_err(|err| {
vm::Error::Message(format!(
"Unable to open certificate `{}`: {}",
cert_path.display(),
err
))
})?;
let identity = native_tls::Identity::from_pkcs12(&identity, "")
.map_err(|err| vm::Error::Message(err.to_string()))?;
let acceptor = tokio_native_tls::TlsAcceptor::from(
native_tls::TlsAcceptor::new(identity)
.map_err(|err| vm::Error::Message(err.to_string()))?,
);
let http = hyper::server::conn::Http::new();
let tcp_listener = tokio::net::TcpListener::bind(&addr)
.map_err(|err| vm::Error::Message(err.to_string()))
.await?;
let incoming = tokio_stream::wrappers::TcpListenerStream::new(tcp_listener)
.err_into()
.and_then(|stream| {
acceptor.accept(stream).map_err(|err| {
info!("Unable to accept TLS connection: {}", err);
Box::new(err) as Box<dyn ::std::error::Error + Send + Sync>
})
});
pin_project! {
struct Acceptor<S> {
#[pin]
incoming: S,
}
}
impl<S, T, E> hyper::server::accept::Accept for Acceptor<S>
where
S: Stream<Item = Result<T, E>>,
{
type Conn = T;
type Error = E;
fn poll_accept(
self: Pin<&mut Self>,
cx: &mut task::Context<'_>,
) -> Poll<Option<Result<Self::Conn, Self::Error>>> {
self.project().incoming.poll_next(cx)
}
}
return hyper::server::Builder::new(Acceptor { incoming }, http)
.serve(hyper::service::make_service_fn(move |_| {
future::ready(Ok::<_, hyper::Error>(listener.clone()))
}))
.map_err(|err| vm::Error::from(format!("Server error: {}", err)))
.await;
}
Server::bind(&addr)
.serve(hyper::service::make_service_fn(move |_| {
future::ready(Ok::<_, hyper::Error>(listener.clone()))
}))
.map_err(|err| vm::Error::from(format!("Server error: {}", err)))
.map_ok(|_| ())
.await
}
type ListenFn = fn(OpaqueValue<RootedThread, EffectHandler<Response>>, HttpState) -> IO<Response>;
#[derive(Clone)]
pub struct Handler {
handle: Function<RootedThread, ListenFn>,
handler: OpaqueValue<RootedThread, EffectHandler<Response>>,
}
impl Handler {
pub fn new(
thread: &Thread,
handler: OpaqueValue<RootedThread, EffectHandler<Response>>,
) -> Self {
// Retrieve the `handle` function from the http module which we use to evaluate values of type
// `EffectHandler Response`
let handle: Function<RootedThread, ListenFn> = thread
.get_global("std.http.handle")
.unwrap_or_else(|err| panic!("{}", err));
Self { handle, handler }
}
pub fn handle<E>(
&mut self,
method: http::Method,
uri: http::Uri,
body: impl Stream<Item = Result<Bytes, E>> + Send +'static,
) -> BoxFuture<'static, crate::Result<hyper::Response<hyper::Body>>>
where
E: fmt::Display + Send +'static,
{
let child_thread = try_future!(self.handle.vm().new_thread());
let mut handle = try_future!(self.handle.re_root(child_thread));
let gluon_request = record_no_decl! {
method => method.as_str().to_owned(),
uri => Uri(uri),
// Since `Body` implements `Userdata` it can be directly pushed to gluon
body => Body(Arc::new(Mutex::new(Box::pin(
body
.map_err(|err| vm::Error::Message(format!("{}", err)))
// `PushAsRef` makes the `body` parameter act as a `&[u8]` which means it is
// marshalled to `Array Byte` in gluon
.map_ok(PushAsRef::<_, [u8]>::new)
))))
};
let (response_sender, response_body) = hyper::Body::channel();
let response_sender = Arc::new(Mutex::new(Some(response_sender)));
let http_state = record_no_decl! {
request => gluon_request,
response => ResponseBody(response_sender.clone())
};
let handler = self.handler.clone();
Box::pin(async move {
handle
.call_async(handler, http_state)
.map(move |result| match result {
Ok(value) => {
match value {
IO::Value(record_p! { status, headers }) => {
// Drop the sender to so that it the receiver stops waiting for
// more chunks
*response_sender.lock().unwrap() = None;
let status = StatusCode::from_u16(status)
.unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);
let mut response = http::Response::builder()
.status(status)
.body(response_body)
.unwrap();
*response.headers_mut() = headers.0;
Ok(response)
}
IO::Exception(err) => {
info!("{}", err);
Ok(http::Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body("".into())
.unwrap())
}
}
}
Err(err) => {
info!("{}", err);
Ok(http::Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body("".into())
.unwrap())
}
})
.await
})
}
}
// To let the `http_types` module refer to `Body` and `ResponseBody` we register these types in a
// separate function which is called before loading `http_types`
pub fn load_types(vm: &Thread) -> vm::Result<ExternModule> {
vm.register_type::<Body>("std.http.types.Body", &[])?;
vm.register_type::<ResponseBody>("std.http.types.ResponseBody", &[])?;
vm.register_type::<Uri>("std.http.types.Uri", &[])?;
ExternModule::new(
vm,
record! {
// Define the types so that they can be used from gluon
type std::http::types::Body => Body,
type std::http::types::ResponseBody => ResponseBody,
type std::http::types::Uri => Uri,
type std::http::Method => String,
type std::http::StatusCode => u16,
type std::http::Request => Request,
type std::http::Response => Response,
type std::http::Headers => Headers,
type std::http::HttpState => HttpState
},
)
}
macro_rules! uri_binds {
($($id: ident)*) => {
record!{
$(
$id => primitive!(1, concat!("std.http.prim.uri.", stringify!($id)), |u: &Uri| (u.0).$id())
),*
}
}
}
mod std {
pub(crate) mod http {
pub(crate) use crate::std_lib::http as prim;
}
}
pub fn load(vm: &Thread) -> vm::Result<ExternModule> {
ExternModule::new(
vm,
record! {
listen => primitive!(2, async fn std::http::prim::listen),
read_chunk => primitive!(1, async fn std::http::prim::read_chunk),
write_response => primitive!(2, async fn std::http::prim::write_response),
port => primitive!(1, "std.http.prim.uri.port", |u: &Uri| (u.0).port().map(|p| p.as_u16())),
uri => uri_binds!(path host query to_string)
},
)
} | impl fmt::Debug for Body { | random_line_split |
http.rs | use crate::real_std::{
fmt, fs,
path::PathBuf,
pin::Pin,
sync::{Arc, Mutex},
};
use {
collect_mac::collect,
futures::{
future::{self, BoxFuture},
prelude::*,
ready,
task::{self, Poll},
},
http::{
header::{HeaderMap, HeaderName, HeaderValue},
StatusCode,
},
hyper::{body::Bytes, Server},
pin_project_lite::pin_project,
};
use crate::base::types::{ArcType, Type};
use crate::{
vm::{
self,
api::{
generic, Collect, Eff, Function, Getable, OpaqueValue, PushAsRef, Pushable, VmType,
WithVM, IO,
},
thread::{ActiveThread, RootedThread, Thread},
ExternModule, Variants,
},
Error,
};
macro_rules! try_future {
($e:expr) => {
try_future!($e, Box::pin)
};
($e:expr, $f:expr) => {
match $e {
Ok(x) => x,
Err(err) => return $f(::futures::future::err(err.into())),
}
};
}
pub struct HttpEffect;
impl VmType for HttpEffect {
type Type = Self;
fn make_type(vm: &Thread) -> ArcType {
let r = generic::R::make_type(vm);
Type::app(
vm.find_type_info("std.http.types.HttpEffect")
.map(|alias| alias.into_type())
.unwrap_or_else(|_| Type::hole()),
collect![r],
)
}
}
pub type EffectHandler<T> = Eff<HttpEffect, T>;
pub struct Headers(HeaderMap);
impl VmType for Headers {
type Type = Vec<(String, Vec<u8>)>;
fn make_type(vm: &Thread) -> ArcType {
Vec::<(String, Vec<u8>)>::make_type(vm)
}
}
impl<'vm> Pushable<'vm> for Headers {
fn vm_push(self, context: &mut ActiveThread<'vm>) -> vm::Result<()> {
Collect::new(
self.0
.iter()
.map(|(name, value)| (name.as_str(), value.as_bytes())),
)
.vm_push(context)
}
}
impl<'vm, 'value> Getable<'vm, 'value> for Headers {
impl_getable_simple!();
fn from_value(vm: &'vm Thread, value: Variants<'value>) -> Self {
Headers(
Collect::from_value(vm, value)
// TODO Error somehow on invalid headers
.filter_map(|(name, value): (&str, &[u8])| {
match (
HeaderName::from_bytes(name.as_bytes()),
HeaderValue::from_bytes(value),
) {
(Ok(name), Ok(value)) => Some((name, value)),
_ => None,
}
})
.collect(),
)
}
}
// By implementing `Userdata` on `Body` it can be automatically pushed and retrieved from gluon
// threads
#[derive(Userdata, Trace, VmType, Clone)]
#[gluon(vm_type = "std.http.types.Body")]
#[gluon(crate_name = "::vm")]
#[gluon_userdata(clone)]
#[gluon_trace(skip)]
// Representation of a http body that is in the prograss of being read
pub struct Body(
Arc<Mutex<Pin<Box<dyn Stream<Item = Result<PushAsRef<Bytes, [u8]>, vm::Error>> + Send>>>>,
);
// Types implementing `Userdata` requires a `std::fmt::Debug` implementation so it can be displayed
impl fmt::Debug for Body {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "hyper::Body")
}
}
// Since `Body` implements `Userdata` gluon will automatically marshal the gluon representation
// into `&Body` argument
fn read_chunk(body: &Body) -> impl Future<Output = IO<Option<PushAsRef<Bytes, [u8]>>>> {
use futures::future::poll_fn;
let body = body.0.clone();
poll_fn(move |cx| {
let mut stream = body.lock().unwrap();
Poll::Ready(IO::Value(
if let Some(result) = ready!(stream.as_mut().poll_next(cx)) {
match result {
Ok(chunk) => Some(chunk),
Err(err) => return IO::Exception(err.to_string()).into(),
}
} else {
None
},
))
})
}
// A http body that is being written
#[derive(Userdata, Trace, VmType, Clone)]
#[gluon(vm_type = "std.http.types.ResponseBody")]
#[gluon(crate_name = "::vm")]
#[gluon_userdata(clone)]
#[gluon_trace(skip)]
pub struct ResponseBody(Arc<Mutex<Option<hyper::body::Sender>>>);
impl fmt::Debug for ResponseBody {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ResponseBody")
}
}
fn write_response(response: &ResponseBody, bytes: &[u8]) -> impl Future<Output = IO<()>> {
use futures::future::poll_fn;
// Turn `bytes´ into a `Bytes` which can be sent to the http body
let mut unsent_chunk = Some(bytes.to_owned().into());
let response = response.0.clone();
poll_fn(move |cx| {
info!("Starting response send");
let mut sender = response.lock().unwrap();
let sender = sender
.as_mut()
.expect("Sender has been dropped while still in use");
let chunk = unsent_chunk
.take()
.expect("Attempt to poll after chunk is sent");
match sender.poll_ready(cx) {
Poll::Pending => {
unsent_chunk = Some(chunk);
return Poll::Pending;
}
Poll::Ready(Ok(_)) => (),
Poll::Ready(Err(err)) => {
info!("Could not send http response {}", err);
return IO::Exception(err.to_string()).into();
}
}
match sender.try_send_data(chunk) {
Ok(()) => Poll::Ready(IO::Value(())),
Err(chunk) => {
unsent_chunk = Some(chunk);
IO::Exception("Could not send http response".into()).into()
}
}
})
}
#[derive(Debug, Userdata, Trace, VmType, Clone)]
#[gluon(vm_type = "std.http.types.Uri")]
#[gluon(crate_name = "::vm")]
#[gluon_trace(skip)]
#[gluon_userdata(clone)]
struct Uri(http::Uri);
// Next we define some record types which are marshalled to and from gluon. These have equivalent
// definitions in http_types.glu
field_decl! { http, method, uri, status, body, request, response, headers }
type Request = record_type! {
method => String,
uri => Uri,
body => Body
};
pub type Response = record_type! {
status => u16,
headers => Headers
};
type HttpState = record_type! {
request => Request,
response => ResponseBody
};
#[derive(Getable, VmType)]
#[gluon(crate_name = "::vm")]
struct Settings {
port: u16,
tls_cert: Option<PathBuf>,
}
fn listen(
settings: Settings,
WithVM { vm, value }: WithVM<OpaqueValue<RootedThread, EffectHandler<Response>>>,
) -> impl Future<Output = IO<()>> + Send +'static {
let vm = vm.root_thread();
listen_(settings, vm, value).map(IO::from)
}
async fn listen_(
settings: Settings,
thread: RootedThread,
handler: OpaqueValue<RootedThread, EffectHandler<Response>>,
) -> vm::Result<()> {
let thread = match thread.new_thread() {
Ok(thread) => thread,
Err(err) => return Err(err),
};
impl tower_service::Service<hyper::Request<hyper::Body>> for Handler {
type Response = hyper::Response<hyper::Body>;
type Error = Error;
type Future = BoxFuture<'static, Result<http::Response<hyper::Body>, Error>>;
fn poll_ready(&mut self, _cx: &mut task::Context<'_>) -> Poll<Result<(), Self::Error>> {
Ok(()).into()
}
fn call(&mut self, request: hyper::Request<hyper::Body>) -> Self::Future {
let (parts, body) = request.into_parts();
self.handle(parts.method, parts.uri, body)
}
}
let addr = format!("0.0.0.0:{}", settings.port).parse().unwrap();
let listener = Handler::new(&thread, handler);
if let Some(cert_path) = &settings.tls_cert {
let identity = fs::read(cert_path).map_err(|err| {
vm::Error::Message(format!(
"Unable to open certificate `{}`: {}",
cert_path.display(),
err
))
})?;
let identity = native_tls::Identity::from_pkcs12(&identity, "")
.map_err(|err| vm::Error::Message(err.to_string()))?;
let acceptor = tokio_native_tls::TlsAcceptor::from(
native_tls::TlsAcceptor::new(identity)
.map_err(|err| vm::Error::Message(err.to_string()))?,
);
let http = hyper::server::conn::Http::new();
let tcp_listener = tokio::net::TcpListener::bind(&addr)
.map_err(|err| vm::Error::Message(err.to_string()))
.await?;
let incoming = tokio_stream::wrappers::TcpListenerStream::new(tcp_listener)
.err_into()
.and_then(|stream| {
acceptor.accept(stream).map_err(|err| {
info!("Unable to accept TLS connection: {}", err);
Box::new(err) as Box<dyn ::std::error::Error + Send + Sync>
})
});
pin_project! {
struct Acceptor<S> {
#[pin]
incoming: S,
}
}
impl<S, T, E> hyper::server::accept::Accept for Acceptor<S>
where
S: Stream<Item = Result<T, E>>,
{
type Conn = T;
type Error = E;
fn poll_accept(
self: Pin<&mut Self>,
cx: &mut task::Context<'_>,
) -> Poll<Option<Result<Self::Conn, Self::Error>>> {
self.project().incoming.poll_next(cx)
}
}
return hyper::server::Builder::new(Acceptor { incoming }, http)
.serve(hyper::service::make_service_fn(move |_| {
future::ready(Ok::<_, hyper::Error>(listener.clone()))
}))
.map_err(|err| vm::Error::from(format!("Server error: {}", err)))
.await;
}
Server::bind(&addr)
.serve(hyper::service::make_service_fn(move |_| {
future::ready(Ok::<_, hyper::Error>(listener.clone()))
}))
.map_err(|err| vm::Error::from(format!("Server error: {}", err)))
.map_ok(|_| ())
.await
}
type ListenFn = fn(OpaqueValue<RootedThread, EffectHandler<Response>>, HttpState) -> IO<Response>;
#[derive(Clone)]
pub struct Handler {
handle: Function<RootedThread, ListenFn>,
handler: OpaqueValue<RootedThread, EffectHandler<Response>>,
}
impl Handler {
pub fn new(
thread: &Thread,
handler: OpaqueValue<RootedThread, EffectHandler<Response>>,
) -> Self { |
pub fn handle<E>(
&mut self,
method: http::Method,
uri: http::Uri,
body: impl Stream<Item = Result<Bytes, E>> + Send +'static,
) -> BoxFuture<'static, crate::Result<hyper::Response<hyper::Body>>>
where
E: fmt::Display + Send +'static,
{
let child_thread = try_future!(self.handle.vm().new_thread());
let mut handle = try_future!(self.handle.re_root(child_thread));
let gluon_request = record_no_decl! {
method => method.as_str().to_owned(),
uri => Uri(uri),
// Since `Body` implements `Userdata` it can be directly pushed to gluon
body => Body(Arc::new(Mutex::new(Box::pin(
body
.map_err(|err| vm::Error::Message(format!("{}", err)))
// `PushAsRef` makes the `body` parameter act as a `&[u8]` which means it is
// marshalled to `Array Byte` in gluon
.map_ok(PushAsRef::<_, [u8]>::new)
))))
};
let (response_sender, response_body) = hyper::Body::channel();
let response_sender = Arc::new(Mutex::new(Some(response_sender)));
let http_state = record_no_decl! {
request => gluon_request,
response => ResponseBody(response_sender.clone())
};
let handler = self.handler.clone();
Box::pin(async move {
handle
.call_async(handler, http_state)
.map(move |result| match result {
Ok(value) => {
match value {
IO::Value(record_p! { status, headers }) => {
// Drop the sender to so that it the receiver stops waiting for
// more chunks
*response_sender.lock().unwrap() = None;
let status = StatusCode::from_u16(status)
.unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);
let mut response = http::Response::builder()
.status(status)
.body(response_body)
.unwrap();
*response.headers_mut() = headers.0;
Ok(response)
}
IO::Exception(err) => {
info!("{}", err);
Ok(http::Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body("".into())
.unwrap())
}
}
}
Err(err) => {
info!("{}", err);
Ok(http::Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body("".into())
.unwrap())
}
})
.await
})
}
}
// To let the `http_types` module refer to `Body` and `ResponseBody` we register these types in a
// separate function which is called before loading `http_types`
pub fn load_types(vm: &Thread) -> vm::Result<ExternModule> {
vm.register_type::<Body>("std.http.types.Body", &[])?;
vm.register_type::<ResponseBody>("std.http.types.ResponseBody", &[])?;
vm.register_type::<Uri>("std.http.types.Uri", &[])?;
ExternModule::new(
vm,
record! {
// Define the types so that they can be used from gluon
type std::http::types::Body => Body,
type std::http::types::ResponseBody => ResponseBody,
type std::http::types::Uri => Uri,
type std::http::Method => String,
type std::http::StatusCode => u16,
type std::http::Request => Request,
type std::http::Response => Response,
type std::http::Headers => Headers,
type std::http::HttpState => HttpState
},
)
}
macro_rules! uri_binds {
($($id: ident)*) => {
record!{
$(
$id => primitive!(1, concat!("std.http.prim.uri.", stringify!($id)), |u: &Uri| (u.0).$id())
),*
}
}
}
mod std {
pub(crate) mod http {
pub(crate) use crate::std_lib::http as prim;
}
}
pub fn load(vm: &Thread) -> vm::Result<ExternModule> {
ExternModule::new(
vm,
record! {
listen => primitive!(2, async fn std::http::prim::listen),
read_chunk => primitive!(1, async fn std::http::prim::read_chunk),
write_response => primitive!(2, async fn std::http::prim::write_response),
port => primitive!(1, "std.http.prim.uri.port", |u: &Uri| (u.0).port().map(|p| p.as_u16())),
uri => uri_binds!(path host query to_string)
},
)
}
|
// Retrieve the `handle` function from the http module which we use to evaluate values of type
// `EffectHandler Response`
let handle: Function<RootedThread, ListenFn> = thread
.get_global("std.http.handle")
.unwrap_or_else(|err| panic!("{}", err));
Self { handle, handler }
}
| identifier_body |
arc-rw-read-mode-shouldnt-escape.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern mod extra;
use extra::arc;
fn main() | {
let x = ~arc::RWArc::new(1);
let mut y = None;
do x.write_downgrade |write_mode| {
y = Some(x.downgrade(write_mode));
//~^ ERROR cannot infer an appropriate lifetime
}
y.unwrap();
// Adding this line causes a method unification failure instead
// do (&option::unwrap(y)).read |state| { assert!(*state == 1); }
} | identifier_body |
|
arc-rw-read-mode-shouldnt-escape.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern mod extra;
use extra::arc;
fn main() {
let x = ~arc::RWArc::new(1);
let mut y = None;
do x.write_downgrade |write_mode| {
y = Some(x.downgrade(write_mode));
//~^ ERROR cannot infer an appropriate lifetime
}
y.unwrap();
// Adding this line causes a method unification failure instead
// do (&option::unwrap(y)).read |state| { assert!(*state == 1); }
} | // file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | random_line_split |
arc-rw-read-mode-shouldnt-escape.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern mod extra;
use extra::arc;
fn | () {
let x = ~arc::RWArc::new(1);
let mut y = None;
do x.write_downgrade |write_mode| {
y = Some(x.downgrade(write_mode));
//~^ ERROR cannot infer an appropriate lifetime
}
y.unwrap();
// Adding this line causes a method unification failure instead
// do (&option::unwrap(y)).read |state| { assert!(*state == 1); }
}
| main | identifier_name |
lib.rs | #![feature(if_let)]
use std::os;
use std::io::Command;
use std::io::process::InheritFd;
use std::default::Default;
/// Extra configuration to pass to gcc.
pub struct Config {
/// Directories where gcc will look for header files.
pub include_directories: Vec<Path>,
/// Additional definitions (`-DKEY` or `-DKEY=VALUE`).
pub definitions: Vec<(String, Option<String>)>,
/// Additional object files to link into the final archive
pub objects: Vec<Path>,
}
impl Default for Config {
fn default() -> Config {
Config {
include_directories: Vec::new(),
definitions: Vec::new(),
objects: Vec::new(),
}
}
}
/// Compile a library from the given set of input C files.
///
/// This will simply compile all files into object files and then assemble them
/// into the output. This will read the standard environment variables to detect
/// cross compilations and such.
///
/// # Example
///
/// ```no_run
/// use std::default::Default;
/// gcc::compile_library("libfoo.a", &Default::default(), &[
/// "foo.c",
/// "bar.c",
/// ]);
/// ```
pub fn compile_library(output: &str, config: &Config, files: &[&str]) {
assert!(output.starts_with("lib"));
assert!(output.ends_with(".a"));
let target = os::getenv("TARGET").unwrap();
let opt_level = os::getenv("OPT_LEVEL").unwrap();
let mut cmd = Command::new(gcc(target.as_slice()));
cmd.arg(format!("-O{}", opt_level));
cmd.arg("-c");
cmd.arg("-ffunction-sections").arg("-fdata-sections");
cmd.args(cflags().as_slice());
if target.as_slice().contains("-ios") {
cmd.args(ios_flags(target.as_slice()).as_slice());
} else {
if target.as_slice().contains("i686") {
cmd.arg("-m32");
} else if target.as_slice().contains("x86_64") {
cmd.arg("-m64");
}
if!target.as_slice().contains("i686") {
cmd.arg("-fPIC");
}
}
for directory in config.include_directories.iter() {
cmd.arg("-I").arg(directory);
}
for &(ref key, ref value) in config.definitions.iter() {
if let &Some(ref value) = value {
cmd.arg(format!("-D{}={}", key, value));
} else {
cmd.arg(format!("-D{}", key));
}
}
let src = Path::new(os::getenv("CARGO_MANIFEST_DIR").unwrap());
let dst = Path::new(os::getenv("OUT_DIR").unwrap());
let mut objects = Vec::new();
for file in files.iter() {
let obj = dst.join(*file).with_extension("o");
std::io::fs::mkdir_recursive(&obj.dir_path(), std::io::USER_RWX).unwrap();
run(cmd.clone().arg(src.join(*file)).arg("-o").arg(&obj));
objects.push(obj);
}
run(Command::new(ar(target.as_slice())).arg("crus")
.arg(dst.join(output))
.args(objects.as_slice())
.args(config.objects.as_slice()));
println!("cargo:rustc-flags=-L {} -l {}:static",
dst.display(), output.slice(3, output.len() - 2));
}
fn run(cmd: &mut Command) {
println!("running: {}", cmd);
assert!(cmd.stdout(InheritFd(1))
.stderr(InheritFd(2))
.status()
.unwrap()
.success());
}
fn gcc(target: &str) -> String {
let is_android = target.find_str("android").is_some();
os::getenv("CC").unwrap_or(if cfg!(windows) {
"gcc".to_string()
} else if is_android {
format!("{}-gcc", target)
} else {
"cc".to_string()
})
} | os::getenv("AR").unwrap_or(if is_android {
format!("{}-ar", target)
} else {
"ar".to_string()
})
}
fn cflags() -> Vec<String> {
os::getenv("CFLAGS").unwrap_or(String::new())
.as_slice().words().map(|s| s.to_string())
.collect()
}
fn ios_flags(target: &str) -> Vec<String> {
let mut is_device_arch = false;
let mut res = Vec::new();
if target.starts_with("arm-") {
res.push("-arch");
res.push("armv7");
is_device_arch = true;
} else if target.starts_with("arm64-") {
res.push("-arch");
res.push("arm64");
is_device_arch = true;
} else if target.starts_with("i386-") {
res.push("-m32");
} else if target.starts_with("x86_64-") {
res.push("-m64");
}
let sdk = if is_device_arch {"iphoneos"} else {"iphonesimulator"};
println!("Detecting iOS SDK path for {}", sdk);
let sdk_path = Command::new("xcrun")
.arg("--show-sdk-path")
.arg("--sdk")
.arg(sdk)
.stderr(InheritFd(2))
.output()
.unwrap()
.output;
let sdk_path = String::from_utf8(sdk_path).unwrap();
res.push("-isysroot");
res.push(sdk_path.as_slice().trim());
res.iter().map(|s| s.to_string()).collect::<Vec<_>>()
} |
fn ar(target: &str) -> String {
let is_android = target.find_str("android").is_some();
| random_line_split |
lib.rs | #![feature(if_let)]
use std::os;
use std::io::Command;
use std::io::process::InheritFd;
use std::default::Default;
/// Extra configuration to pass to gcc.
pub struct Config {
/// Directories where gcc will look for header files.
pub include_directories: Vec<Path>,
/// Additional definitions (`-DKEY` or `-DKEY=VALUE`).
pub definitions: Vec<(String, Option<String>)>,
/// Additional object files to link into the final archive
pub objects: Vec<Path>,
}
impl Default for Config {
fn default() -> Config {
Config {
include_directories: Vec::new(),
definitions: Vec::new(),
objects: Vec::new(),
}
}
}
/// Compile a library from the given set of input C files.
///
/// This will simply compile all files into object files and then assemble them
/// into the output. This will read the standard environment variables to detect
/// cross compilations and such.
///
/// # Example
///
/// ```no_run
/// use std::default::Default;
/// gcc::compile_library("libfoo.a", &Default::default(), &[
/// "foo.c",
/// "bar.c",
/// ]);
/// ```
pub fn compile_library(output: &str, config: &Config, files: &[&str]) {
assert!(output.starts_with("lib"));
assert!(output.ends_with(".a"));
let target = os::getenv("TARGET").unwrap();
let opt_level = os::getenv("OPT_LEVEL").unwrap();
let mut cmd = Command::new(gcc(target.as_slice()));
cmd.arg(format!("-O{}", opt_level));
cmd.arg("-c");
cmd.arg("-ffunction-sections").arg("-fdata-sections");
cmd.args(cflags().as_slice());
if target.as_slice().contains("-ios") {
cmd.args(ios_flags(target.as_slice()).as_slice());
} else {
if target.as_slice().contains("i686") {
cmd.arg("-m32");
} else if target.as_slice().contains("x86_64") {
cmd.arg("-m64");
}
if!target.as_slice().contains("i686") {
cmd.arg("-fPIC");
}
}
for directory in config.include_directories.iter() {
cmd.arg("-I").arg(directory);
}
for &(ref key, ref value) in config.definitions.iter() {
if let &Some(ref value) = value {
cmd.arg(format!("-D{}={}", key, value));
} else {
cmd.arg(format!("-D{}", key));
}
}
let src = Path::new(os::getenv("CARGO_MANIFEST_DIR").unwrap());
let dst = Path::new(os::getenv("OUT_DIR").unwrap());
let mut objects = Vec::new();
for file in files.iter() {
let obj = dst.join(*file).with_extension("o");
std::io::fs::mkdir_recursive(&obj.dir_path(), std::io::USER_RWX).unwrap();
run(cmd.clone().arg(src.join(*file)).arg("-o").arg(&obj));
objects.push(obj);
}
run(Command::new(ar(target.as_slice())).arg("crus")
.arg(dst.join(output))
.args(objects.as_slice())
.args(config.objects.as_slice()));
println!("cargo:rustc-flags=-L {} -l {}:static",
dst.display(), output.slice(3, output.len() - 2));
}
fn run(cmd: &mut Command) {
println!("running: {}", cmd);
assert!(cmd.stdout(InheritFd(1))
.stderr(InheritFd(2))
.status()
.unwrap()
.success());
}
fn gcc(target: &str) -> String {
let is_android = target.find_str("android").is_some();
os::getenv("CC").unwrap_or(if cfg!(windows) | else if is_android {
format!("{}-gcc", target)
} else {
"cc".to_string()
})
}
fn ar(target: &str) -> String {
let is_android = target.find_str("android").is_some();
os::getenv("AR").unwrap_or(if is_android {
format!("{}-ar", target)
} else {
"ar".to_string()
})
}
fn cflags() -> Vec<String> {
os::getenv("CFLAGS").unwrap_or(String::new())
.as_slice().words().map(|s| s.to_string())
.collect()
}
fn ios_flags(target: &str) -> Vec<String> {
let mut is_device_arch = false;
let mut res = Vec::new();
if target.starts_with("arm-") {
res.push("-arch");
res.push("armv7");
is_device_arch = true;
} else if target.starts_with("arm64-") {
res.push("-arch");
res.push("arm64");
is_device_arch = true;
} else if target.starts_with("i386-") {
res.push("-m32");
} else if target.starts_with("x86_64-") {
res.push("-m64");
}
let sdk = if is_device_arch {"iphoneos"} else {"iphonesimulator"};
println!("Detecting iOS SDK path for {}", sdk);
let sdk_path = Command::new("xcrun")
.arg("--show-sdk-path")
.arg("--sdk")
.arg(sdk)
.stderr(InheritFd(2))
.output()
.unwrap()
.output;
let sdk_path = String::from_utf8(sdk_path).unwrap();
res.push("-isysroot");
res.push(sdk_path.as_slice().trim());
res.iter().map(|s| s.to_string()).collect::<Vec<_>>()
}
| {
"gcc".to_string()
} | conditional_block |
lib.rs | #![feature(if_let)]
use std::os;
use std::io::Command;
use std::io::process::InheritFd;
use std::default::Default;
/// Extra configuration to pass to gcc.
pub struct Config {
/// Directories where gcc will look for header files.
pub include_directories: Vec<Path>,
/// Additional definitions (`-DKEY` or `-DKEY=VALUE`).
pub definitions: Vec<(String, Option<String>)>,
/// Additional object files to link into the final archive
pub objects: Vec<Path>,
}
impl Default for Config {
fn default() -> Config {
Config {
include_directories: Vec::new(),
definitions: Vec::new(),
objects: Vec::new(),
}
}
}
/// Compile a library from the given set of input C files.
///
/// This will simply compile all files into object files and then assemble them
/// into the output. This will read the standard environment variables to detect
/// cross compilations and such.
///
/// # Example
///
/// ```no_run
/// use std::default::Default;
/// gcc::compile_library("libfoo.a", &Default::default(), &[
/// "foo.c",
/// "bar.c",
/// ]);
/// ```
pub fn compile_library(output: &str, config: &Config, files: &[&str]) {
assert!(output.starts_with("lib"));
assert!(output.ends_with(".a"));
let target = os::getenv("TARGET").unwrap();
let opt_level = os::getenv("OPT_LEVEL").unwrap();
let mut cmd = Command::new(gcc(target.as_slice()));
cmd.arg(format!("-O{}", opt_level));
cmd.arg("-c");
cmd.arg("-ffunction-sections").arg("-fdata-sections");
cmd.args(cflags().as_slice());
if target.as_slice().contains("-ios") {
cmd.args(ios_flags(target.as_slice()).as_slice());
} else {
if target.as_slice().contains("i686") {
cmd.arg("-m32");
} else if target.as_slice().contains("x86_64") {
cmd.arg("-m64");
}
if!target.as_slice().contains("i686") {
cmd.arg("-fPIC");
}
}
for directory in config.include_directories.iter() {
cmd.arg("-I").arg(directory);
}
for &(ref key, ref value) in config.definitions.iter() {
if let &Some(ref value) = value {
cmd.arg(format!("-D{}={}", key, value));
} else {
cmd.arg(format!("-D{}", key));
}
}
let src = Path::new(os::getenv("CARGO_MANIFEST_DIR").unwrap());
let dst = Path::new(os::getenv("OUT_DIR").unwrap());
let mut objects = Vec::new();
for file in files.iter() {
let obj = dst.join(*file).with_extension("o");
std::io::fs::mkdir_recursive(&obj.dir_path(), std::io::USER_RWX).unwrap();
run(cmd.clone().arg(src.join(*file)).arg("-o").arg(&obj));
objects.push(obj);
}
run(Command::new(ar(target.as_slice())).arg("crus")
.arg(dst.join(output))
.args(objects.as_slice())
.args(config.objects.as_slice()));
println!("cargo:rustc-flags=-L {} -l {}:static",
dst.display(), output.slice(3, output.len() - 2));
}
fn run(cmd: &mut Command) {
println!("running: {}", cmd);
assert!(cmd.stdout(InheritFd(1))
.stderr(InheritFd(2))
.status()
.unwrap()
.success());
}
fn gcc(target: &str) -> String {
let is_android = target.find_str("android").is_some();
os::getenv("CC").unwrap_or(if cfg!(windows) {
"gcc".to_string()
} else if is_android {
format!("{}-gcc", target)
} else {
"cc".to_string()
})
}
fn ar(target: &str) -> String |
fn cflags() -> Vec<String> {
os::getenv("CFLAGS").unwrap_or(String::new())
.as_slice().words().map(|s| s.to_string())
.collect()
}
fn ios_flags(target: &str) -> Vec<String> {
let mut is_device_arch = false;
let mut res = Vec::new();
if target.starts_with("arm-") {
res.push("-arch");
res.push("armv7");
is_device_arch = true;
} else if target.starts_with("arm64-") {
res.push("-arch");
res.push("arm64");
is_device_arch = true;
} else if target.starts_with("i386-") {
res.push("-m32");
} else if target.starts_with("x86_64-") {
res.push("-m64");
}
let sdk = if is_device_arch {"iphoneos"} else {"iphonesimulator"};
println!("Detecting iOS SDK path for {}", sdk);
let sdk_path = Command::new("xcrun")
.arg("--show-sdk-path")
.arg("--sdk")
.arg(sdk)
.stderr(InheritFd(2))
.output()
.unwrap()
.output;
let sdk_path = String::from_utf8(sdk_path).unwrap();
res.push("-isysroot");
res.push(sdk_path.as_slice().trim());
res.iter().map(|s| s.to_string()).collect::<Vec<_>>()
}
| {
let is_android = target.find_str("android").is_some();
os::getenv("AR").unwrap_or(if is_android {
format!("{}-ar", target)
} else {
"ar".to_string()
})
} | identifier_body |
lib.rs | #![feature(if_let)]
use std::os;
use std::io::Command;
use std::io::process::InheritFd;
use std::default::Default;
/// Extra configuration to pass to gcc.
pub struct | {
/// Directories where gcc will look for header files.
pub include_directories: Vec<Path>,
/// Additional definitions (`-DKEY` or `-DKEY=VALUE`).
pub definitions: Vec<(String, Option<String>)>,
/// Additional object files to link into the final archive
pub objects: Vec<Path>,
}
impl Default for Config {
fn default() -> Config {
Config {
include_directories: Vec::new(),
definitions: Vec::new(),
objects: Vec::new(),
}
}
}
/// Compile a library from the given set of input C files.
///
/// This will simply compile all files into object files and then assemble them
/// into the output. This will read the standard environment variables to detect
/// cross compilations and such.
///
/// # Example
///
/// ```no_run
/// use std::default::Default;
/// gcc::compile_library("libfoo.a", &Default::default(), &[
/// "foo.c",
/// "bar.c",
/// ]);
/// ```
pub fn compile_library(output: &str, config: &Config, files: &[&str]) {
assert!(output.starts_with("lib"));
assert!(output.ends_with(".a"));
let target = os::getenv("TARGET").unwrap();
let opt_level = os::getenv("OPT_LEVEL").unwrap();
let mut cmd = Command::new(gcc(target.as_slice()));
cmd.arg(format!("-O{}", opt_level));
cmd.arg("-c");
cmd.arg("-ffunction-sections").arg("-fdata-sections");
cmd.args(cflags().as_slice());
if target.as_slice().contains("-ios") {
cmd.args(ios_flags(target.as_slice()).as_slice());
} else {
if target.as_slice().contains("i686") {
cmd.arg("-m32");
} else if target.as_slice().contains("x86_64") {
cmd.arg("-m64");
}
if!target.as_slice().contains("i686") {
cmd.arg("-fPIC");
}
}
for directory in config.include_directories.iter() {
cmd.arg("-I").arg(directory);
}
for &(ref key, ref value) in config.definitions.iter() {
if let &Some(ref value) = value {
cmd.arg(format!("-D{}={}", key, value));
} else {
cmd.arg(format!("-D{}", key));
}
}
let src = Path::new(os::getenv("CARGO_MANIFEST_DIR").unwrap());
let dst = Path::new(os::getenv("OUT_DIR").unwrap());
let mut objects = Vec::new();
for file in files.iter() {
let obj = dst.join(*file).with_extension("o");
std::io::fs::mkdir_recursive(&obj.dir_path(), std::io::USER_RWX).unwrap();
run(cmd.clone().arg(src.join(*file)).arg("-o").arg(&obj));
objects.push(obj);
}
run(Command::new(ar(target.as_slice())).arg("crus")
.arg(dst.join(output))
.args(objects.as_slice())
.args(config.objects.as_slice()));
println!("cargo:rustc-flags=-L {} -l {}:static",
dst.display(), output.slice(3, output.len() - 2));
}
fn run(cmd: &mut Command) {
println!("running: {}", cmd);
assert!(cmd.stdout(InheritFd(1))
.stderr(InheritFd(2))
.status()
.unwrap()
.success());
}
fn gcc(target: &str) -> String {
let is_android = target.find_str("android").is_some();
os::getenv("CC").unwrap_or(if cfg!(windows) {
"gcc".to_string()
} else if is_android {
format!("{}-gcc", target)
} else {
"cc".to_string()
})
}
fn ar(target: &str) -> String {
let is_android = target.find_str("android").is_some();
os::getenv("AR").unwrap_or(if is_android {
format!("{}-ar", target)
} else {
"ar".to_string()
})
}
fn cflags() -> Vec<String> {
os::getenv("CFLAGS").unwrap_or(String::new())
.as_slice().words().map(|s| s.to_string())
.collect()
}
fn ios_flags(target: &str) -> Vec<String> {
let mut is_device_arch = false;
let mut res = Vec::new();
if target.starts_with("arm-") {
res.push("-arch");
res.push("armv7");
is_device_arch = true;
} else if target.starts_with("arm64-") {
res.push("-arch");
res.push("arm64");
is_device_arch = true;
} else if target.starts_with("i386-") {
res.push("-m32");
} else if target.starts_with("x86_64-") {
res.push("-m64");
}
let sdk = if is_device_arch {"iphoneos"} else {"iphonesimulator"};
println!("Detecting iOS SDK path for {}", sdk);
let sdk_path = Command::new("xcrun")
.arg("--show-sdk-path")
.arg("--sdk")
.arg(sdk)
.stderr(InheritFd(2))
.output()
.unwrap()
.output;
let sdk_path = String::from_utf8(sdk_path).unwrap();
res.push("-isysroot");
res.push(sdk_path.as_slice().trim());
res.iter().map(|s| s.to_string()).collect::<Vec<_>>()
}
| Config | identifier_name |
vperm2i128.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vperm2i128_1() {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM5)), operand3: Some(Direct(YMM1)), operand4: Some(Literal8(22)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 85, 70, 193, 22], OperandSize::Dword)
}
fn vperm2i128_2() {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM2)), operand3: Some(IndirectScaledDisplaced(EDX, Two, 676667494, Some(OperandSize::Ymmword), None)), operand4: Some(Literal8(6)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 109, 70, 52, 85, 102, 32, 85, 40, 6], OperandSize::Dword)
}
fn vperm2i128_3() {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM1)), operand3: Some(Direct(YMM5)), operand4: Some(Literal8(103)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 117, 70, 229, 103], OperandSize::Qword)
}
fn vperm2i128_4() {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM4)), operand3: Some(IndirectScaledDisplaced(RCX, Eight, 707114910, Some(OperandSize::Ymmword), None)), operand4: Some(Literal8(11)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 93, 70, 44, 205, 158, 183, 37, 42, 11], OperandSize::Qword) | } | random_line_split |
|
vperm2i128.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn | () {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM5)), operand3: Some(Direct(YMM1)), operand4: Some(Literal8(22)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 85, 70, 193, 22], OperandSize::Dword)
}
fn vperm2i128_2() {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM2)), operand3: Some(IndirectScaledDisplaced(EDX, Two, 676667494, Some(OperandSize::Ymmword), None)), operand4: Some(Literal8(6)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 109, 70, 52, 85, 102, 32, 85, 40, 6], OperandSize::Dword)
}
fn vperm2i128_3() {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM1)), operand3: Some(Direct(YMM5)), operand4: Some(Literal8(103)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 117, 70, 229, 103], OperandSize::Qword)
}
fn vperm2i128_4() {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM4)), operand3: Some(IndirectScaledDisplaced(RCX, Eight, 707114910, Some(OperandSize::Ymmword), None)), operand4: Some(Literal8(11)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 93, 70, 44, 205, 158, 183, 37, 42, 11], OperandSize::Qword)
}
| vperm2i128_1 | identifier_name |
vperm2i128.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vperm2i128_1() {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM5)), operand3: Some(Direct(YMM1)), operand4: Some(Literal8(22)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 85, 70, 193, 22], OperandSize::Dword)
}
fn vperm2i128_2() {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM2)), operand3: Some(IndirectScaledDisplaced(EDX, Two, 676667494, Some(OperandSize::Ymmword), None)), operand4: Some(Literal8(6)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 109, 70, 52, 85, 102, 32, 85, 40, 6], OperandSize::Dword)
}
fn vperm2i128_3() |
fn vperm2i128_4() {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM4)), operand3: Some(IndirectScaledDisplaced(RCX, Eight, 707114910, Some(OperandSize::Ymmword), None)), operand4: Some(Literal8(11)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 93, 70, 44, 205, 158, 183, 37, 42, 11], OperandSize::Qword)
}
| {
run_test(&Instruction { mnemonic: Mnemonic::VPERM2I128, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM1)), operand3: Some(Direct(YMM5)), operand4: Some(Literal8(103)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 117, 70, 229, 103], OperandSize::Qword)
} | identifier_body |
mod.rs | #![macro_escape]
use serialize::json::{Json, ParserError};
use url::Url;
use std::collections::HashMap;
use std::io::IoError;
use std::local_data::Ref;
#[cfg(not(teepee))]
pub use self::http::Client;
#[cfg(teepee)]
pub use self::teepee::Client;
mod http;
mod teepee;
macro_rules! params {
{$($key:expr: $val:expr,)+} => (
{
use std::collections::HashMap;
let mut params: HashMap<String, String> = HashMap::new();
$(
params.insert($key.into_string(), $val.to_string());
)+
params
}
);
}
pub static USER_AGENT: &'static str = "rawr v0.1 (github.com/cybergeek94/rawr)";
local_data_key!(_modhash: String)
pub type JsonError = ParserError;
pub type JsonResult<T> = Result<T, JsonError>;
pub trait JsonClient {
/// Make a GET request, returning a Json response. The GET parameters should be in the passed URL.
/// Implementers should update the local modhash by using `set_modhash()`
fn get(&self, url: &Url) -> JsonResult<Json>;
/// Make a POST request, returning the JSON response
fn post(&self, url: &Url, params: HashMap<String, String>) -> JsonResult<Json>;
/// Make a POST request, including the value of `set_modhash` as the `X-Modhash` header
/// and the session cookie
fn post_modhash(&self, url: &Url, params: HashMap<String, String>, session: &str) -> JsonResult<Json>;
}
pub fn set_modhash(modhash: &str) {
_modhash.replace(Some(modhash.into_string()));
}
pub fn get_modhash() -> Option<Ref<String>> |
pub fn has_modhash() -> bool {
_modhash.get().is_some()
}
/// Map a std::io::IoError to a serialize::json::IoError (ParserError variant)
pub fn err_io_to_json_io(err: IoError) -> ParserError {
super::serialize::json::IoError(err.kind, err.desc)
}
#[test]
fn test_params() {
let params = params!{
"hello": "goodbye",
"yes": "no",
};
drop(params);
}
| {
_modhash.get()
} | identifier_body |
mod.rs | #![macro_escape]
use serialize::json::{Json, ParserError};
use url::Url;
use std::collections::HashMap;
use std::io::IoError;
use std::local_data::Ref;
#[cfg(not(teepee))]
pub use self::http::Client;
#[cfg(teepee)]
pub use self::teepee::Client;
mod http;
mod teepee;
macro_rules! params {
{$($key:expr: $val:expr,)+} => (
{
use std::collections::HashMap;
let mut params: HashMap<String, String> = HashMap::new();
$(
params.insert($key.into_string(), $val.to_string());
)+
params
}
);
}
pub static USER_AGENT: &'static str = "rawr v0.1 (github.com/cybergeek94/rawr)";
local_data_key!(_modhash: String)
pub type JsonError = ParserError;
pub type JsonResult<T> = Result<T, JsonError>;
pub trait JsonClient {
/// Make a GET request, returning a Json response. The GET parameters should be in the passed URL.
/// Implementers should update the local modhash by using `set_modhash()`
fn get(&self, url: &Url) -> JsonResult<Json>;
/// Make a POST request, returning the JSON response
fn post(&self, url: &Url, params: HashMap<String, String>) -> JsonResult<Json>;
/// Make a POST request, including the value of `set_modhash` as the `X-Modhash` header
/// and the session cookie
fn post_modhash(&self, url: &Url, params: HashMap<String, String>, session: &str) -> JsonResult<Json>;
}
pub fn set_modhash(modhash: &str) {
_modhash.replace(Some(modhash.into_string()));
}
pub fn get_modhash() -> Option<Ref<String>> {
_modhash.get()
}
pub fn has_modhash() -> bool {
_modhash.get().is_some()
}
/// Map a std::io::IoError to a serialize::json::IoError (ParserError variant)
pub fn err_io_to_json_io(err: IoError) -> ParserError {
super::serialize::json::IoError(err.kind, err.desc)
} |
#[test]
fn test_params() {
let params = params!{
"hello": "goodbye",
"yes": "no",
};
drop(params);
} | random_line_split |
|
mod.rs | #![macro_escape]
use serialize::json::{Json, ParserError};
use url::Url;
use std::collections::HashMap;
use std::io::IoError;
use std::local_data::Ref;
#[cfg(not(teepee))]
pub use self::http::Client;
#[cfg(teepee)]
pub use self::teepee::Client;
mod http;
mod teepee;
macro_rules! params {
{$($key:expr: $val:expr,)+} => (
{
use std::collections::HashMap;
let mut params: HashMap<String, String> = HashMap::new();
$(
params.insert($key.into_string(), $val.to_string());
)+
params
}
);
}
pub static USER_AGENT: &'static str = "rawr v0.1 (github.com/cybergeek94/rawr)";
local_data_key!(_modhash: String)
pub type JsonError = ParserError;
pub type JsonResult<T> = Result<T, JsonError>;
pub trait JsonClient {
/// Make a GET request, returning a Json response. The GET parameters should be in the passed URL.
/// Implementers should update the local modhash by using `set_modhash()`
fn get(&self, url: &Url) -> JsonResult<Json>;
/// Make a POST request, returning the JSON response
fn post(&self, url: &Url, params: HashMap<String, String>) -> JsonResult<Json>;
/// Make a POST request, including the value of `set_modhash` as the `X-Modhash` header
/// and the session cookie
fn post_modhash(&self, url: &Url, params: HashMap<String, String>, session: &str) -> JsonResult<Json>;
}
pub fn | (modhash: &str) {
_modhash.replace(Some(modhash.into_string()));
}
pub fn get_modhash() -> Option<Ref<String>> {
_modhash.get()
}
pub fn has_modhash() -> bool {
_modhash.get().is_some()
}
/// Map a std::io::IoError to a serialize::json::IoError (ParserError variant)
pub fn err_io_to_json_io(err: IoError) -> ParserError {
super::serialize::json::IoError(err.kind, err.desc)
}
#[test]
fn test_params() {
let params = params!{
"hello": "goodbye",
"yes": "no",
};
drop(params);
}
| set_modhash | identifier_name |
single_thread_calculator.rs | use crate::{MonteCarloPiCalculator, gen_random};
use std::sync::Arc;
pub struct SingleThreadCalculator {}
impl SingleThreadCalculator {
#[inline]
fn gen_randoms_static(n: usize) -> (Vec<f64>, Vec<f64>) {
let mut xs = vec![0.0; n];
let mut ys = vec![0.0; n];
for i in 0..n {
let mut t = gen_random(i as f64 / n as f64);
t = gen_random(t);
t = gen_random(t);
xs[i] = t;
for _ in 0..10 {
t = gen_random(t);
}
ys[i] = t;
}
return (xs, ys);
}
#[inline]
#[allow(unused_parens)]
fn cal_static(xs: &Arc<Vec<f64>>, ys: &Arc<Vec<f64>>, n: usize) -> u64 {
let mut cnt = 0;
for i in 0..n {
if (xs[i] * xs[i] + ys[i] * ys[i] < 1.0) |
}
return cnt;
}
}
impl MonteCarloPiCalculator for SingleThreadCalculator {
#[inline]
fn new(_n: usize) -> SingleThreadCalculator {
return SingleThreadCalculator {};
}
#[inline]
fn gen_randoms(&self, n: usize) -> (Vec<f64>, Vec<f64>) {
return SingleThreadCalculator::gen_randoms_static(n);
}
#[inline]
fn cal(&self, xs: &Arc<Vec<f64>>, ys: &Arc<Vec<f64>>, n: usize) -> u64 {
return SingleThreadCalculator::cal_static(xs, ys, n);
}
} | {
cnt += 1;
} | conditional_block |
single_thread_calculator.rs | use crate::{MonteCarloPiCalculator, gen_random};
use std::sync::Arc;
pub struct SingleThreadCalculator {}
impl SingleThreadCalculator {
#[inline]
fn gen_randoms_static(n: usize) -> (Vec<f64>, Vec<f64>) {
let mut xs = vec![0.0; n];
let mut ys = vec![0.0; n];
for i in 0..n {
let mut t = gen_random(i as f64 / n as f64);
t = gen_random(t);
t = gen_random(t);
xs[i] = t;
for _ in 0..10 {
t = gen_random(t);
}
ys[i] = t;
}
return (xs, ys);
}
#[inline]
#[allow(unused_parens)]
fn | (xs: &Arc<Vec<f64>>, ys: &Arc<Vec<f64>>, n: usize) -> u64 {
let mut cnt = 0;
for i in 0..n {
if (xs[i] * xs[i] + ys[i] * ys[i] < 1.0) {
cnt += 1;
}
}
return cnt;
}
}
impl MonteCarloPiCalculator for SingleThreadCalculator {
#[inline]
fn new(_n: usize) -> SingleThreadCalculator {
return SingleThreadCalculator {};
}
#[inline]
fn gen_randoms(&self, n: usize) -> (Vec<f64>, Vec<f64>) {
return SingleThreadCalculator::gen_randoms_static(n);
}
#[inline]
fn cal(&self, xs: &Arc<Vec<f64>>, ys: &Arc<Vec<f64>>, n: usize) -> u64 {
return SingleThreadCalculator::cal_static(xs, ys, n);
}
} | cal_static | identifier_name |
single_thread_calculator.rs | use crate::{MonteCarloPiCalculator, gen_random};
use std::sync::Arc;
pub struct SingleThreadCalculator {}
impl SingleThreadCalculator {
#[inline]
fn gen_randoms_static(n: usize) -> (Vec<f64>, Vec<f64>) {
let mut xs = vec![0.0; n];
let mut ys = vec![0.0; n];
for i in 0..n {
let mut t = gen_random(i as f64 / n as f64);
t = gen_random(t);
t = gen_random(t);
xs[i] = t;
for _ in 0..10 {
t = gen_random(t);
}
ys[i] = t;
}
return (xs, ys);
}
#[inline]
#[allow(unused_parens)]
fn cal_static(xs: &Arc<Vec<f64>>, ys: &Arc<Vec<f64>>, n: usize) -> u64 |
}
impl MonteCarloPiCalculator for SingleThreadCalculator {
#[inline]
fn new(_n: usize) -> SingleThreadCalculator {
return SingleThreadCalculator {};
}
#[inline]
fn gen_randoms(&self, n: usize) -> (Vec<f64>, Vec<f64>) {
return SingleThreadCalculator::gen_randoms_static(n);
}
#[inline]
fn cal(&self, xs: &Arc<Vec<f64>>, ys: &Arc<Vec<f64>>, n: usize) -> u64 {
return SingleThreadCalculator::cal_static(xs, ys, n);
}
} | {
let mut cnt = 0;
for i in 0..n {
if (xs[i] * xs[i] + ys[i] * ys[i] < 1.0) {
cnt += 1;
}
}
return cnt;
} | identifier_body |
single_thread_calculator.rs | use crate::{MonteCarloPiCalculator, gen_random};
use std::sync::Arc;
pub struct SingleThreadCalculator {}
impl SingleThreadCalculator {
#[inline]
fn gen_randoms_static(n: usize) -> (Vec<f64>, Vec<f64>) {
let mut xs = vec![0.0; n];
let mut ys = vec![0.0; n];
for i in 0..n {
let mut t = gen_random(i as f64 / n as f64);
t = gen_random(t);
t = gen_random(t);
xs[i] = t;
for _ in 0..10 {
t = gen_random(t);
}
ys[i] = t;
}
return (xs, ys);
} | fn cal_static(xs: &Arc<Vec<f64>>, ys: &Arc<Vec<f64>>, n: usize) -> u64 {
let mut cnt = 0;
for i in 0..n {
if (xs[i] * xs[i] + ys[i] * ys[i] < 1.0) {
cnt += 1;
}
}
return cnt;
}
}
impl MonteCarloPiCalculator for SingleThreadCalculator {
#[inline]
fn new(_n: usize) -> SingleThreadCalculator {
return SingleThreadCalculator {};
}
#[inline]
fn gen_randoms(&self, n: usize) -> (Vec<f64>, Vec<f64>) {
return SingleThreadCalculator::gen_randoms_static(n);
}
#[inline]
fn cal(&self, xs: &Arc<Vec<f64>>, ys: &Arc<Vec<f64>>, n: usize) -> u64 {
return SingleThreadCalculator::cal_static(xs, ys, n);
}
} |
#[inline]
#[allow(unused_parens)] | random_line_split |
square.rs | use malachite_base::num::arithmetic::traits::UnsignedAbs;
use malachite_base::num::basic::floats::PrimitiveFloat;
use malachite_base::num::basic::integers::PrimitiveInt;
use malachite_base::num::basic::signeds::PrimitiveSigned;
use malachite_base::num::basic::unsigneds::PrimitiveUnsigned;
use malachite_base::num::conversion::traits::WrappingFrom;
use malachite_base::num::float::NiceFloat;
use malachite_base_test_util::generators::{
primitive_float_gen, signed_gen_var_10, unsigned_gen_var_21,
};
#[test]
fn test_square() {
fn test<T: PrimitiveInt>(x: T, out: T) {
assert_eq!(x.square(), out);
let mut x = x;
x.square_assign();
assert_eq!(x, out);
}
test::<u8>(0, 0);
test::<i16>(1, 1);
test::<u32>(2, 4);
test::<i64>(3, 9);
test::<u128>(10, 100);
test::<isize>(123, 15129);
test::<u32>(1000, 1000000);
test::<i16>(-1, 1);
test::<i32>(-2, 4);
test::<i64>(-3, 9);
test::<i128>(-10, 100);
test::<isize>(-123, 15129);
test::<i32>(-1000, 1000000);
}
fn square_properties_helper_unsigned<T: PrimitiveUnsigned>() {
unsigned_gen_var_21::<T>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(square, x.square());
assert_eq!(square, x.pow(2));
assert_eq!(square.checked_sqrt(), Some(x));
if x > T::ONE {
assert_eq!(square.checked_log_base(x), Some(2));
}
});
}
fn square_properties_helper_signed<
U: PrimitiveUnsigned + WrappingFrom<S>,
S: PrimitiveSigned + UnsignedAbs<Output = U> + WrappingFrom<U>,
>() {
signed_gen_var_10::<U, S>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(square, x.square());
assert_eq!(square, x.pow(2));
if x!= S::MIN {
assert_eq!((-x).square(), square);
}
assert_eq!(
U::wrapping_from(square).checked_sqrt().unwrap(),
x.unsigned_abs()
);
});
}
fn square_properties_helper_primitive_float<T: PrimitiveFloat>() |
#[test]
fn square_properties() {
apply_fn_to_unsigneds!(square_properties_helper_unsigned);
apply_fn_to_unsigned_signed_pairs!(square_properties_helper_signed);
apply_fn_to_primitive_floats!(square_properties_helper_primitive_float);
}
| {
primitive_float_gen::<T>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(NiceFloat(square), NiceFloat(x.square()));
assert_eq!(NiceFloat(square), NiceFloat(x.pow(2)));
assert_eq!(NiceFloat((-x).square()), NiceFloat(square));
});
} | identifier_body |
square.rs | use malachite_base::num::arithmetic::traits::UnsignedAbs;
use malachite_base::num::basic::floats::PrimitiveFloat;
use malachite_base::num::basic::integers::PrimitiveInt;
use malachite_base::num::basic::signeds::PrimitiveSigned;
use malachite_base::num::basic::unsigneds::PrimitiveUnsigned;
use malachite_base::num::conversion::traits::WrappingFrom;
use malachite_base::num::float::NiceFloat;
use malachite_base_test_util::generators::{
primitive_float_gen, signed_gen_var_10, unsigned_gen_var_21,
};
#[test]
fn test_square() {
fn test<T: PrimitiveInt>(x: T, out: T) {
assert_eq!(x.square(), out);
let mut x = x;
x.square_assign();
assert_eq!(x, out);
}
test::<u8>(0, 0);
test::<i16>(1, 1);
test::<u32>(2, 4);
test::<i64>(3, 9);
test::<u128>(10, 100);
test::<isize>(123, 15129);
test::<u32>(1000, 1000000);
test::<i16>(-1, 1);
test::<i32>(-2, 4);
test::<i64>(-3, 9);
test::<i128>(-10, 100);
test::<isize>(-123, 15129);
test::<i32>(-1000, 1000000);
}
fn square_properties_helper_unsigned<T: PrimitiveUnsigned>() {
unsigned_gen_var_21::<T>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(square, x.square());
assert_eq!(square, x.pow(2));
assert_eq!(square.checked_sqrt(), Some(x));
if x > T::ONE {
assert_eq!(square.checked_log_base(x), Some(2));
}
});
}
fn square_properties_helper_signed<
U: PrimitiveUnsigned + WrappingFrom<S>,
S: PrimitiveSigned + UnsignedAbs<Output = U> + WrappingFrom<U>,
>() {
signed_gen_var_10::<U, S>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(square, x.square());
assert_eq!(square, x.pow(2));
if x!= S::MIN |
assert_eq!(
U::wrapping_from(square).checked_sqrt().unwrap(),
x.unsigned_abs()
);
});
}
fn square_properties_helper_primitive_float<T: PrimitiveFloat>() {
primitive_float_gen::<T>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(NiceFloat(square), NiceFloat(x.square()));
assert_eq!(NiceFloat(square), NiceFloat(x.pow(2)));
assert_eq!(NiceFloat((-x).square()), NiceFloat(square));
});
}
#[test]
fn square_properties() {
apply_fn_to_unsigneds!(square_properties_helper_unsigned);
apply_fn_to_unsigned_signed_pairs!(square_properties_helper_signed);
apply_fn_to_primitive_floats!(square_properties_helper_primitive_float);
}
| {
assert_eq!((-x).square(), square);
} | conditional_block |
square.rs | use malachite_base::num::arithmetic::traits::UnsignedAbs;
use malachite_base::num::basic::floats::PrimitiveFloat;
use malachite_base::num::basic::integers::PrimitiveInt;
use malachite_base::num::basic::signeds::PrimitiveSigned;
use malachite_base::num::basic::unsigneds::PrimitiveUnsigned;
use malachite_base::num::conversion::traits::WrappingFrom;
use malachite_base::num::float::NiceFloat;
use malachite_base_test_util::generators::{
primitive_float_gen, signed_gen_var_10, unsigned_gen_var_21,
};
#[test]
fn test_square() {
fn test<T: PrimitiveInt>(x: T, out: T) {
assert_eq!(x.square(), out);
let mut x = x;
x.square_assign();
assert_eq!(x, out);
}
test::<u8>(0, 0);
test::<i16>(1, 1);
test::<u32>(2, 4);
test::<i64>(3, 9);
test::<u128>(10, 100);
test::<isize>(123, 15129);
test::<u32>(1000, 1000000);
test::<i16>(-1, 1);
test::<i32>(-2, 4);
test::<i64>(-3, 9);
test::<i128>(-10, 100);
test::<isize>(-123, 15129);
test::<i32>(-1000, 1000000);
}
fn square_properties_helper_unsigned<T: PrimitiveUnsigned>() {
unsigned_gen_var_21::<T>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(square, x.square());
assert_eq!(square, x.pow(2));
assert_eq!(square.checked_sqrt(), Some(x));
if x > T::ONE {
assert_eq!(square.checked_log_base(x), Some(2));
}
});
}
fn square_properties_helper_signed<
U: PrimitiveUnsigned + WrappingFrom<S>,
S: PrimitiveSigned + UnsignedAbs<Output = U> + WrappingFrom<U>,
>() {
signed_gen_var_10::<U, S>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(square, x.square());
assert_eq!(square, x.pow(2));
if x!= S::MIN {
assert_eq!((-x).square(), square);
}
assert_eq!(
U::wrapping_from(square).checked_sqrt().unwrap(),
x.unsigned_abs()
);
});
}
fn | <T: PrimitiveFloat>() {
primitive_float_gen::<T>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(NiceFloat(square), NiceFloat(x.square()));
assert_eq!(NiceFloat(square), NiceFloat(x.pow(2)));
assert_eq!(NiceFloat((-x).square()), NiceFloat(square));
});
}
#[test]
fn square_properties() {
apply_fn_to_unsigneds!(square_properties_helper_unsigned);
apply_fn_to_unsigned_signed_pairs!(square_properties_helper_signed);
apply_fn_to_primitive_floats!(square_properties_helper_primitive_float);
}
| square_properties_helper_primitive_float | identifier_name |
square.rs | use malachite_base::num::arithmetic::traits::UnsignedAbs;
use malachite_base::num::basic::floats::PrimitiveFloat;
use malachite_base::num::basic::integers::PrimitiveInt;
use malachite_base::num::basic::signeds::PrimitiveSigned;
use malachite_base::num::basic::unsigneds::PrimitiveUnsigned;
use malachite_base::num::conversion::traits::WrappingFrom;
use malachite_base::num::float::NiceFloat;
use malachite_base_test_util::generators::{
primitive_float_gen, signed_gen_var_10, unsigned_gen_var_21,
};
#[test]
fn test_square() {
fn test<T: PrimitiveInt>(x: T, out: T) {
assert_eq!(x.square(), out);
let mut x = x;
x.square_assign();
assert_eq!(x, out);
}
test::<u8>(0, 0);
test::<i16>(1, 1);
test::<u32>(2, 4);
test::<i64>(3, 9);
test::<u128>(10, 100);
test::<isize>(123, 15129);
test::<u32>(1000, 1000000);
test::<i16>(-1, 1);
test::<i32>(-2, 4);
test::<i64>(-3, 9);
test::<i128>(-10, 100);
test::<isize>(-123, 15129);
test::<i32>(-1000, 1000000);
}
fn square_properties_helper_unsigned<T: PrimitiveUnsigned>() {
unsigned_gen_var_21::<T>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(square, x.square());
assert_eq!(square, x.pow(2));
assert_eq!(square.checked_sqrt(), Some(x));
if x > T::ONE { | });
}
fn square_properties_helper_signed<
U: PrimitiveUnsigned + WrappingFrom<S>,
S: PrimitiveSigned + UnsignedAbs<Output = U> + WrappingFrom<U>,
>() {
signed_gen_var_10::<U, S>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(square, x.square());
assert_eq!(square, x.pow(2));
if x!= S::MIN {
assert_eq!((-x).square(), square);
}
assert_eq!(
U::wrapping_from(square).checked_sqrt().unwrap(),
x.unsigned_abs()
);
});
}
fn square_properties_helper_primitive_float<T: PrimitiveFloat>() {
primitive_float_gen::<T>().test_properties(|x| {
let mut square = x;
square.square_assign();
assert_eq!(NiceFloat(square), NiceFloat(x.square()));
assert_eq!(NiceFloat(square), NiceFloat(x.pow(2)));
assert_eq!(NiceFloat((-x).square()), NiceFloat(square));
});
}
#[test]
fn square_properties() {
apply_fn_to_unsigneds!(square_properties_helper_unsigned);
apply_fn_to_unsigned_signed_pairs!(square_properties_helper_signed);
apply_fn_to_primitive_floats!(square_properties_helper_primitive_float);
} | assert_eq!(square.checked_log_base(x), Some(2));
} | random_line_split |
token.rs | use std;
/// A token.
#[derive(Clone,Debug,PartialEq,Eq)]
pub enum Token
{
/// A word.
Word(String),
/// A string literal.
String(String),
/// An integer literal.
// TODO: use BigNum
Integer(i64),
/// A comment.
///
/// If the comment is inline, it existed on the same line
/// as the previous statement.
///
/// For example
///
/// ``` ir
/// add 2, 4 ; inline comment goes here
/// ```
Comment {
inline: bool,
text: String,
},
/// A symbol.
Symbol(String),
/// A new line.
NewLine,
}
impl Token
{
pub fn comma() -> Self { Token::symbol(",") }
pub fn colon() -> Self { Token::symbol(":") }
pub fn left_parenthesis() -> Self { Token::symbol("(") }
pub fn right_parenthesis() -> Self { Token::symbol(")") }
pub fn at_sign() -> Self { Token::symbol("@") }
pub fn percent_sign() -> Self { Token::symbol("%") }
pub fn left_curly_brace() -> Self { Token::symbol("{") }
pub fn right_curly_brace() -> Self { Token::symbol("}") }
pub fn equal_sign() -> Self { Token::symbol("=") }
pub fn function_arrow() -> Self { Token::symbol("->") }
pub fn boolean_true() -> Self { Token::word("true") }
pub fn boolean_false() -> Self { Token::word("false") }
pub fn word<S>(word: S) -> Self
where S: Into<String> {
Token::Word(word.into())
}
pub fn string<S>(string: S) -> Self
where S: Into<String> {
Token::String(string.into())
}
pub fn integer<I>(integer: I) -> Self
where I: Into<i64> {
Token::Integer(integer.into())
}
pub fn comment<S>(text: S) -> Self
where S: Into<String> {
Token::Comment {
inline: false,
text: text.into(),
}
}
pub fn inline_comment<S>(text: S) -> Self
where S: Into<String> {
Token::Comment {
inline: true,
text: text.into(),
}
}
pub fn symbol<S>(symbol: S) -> Self
where S: Into<String> {
Token::Symbol(symbol.into())
}
pub fn new_line() -> Self {
Token::NewLine
}
pub fn is_word(&self) -> bool {
if let Token::Word(..) = *self { true } else { false }
}
pub fn is_string(&self) -> bool {
if let Token::String(..) = *self { true } else { false }
} | pub fn is_symbol(&self) -> bool {
if let Token::Symbol(..) = *self { true } else { false }
}
pub fn is_comment(&self) -> bool {
if let Token::Comment {.. } = *self { true } else { false }
}
pub fn is_new_line(&self) -> bool {
if let Token::NewLine = *self { true } else { false }
}
pub fn is_boolean(&self) -> bool {
self == &Token::boolean_true() ||
self == &Token::boolean_false()
}
}
impl std::fmt::Display for Token
{
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
&Token::Word(ref w) => write!(fmt, "{}", w),
&Token::String(ref s) => write!(fmt, "\"{}\"", s),
&Token::Integer(ref i) => write!(fmt, "{}", i),
&Token::Symbol(ref s) => write!(fmt, "{}", s),
&Token::Comment { ref text,.. } => write!(fmt, " {}", text),
&Token::NewLine => write!(fmt, "new line"),
}
}
} |
pub fn is_integer(&self) -> bool {
if let Token::String(..) = *self { true } else { false }
}
| random_line_split |
token.rs | use std;
/// A token.
#[derive(Clone,Debug,PartialEq,Eq)]
pub enum Token
{
/// A word.
Word(String),
/// A string literal.
String(String),
/// An integer literal.
// TODO: use BigNum
Integer(i64),
/// A comment.
///
/// If the comment is inline, it existed on the same line
/// as the previous statement.
///
/// For example
///
/// ``` ir
/// add 2, 4 ; inline comment goes here
/// ```
Comment {
inline: bool,
text: String,
},
/// A symbol.
Symbol(String),
/// A new line.
NewLine,
}
impl Token
{
pub fn comma() -> Self { Token::symbol(",") }
pub fn colon() -> Self { Token::symbol(":") }
pub fn left_parenthesis() -> Self { Token::symbol("(") }
pub fn | () -> Self { Token::symbol(")") }
pub fn at_sign() -> Self { Token::symbol("@") }
pub fn percent_sign() -> Self { Token::symbol("%") }
pub fn left_curly_brace() -> Self { Token::symbol("{") }
pub fn right_curly_brace() -> Self { Token::symbol("}") }
pub fn equal_sign() -> Self { Token::symbol("=") }
pub fn function_arrow() -> Self { Token::symbol("->") }
pub fn boolean_true() -> Self { Token::word("true") }
pub fn boolean_false() -> Self { Token::word("false") }
pub fn word<S>(word: S) -> Self
where S: Into<String> {
Token::Word(word.into())
}
pub fn string<S>(string: S) -> Self
where S: Into<String> {
Token::String(string.into())
}
pub fn integer<I>(integer: I) -> Self
where I: Into<i64> {
Token::Integer(integer.into())
}
pub fn comment<S>(text: S) -> Self
where S: Into<String> {
Token::Comment {
inline: false,
text: text.into(),
}
}
pub fn inline_comment<S>(text: S) -> Self
where S: Into<String> {
Token::Comment {
inline: true,
text: text.into(),
}
}
pub fn symbol<S>(symbol: S) -> Self
where S: Into<String> {
Token::Symbol(symbol.into())
}
pub fn new_line() -> Self {
Token::NewLine
}
pub fn is_word(&self) -> bool {
if let Token::Word(..) = *self { true } else { false }
}
pub fn is_string(&self) -> bool {
if let Token::String(..) = *self { true } else { false }
}
pub fn is_integer(&self) -> bool {
if let Token::String(..) = *self { true } else { false }
}
pub fn is_symbol(&self) -> bool {
if let Token::Symbol(..) = *self { true } else { false }
}
pub fn is_comment(&self) -> bool {
if let Token::Comment {.. } = *self { true } else { false }
}
pub fn is_new_line(&self) -> bool {
if let Token::NewLine = *self { true } else { false }
}
pub fn is_boolean(&self) -> bool {
self == &Token::boolean_true() ||
self == &Token::boolean_false()
}
}
impl std::fmt::Display for Token
{
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
&Token::Word(ref w) => write!(fmt, "{}", w),
&Token::String(ref s) => write!(fmt, "\"{}\"", s),
&Token::Integer(ref i) => write!(fmt, "{}", i),
&Token::Symbol(ref s) => write!(fmt, "{}", s),
&Token::Comment { ref text,.. } => write!(fmt, " {}", text),
&Token::NewLine => write!(fmt, "new line"),
}
}
}
| right_parenthesis | identifier_name |
token.rs | use std;
/// A token.
#[derive(Clone,Debug,PartialEq,Eq)]
pub enum Token
{
/// A word.
Word(String),
/// A string literal.
String(String),
/// An integer literal.
// TODO: use BigNum
Integer(i64),
/// A comment.
///
/// If the comment is inline, it existed on the same line
/// as the previous statement.
///
/// For example
///
/// ``` ir
/// add 2, 4 ; inline comment goes here
/// ```
Comment {
inline: bool,
text: String,
},
/// A symbol.
Symbol(String),
/// A new line.
NewLine,
}
impl Token
{
pub fn comma() -> Self { Token::symbol(",") }
pub fn colon() -> Self { Token::symbol(":") }
pub fn left_parenthesis() -> Self { Token::symbol("(") }
pub fn right_parenthesis() -> Self { Token::symbol(")") }
pub fn at_sign() -> Self { Token::symbol("@") }
pub fn percent_sign() -> Self { Token::symbol("%") }
pub fn left_curly_brace() -> Self { Token::symbol("{") }
pub fn right_curly_brace() -> Self { Token::symbol("}") }
pub fn equal_sign() -> Self { Token::symbol("=") }
pub fn function_arrow() -> Self { Token::symbol("->") }
pub fn boolean_true() -> Self { Token::word("true") }
pub fn boolean_false() -> Self { Token::word("false") }
pub fn word<S>(word: S) -> Self
where S: Into<String> {
Token::Word(word.into())
}
pub fn string<S>(string: S) -> Self
where S: Into<String> {
Token::String(string.into())
}
pub fn integer<I>(integer: I) -> Self
where I: Into<i64> {
Token::Integer(integer.into())
}
pub fn comment<S>(text: S) -> Self
where S: Into<String> {
Token::Comment {
inline: false,
text: text.into(),
}
}
pub fn inline_comment<S>(text: S) -> Self
where S: Into<String> {
Token::Comment {
inline: true,
text: text.into(),
}
}
pub fn symbol<S>(symbol: S) -> Self
where S: Into<String> {
Token::Symbol(symbol.into())
}
pub fn new_line() -> Self {
Token::NewLine
}
pub fn is_word(&self) -> bool {
if let Token::Word(..) = *self { true } else { false }
}
pub fn is_string(&self) -> bool {
if let Token::String(..) = *self { true } else |
}
pub fn is_integer(&self) -> bool {
if let Token::String(..) = *self { true } else { false }
}
pub fn is_symbol(&self) -> bool {
if let Token::Symbol(..) = *self { true } else { false }
}
pub fn is_comment(&self) -> bool {
if let Token::Comment {.. } = *self { true } else { false }
}
pub fn is_new_line(&self) -> bool {
if let Token::NewLine = *self { true } else { false }
}
pub fn is_boolean(&self) -> bool {
self == &Token::boolean_true() ||
self == &Token::boolean_false()
}
}
impl std::fmt::Display for Token
{
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
&Token::Word(ref w) => write!(fmt, "{}", w),
&Token::String(ref s) => write!(fmt, "\"{}\"", s),
&Token::Integer(ref i) => write!(fmt, "{}", i),
&Token::Symbol(ref s) => write!(fmt, "{}", s),
&Token::Comment { ref text,.. } => write!(fmt, " {}", text),
&Token::NewLine => write!(fmt, "new line"),
}
}
}
| { false } | conditional_block |
arguments.rs | use thiserror::Error;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Comm {
Type1,
Type2,
None,
}
/// A type of paired token
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Field {
/// Processes (ex: `$(..)`)
Proc,
/// Literal array (ex: `[ 1.. 3 ]`)
Array,
/// Brace expansion (ex: `{a,b,c,d}`)
Braces,
}
/// The depth of various paired structures
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Levels {
/// Parentheses
parens: u8,
/// Array literals
array: u8,
/// Braces
braces: u8,
}
/// Error with paired tokens
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Error)]
pub enum LevelsError {
/// Unmatched opening parenthese
#[error("unmatched opening parenthese")]
UnmatchedParen,
/// Unmatched opening bracket
#[error("unmatched opening bracket")]
UnmatchedBracket,
/// Unmatched opening brace
#[error("unmatched opening brace")]
UnmatchedBrace,
/// Extra closing parenthese(s)
#[error("extra closing parenthese(s)")]
ExtraParen,
/// Extra closing bracket(s)
#[error("extra closing bracket(s)")]
ExtraBracket,
/// Extra closing brace(s)
#[error("extra closing brace(s)")]
ExtraBrace,
}
impl Levels {
/// Add a new depth level
pub fn up(&mut self, field: Field) {
let level = match field {
Field::Proc => &mut self.parens,
Field::Array => &mut self.array,
Field::Braces => &mut self.braces,
};
*level += 1;
}
/// Close paired tokens
pub fn down(&mut self, field: Field) -> Result<(), LevelsError> {
let level = match field {
Field::Proc if self.parens > 0 => &mut self.parens,
Field::Array if self.array > 0 => &mut self.array,
Field::Braces if self.braces > 0 => &mut self.braces,
// errors
Field::Proc => return Err(LevelsError::ExtraParen),
Field::Array => return Err(LevelsError::ExtraBracket),
Field::Braces => return Err(LevelsError::ExtraBrace),
};
*level -= 1;
Ok(())
}
/// Check if all parens where matched
pub const fn are_rooted(self) -> bool {
self.parens == 0 && self.array == 0 && self.braces == 0
}
/// Check if all is ok
pub const fn check(self) -> Result<(), LevelsError> {
if self.parens > 0 {
Err(LevelsError::UnmatchedParen)
} else if self.array > 0 {
Err(LevelsError::UnmatchedBracket)
} else if self.braces > 0 {
Err(LevelsError::UnmatchedBrace)
} else {
Ok(())
}
}
}
/// An efficient `Iterator` structure for splitting arguments
#[derive(Debug)]
pub struct ArgumentSplitter<'a> {
data: &'a str,
/// Number of bytes read
read: usize,
comm: Comm,
quotes: bool,
variab: bool,
array: bool,
method: bool,
}
impl<'a> ArgumentSplitter<'a> {
/// Create a new argument splitter based on the provided data
pub const fn new(data: &'a str) -> ArgumentSplitter<'a> {
ArgumentSplitter {
data,
read: 0,
comm: Comm::None,
quotes: false,
variab: false,
array: false,
method: false,
}
}
fn | <B: Iterator<Item = u8>>(&mut self, bytes: &mut B) {
while let Some(character) = bytes.next() {
match character {
b'\\' => {
self.read += 2;
let _ = bytes.next();
continue;
}
b'\'' => break,
_ => (),
}
self.read += 1;
}
}
}
impl<'a> Iterator for ArgumentSplitter<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<&'a str> {
let data = self.data.as_bytes();
while let Some(&b' ') = data.get(self.read) {
self.read += 1;
}
let start = self.read;
let mut levels = Levels::default();
let mut bytes = data.iter().skip(self.read).copied();
while let Some(character) = bytes.next() {
match character {
// Skip the next byte.
b'\\' => {
self.read += 2;
let _ = bytes.next();
continue;
}
// Disable COMM_1 and enable COMM_2 + ARRAY.
b'@' => {
self.array = true;
self.comm = Comm::Type2;
self.read += 1;
continue;
}
// Disable COMM_2 and enable COMM_1 + VARIAB.
b'$' => {
self.variab = true;
self.comm = Comm::Type1;
self.read += 1;
continue;
}
b'[' => levels.up(Field::Array),
b']' => {
let _ = levels.down(Field::Array);
}
b'{' => levels.up(Field::Braces),
b'}' => {
// TODO: handle errors here
let _ = levels.down(Field::Braces);
}
b'(' => {
// Disable VARIAB + ARRAY and enable METHOD.
// if variab or array are set
if self.array || self.variab {
self.array = false;
self.variab = false;
self.method = true;
}
levels.up(Field::Proc);
}
b')' => {
self.method = false;
let _ = levels.down(Field::Proc);
}
// Toggle double quote rules.
b'"' => {
self.quotes ^= true;
}
// Loop through characters until single quote rules are completed.
b'\'' if!self.quotes => {
self.scan_singlequotes(&mut bytes);
self.read += 2;
continue;
}
// Break from the loop once a root-level space is found.
b''=> {
if!self.quotes &&!self.method && levels.are_rooted() {
break;
}
}
_ => (),
}
self.read += 1;
// disable COMM_1 and COMM_2
self.comm = Comm::None;
}
if start == self.read {
None
} else {
Some(&self.data[start..self.read])
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn compare(input: &str, expected: Vec<&str>) {
let arguments = ArgumentSplitter::new(input).collect::<Vec<&str>>();
for (left, right) in expected.iter().zip(arguments.iter()) {
assert_eq!(left, right);
}
assert_eq!(expected.len(), arguments.len());
}
#[test]
fn methods() {
let input = "echo $join(array, ', ') @split(var, ', ')";
let expected = vec!["echo", "$join(array, ', ')", "@split(var, ', ')"];
compare(input, expected);
}
#[test]
fn processes() {
let input = "echo $(echo one $(echo two)) @[echo one @[echo two]]";
let expected = vec!["echo", "$(echo one $(echo two))", "@[echo one @[echo two]]"];
compare(input, expected);
}
#[test]
fn arrays() {
let input = "echo [ one two @[echo three four] five ] [ six seven ]";
let expected = vec!["echo", "[ one two @[echo three four] five ]", "[ six seven ]"];
compare(input, expected);
}
#[test]
fn quotes() {
let input = "echo 'one two \"three four\"' \"five six'seven eight'\"";
let expected = vec!["echo", "'one two \"three four\"'", "\"five six'seven eight'\""];
compare(input, expected);
}
}
| scan_singlequotes | identifier_name |
arguments.rs | use thiserror::Error;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Comm {
Type1,
Type2,
None,
}
/// A type of paired token
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Field {
/// Processes (ex: `$(..)`)
Proc,
/// Literal array (ex: `[ 1.. 3 ]`)
Array,
/// Brace expansion (ex: `{a,b,c,d}`)
Braces,
}
/// The depth of various paired structures
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Levels {
/// Parentheses
parens: u8,
/// Array literals
array: u8,
/// Braces
braces: u8,
}
/// Error with paired tokens
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Error)]
pub enum LevelsError {
/// Unmatched opening parenthese
#[error("unmatched opening parenthese")]
UnmatchedParen,
/// Unmatched opening bracket
#[error("unmatched opening bracket")]
UnmatchedBracket,
/// Unmatched opening brace
#[error("unmatched opening brace")]
UnmatchedBrace,
/// Extra closing parenthese(s)
#[error("extra closing parenthese(s)")]
ExtraParen,
/// Extra closing bracket(s)
#[error("extra closing bracket(s)")]
ExtraBracket,
/// Extra closing brace(s)
#[error("extra closing brace(s)")]
ExtraBrace,
}
impl Levels {
/// Add a new depth level
pub fn up(&mut self, field: Field) {
let level = match field {
Field::Proc => &mut self.parens,
Field::Array => &mut self.array,
Field::Braces => &mut self.braces,
};
*level += 1;
}
/// Close paired tokens
pub fn down(&mut self, field: Field) -> Result<(), LevelsError> {
let level = match field {
Field::Proc if self.parens > 0 => &mut self.parens,
Field::Array if self.array > 0 => &mut self.array,
Field::Braces if self.braces > 0 => &mut self.braces,
// errors
Field::Proc => return Err(LevelsError::ExtraParen),
Field::Array => return Err(LevelsError::ExtraBracket),
Field::Braces => return Err(LevelsError::ExtraBrace),
};
*level -= 1;
Ok(())
}
/// Check if all parens where matched
pub const fn are_rooted(self) -> bool {
self.parens == 0 && self.array == 0 && self.braces == 0
}
/// Check if all is ok
pub const fn check(self) -> Result<(), LevelsError> {
if self.parens > 0 {
Err(LevelsError::UnmatchedParen)
} else if self.array > 0 {
Err(LevelsError::UnmatchedBracket)
} else if self.braces > 0 {
Err(LevelsError::UnmatchedBrace)
} else {
Ok(())
}
}
}
/// An efficient `Iterator` structure for splitting arguments
#[derive(Debug)]
pub struct ArgumentSplitter<'a> {
data: &'a str,
/// Number of bytes read
read: usize,
comm: Comm,
quotes: bool,
variab: bool,
array: bool,
method: bool,
}
impl<'a> ArgumentSplitter<'a> {
/// Create a new argument splitter based on the provided data
pub const fn new(data: &'a str) -> ArgumentSplitter<'a> {
ArgumentSplitter {
data,
read: 0,
comm: Comm::None,
quotes: false,
variab: false,
array: false,
method: false,
}
}
fn scan_singlequotes<B: Iterator<Item = u8>>(&mut self, bytes: &mut B) {
while let Some(character) = bytes.next() {
match character {
b'\\' => {
self.read += 2;
let _ = bytes.next();
continue;
}
b'\'' => break,
_ => (),
}
self.read += 1;
}
}
}
impl<'a> Iterator for ArgumentSplitter<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<&'a str> {
let data = self.data.as_bytes();
while let Some(&b' ') = data.get(self.read) {
self.read += 1;
}
let start = self.read;
let mut levels = Levels::default();
let mut bytes = data.iter().skip(self.read).copied();
while let Some(character) = bytes.next() {
match character {
// Skip the next byte.
b'\\' => {
self.read += 2;
let _ = bytes.next();
continue;
}
// Disable COMM_1 and enable COMM_2 + ARRAY.
b'@' => {
self.array = true;
self.comm = Comm::Type2;
self.read += 1;
continue;
}
// Disable COMM_2 and enable COMM_1 + VARIAB.
b'$' => {
self.variab = true;
self.comm = Comm::Type1;
self.read += 1;
continue;
}
b'[' => levels.up(Field::Array),
b']' => {
let _ = levels.down(Field::Array);
}
b'{' => levels.up(Field::Braces),
b'}' => {
// TODO: handle errors here
let _ = levels.down(Field::Braces);
}
b'(' => {
// Disable VARIAB + ARRAY and enable METHOD.
// if variab or array are set
if self.array || self.variab {
self.array = false;
self.variab = false;
self.method = true;
}
levels.up(Field::Proc);
}
b')' => {
self.method = false;
let _ = levels.down(Field::Proc);
}
// Toggle double quote rules.
b'"' => {
self.quotes ^= true;
}
// Loop through characters until single quote rules are completed.
b'\'' if!self.quotes => {
self.scan_singlequotes(&mut bytes);
self.read += 2;
continue;
}
// Break from the loop once a root-level space is found. | b''=> {
if!self.quotes &&!self.method && levels.are_rooted() {
break;
}
}
_ => (),
}
self.read += 1;
// disable COMM_1 and COMM_2
self.comm = Comm::None;
}
if start == self.read {
None
} else {
Some(&self.data[start..self.read])
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn compare(input: &str, expected: Vec<&str>) {
let arguments = ArgumentSplitter::new(input).collect::<Vec<&str>>();
for (left, right) in expected.iter().zip(arguments.iter()) {
assert_eq!(left, right);
}
assert_eq!(expected.len(), arguments.len());
}
#[test]
fn methods() {
let input = "echo $join(array, ', ') @split(var, ', ')";
let expected = vec!["echo", "$join(array, ', ')", "@split(var, ', ')"];
compare(input, expected);
}
#[test]
fn processes() {
let input = "echo $(echo one $(echo two)) @[echo one @[echo two]]";
let expected = vec!["echo", "$(echo one $(echo two))", "@[echo one @[echo two]]"];
compare(input, expected);
}
#[test]
fn arrays() {
let input = "echo [ one two @[echo three four] five ] [ six seven ]";
let expected = vec!["echo", "[ one two @[echo three four] five ]", "[ six seven ]"];
compare(input, expected);
}
#[test]
fn quotes() {
let input = "echo 'one two \"three four\"' \"five six'seven eight'\"";
let expected = vec!["echo", "'one two \"three four\"'", "\"five six'seven eight'\""];
compare(input, expected);
}
} | random_line_split |
|
arguments.rs | use thiserror::Error;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Comm {
Type1,
Type2,
None,
}
/// A type of paired token
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Field {
/// Processes (ex: `$(..)`)
Proc,
/// Literal array (ex: `[ 1.. 3 ]`)
Array,
/// Brace expansion (ex: `{a,b,c,d}`)
Braces,
}
/// The depth of various paired structures
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Levels {
/// Parentheses
parens: u8,
/// Array literals
array: u8,
/// Braces
braces: u8,
}
/// Error with paired tokens
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Error)]
pub enum LevelsError {
/// Unmatched opening parenthese
#[error("unmatched opening parenthese")]
UnmatchedParen,
/// Unmatched opening bracket
#[error("unmatched opening bracket")]
UnmatchedBracket,
/// Unmatched opening brace
#[error("unmatched opening brace")]
UnmatchedBrace,
/// Extra closing parenthese(s)
#[error("extra closing parenthese(s)")]
ExtraParen,
/// Extra closing bracket(s)
#[error("extra closing bracket(s)")]
ExtraBracket,
/// Extra closing brace(s)
#[error("extra closing brace(s)")]
ExtraBrace,
}
impl Levels {
/// Add a new depth level
pub fn up(&mut self, field: Field) {
let level = match field {
Field::Proc => &mut self.parens,
Field::Array => &mut self.array,
Field::Braces => &mut self.braces,
};
*level += 1;
}
/// Close paired tokens
pub fn down(&mut self, field: Field) -> Result<(), LevelsError> {
let level = match field {
Field::Proc if self.parens > 0 => &mut self.parens,
Field::Array if self.array > 0 => &mut self.array,
Field::Braces if self.braces > 0 => &mut self.braces,
// errors
Field::Proc => return Err(LevelsError::ExtraParen),
Field::Array => return Err(LevelsError::ExtraBracket),
Field::Braces => return Err(LevelsError::ExtraBrace),
};
*level -= 1;
Ok(())
}
/// Check if all parens where matched
pub const fn are_rooted(self) -> bool {
self.parens == 0 && self.array == 0 && self.braces == 0
}
/// Check if all is ok
pub const fn check(self) -> Result<(), LevelsError> {
if self.parens > 0 {
Err(LevelsError::UnmatchedParen)
} else if self.array > 0 {
Err(LevelsError::UnmatchedBracket)
} else if self.braces > 0 {
Err(LevelsError::UnmatchedBrace)
} else {
Ok(())
}
}
}
/// An efficient `Iterator` structure for splitting arguments
#[derive(Debug)]
pub struct ArgumentSplitter<'a> {
data: &'a str,
/// Number of bytes read
read: usize,
comm: Comm,
quotes: bool,
variab: bool,
array: bool,
method: bool,
}
impl<'a> ArgumentSplitter<'a> {
/// Create a new argument splitter based on the provided data
pub const fn new(data: &'a str) -> ArgumentSplitter<'a> {
ArgumentSplitter {
data,
read: 0,
comm: Comm::None,
quotes: false,
variab: false,
array: false,
method: false,
}
}
fn scan_singlequotes<B: Iterator<Item = u8>>(&mut self, bytes: &mut B) {
while let Some(character) = bytes.next() {
match character {
b'\\' => {
self.read += 2;
let _ = bytes.next();
continue;
}
b'\'' => break,
_ => (),
}
self.read += 1;
}
}
}
impl<'a> Iterator for ArgumentSplitter<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<&'a str> {
let data = self.data.as_bytes();
while let Some(&b' ') = data.get(self.read) {
self.read += 1;
}
let start = self.read;
let mut levels = Levels::default();
let mut bytes = data.iter().skip(self.read).copied();
while let Some(character) = bytes.next() {
match character {
// Skip the next byte.
b'\\' => {
self.read += 2;
let _ = bytes.next();
continue;
}
// Disable COMM_1 and enable COMM_2 + ARRAY.
b'@' => {
self.array = true;
self.comm = Comm::Type2;
self.read += 1;
continue;
}
// Disable COMM_2 and enable COMM_1 + VARIAB.
b'$' => {
self.variab = true;
self.comm = Comm::Type1;
self.read += 1;
continue;
}
b'[' => levels.up(Field::Array),
b']' => {
let _ = levels.down(Field::Array);
}
b'{' => levels.up(Field::Braces),
b'}' => {
// TODO: handle errors here
let _ = levels.down(Field::Braces);
}
b'(' => {
// Disable VARIAB + ARRAY and enable METHOD.
// if variab or array are set
if self.array || self.variab {
self.array = false;
self.variab = false;
self.method = true;
}
levels.up(Field::Proc);
}
b')' => {
self.method = false;
let _ = levels.down(Field::Proc);
}
// Toggle double quote rules.
b'"' => {
self.quotes ^= true;
}
// Loop through characters until single quote rules are completed.
b'\'' if!self.quotes => {
self.scan_singlequotes(&mut bytes);
self.read += 2;
continue;
}
// Break from the loop once a root-level space is found.
b''=> {
if!self.quotes &&!self.method && levels.are_rooted() {
break;
}
}
_ => (),
}
self.read += 1;
// disable COMM_1 and COMM_2
self.comm = Comm::None;
}
if start == self.read {
None
} else {
Some(&self.data[start..self.read])
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn compare(input: &str, expected: Vec<&str>) {
let arguments = ArgumentSplitter::new(input).collect::<Vec<&str>>();
for (left, right) in expected.iter().zip(arguments.iter()) {
assert_eq!(left, right);
}
assert_eq!(expected.len(), arguments.len());
}
#[test]
fn methods() {
let input = "echo $join(array, ', ') @split(var, ', ')";
let expected = vec!["echo", "$join(array, ', ')", "@split(var, ', ')"];
compare(input, expected);
}
#[test]
fn processes() {
let input = "echo $(echo one $(echo two)) @[echo one @[echo two]]";
let expected = vec!["echo", "$(echo one $(echo two))", "@[echo one @[echo two]]"];
compare(input, expected);
}
#[test]
fn arrays() {
let input = "echo [ one two @[echo three four] five ] [ six seven ]";
let expected = vec!["echo", "[ one two @[echo three four] five ]", "[ six seven ]"];
compare(input, expected);
}
#[test]
fn quotes() |
}
| {
let input = "echo 'one two \"three four\"' \"five six 'seven eight'\"";
let expected = vec!["echo", "'one two \"three four\"'", "\"five six 'seven eight'\""];
compare(input, expected);
} | identifier_body |
lib.rs | //! #rust-hackchat
//! A client library for Hack.chat.
//!
//! This library allows you to make custom clients and bots for Hack.chat using Rust.
//!
//! #Examples
//!
//! ```
//! extern crate hackchat;
//! use hackchat::{ChatClient, ChatEvent};
//!
//! fn main() {
//! let mut conn = ChatClient::new("TestBot", "botDev"); //Connects to the?botDev channel
//! conn.start_ping_thread(); //Sends ping packets regularly
//!
//! for event in conn.iter() {
//! match event {
//! ChatEvent::Message(nick, message, trip_code) => {
//! println!("<{}> {}", nick, message);
//! },
//! _ => {}
//! }
//! }
//! }
//! ```
extern crate websocket;
#[macro_use] extern crate serde_json;
extern crate rustc_serialize;
use std::thread;
use rustc_serialize::json;
use websocket::{Client, Message, WebSocketStream};
use websocket::message::Type;
use websocket::client::request::Url;
use websocket::sender::Sender;
use websocket::receiver::Receiver;
use websocket::ws::sender::Sender as SenderTrait;
use websocket::ws::receiver::Receiver as ReceiverTrait;
use std::sync::Arc;
use std::sync::Mutex;
/// The main struct responsible for the connection and events.
#[derive(Clone)]
pub struct ChatClient {
nick: String,
channel: String,
sender: Arc<Mutex<Sender<WebSocketStream>>>,
receiver: Arc<Mutex<Receiver<WebSocketStream>>>,
}
impl ChatClient {
/// Creates a new connection to hack.chat.
///
/// ```
/// let mut chat = ChatClient::new("WikiBot", "programming");
/// // Joins?programming with the nick "WikiBot"
/// ```
pub fn new(nick: &str, channel: &str) -> ChatClient {
let url = Url::parse("wss://hack.chat/chat-ws").unwrap();
let request = Client::connect(url).unwrap();
let response = request.send().unwrap();
let client = response.begin();
let (mut sender, receiver) = client.split();
let join_packet = json!({
"cmd": "join",
"nick": nick,
"channel": channel
});
let message = Message::text(join_packet.to_string());
sender.send_message(&message).unwrap();
return ChatClient {
nick: nick.to_string(),
channel: channel.to_string(),
sender: Arc::new(Mutex::new(sender)),
receiver: Arc::new(Mutex::new(receiver))
};
}
/// Sends a message to the current channel.
///
/// ```
/// let mut chat = ChatClient::new("TestBot", "botDev");
/// chat.send_message("Hello there people".to_string());
/// ```
///
/// ```
/// let mut chat = ChatClient::new("TestBot", "botDev");
///
/// let problem_count = 99;
/// chat.send_message(format!("I got {} problems but Rust ain't one", problem_count));
/// ```
pub fn send_message(&mut self, message: String) {
let chat_packet = json!({
"cmd": "chat",
"text": message
});
let message = Message::text(chat_packet.to_string());
self.sender.lock().unwrap().send_message(&message).unwrap();
}
fn send_ping(&mut self) {
let ping_packet = json!({
"cmd": "ping"
});
let message = Message::text(ping_packet.to_string());
self.sender.lock().unwrap().send_message(&message).unwrap();
}
/// Sends a stats request, which results in an Info event that has the number of connected
/// IPs and channels.
pub fn send_stats_request(&mut self) {
let stats_packet = json!({
"cmd": "stats"
});
let message = Message::text(stats_packet.to_string());
self.sender.lock().unwrap().send_message(&message).unwrap();
}
/// Starts the ping thread, which sends regular pings to keep the connection open.
pub fn start_ping_thread(&mut self) {
let mut chat_clone = self.clone();
thread::spawn(move|| {
loop {
thread::sleep_ms(60 * 1000);
chat_clone.send_ping();
}
});
}
/// Returns an iterator of hack.chat events such as messages.
///
/// #Examples
/// ```
/// let mut chat = ChatClient::new("GreetingBot", "botDev");
/// chat.start_ping_thread(); //Start the ping thread so we keep connected
///
/// for event in chat.iter() {
/// match event {
/// ChatEvent::JoinRoom(nick) => {
/// chat.send_message(format!("Welcome to the chat {}!", nick));
/// },
/// ChatEvent::LeaveRoom(nick) => {
/// chat.send_message(format!("Goodbye {}, see you later!", nick));
/// },
/// _ => {}
/// }
/// }
/// ```
pub fn iter(&mut self) -> ChatClient {
return self.clone();
}
}
impl Iterator for ChatClient {
type Item = ChatEvent;
fn next(&mut self) -> Option<ChatEvent> {
loop {
let message: Message = match self.receiver.lock().unwrap().recv_message() {
Ok(message) => message,
Err(e) => {
println!("{}", e);
continue;
}
};
match message.opcode {
Type::Text => {
let data = std::str::from_utf8(&*message.payload).unwrap();
let cmdpacket: serde_json::Value = match serde_json::from_slice(&*message.payload) {
Ok(packet) => packet,
Err(e) => {
println!("{}", e);
continue;
}
};
match cmdpacket.get("cmd").unwrap_or(&serde_json::Value::Null).as_str() {
Some("chat") => {
let decodedpacket: ChatPacket = json::decode(&data).unwrap();
if decodedpacket.nick!= self.nick {
return Some(ChatEvent::Message (
decodedpacket.nick,
decodedpacket.text,
decodedpacket.trip.unwrap_or("".to_string())
));
}else {
continue;
}
},
Some("info") => {
let decodedpacket: InfoWarnPacket = json::decode(&data).unwrap();
return Some(ChatEvent::Info (
decodedpacket.text
));
},
Some("onlineAdd") => {
let decodedpacket: OnlineChangePacket = json::decode(&data).unwrap();
return Some(ChatEvent::JoinRoom (
decodedpacket.nick
));
},
Some("onlineRemove") => {
let decodedpacket: OnlineChangePacket = json::decode(&data).unwrap();
return Some(ChatEvent::LeaveRoom (
decodedpacket.nick
));
},
_ => |
}
},
Type::Ping => {
self.sender.lock().unwrap().send_message(&Message::pong(message.payload)).unwrap();
},
_ => {
return None;
}
};
return None;
}
}
}
/// Various Hack.chat events
pub enum ChatEvent {
/// Raised when there is a new message from the channel
///
/// The format is ChatEvent::Message(nick, text, trip_code)
Message (String, String, String),
/// Rasied when someone joins the channel
///
/// The format is ChatEvent::JoinRoom(nick)
JoinRoom (String),
/// Raised when someone leaves the channel
///
/// The format is ChatEvent::LeaveRoom(nick)
LeaveRoom (String),
/// Raised when there is an event from the channel itself.
/// Some examples include:
///
/// * The result of the stats requests
/// * A user being banned.
Info (String)
}
#[derive(RustcEncodable, RustcDecodable)]
struct GenericPacket {
cmd: String
}
#[derive(RustcDecodable)]
struct ChatPacket {
nick: String,
text: String,
trip: Option<String>
}
#[derive(RustcDecodable)]
struct OnlineChangePacket {
nick: String
}
#[derive(RustcDecodable)]
struct InfoWarnPacket {
text: String
}
| {
println!("Unsupported message type");
continue;
} | conditional_block |
lib.rs | //! #rust-hackchat
//! A client library for Hack.chat.
//!
//! This library allows you to make custom clients and bots for Hack.chat using Rust.
//!
//! #Examples
//!
//! ```
//! extern crate hackchat;
//! use hackchat::{ChatClient, ChatEvent};
//!
//! fn main() {
//! let mut conn = ChatClient::new("TestBot", "botDev"); //Connects to the?botDev channel
//! conn.start_ping_thread(); //Sends ping packets regularly
//!
//! for event in conn.iter() {
//! match event {
//! ChatEvent::Message(nick, message, trip_code) => {
//! println!("<{}> {}", nick, message);
//! },
//! _ => {}
//! }
//! }
//! }
//! ```
extern crate websocket;
#[macro_use] extern crate serde_json;
extern crate rustc_serialize;
use std::thread;
use rustc_serialize::json;
use websocket::{Client, Message, WebSocketStream};
use websocket::message::Type;
use websocket::client::request::Url;
use websocket::sender::Sender;
use websocket::receiver::Receiver;
use websocket::ws::sender::Sender as SenderTrait;
use websocket::ws::receiver::Receiver as ReceiverTrait;
use std::sync::Arc;
use std::sync::Mutex;
/// The main struct responsible for the connection and events.
#[derive(Clone)]
pub struct ChatClient {
nick: String,
channel: String,
sender: Arc<Mutex<Sender<WebSocketStream>>>,
receiver: Arc<Mutex<Receiver<WebSocketStream>>>,
}
impl ChatClient {
/// Creates a new connection to hack.chat.
///
/// ```
/// let mut chat = ChatClient::new("WikiBot", "programming");
/// // Joins?programming with the nick "WikiBot"
/// ```
pub fn new(nick: &str, channel: &str) -> ChatClient {
let url = Url::parse("wss://hack.chat/chat-ws").unwrap();
let request = Client::connect(url).unwrap();
let response = request.send().unwrap();
let client = response.begin();
let (mut sender, receiver) = client.split();
let join_packet = json!({
"cmd": "join",
"nick": nick,
"channel": channel
});
let message = Message::text(join_packet.to_string());
sender.send_message(&message).unwrap();
return ChatClient {
nick: nick.to_string(),
channel: channel.to_string(),
sender: Arc::new(Mutex::new(sender)),
receiver: Arc::new(Mutex::new(receiver))
};
}
/// Sends a message to the current channel.
///
/// ```
/// let mut chat = ChatClient::new("TestBot", "botDev");
/// chat.send_message("Hello there people".to_string());
/// ```
///
/// ```
/// let mut chat = ChatClient::new("TestBot", "botDev");
///
/// let problem_count = 99;
/// chat.send_message(format!("I got {} problems but Rust ain't one", problem_count));
/// ```
pub fn send_message(&mut self, message: String) {
let chat_packet = json!({
"cmd": "chat",
"text": message
});
let message = Message::text(chat_packet.to_string());
self.sender.lock().unwrap().send_message(&message).unwrap();
}
fn send_ping(&mut self) {
let ping_packet = json!({
"cmd": "ping"
});
let message = Message::text(ping_packet.to_string());
self.sender.lock().unwrap().send_message(&message).unwrap();
}
/// Sends a stats request, which results in an Info event that has the number of connected
/// IPs and channels.
pub fn send_stats_request(&mut self) {
let stats_packet = json!({
"cmd": "stats"
});
let message = Message::text(stats_packet.to_string());
self.sender.lock().unwrap().send_message(&message).unwrap();
}
/// Starts the ping thread, which sends regular pings to keep the connection open.
pub fn start_ping_thread(&mut self) {
let mut chat_clone = self.clone();
thread::spawn(move|| {
loop {
thread::sleep_ms(60 * 1000);
chat_clone.send_ping();
}
});
}
/// Returns an iterator of hack.chat events such as messages.
///
/// #Examples
/// ```
/// let mut chat = ChatClient::new("GreetingBot", "botDev");
/// chat.start_ping_thread(); //Start the ping thread so we keep connected
/// | /// ChatEvent::LeaveRoom(nick) => {
/// chat.send_message(format!("Goodbye {}, see you later!", nick));
/// },
/// _ => {}
/// }
/// }
/// ```
pub fn iter(&mut self) -> ChatClient {
return self.clone();
}
}
impl Iterator for ChatClient {
type Item = ChatEvent;
fn next(&mut self) -> Option<ChatEvent> {
loop {
let message: Message = match self.receiver.lock().unwrap().recv_message() {
Ok(message) => message,
Err(e) => {
println!("{}", e);
continue;
}
};
match message.opcode {
Type::Text => {
let data = std::str::from_utf8(&*message.payload).unwrap();
let cmdpacket: serde_json::Value = match serde_json::from_slice(&*message.payload) {
Ok(packet) => packet,
Err(e) => {
println!("{}", e);
continue;
}
};
match cmdpacket.get("cmd").unwrap_or(&serde_json::Value::Null).as_str() {
Some("chat") => {
let decodedpacket: ChatPacket = json::decode(&data).unwrap();
if decodedpacket.nick!= self.nick {
return Some(ChatEvent::Message (
decodedpacket.nick,
decodedpacket.text,
decodedpacket.trip.unwrap_or("".to_string())
));
}else {
continue;
}
},
Some("info") => {
let decodedpacket: InfoWarnPacket = json::decode(&data).unwrap();
return Some(ChatEvent::Info (
decodedpacket.text
));
},
Some("onlineAdd") => {
let decodedpacket: OnlineChangePacket = json::decode(&data).unwrap();
return Some(ChatEvent::JoinRoom (
decodedpacket.nick
));
},
Some("onlineRemove") => {
let decodedpacket: OnlineChangePacket = json::decode(&data).unwrap();
return Some(ChatEvent::LeaveRoom (
decodedpacket.nick
));
},
_ => {
println!("Unsupported message type");
continue;
}
}
},
Type::Ping => {
self.sender.lock().unwrap().send_message(&Message::pong(message.payload)).unwrap();
},
_ => {
return None;
}
};
return None;
}
}
}
/// Various Hack.chat events
pub enum ChatEvent {
/// Raised when there is a new message from the channel
///
/// The format is ChatEvent::Message(nick, text, trip_code)
Message (String, String, String),
/// Rasied when someone joins the channel
///
/// The format is ChatEvent::JoinRoom(nick)
JoinRoom (String),
/// Raised when someone leaves the channel
///
/// The format is ChatEvent::LeaveRoom(nick)
LeaveRoom (String),
/// Raised when there is an event from the channel itself.
/// Some examples include:
///
/// * The result of the stats requests
/// * A user being banned.
Info (String)
}
#[derive(RustcEncodable, RustcDecodable)]
struct GenericPacket {
cmd: String
}
#[derive(RustcDecodable)]
struct ChatPacket {
nick: String,
text: String,
trip: Option<String>
}
#[derive(RustcDecodable)]
struct OnlineChangePacket {
nick: String
}
#[derive(RustcDecodable)]
struct InfoWarnPacket {
text: String
} | /// for event in chat.iter() {
/// match event {
/// ChatEvent::JoinRoom(nick) => {
/// chat.send_message(format!("Welcome to the chat {}!", nick));
/// }, | random_line_split |
lib.rs | //! #rust-hackchat
//! A client library for Hack.chat.
//!
//! This library allows you to make custom clients and bots for Hack.chat using Rust.
//!
//! #Examples
//!
//! ```
//! extern crate hackchat;
//! use hackchat::{ChatClient, ChatEvent};
//!
//! fn main() {
//! let mut conn = ChatClient::new("TestBot", "botDev"); //Connects to the?botDev channel
//! conn.start_ping_thread(); //Sends ping packets regularly
//!
//! for event in conn.iter() {
//! match event {
//! ChatEvent::Message(nick, message, trip_code) => {
//! println!("<{}> {}", nick, message);
//! },
//! _ => {}
//! }
//! }
//! }
//! ```
extern crate websocket;
#[macro_use] extern crate serde_json;
extern crate rustc_serialize;
use std::thread;
use rustc_serialize::json;
use websocket::{Client, Message, WebSocketStream};
use websocket::message::Type;
use websocket::client::request::Url;
use websocket::sender::Sender;
use websocket::receiver::Receiver;
use websocket::ws::sender::Sender as SenderTrait;
use websocket::ws::receiver::Receiver as ReceiverTrait;
use std::sync::Arc;
use std::sync::Mutex;
/// The main struct responsible for the connection and events.
#[derive(Clone)]
pub struct ChatClient {
nick: String,
channel: String,
sender: Arc<Mutex<Sender<WebSocketStream>>>,
receiver: Arc<Mutex<Receiver<WebSocketStream>>>,
}
impl ChatClient {
/// Creates a new connection to hack.chat.
///
/// ```
/// let mut chat = ChatClient::new("WikiBot", "programming");
/// // Joins?programming with the nick "WikiBot"
/// ```
pub fn new(nick: &str, channel: &str) -> ChatClient {
let url = Url::parse("wss://hack.chat/chat-ws").unwrap();
let request = Client::connect(url).unwrap();
let response = request.send().unwrap();
let client = response.begin();
let (mut sender, receiver) = client.split();
let join_packet = json!({
"cmd": "join",
"nick": nick,
"channel": channel
});
let message = Message::text(join_packet.to_string());
sender.send_message(&message).unwrap();
return ChatClient {
nick: nick.to_string(),
channel: channel.to_string(),
sender: Arc::new(Mutex::new(sender)),
receiver: Arc::new(Mutex::new(receiver))
};
}
/// Sends a message to the current channel.
///
/// ```
/// let mut chat = ChatClient::new("TestBot", "botDev");
/// chat.send_message("Hello there people".to_string());
/// ```
///
/// ```
/// let mut chat = ChatClient::new("TestBot", "botDev");
///
/// let problem_count = 99;
/// chat.send_message(format!("I got {} problems but Rust ain't one", problem_count));
/// ```
pub fn send_message(&mut self, message: String) {
let chat_packet = json!({
"cmd": "chat",
"text": message
});
let message = Message::text(chat_packet.to_string());
self.sender.lock().unwrap().send_message(&message).unwrap();
}
fn send_ping(&mut self) {
let ping_packet = json!({
"cmd": "ping"
});
let message = Message::text(ping_packet.to_string());
self.sender.lock().unwrap().send_message(&message).unwrap();
}
/// Sends a stats request, which results in an Info event that has the number of connected
/// IPs and channels.
pub fn | (&mut self) {
let stats_packet = json!({
"cmd": "stats"
});
let message = Message::text(stats_packet.to_string());
self.sender.lock().unwrap().send_message(&message).unwrap();
}
/// Starts the ping thread, which sends regular pings to keep the connection open.
pub fn start_ping_thread(&mut self) {
let mut chat_clone = self.clone();
thread::spawn(move|| {
loop {
thread::sleep_ms(60 * 1000);
chat_clone.send_ping();
}
});
}
/// Returns an iterator of hack.chat events such as messages.
///
/// #Examples
/// ```
/// let mut chat = ChatClient::new("GreetingBot", "botDev");
/// chat.start_ping_thread(); //Start the ping thread so we keep connected
///
/// for event in chat.iter() {
/// match event {
/// ChatEvent::JoinRoom(nick) => {
/// chat.send_message(format!("Welcome to the chat {}!", nick));
/// },
/// ChatEvent::LeaveRoom(nick) => {
/// chat.send_message(format!("Goodbye {}, see you later!", nick));
/// },
/// _ => {}
/// }
/// }
/// ```
pub fn iter(&mut self) -> ChatClient {
return self.clone();
}
}
impl Iterator for ChatClient {
type Item = ChatEvent;
fn next(&mut self) -> Option<ChatEvent> {
loop {
let message: Message = match self.receiver.lock().unwrap().recv_message() {
Ok(message) => message,
Err(e) => {
println!("{}", e);
continue;
}
};
match message.opcode {
Type::Text => {
let data = std::str::from_utf8(&*message.payload).unwrap();
let cmdpacket: serde_json::Value = match serde_json::from_slice(&*message.payload) {
Ok(packet) => packet,
Err(e) => {
println!("{}", e);
continue;
}
};
match cmdpacket.get("cmd").unwrap_or(&serde_json::Value::Null).as_str() {
Some("chat") => {
let decodedpacket: ChatPacket = json::decode(&data).unwrap();
if decodedpacket.nick!= self.nick {
return Some(ChatEvent::Message (
decodedpacket.nick,
decodedpacket.text,
decodedpacket.trip.unwrap_or("".to_string())
));
}else {
continue;
}
},
Some("info") => {
let decodedpacket: InfoWarnPacket = json::decode(&data).unwrap();
return Some(ChatEvent::Info (
decodedpacket.text
));
},
Some("onlineAdd") => {
let decodedpacket: OnlineChangePacket = json::decode(&data).unwrap();
return Some(ChatEvent::JoinRoom (
decodedpacket.nick
));
},
Some("onlineRemove") => {
let decodedpacket: OnlineChangePacket = json::decode(&data).unwrap();
return Some(ChatEvent::LeaveRoom (
decodedpacket.nick
));
},
_ => {
println!("Unsupported message type");
continue;
}
}
},
Type::Ping => {
self.sender.lock().unwrap().send_message(&Message::pong(message.payload)).unwrap();
},
_ => {
return None;
}
};
return None;
}
}
}
/// Various Hack.chat events
pub enum ChatEvent {
/// Raised when there is a new message from the channel
///
/// The format is ChatEvent::Message(nick, text, trip_code)
Message (String, String, String),
/// Rasied when someone joins the channel
///
/// The format is ChatEvent::JoinRoom(nick)
JoinRoom (String),
/// Raised when someone leaves the channel
///
/// The format is ChatEvent::LeaveRoom(nick)
LeaveRoom (String),
/// Raised when there is an event from the channel itself.
/// Some examples include:
///
/// * The result of the stats requests
/// * A user being banned.
Info (String)
}
#[derive(RustcEncodable, RustcDecodable)]
struct GenericPacket {
cmd: String
}
#[derive(RustcDecodable)]
struct ChatPacket {
nick: String,
text: String,
trip: Option<String>
}
#[derive(RustcDecodable)]
struct OnlineChangePacket {
nick: String
}
#[derive(RustcDecodable)]
struct InfoWarnPacket {
text: String
}
| send_stats_request | identifier_name |
extern-stress.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This creates a bunch of yielding tasks that run concurrently
// while holding onto C stacks
mod rustrt {
pub extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else {
task::yield();
count(data - 1u) + count(data - 1u)
}
}
fn count(n: uint) -> uint {
unsafe {
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
for old_iter::repeat(100u) { | do task::spawn {
assert!(count(5u) == 16u);
};
}
} | random_line_split |
|
extern-stress.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This creates a bunch of yielding tasks that run concurrently
// while holding onto C stacks
mod rustrt {
pub extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else |
}
fn count(n: uint) -> uint {
unsafe {
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
for old_iter::repeat(100u) {
do task::spawn {
assert!(count(5u) == 16u);
};
}
}
| {
task::yield();
count(data - 1u) + count(data - 1u)
} | conditional_block |
extern-stress.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This creates a bunch of yielding tasks that run concurrently
// while holding onto C stacks
mod rustrt {
pub extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else {
task::yield();
count(data - 1u) + count(data - 1u)
}
}
fn count(n: uint) -> uint {
unsafe {
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() | {
for old_iter::repeat(100u) {
do task::spawn {
assert!(count(5u) == 16u);
};
}
} | identifier_body |
|
extern-stress.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This creates a bunch of yielding tasks that run concurrently
// while holding onto C stacks
mod rustrt {
pub extern {
pub fn rust_dbg_call(cb: *u8, data: libc::uintptr_t)
-> libc::uintptr_t;
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
if data == 1u {
data
} else {
task::yield();
count(data - 1u) + count(data - 1u)
}
}
fn | (n: uint) -> uint {
unsafe {
rustrt::rust_dbg_call(cb, n)
}
}
pub fn main() {
for old_iter::repeat(100u) {
do task::spawn {
assert!(count(5u) == 16u);
};
}
}
| count | identifier_name |
main.rs | use std::env;
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
fn | (c: char) -> bool
{
match c {
'' => true,
'x' => true,
'y' => true,
'=' => true,
_ => false,
}
}
const WIDTH: usize = 50;
const HEIGHT: usize = 6;
fn main() {
let args: Vec<String> = env::args().collect();
let f = File::open(&args[1]).expect("Could not open file");
let reader = BufReader::new(f);
let mut lights = [[false; HEIGHT]; WIDTH];
for line in reader.lines() {
let contents = line.unwrap();
let parts: Vec<&str> = contents.split(|c| is_splitpoint(c)).collect();
match parts[0] {
"rect" => {
let width: usize = parts[1].parse().unwrap();
let height: usize = parts[2].parse().unwrap();
for x in 0..width {
for y in 0..height {
lights[x][y] = true;
}
}
},
"rotate" => {
let index: usize = parts[4].parse().expect("Invalid index");
let amount: usize = parts[7].parse().expect("Invalid row");
match parts[1] {
"row" => {
let mut copy = [false; WIDTH];
for x in 0..WIDTH {
copy[x] = lights[x][index];
}
for x in 0..WIDTH {
lights[(x + amount) % WIDTH][index] = copy[x];
}
},
"column" => {
let mut copy = [false; HEIGHT];
for y in 0..HEIGHT {
copy[y] = lights[index][y];
}
for y in 0..HEIGHT {
lights[index][(y + amount) % HEIGHT] = copy[y];
}
}
_ => panic!("{} is not a supported rotation", parts[1]),
}
},
_ => panic!("{} is not a supported operation", parts[0]),
}
}
let mut count = 0;
for y in 0..HEIGHT {
for x in 0..WIDTH {
let mut c ='';
if lights[x][y] {
count += 1;
c = '★';
}
print!("{}", c);
// spacing between letters
if x % 5 == 4 {
print!(" ");
}
}
println!("");
}
println!("{} lights active.", count);
}
| is_splitpoint | identifier_name |
main.rs | use std::env;
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
fn is_splitpoint(c: char) -> bool
{
match c {
'' => true,
'x' => true,
'y' => true,
'=' => true,
_ => false,
}
}
const WIDTH: usize = 50;
const HEIGHT: usize = 6;
fn main() {
let args: Vec<String> = env::args().collect();
let f = File::open(&args[1]).expect("Could not open file");
let reader = BufReader::new(f);
let mut lights = [[false; HEIGHT]; WIDTH];
for line in reader.lines() {
let contents = line.unwrap();
let parts: Vec<&str> = contents.split(|c| is_splitpoint(c)).collect();
match parts[0] {
"rect" => {
let width: usize = parts[1].parse().unwrap();
let height: usize = parts[2].parse().unwrap();
for x in 0..width {
for y in 0..height {
lights[x][y] = true;
}
}
},
"rotate" => {
let index: usize = parts[4].parse().expect("Invalid index");
let amount: usize = parts[7].parse().expect("Invalid row");
match parts[1] {
"row" => {
let mut copy = [false; WIDTH];
for x in 0..WIDTH {
copy[x] = lights[x][index];
}
for x in 0..WIDTH {
lights[(x + amount) % WIDTH][index] = copy[x];
}
},
"column" => {
let mut copy = [false; HEIGHT];
for y in 0..HEIGHT {
copy[y] = lights[index][y];
}
for y in 0..HEIGHT {
lights[index][(y + amount) % HEIGHT] = copy[y];
}
}
_ => panic!("{} is not a supported rotation", parts[1]),
}
},
_ => panic!("{} is not a supported operation", parts[0]),
}
}
let mut count = 0;
for y in 0..HEIGHT {
for x in 0..WIDTH {
let mut c ='';
if lights[x][y] {
count += 1;
c = '★';
}
print!("{}", c);
// spacing between letters
if x % 5 == 4 { | println!("");
}
println!("{} lights active.", count);
} | print!(" ");
}
} | random_line_split |
main.rs | use std::env;
use std::io::prelude::*;
use std::io::BufReader;
use std::fs::File;
fn is_splitpoint(c: char) -> bool
|
const WIDTH: usize = 50;
const HEIGHT: usize = 6;
fn main() {
let args: Vec<String> = env::args().collect();
let f = File::open(&args[1]).expect("Could not open file");
let reader = BufReader::new(f);
let mut lights = [[false; HEIGHT]; WIDTH];
for line in reader.lines() {
let contents = line.unwrap();
let parts: Vec<&str> = contents.split(|c| is_splitpoint(c)).collect();
match parts[0] {
"rect" => {
let width: usize = parts[1].parse().unwrap();
let height: usize = parts[2].parse().unwrap();
for x in 0..width {
for y in 0..height {
lights[x][y] = true;
}
}
},
"rotate" => {
let index: usize = parts[4].parse().expect("Invalid index");
let amount: usize = parts[7].parse().expect("Invalid row");
match parts[1] {
"row" => {
let mut copy = [false; WIDTH];
for x in 0..WIDTH {
copy[x] = lights[x][index];
}
for x in 0..WIDTH {
lights[(x + amount) % WIDTH][index] = copy[x];
}
},
"column" => {
let mut copy = [false; HEIGHT];
for y in 0..HEIGHT {
copy[y] = lights[index][y];
}
for y in 0..HEIGHT {
lights[index][(y + amount) % HEIGHT] = copy[y];
}
}
_ => panic!("{} is not a supported rotation", parts[1]),
}
},
_ => panic!("{} is not a supported operation", parts[0]),
}
}
let mut count = 0;
for y in 0..HEIGHT {
for x in 0..WIDTH {
let mut c ='';
if lights[x][y] {
count += 1;
c = '★';
}
print!("{}", c);
// spacing between letters
if x % 5 == 4 {
print!(" ");
}
}
println!("");
}
println!("{} lights active.", count);
}
| {
match c {
' ' => true,
'x' => true,
'y' => true,
'=' => true,
_ => false,
}
} | identifier_body |
mod.rs | pub mod md5;
pub mod sha1;
pub trait Hasher
{
/**
* Reset the hasher's state.
*/
fn reset(&mut self);
/**
* Provide input data.
*/
fn update(&mut self, data: &[u8]);
/**
* Retrieve digest result. The output must be large enough to contains result
* size (from output_size method).
*/
fn output(&self, out: &mut [u8]);
/**
* Get the output size in bits.
*/
fn output_size_bits(&self) -> uint;
/**
* Get the block size in bits.
*/
fn block_size_bits(&self) -> uint;
/**
* Get the output size in bytes.
*/
fn output_size(&self) -> uint
{
(self.output_size_bits() + 7) / 8
}
/**
* Get the block size in bytes.
*/
fn block_size(&self) -> uint
{
(self.block_size_bits() + 7) / 8
}
fn | (&self) -> Vec<u8>
{
let size = self.output_size();
let mut buf = Vec::from_elem(size, 0u8);
self.output(buf.as_mut_slice());
buf
}
}
pub trait Hashable {
/**
* Feed the value to the hasher passed in parameter.
*/
fn feed<H: Hasher>(&self, h: &mut H);
/**
* Hash the value to ~[u8].
*
* Reset the hasher passed in parameter, because we want
* an empty hasher to get only the value's hash.
*/
fn to_hash<H: Hasher>(&self, h: &mut H) -> Vec<u8>
{
h.reset();
self.feed(h);
h.digest()
}
}
impl<'a> Hashable for &'a [u8] {
fn feed<H: Hasher>(&self, h: &mut H)
{
h.update(*self)
}
}
| digest | identifier_name |
mod.rs | pub mod md5;
pub mod sha1;
pub trait Hasher
{
/**
* Reset the hasher's state.
*/
fn reset(&mut self);
/**
* Provide input data.
*/
fn update(&mut self, data: &[u8]);
/**
* Retrieve digest result. The output must be large enough to contains result
* size (from output_size method).
*/
fn output(&self, out: &mut [u8]);
/**
* Get the output size in bits.
*/
fn output_size_bits(&self) -> uint;
/**
* Get the block size in bits.
*/
fn block_size_bits(&self) -> uint;
/**
* Get the output size in bytes.
*/
fn output_size(&self) -> uint
{
(self.output_size_bits() + 7) / 8
}
/**
* Get the block size in bytes.
*/
fn block_size(&self) -> uint
{
(self.block_size_bits() + 7) / 8
}
fn digest(&self) -> Vec<u8>
{
let size = self.output_size();
let mut buf = Vec::from_elem(size, 0u8);
self.output(buf.as_mut_slice());
buf
}
}
pub trait Hashable {
/**
* Feed the value to the hasher passed in parameter.
*/
fn feed<H: Hasher>(&self, h: &mut H);
/**
* Hash the value to ~[u8].
*
* Reset the hasher passed in parameter, because we want
* an empty hasher to get only the value's hash.
*/
fn to_hash<H: Hasher>(&self, h: &mut H) -> Vec<u8>
{
h.reset();
self.feed(h);
h.digest()
}
}
impl<'a> Hashable for &'a [u8] {
fn feed<H: Hasher>(&self, h: &mut H)
|
}
| {
h.update(*self)
} | identifier_body |
mod.rs | pub mod md5;
pub mod sha1;
pub trait Hasher
{
/**
* Reset the hasher's state.
*/
fn reset(&mut self);
/**
* Provide input data.
*/
fn update(&mut self, data: &[u8]);
/**
* Retrieve digest result. The output must be large enough to contains result
* size (from output_size method).
*/
fn output(&self, out: &mut [u8]);
/**
* Get the output size in bits.
*/
fn output_size_bits(&self) -> uint;
/**
* Get the block size in bits.
*/
fn block_size_bits(&self) -> uint;
/**
* Get the output size in bytes.
*/
fn output_size(&self) -> uint
{ | * Get the block size in bytes.
*/
fn block_size(&self) -> uint
{
(self.block_size_bits() + 7) / 8
}
fn digest(&self) -> Vec<u8>
{
let size = self.output_size();
let mut buf = Vec::from_elem(size, 0u8);
self.output(buf.as_mut_slice());
buf
}
}
pub trait Hashable {
/**
* Feed the value to the hasher passed in parameter.
*/
fn feed<H: Hasher>(&self, h: &mut H);
/**
* Hash the value to ~[u8].
*
* Reset the hasher passed in parameter, because we want
* an empty hasher to get only the value's hash.
*/
fn to_hash<H: Hasher>(&self, h: &mut H) -> Vec<u8>
{
h.reset();
self.feed(h);
h.digest()
}
}
impl<'a> Hashable for &'a [u8] {
fn feed<H: Hasher>(&self, h: &mut H)
{
h.update(*self)
}
} | (self.output_size_bits() + 7) / 8
}
/** | random_line_split |
usage.rs | use crate as utils;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_hir::intravisit;
use rustc_hir::intravisit::{NestedVisitorMap, Visitor};
use rustc_hir::HirIdSet;
use rustc_hir::{Expr, ExprKind, HirId, Path};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::LateContext;
use rustc_middle::hir::map::Map;
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty;
use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
/// Returns a set of mutated local variable IDs, or `None` if mutations could not be determined.
pub fn mutated_variables<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> Option<HirIdSet> {
let mut delegate = MutVarsDelegate {
used_mutably: HirIdSet::default(),
skip: false,
};
cx.tcx.infer_ctxt().enter(|infcx| {
ExprUseVisitor::new(
&mut delegate,
&infcx,
expr.hir_id.owner,
cx.param_env,
cx.typeck_results(),
)
.walk_expr(expr);
});
if delegate.skip {
return None;
}
Some(delegate.used_mutably)
}
pub fn is_potentially_mutated<'tcx>(variable: &'tcx Path<'_>, expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> bool {
if let Res::Local(id) = variable.res {
mutated_variables(expr, cx).map_or(true, |mutated| mutated.contains(&id))
} else {
true
}
}
struct MutVarsDelegate {
used_mutably: HirIdSet,
skip: bool,
}
impl<'tcx> MutVarsDelegate {
#[allow(clippy::similar_names)]
fn update(&mut self, cat: &PlaceWithHirId<'tcx>) {
match cat.place.base {
PlaceBase::Local(id) => {
self.used_mutably.insert(id);
},
PlaceBase::Upvar(_) => {
//FIXME: This causes false negatives. We can't get the `NodeId` from
//`Categorization::Upvar(_)`. So we search for any `Upvar`s in the
//`while`-body, not just the ones in the condition.
self.skip = true;
},
_ => {},
}
}
}
impl<'tcx> Delegate<'tcx> for MutVarsDelegate {
fn consume(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {}
fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, bk: ty::BorrowKind) {
if let ty::BorrowKind::MutBorrow = bk {
self.update(cmt);
}
}
fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
self.update(cmt);
}
fn fake_read(&mut self, _: rustc_typeck::expr_use_visitor::Place<'tcx>, _: FakeReadCause, _: HirId) {}
}
pub struct ParamBindingIdCollector {
binding_hir_ids: Vec<hir::HirId>,
}
impl<'tcx> ParamBindingIdCollector {
fn collect_binding_hir_ids(body: &'tcx hir::Body<'tcx>) -> Vec<hir::HirId> {
let mut hir_ids: Vec<hir::HirId> = Vec::new();
for param in body.params.iter() {
let mut finder = ParamBindingIdCollector {
binding_hir_ids: Vec::new(),
};
finder.visit_param(param);
for hir_id in &finder.binding_hir_ids {
hir_ids.push(*hir_id);
}
}
hir_ids
}
}
impl<'tcx> intravisit::Visitor<'tcx> for ParamBindingIdCollector {
type Map = Map<'tcx>;
fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) {
if let hir::PatKind::Binding(_, hir_id,..) = pat.kind {
self.binding_hir_ids.push(hir_id);
}
intravisit::walk_pat(self, pat);
}
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::None
}
}
pub struct BindingUsageFinder<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
binding_ids: Vec<hir::HirId>,
usage_found: bool,
}
impl<'a, 'tcx> BindingUsageFinder<'a, 'tcx> {
pub fn are_params_used(cx: &'a LateContext<'tcx>, body: &'tcx hir::Body<'tcx>) -> bool {
let mut finder = BindingUsageFinder {
cx,
binding_ids: ParamBindingIdCollector::collect_binding_hir_ids(body),
usage_found: false,
};
finder.visit_body(body);
finder.usage_found
}
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for BindingUsageFinder<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
if!self.usage_found {
intravisit::walk_expr(self, expr);
}
}
fn visit_path(&mut self, path: &'tcx hir::Path<'tcx>, _: hir::HirId) {
if let hir::def::Res::Local(id) = path.res {
if self.binding_ids.contains(&id) {
self.usage_found = true;
}
}
}
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
}
struct ReturnBreakContinueMacroVisitor {
seen_return_break_continue: bool,
}
impl ReturnBreakContinueMacroVisitor {
fn new() -> ReturnBreakContinueMacroVisitor {
ReturnBreakContinueMacroVisitor {
seen_return_break_continue: false,
}
}
}
impl<'tcx> Visitor<'tcx> for ReturnBreakContinueMacroVisitor {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
if self.seen_return_break_continue {
// No need to look farther if we've already seen one of them
return;
}
match &ex.kind {
ExprKind::Ret(..) | ExprKind::Break(..) | ExprKind::Continue(..) => {
self.seen_return_break_continue = true;
},
// Something special could be done here to handle while or for loop
// desugaring, as this will detect a break if there's a while loop
// or a for loop inside the expression.
_ => {
if utils::in_macro(ex.span) {
self.seen_return_break_continue = true;
} else {
rustc_hir::intravisit::walk_expr(self, ex);
}
},
}
}
}
pub fn contains_return_break_continue_macro(expression: &Expr<'_>) -> bool {
let mut recursive_visitor = ReturnBreakContinueMacroVisitor::new();
recursive_visitor.visit_expr(expression);
recursive_visitor.seen_return_break_continue
}
pub struct UsedAfterExprVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
expr: &'tcx Expr<'tcx>,
definition: HirId,
past_expr: bool,
used_after_expr: bool,
}
impl<'a, 'tcx> UsedAfterExprVisitor<'a, 'tcx> {
pub fn is_found(cx: &'a LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
utils::path_to_local(expr).map_or(false, |definition| {
let mut visitor = UsedAfterExprVisitor {
cx,
expr,
definition,
past_expr: false,
used_after_expr: false,
};
utils::get_enclosing_block(cx, definition).map_or(false, |block| {
visitor.visit_block(block);
visitor.used_after_expr
})
})
}
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for UsedAfterExprVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
if self.used_after_expr {
return;
}
if expr.hir_id == self.expr.hir_id {
self.past_expr = true;
} else if self.past_expr && utils::path_to_local_id(expr, self.definition) {
self.used_after_expr = true;
} else |
}
}
| {
intravisit::walk_expr(self, expr);
} | conditional_block |
usage.rs | use crate as utils;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_hir::intravisit;
use rustc_hir::intravisit::{NestedVisitorMap, Visitor};
use rustc_hir::HirIdSet;
use rustc_hir::{Expr, ExprKind, HirId, Path};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::LateContext;
use rustc_middle::hir::map::Map;
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty;
use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
/// Returns a set of mutated local variable IDs, or `None` if mutations could not be determined.
pub fn mutated_variables<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> Option<HirIdSet> {
let mut delegate = MutVarsDelegate {
used_mutably: HirIdSet::default(),
skip: false,
};
cx.tcx.infer_ctxt().enter(|infcx| {
ExprUseVisitor::new(
&mut delegate,
&infcx,
expr.hir_id.owner,
cx.param_env,
cx.typeck_results(),
)
.walk_expr(expr);
});
if delegate.skip {
return None;
}
Some(delegate.used_mutably)
}
pub fn is_potentially_mutated<'tcx>(variable: &'tcx Path<'_>, expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> bool {
if let Res::Local(id) = variable.res {
mutated_variables(expr, cx).map_or(true, |mutated| mutated.contains(&id))
} else {
true
}
}
struct MutVarsDelegate {
used_mutably: HirIdSet,
skip: bool,
}
impl<'tcx> MutVarsDelegate {
#[allow(clippy::similar_names)]
fn update(&mut self, cat: &PlaceWithHirId<'tcx>) {
match cat.place.base {
PlaceBase::Local(id) => {
self.used_mutably.insert(id);
},
PlaceBase::Upvar(_) => {
//FIXME: This causes false negatives. We can't get the `NodeId` from
//`Categorization::Upvar(_)`. So we search for any `Upvar`s in the
//`while`-body, not just the ones in the condition.
self.skip = true;
},
_ => {},
}
}
}
impl<'tcx> Delegate<'tcx> for MutVarsDelegate {
fn consume(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {}
fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, bk: ty::BorrowKind) {
if let ty::BorrowKind::MutBorrow = bk {
self.update(cmt);
}
}
fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
self.update(cmt);
}
fn fake_read(&mut self, _: rustc_typeck::expr_use_visitor::Place<'tcx>, _: FakeReadCause, _: HirId) {}
}
pub struct ParamBindingIdCollector {
binding_hir_ids: Vec<hir::HirId>,
}
impl<'tcx> ParamBindingIdCollector {
fn collect_binding_hir_ids(body: &'tcx hir::Body<'tcx>) -> Vec<hir::HirId> {
let mut hir_ids: Vec<hir::HirId> = Vec::new();
for param in body.params.iter() { | let mut finder = ParamBindingIdCollector {
binding_hir_ids: Vec::new(),
};
finder.visit_param(param);
for hir_id in &finder.binding_hir_ids {
hir_ids.push(*hir_id);
}
}
hir_ids
}
}
impl<'tcx> intravisit::Visitor<'tcx> for ParamBindingIdCollector {
type Map = Map<'tcx>;
fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) {
if let hir::PatKind::Binding(_, hir_id,..) = pat.kind {
self.binding_hir_ids.push(hir_id);
}
intravisit::walk_pat(self, pat);
}
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::None
}
}
pub struct BindingUsageFinder<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
binding_ids: Vec<hir::HirId>,
usage_found: bool,
}
impl<'a, 'tcx> BindingUsageFinder<'a, 'tcx> {
pub fn are_params_used(cx: &'a LateContext<'tcx>, body: &'tcx hir::Body<'tcx>) -> bool {
let mut finder = BindingUsageFinder {
cx,
binding_ids: ParamBindingIdCollector::collect_binding_hir_ids(body),
usage_found: false,
};
finder.visit_body(body);
finder.usage_found
}
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for BindingUsageFinder<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
if!self.usage_found {
intravisit::walk_expr(self, expr);
}
}
fn visit_path(&mut self, path: &'tcx hir::Path<'tcx>, _: hir::HirId) {
if let hir::def::Res::Local(id) = path.res {
if self.binding_ids.contains(&id) {
self.usage_found = true;
}
}
}
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
}
struct ReturnBreakContinueMacroVisitor {
seen_return_break_continue: bool,
}
impl ReturnBreakContinueMacroVisitor {
fn new() -> ReturnBreakContinueMacroVisitor {
ReturnBreakContinueMacroVisitor {
seen_return_break_continue: false,
}
}
}
impl<'tcx> Visitor<'tcx> for ReturnBreakContinueMacroVisitor {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
if self.seen_return_break_continue {
// No need to look farther if we've already seen one of them
return;
}
match &ex.kind {
ExprKind::Ret(..) | ExprKind::Break(..) | ExprKind::Continue(..) => {
self.seen_return_break_continue = true;
},
// Something special could be done here to handle while or for loop
// desugaring, as this will detect a break if there's a while loop
// or a for loop inside the expression.
_ => {
if utils::in_macro(ex.span) {
self.seen_return_break_continue = true;
} else {
rustc_hir::intravisit::walk_expr(self, ex);
}
},
}
}
}
pub fn contains_return_break_continue_macro(expression: &Expr<'_>) -> bool {
let mut recursive_visitor = ReturnBreakContinueMacroVisitor::new();
recursive_visitor.visit_expr(expression);
recursive_visitor.seen_return_break_continue
}
pub struct UsedAfterExprVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
expr: &'tcx Expr<'tcx>,
definition: HirId,
past_expr: bool,
used_after_expr: bool,
}
impl<'a, 'tcx> UsedAfterExprVisitor<'a, 'tcx> {
pub fn is_found(cx: &'a LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
utils::path_to_local(expr).map_or(false, |definition| {
let mut visitor = UsedAfterExprVisitor {
cx,
expr,
definition,
past_expr: false,
used_after_expr: false,
};
utils::get_enclosing_block(cx, definition).map_or(false, |block| {
visitor.visit_block(block);
visitor.used_after_expr
})
})
}
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for UsedAfterExprVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
if self.used_after_expr {
return;
}
if expr.hir_id == self.expr.hir_id {
self.past_expr = true;
} else if self.past_expr && utils::path_to_local_id(expr, self.definition) {
self.used_after_expr = true;
} else {
intravisit::walk_expr(self, expr);
}
}
} | random_line_split |
|
usage.rs | use crate as utils;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_hir::intravisit;
use rustc_hir::intravisit::{NestedVisitorMap, Visitor};
use rustc_hir::HirIdSet;
use rustc_hir::{Expr, ExprKind, HirId, Path};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::LateContext;
use rustc_middle::hir::map::Map;
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty;
use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
/// Returns a set of mutated local variable IDs, or `None` if mutations could not be determined.
pub fn mutated_variables<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> Option<HirIdSet> {
let mut delegate = MutVarsDelegate {
used_mutably: HirIdSet::default(),
skip: false,
};
cx.tcx.infer_ctxt().enter(|infcx| {
ExprUseVisitor::new(
&mut delegate,
&infcx,
expr.hir_id.owner,
cx.param_env,
cx.typeck_results(),
)
.walk_expr(expr);
});
if delegate.skip {
return None;
}
Some(delegate.used_mutably)
}
pub fn is_potentially_mutated<'tcx>(variable: &'tcx Path<'_>, expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> bool {
if let Res::Local(id) = variable.res {
mutated_variables(expr, cx).map_or(true, |mutated| mutated.contains(&id))
} else {
true
}
}
struct MutVarsDelegate {
used_mutably: HirIdSet,
skip: bool,
}
impl<'tcx> MutVarsDelegate {
#[allow(clippy::similar_names)]
fn update(&mut self, cat: &PlaceWithHirId<'tcx>) {
match cat.place.base {
PlaceBase::Local(id) => {
self.used_mutably.insert(id);
},
PlaceBase::Upvar(_) => {
//FIXME: This causes false negatives. We can't get the `NodeId` from
//`Categorization::Upvar(_)`. So we search for any `Upvar`s in the
//`while`-body, not just the ones in the condition.
self.skip = true;
},
_ => {},
}
}
}
impl<'tcx> Delegate<'tcx> for MutVarsDelegate {
fn consume(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {}
fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, bk: ty::BorrowKind) {
if let ty::BorrowKind::MutBorrow = bk {
self.update(cmt);
}
}
fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
self.update(cmt);
}
fn fake_read(&mut self, _: rustc_typeck::expr_use_visitor::Place<'tcx>, _: FakeReadCause, _: HirId) {}
}
pub struct ParamBindingIdCollector {
binding_hir_ids: Vec<hir::HirId>,
}
impl<'tcx> ParamBindingIdCollector {
fn collect_binding_hir_ids(body: &'tcx hir::Body<'tcx>) -> Vec<hir::HirId> {
let mut hir_ids: Vec<hir::HirId> = Vec::new();
for param in body.params.iter() {
let mut finder = ParamBindingIdCollector {
binding_hir_ids: Vec::new(),
};
finder.visit_param(param);
for hir_id in &finder.binding_hir_ids {
hir_ids.push(*hir_id);
}
}
hir_ids
}
}
impl<'tcx> intravisit::Visitor<'tcx> for ParamBindingIdCollector {
type Map = Map<'tcx>;
fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) {
if let hir::PatKind::Binding(_, hir_id,..) = pat.kind {
self.binding_hir_ids.push(hir_id);
}
intravisit::walk_pat(self, pat);
}
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::None
}
}
pub struct BindingUsageFinder<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
binding_ids: Vec<hir::HirId>,
usage_found: bool,
}
impl<'a, 'tcx> BindingUsageFinder<'a, 'tcx> {
pub fn are_params_used(cx: &'a LateContext<'tcx>, body: &'tcx hir::Body<'tcx>) -> bool {
let mut finder = BindingUsageFinder {
cx,
binding_ids: ParamBindingIdCollector::collect_binding_hir_ids(body),
usage_found: false,
};
finder.visit_body(body);
finder.usage_found
}
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for BindingUsageFinder<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
if!self.usage_found {
intravisit::walk_expr(self, expr);
}
}
fn visit_path(&mut self, path: &'tcx hir::Path<'tcx>, _: hir::HirId) {
if let hir::def::Res::Local(id) = path.res {
if self.binding_ids.contains(&id) {
self.usage_found = true;
}
}
}
fn | (&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
}
struct ReturnBreakContinueMacroVisitor {
seen_return_break_continue: bool,
}
impl ReturnBreakContinueMacroVisitor {
fn new() -> ReturnBreakContinueMacroVisitor {
ReturnBreakContinueMacroVisitor {
seen_return_break_continue: false,
}
}
}
impl<'tcx> Visitor<'tcx> for ReturnBreakContinueMacroVisitor {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
if self.seen_return_break_continue {
// No need to look farther if we've already seen one of them
return;
}
match &ex.kind {
ExprKind::Ret(..) | ExprKind::Break(..) | ExprKind::Continue(..) => {
self.seen_return_break_continue = true;
},
// Something special could be done here to handle while or for loop
// desugaring, as this will detect a break if there's a while loop
// or a for loop inside the expression.
_ => {
if utils::in_macro(ex.span) {
self.seen_return_break_continue = true;
} else {
rustc_hir::intravisit::walk_expr(self, ex);
}
},
}
}
}
pub fn contains_return_break_continue_macro(expression: &Expr<'_>) -> bool {
let mut recursive_visitor = ReturnBreakContinueMacroVisitor::new();
recursive_visitor.visit_expr(expression);
recursive_visitor.seen_return_break_continue
}
pub struct UsedAfterExprVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
expr: &'tcx Expr<'tcx>,
definition: HirId,
past_expr: bool,
used_after_expr: bool,
}
impl<'a, 'tcx> UsedAfterExprVisitor<'a, 'tcx> {
pub fn is_found(cx: &'a LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
utils::path_to_local(expr).map_or(false, |definition| {
let mut visitor = UsedAfterExprVisitor {
cx,
expr,
definition,
past_expr: false,
used_after_expr: false,
};
utils::get_enclosing_block(cx, definition).map_or(false, |block| {
visitor.visit_block(block);
visitor.used_after_expr
})
})
}
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for UsedAfterExprVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
if self.used_after_expr {
return;
}
if expr.hir_id == self.expr.hir_id {
self.past_expr = true;
} else if self.past_expr && utils::path_to_local_id(expr, self.definition) {
self.used_after_expr = true;
} else {
intravisit::walk_expr(self, expr);
}
}
}
| nested_visit_map | identifier_name |
usage.rs | use crate as utils;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_hir::intravisit;
use rustc_hir::intravisit::{NestedVisitorMap, Visitor};
use rustc_hir::HirIdSet;
use rustc_hir::{Expr, ExprKind, HirId, Path};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::LateContext;
use rustc_middle::hir::map::Map;
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty;
use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
/// Returns a set of mutated local variable IDs, or `None` if mutations could not be determined.
pub fn mutated_variables<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> Option<HirIdSet> {
let mut delegate = MutVarsDelegate {
used_mutably: HirIdSet::default(),
skip: false,
};
cx.tcx.infer_ctxt().enter(|infcx| {
ExprUseVisitor::new(
&mut delegate,
&infcx,
expr.hir_id.owner,
cx.param_env,
cx.typeck_results(),
)
.walk_expr(expr);
});
if delegate.skip {
return None;
}
Some(delegate.used_mutably)
}
pub fn is_potentially_mutated<'tcx>(variable: &'tcx Path<'_>, expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> bool {
if let Res::Local(id) = variable.res {
mutated_variables(expr, cx).map_or(true, |mutated| mutated.contains(&id))
} else {
true
}
}
struct MutVarsDelegate {
used_mutably: HirIdSet,
skip: bool,
}
impl<'tcx> MutVarsDelegate {
#[allow(clippy::similar_names)]
fn update(&mut self, cat: &PlaceWithHirId<'tcx>) {
match cat.place.base {
PlaceBase::Local(id) => {
self.used_mutably.insert(id);
},
PlaceBase::Upvar(_) => {
//FIXME: This causes false negatives. We can't get the `NodeId` from
//`Categorization::Upvar(_)`. So we search for any `Upvar`s in the
//`while`-body, not just the ones in the condition.
self.skip = true;
},
_ => {},
}
}
}
impl<'tcx> Delegate<'tcx> for MutVarsDelegate {
fn consume(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {}
fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, bk: ty::BorrowKind) {
if let ty::BorrowKind::MutBorrow = bk {
self.update(cmt);
}
}
fn mutate(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId) {
self.update(cmt);
}
fn fake_read(&mut self, _: rustc_typeck::expr_use_visitor::Place<'tcx>, _: FakeReadCause, _: HirId) {}
}
pub struct ParamBindingIdCollector {
binding_hir_ids: Vec<hir::HirId>,
}
impl<'tcx> ParamBindingIdCollector {
fn collect_binding_hir_ids(body: &'tcx hir::Body<'tcx>) -> Vec<hir::HirId> |
}
impl<'tcx> intravisit::Visitor<'tcx> for ParamBindingIdCollector {
type Map = Map<'tcx>;
fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) {
if let hir::PatKind::Binding(_, hir_id,..) = pat.kind {
self.binding_hir_ids.push(hir_id);
}
intravisit::walk_pat(self, pat);
}
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::None
}
}
pub struct BindingUsageFinder<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
binding_ids: Vec<hir::HirId>,
usage_found: bool,
}
impl<'a, 'tcx> BindingUsageFinder<'a, 'tcx> {
pub fn are_params_used(cx: &'a LateContext<'tcx>, body: &'tcx hir::Body<'tcx>) -> bool {
let mut finder = BindingUsageFinder {
cx,
binding_ids: ParamBindingIdCollector::collect_binding_hir_ids(body),
usage_found: false,
};
finder.visit_body(body);
finder.usage_found
}
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for BindingUsageFinder<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
if!self.usage_found {
intravisit::walk_expr(self, expr);
}
}
fn visit_path(&mut self, path: &'tcx hir::Path<'tcx>, _: hir::HirId) {
if let hir::def::Res::Local(id) = path.res {
if self.binding_ids.contains(&id) {
self.usage_found = true;
}
}
}
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
intravisit::NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
}
struct ReturnBreakContinueMacroVisitor {
seen_return_break_continue: bool,
}
impl ReturnBreakContinueMacroVisitor {
fn new() -> ReturnBreakContinueMacroVisitor {
ReturnBreakContinueMacroVisitor {
seen_return_break_continue: false,
}
}
}
impl<'tcx> Visitor<'tcx> for ReturnBreakContinueMacroVisitor {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
if self.seen_return_break_continue {
// No need to look farther if we've already seen one of them
return;
}
match &ex.kind {
ExprKind::Ret(..) | ExprKind::Break(..) | ExprKind::Continue(..) => {
self.seen_return_break_continue = true;
},
// Something special could be done here to handle while or for loop
// desugaring, as this will detect a break if there's a while loop
// or a for loop inside the expression.
_ => {
if utils::in_macro(ex.span) {
self.seen_return_break_continue = true;
} else {
rustc_hir::intravisit::walk_expr(self, ex);
}
},
}
}
}
pub fn contains_return_break_continue_macro(expression: &Expr<'_>) -> bool {
let mut recursive_visitor = ReturnBreakContinueMacroVisitor::new();
recursive_visitor.visit_expr(expression);
recursive_visitor.seen_return_break_continue
}
pub struct UsedAfterExprVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
expr: &'tcx Expr<'tcx>,
definition: HirId,
past_expr: bool,
used_after_expr: bool,
}
impl<'a, 'tcx> UsedAfterExprVisitor<'a, 'tcx> {
pub fn is_found(cx: &'a LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
utils::path_to_local(expr).map_or(false, |definition| {
let mut visitor = UsedAfterExprVisitor {
cx,
expr,
definition,
past_expr: false,
used_after_expr: false,
};
utils::get_enclosing_block(cx, definition).map_or(false, |block| {
visitor.visit_block(block);
visitor.used_after_expr
})
})
}
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for UsedAfterExprVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
if self.used_after_expr {
return;
}
if expr.hir_id == self.expr.hir_id {
self.past_expr = true;
} else if self.past_expr && utils::path_to_local_id(expr, self.definition) {
self.used_after_expr = true;
} else {
intravisit::walk_expr(self, expr);
}
}
}
| {
let mut hir_ids: Vec<hir::HirId> = Vec::new();
for param in body.params.iter() {
let mut finder = ParamBindingIdCollector {
binding_hir_ids: Vec::new(),
};
finder.visit_param(param);
for hir_id in &finder.binding_hir_ids {
hir_ids.push(*hir_id);
}
}
hir_ids
} | identifier_body |
markdown.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io;
use core;
use getopts;
use testing;
use rustc::session::search_paths::SearchPaths;
use externalfiles::ExternalHtml;
use html::escape::Escape;
use html::markdown;
use html::markdown::{Markdown, MarkdownWithToc, find_testable_code, reset_headers};
use test::Collector;
/// Separate any lines at the start of the file that begin with `%`.
fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {
let mut metadata = Vec::new();
for line in s.lines() {
if line.starts_with("%") {
// remove %<whitespace>
metadata.push(line[1..].trim_left())
} else {
let line_start_byte = s.subslice_offset(line);
return (metadata, &s[line_start_byte..]);
}
}
// if we're here, then all lines were metadata % lines.
(metadata, "")
}
/// Render `input` (e.g. "foo.md") into an HTML file in `output`
/// (e.g. output = "bar" => "bar/foo.html").
pub fn render(input: &str, mut output: Path, matches: &getopts::Matches,
external_html: &ExternalHtml, include_toc: bool) -> int {
let input_p = Path::new(input);
output.push(input_p.filestem().unwrap());
output.set_extension("html");
let mut css = String::new();
for name in matches.opt_strs("markdown-css").iter() {
let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name);
css.push_str(s.as_slice())
}
let input_str = load_or_return!(input, 1, 2);
let playground = matches.opt_str("markdown-playground-url");
if playground.is_some() {
markdown::PLAYGROUND_KRATE.with(|s| { *s.borrow_mut() = None; });
}
let playground = playground.unwrap_or("".to_string());
let mut out = match io::File::create(&output) {
Err(e) => {
let _ = writeln!(&mut io::stderr(),
"error opening `{}` for writing: {}",
output.display(), e);
return 4;
}
Ok(f) => f
};
let (metadata, text) = extract_leading_metadata(input_str.as_slice());
if metadata.len() == 0 {
let _ = writeln!(&mut io::stderr(),
"invalid markdown file: expecting initial line with `%...TITLE...`");
return 5;
}
let title = metadata[0].as_slice();
reset_headers();
let rendered = if include_toc {
format!("{}", MarkdownWithToc(text))
} else {
format!("{}", Markdown(text))
};
let err = write!(
&mut out,
r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="generator" content="rustdoc">
<title>{title}</title>
{css}
{in_header}
</head>
<body class="rustdoc">
<!--[if lte IE 8]>
<div class="warning">
This old browser is unsupported and will most likely display funky
things.
</div>
<![endif]-->
{before_content}
<h1 class="title">{title}</h1>
{text}
<script type="text/javascript">
window.playgroundUrl = "{playground}";
</script>
{after_content}
</body>
</html>"#,
title = Escape(title),
css = css,
in_header = external_html.in_header,
before_content = external_html.before_content,
text = rendered,
after_content = external_html.after_content,
playground = playground,
);
match err {
Err(e) => {
let _ = writeln!(&mut io::stderr(),
"error writing to `{}`: {}",
output.display(), e);
6
}
Ok(_) => 0
}
}
/// Run any tests/code examples in the markdown file `input`.
pub fn test(input: &str, libs: SearchPaths, externs: core::Externs,
mut test_args: Vec<String>) -> int | {
let input_str = load_or_return!(input, 1, 2);
let mut collector = Collector::new(input.to_string(), libs, externs, true);
find_testable_code(input_str.as_slice(), &mut collector);
test_args.insert(0, "rustdoctest".to_string());
testing::test_main(test_args.as_slice(), collector.tests);
0
} | identifier_body |
|
markdown.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io;
use core;
use getopts;
use testing;
use rustc::session::search_paths::SearchPaths;
use externalfiles::ExternalHtml;
use html::escape::Escape;
use html::markdown;
use html::markdown::{Markdown, MarkdownWithToc, find_testable_code, reset_headers};
use test::Collector;
/// Separate any lines at the start of the file that begin with `%`.
fn | <'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {
let mut metadata = Vec::new();
for line in s.lines() {
if line.starts_with("%") {
// remove %<whitespace>
metadata.push(line[1..].trim_left())
} else {
let line_start_byte = s.subslice_offset(line);
return (metadata, &s[line_start_byte..]);
}
}
// if we're here, then all lines were metadata % lines.
(metadata, "")
}
/// Render `input` (e.g. "foo.md") into an HTML file in `output`
/// (e.g. output = "bar" => "bar/foo.html").
pub fn render(input: &str, mut output: Path, matches: &getopts::Matches,
external_html: &ExternalHtml, include_toc: bool) -> int {
let input_p = Path::new(input);
output.push(input_p.filestem().unwrap());
output.set_extension("html");
let mut css = String::new();
for name in matches.opt_strs("markdown-css").iter() {
let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name);
css.push_str(s.as_slice())
}
let input_str = load_or_return!(input, 1, 2);
let playground = matches.opt_str("markdown-playground-url");
if playground.is_some() {
markdown::PLAYGROUND_KRATE.with(|s| { *s.borrow_mut() = None; });
}
let playground = playground.unwrap_or("".to_string());
let mut out = match io::File::create(&output) {
Err(e) => {
let _ = writeln!(&mut io::stderr(),
"error opening `{}` for writing: {}",
output.display(), e);
return 4;
}
Ok(f) => f
};
let (metadata, text) = extract_leading_metadata(input_str.as_slice());
if metadata.len() == 0 {
let _ = writeln!(&mut io::stderr(),
"invalid markdown file: expecting initial line with `%...TITLE...`");
return 5;
}
let title = metadata[0].as_slice();
reset_headers();
let rendered = if include_toc {
format!("{}", MarkdownWithToc(text))
} else {
format!("{}", Markdown(text))
};
let err = write!(
&mut out,
r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="generator" content="rustdoc">
<title>{title}</title>
{css}
{in_header}
</head>
<body class="rustdoc">
<!--[if lte IE 8]>
<div class="warning">
This old browser is unsupported and will most likely display funky
things.
</div>
<![endif]-->
{before_content}
<h1 class="title">{title}</h1>
{text}
<script type="text/javascript">
window.playgroundUrl = "{playground}";
</script>
{after_content}
</body>
</html>"#,
title = Escape(title),
css = css,
in_header = external_html.in_header,
before_content = external_html.before_content,
text = rendered,
after_content = external_html.after_content,
playground = playground,
);
match err {
Err(e) => {
let _ = writeln!(&mut io::stderr(),
"error writing to `{}`: {}",
output.display(), e);
6
}
Ok(_) => 0
}
}
/// Run any tests/code examples in the markdown file `input`.
pub fn test(input: &str, libs: SearchPaths, externs: core::Externs,
mut test_args: Vec<String>) -> int {
let input_str = load_or_return!(input, 1, 2);
let mut collector = Collector::new(input.to_string(), libs, externs, true);
find_testable_code(input_str.as_slice(), &mut collector);
test_args.insert(0, "rustdoctest".to_string());
testing::test_main(test_args.as_slice(), collector.tests);
0
}
| extract_leading_metadata | identifier_name |
markdown.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io;
use core;
use getopts;
use testing;
use rustc::session::search_paths::SearchPaths;
use externalfiles::ExternalHtml;
use html::escape::Escape;
use html::markdown;
use html::markdown::{Markdown, MarkdownWithToc, find_testable_code, reset_headers};
use test::Collector;
/// Separate any lines at the start of the file that begin with `%`.
fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {
let mut metadata = Vec::new();
for line in s.lines() {
if line.starts_with("%") {
// remove %<whitespace>
metadata.push(line[1..].trim_left())
} else {
let line_start_byte = s.subslice_offset(line);
return (metadata, &s[line_start_byte..]);
}
}
// if we're here, then all lines were metadata % lines.
(metadata, "")
}
/// Render `input` (e.g. "foo.md") into an HTML file in `output`
/// (e.g. output = "bar" => "bar/foo.html").
pub fn render(input: &str, mut output: Path, matches: &getopts::Matches,
external_html: &ExternalHtml, include_toc: bool) -> int {
let input_p = Path::new(input);
output.push(input_p.filestem().unwrap());
output.set_extension("html");
let mut css = String::new();
for name in matches.opt_strs("markdown-css").iter() {
let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name);
css.push_str(s.as_slice())
}
let input_str = load_or_return!(input, 1, 2);
let playground = matches.opt_str("markdown-playground-url");
if playground.is_some() {
markdown::PLAYGROUND_KRATE.with(|s| { *s.borrow_mut() = None; });
}
let playground = playground.unwrap_or("".to_string());
let mut out = match io::File::create(&output) {
Err(e) => {
let _ = writeln!(&mut io::stderr(),
"error opening `{}` for writing: {}",
output.display(), e);
return 4;
}
Ok(f) => f
};
let (metadata, text) = extract_leading_metadata(input_str.as_slice());
if metadata.len() == 0 {
let _ = writeln!(&mut io::stderr(),
"invalid markdown file: expecting initial line with `%...TITLE...`");
return 5;
}
let title = metadata[0].as_slice();
reset_headers();
let rendered = if include_toc {
format!("{}", MarkdownWithToc(text))
} else {
format!("{}", Markdown(text))
};
let err = write!(
&mut out,
r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8"> | <title>{title}</title>
{css}
{in_header}
</head>
<body class="rustdoc">
<!--[if lte IE 8]>
<div class="warning">
This old browser is unsupported and will most likely display funky
things.
</div>
<![endif]-->
{before_content}
<h1 class="title">{title}</h1>
{text}
<script type="text/javascript">
window.playgroundUrl = "{playground}";
</script>
{after_content}
</body>
</html>"#,
title = Escape(title),
css = css,
in_header = external_html.in_header,
before_content = external_html.before_content,
text = rendered,
after_content = external_html.after_content,
playground = playground,
);
match err {
Err(e) => {
let _ = writeln!(&mut io::stderr(),
"error writing to `{}`: {}",
output.display(), e);
6
}
Ok(_) => 0
}
}
/// Run any tests/code examples in the markdown file `input`.
pub fn test(input: &str, libs: SearchPaths, externs: core::Externs,
mut test_args: Vec<String>) -> int {
let input_str = load_or_return!(input, 1, 2);
let mut collector = Collector::new(input.to_string(), libs, externs, true);
find_testable_code(input_str.as_slice(), &mut collector);
test_args.insert(0, "rustdoctest".to_string());
testing::test_main(test_args.as_slice(), collector.tests);
0
} | <meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="generator" content="rustdoc"> | random_line_split |
lib.rs | #[derive(Debug, PartialEq)]
pub struct Clock {
hours: i16,
minutes: i16,
}
impl Clock {
pub fn new(hours: i16, minutes: i16) -> Self {
Clock { hours, minutes }.normalize()
}
pub fn add_minutes(mut self, n: i16) -> Self {
self.minutes += n;
self.normalize()
}
pub fn to_string(&self) -> String |
fn normalize(mut self) -> Self {
self.hours += self.minutes / 60;
self.minutes %= 60;
self.hours %= 24;
if self.minutes < 0 {
self.hours -= 1;
self.minutes += 60;
}
if self.hours < 0 {
self.hours += 24;
}
self
}
}
| {
format!("{:02}:{:02}", self.hours, self.minutes)
} | identifier_body |
lib.rs | #[derive(Debug, PartialEq)]
pub struct Clock {
hours: i16,
minutes: i16,
}
impl Clock {
pub fn new(hours: i16, minutes: i16) -> Self {
Clock { hours, minutes }.normalize()
}
pub fn add_minutes(mut self, n: i16) -> Self {
self.minutes += n;
self.normalize()
}
pub fn to_string(&self) -> String {
format!("{:02}:{:02}", self.hours, self.minutes)
}
fn normalize(mut self) -> Self {
self.hours += self.minutes / 60;
self.minutes %= 60;
self.hours %= 24;
if self.minutes < 0 {
self.hours -= 1;
self.minutes += 60;
}
if self.hours < 0 |
self
}
}
| {
self.hours += 24;
} | conditional_block |
lib.rs | #[derive(Debug, PartialEq)]
pub struct Clock {
hours: i16,
minutes: i16,
}
impl Clock {
pub fn new(hours: i16, minutes: i16) -> Self {
Clock { hours, minutes }.normalize()
}
pub fn add_minutes(mut self, n: i16) -> Self {
self.minutes += n;
self.normalize()
}
pub fn to_string(&self) -> String {
format!("{:02}:{:02}", self.hours, self.minutes)
}
fn | (mut self) -> Self {
self.hours += self.minutes / 60;
self.minutes %= 60;
self.hours %= 24;
if self.minutes < 0 {
self.hours -= 1;
self.minutes += 60;
}
if self.hours < 0 {
self.hours += 24;
}
self
}
}
| normalize | identifier_name |
lib.rs | #[derive(Debug, PartialEq)]
pub struct Clock { | minutes: i16,
}
impl Clock {
pub fn new(hours: i16, minutes: i16) -> Self {
Clock { hours, minutes }.normalize()
}
pub fn add_minutes(mut self, n: i16) -> Self {
self.minutes += n;
self.normalize()
}
pub fn to_string(&self) -> String {
format!("{:02}:{:02}", self.hours, self.minutes)
}
fn normalize(mut self) -> Self {
self.hours += self.minutes / 60;
self.minutes %= 60;
self.hours %= 24;
if self.minutes < 0 {
self.hours -= 1;
self.minutes += 60;
}
if self.hours < 0 {
self.hours += 24;
}
self
}
} | hours: i16, | random_line_split |
step6_file.rs | use std::rc::Rc;
//use std::collections::HashMap;
use fnv::FnvHashMap;
use itertools::Itertools;
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate itertools;
extern crate fnv;
extern crate rustyline;
use rustyline::error::ReadlineError;
use rustyline::Editor;
#[macro_use]
mod types;
use types::{MalVal,MalArgs,MalRet,MalErr,error,format_error};
use types::MalVal::{Nil,Bool,Str,Sym,List,Vector,Hash,Func,MalFunc};
mod reader;
mod printer;
mod env;
use env::{Env,env_new,env_bind,env_get,env_set,env_sets};
#[macro_use]
mod core;
// read
fn read(str: &str) -> MalRet {
reader::read_str(str.to_string())
}
// eval
fn eval_ast(ast: &MalVal, env: &Env) -> MalRet {
match ast {
Sym(_) => Ok(env_get(&env, &ast)?),
List(v,_) => {
let mut lst: MalArgs = vec![];
for a in v.iter() { lst.push(eval(a.clone(), env.clone())?) }
Ok(list!(lst))
},
Vector(v,_) => {
let mut lst: MalArgs = vec![];
for a in v.iter() { lst.push(eval(a.clone(), env.clone())?) }
Ok(vector!(lst))
},
Hash(hm,_) => {
let mut new_hm: FnvHashMap<String,MalVal> = FnvHashMap::default();
for (k,v) in hm.iter() {
new_hm.insert(k.to_string(), eval(v.clone(), env.clone())?);
}
Ok(Hash(Rc::new(new_hm),Rc::new(Nil)))
},
_ => Ok(ast.clone()),
}
}
fn eval(mut ast: MalVal, mut env: Env) -> MalRet {
let ret: MalRet;
'tco: loop {
ret = match ast.clone() {
List(l,_) => {
if l.len() == 0 { return Ok(ast); }
let a0 = &l[0];
match a0 {
Sym(ref a0sym) if a0sym == "def!" => {
env_set(&env, l[1].clone(), eval(l[2].clone(), env.clone())?)
},
Sym(ref a0sym) if a0sym == "let*" => {
env = env_new(Some(env.clone()));
let (a1, a2) = (l[1].clone(), l[2].clone());
match a1 {
List(ref binds,_) | Vector(ref binds,_) => {
for (b, e) in binds.iter().tuples() {
match b {
Sym(_) => {
let _ = env_set(&env, b.clone(),
eval(e.clone(), env.clone())?);
},
_ => {
return error("let* with non-Sym binding");
}
}
}
},
_ => {
return error("let* with non-List bindings");
}
};
ast = a2;
continue 'tco;
},
Sym(ref a0sym) if a0sym == "do" => {
match eval_ast(&list!(l[1..l.len()-1].to_vec()), &env)? {
List(_,_) => {
ast = l.last().unwrap_or(&Nil).clone();
continue 'tco;
},
_ => error("invalid do form"),
}
},
Sym(ref a0sym) if a0sym == "if" => {
let cond = eval(l[1].clone(), env.clone())?;
match cond {
Bool(false) | Nil if l.len() >= 4 => {
ast = l[3].clone();
continue 'tco;
},
Bool(false) | Nil => Ok(Nil),
_ if l.len() >= 3 => {
ast = l[2].clone();
continue 'tco;
},
_ => Ok(Nil)
}
},
Sym(ref a0sym) if a0sym == "fn*" => {
let (a1, a2) = (l[1].clone(), l[2].clone());
Ok(MalFunc{eval: eval, ast: Rc::new(a2), env: env,
params: Rc::new(a1), is_macro: false,
meta: Rc::new(Nil)})
},
Sym(ref a0sym) if a0sym == "eval" => {
ast = eval(l[1].clone(), env.clone())?;
while let Some(ref e) = env.clone().outer {
env = e.clone();
}
continue 'tco;
},
_ => {
match eval_ast(&ast, &env)? {
List(ref el,_) => {
let ref f = el[0].clone();
let args = el[1..].to_vec();
match f {
Func(_,_) => f.apply(args),
MalFunc{ast: mast, env: menv, params,..} => {
let a = &**mast;
let p = &**params;
env = env_bind(Some(menv.clone()), p.clone(), args)?;
ast = a.clone();
continue 'tco;
},
_ => error("attempt to call non-function"),
}
},
_ => {
error("expected a list")
}
}
}
}
},
_ => eval_ast(&ast, &env),
};
break;
} // end 'tco loop
ret
}
// print
fn print(ast: &MalVal) -> String |
fn rep(str: &str, env: &Env) -> Result<String,MalErr> {
let ast = read(str)?;
let exp = eval(ast, env.clone())?;
Ok(print(&exp))
}
fn main() {
let mut args = std::env::args();
let arg1 = args.nth(1);
// `()` can be used when no completer is required
let mut rl = Editor::<()>::new();
if rl.load_history(".mal-history").is_err() {
println!("No previous history.");
}
// core.rs: defined using rust
let repl_env = env_new(None);
for (k, v) in core::ns() {
env_sets(&repl_env, k, v);
}
env_sets(&repl_env, "*ARGV*", list!(args.map(Str).collect()));
// core.mal: defined using the language itself
let _ = rep("(def! not (fn* (a) (if a false true)))", &repl_env);
let _ = rep("(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))", &repl_env);
// Invoked with arguments
if let Some(f) = arg1 {
match rep(&format!("(load-file \"{}\")",f), &repl_env) {
Ok(_) => std::process::exit(0),
Err(e) => {
println!("Error: {}", format_error(e));
std::process::exit(1);
}
}
}
// main repl loop
loop {
let readline = rl.readline("user> ");
match readline {
Ok(line) => {
rl.add_history_entry(&line);
rl.save_history(".mal-history").unwrap();
if line.len() > 0 {
match rep(&line, &repl_env) {
Ok(out) => println!("{}", out),
Err(e) => println!("Error: {}", format_error(e)),
}
}
},
Err(ReadlineError::Interrupted) => continue,
Err(ReadlineError::Eof) => break,
Err(err) => {
println!("Error: {:?}", err);
break
}
}
}
}
// vim: ts=2:sw=2:expandtab
| {
ast.pr_str(true)
} | identifier_body |
step6_file.rs | use std::rc::Rc;
//use std::collections::HashMap;
use fnv::FnvHashMap;
use itertools::Itertools;
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate itertools;
extern crate fnv;
extern crate rustyline;
use rustyline::error::ReadlineError;
use rustyline::Editor;
#[macro_use]
mod types;
use types::{MalVal,MalArgs,MalRet,MalErr,error,format_error};
use types::MalVal::{Nil,Bool,Str,Sym,List,Vector,Hash,Func,MalFunc};
mod reader;
mod printer;
mod env;
use env::{Env,env_new,env_bind,env_get,env_set,env_sets};
#[macro_use]
mod core;
// read
fn read(str: &str) -> MalRet {
reader::read_str(str.to_string())
}
// eval
fn eval_ast(ast: &MalVal, env: &Env) -> MalRet {
match ast {
Sym(_) => Ok(env_get(&env, &ast)?),
List(v,_) => {
let mut lst: MalArgs = vec![];
for a in v.iter() { lst.push(eval(a.clone(), env.clone())?) }
Ok(list!(lst))
},
Vector(v,_) => {
let mut lst: MalArgs = vec![];
for a in v.iter() { lst.push(eval(a.clone(), env.clone())?) }
Ok(vector!(lst))
},
Hash(hm,_) => {
let mut new_hm: FnvHashMap<String,MalVal> = FnvHashMap::default();
for (k,v) in hm.iter() {
new_hm.insert(k.to_string(), eval(v.clone(), env.clone())?);
}
Ok(Hash(Rc::new(new_hm),Rc::new(Nil)))
},
_ => Ok(ast.clone()),
}
}
fn eval(mut ast: MalVal, mut env: Env) -> MalRet {
let ret: MalRet;
'tco: loop {
ret = match ast.clone() {
List(l,_) => {
if l.len() == 0 { return Ok(ast); }
let a0 = &l[0];
match a0 {
Sym(ref a0sym) if a0sym == "def!" => {
env_set(&env, l[1].clone(), eval(l[2].clone(), env.clone())?)
},
Sym(ref a0sym) if a0sym == "let*" => {
env = env_new(Some(env.clone()));
let (a1, a2) = (l[1].clone(), l[2].clone());
match a1 {
List(ref binds,_) | Vector(ref binds,_) => {
for (b, e) in binds.iter().tuples() {
match b {
Sym(_) => {
let _ = env_set(&env, b.clone(),
eval(e.clone(), env.clone())?);
},
_ => {
return error("let* with non-Sym binding");
}
}
}
},
_ => {
return error("let* with non-List bindings");
}
};
ast = a2;
continue 'tco;
},
Sym(ref a0sym) if a0sym == "do" => {
match eval_ast(&list!(l[1..l.len()-1].to_vec()), &env)? {
List(_,_) => {
ast = l.last().unwrap_or(&Nil).clone();
continue 'tco;
},
_ => error("invalid do form"),
}
},
Sym(ref a0sym) if a0sym == "if" => {
let cond = eval(l[1].clone(), env.clone())?;
match cond {
Bool(false) | Nil if l.len() >= 4 => {
ast = l[3].clone();
continue 'tco;
},
Bool(false) | Nil => Ok(Nil),
_ if l.len() >= 3 => | ,
_ => Ok(Nil)
}
},
Sym(ref a0sym) if a0sym == "fn*" => {
let (a1, a2) = (l[1].clone(), l[2].clone());
Ok(MalFunc{eval: eval, ast: Rc::new(a2), env: env,
params: Rc::new(a1), is_macro: false,
meta: Rc::new(Nil)})
},
Sym(ref a0sym) if a0sym == "eval" => {
ast = eval(l[1].clone(), env.clone())?;
while let Some(ref e) = env.clone().outer {
env = e.clone();
}
continue 'tco;
},
_ => {
match eval_ast(&ast, &env)? {
List(ref el,_) => {
let ref f = el[0].clone();
let args = el[1..].to_vec();
match f {
Func(_,_) => f.apply(args),
MalFunc{ast: mast, env: menv, params,..} => {
let a = &**mast;
let p = &**params;
env = env_bind(Some(menv.clone()), p.clone(), args)?;
ast = a.clone();
continue 'tco;
},
_ => error("attempt to call non-function"),
}
},
_ => {
error("expected a list")
}
}
}
}
},
_ => eval_ast(&ast, &env),
};
break;
} // end 'tco loop
ret
}
// print
fn print(ast: &MalVal) -> String {
ast.pr_str(true)
}
fn rep(str: &str, env: &Env) -> Result<String,MalErr> {
let ast = read(str)?;
let exp = eval(ast, env.clone())?;
Ok(print(&exp))
}
fn main() {
let mut args = std::env::args();
let arg1 = args.nth(1);
// `()` can be used when no completer is required
let mut rl = Editor::<()>::new();
if rl.load_history(".mal-history").is_err() {
println!("No previous history.");
}
// core.rs: defined using rust
let repl_env = env_new(None);
for (k, v) in core::ns() {
env_sets(&repl_env, k, v);
}
env_sets(&repl_env, "*ARGV*", list!(args.map(Str).collect()));
// core.mal: defined using the language itself
let _ = rep("(def! not (fn* (a) (if a false true)))", &repl_env);
let _ = rep("(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))", &repl_env);
// Invoked with arguments
if let Some(f) = arg1 {
match rep(&format!("(load-file \"{}\")",f), &repl_env) {
Ok(_) => std::process::exit(0),
Err(e) => {
println!("Error: {}", format_error(e));
std::process::exit(1);
}
}
}
// main repl loop
loop {
let readline = rl.readline("user> ");
match readline {
Ok(line) => {
rl.add_history_entry(&line);
rl.save_history(".mal-history").unwrap();
if line.len() > 0 {
match rep(&line, &repl_env) {
Ok(out) => println!("{}", out),
Err(e) => println!("Error: {}", format_error(e)),
}
}
},
Err(ReadlineError::Interrupted) => continue,
Err(ReadlineError::Eof) => break,
Err(err) => {
println!("Error: {:?}", err);
break
}
}
}
}
// vim: ts=2:sw=2:expandtab
| {
ast = l[2].clone();
continue 'tco;
} | conditional_block |
step6_file.rs | use std::rc::Rc;
//use std::collections::HashMap;
use fnv::FnvHashMap;
use itertools::Itertools;
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate itertools;
extern crate fnv;
extern crate rustyline;
use rustyline::error::ReadlineError;
use rustyline::Editor;
#[macro_use]
mod types;
use types::{MalVal,MalArgs,MalRet,MalErr,error,format_error};
use types::MalVal::{Nil,Bool,Str,Sym,List,Vector,Hash,Func,MalFunc};
mod reader;
mod printer;
mod env;
use env::{Env,env_new,env_bind,env_get,env_set,env_sets};
#[macro_use]
mod core;
// read
fn read(str: &str) -> MalRet {
reader::read_str(str.to_string())
}
// eval
fn eval_ast(ast: &MalVal, env: &Env) -> MalRet {
match ast {
Sym(_) => Ok(env_get(&env, &ast)?),
List(v,_) => {
let mut lst: MalArgs = vec![];
for a in v.iter() { lst.push(eval(a.clone(), env.clone())?) }
Ok(list!(lst))
},
Vector(v,_) => {
let mut lst: MalArgs = vec![];
for a in v.iter() { lst.push(eval(a.clone(), env.clone())?) }
Ok(vector!(lst))
},
Hash(hm,_) => {
let mut new_hm: FnvHashMap<String,MalVal> = FnvHashMap::default();
for (k,v) in hm.iter() {
new_hm.insert(k.to_string(), eval(v.clone(), env.clone())?);
}
Ok(Hash(Rc::new(new_hm),Rc::new(Nil)))
},
_ => Ok(ast.clone()),
}
}
fn eval(mut ast: MalVal, mut env: Env) -> MalRet {
let ret: MalRet;
'tco: loop {
ret = match ast.clone() {
List(l,_) => {
if l.len() == 0 { return Ok(ast); }
let a0 = &l[0];
match a0 {
Sym(ref a0sym) if a0sym == "def!" => {
env_set(&env, l[1].clone(), eval(l[2].clone(), env.clone())?)
},
Sym(ref a0sym) if a0sym == "let*" => {
env = env_new(Some(env.clone()));
let (a1, a2) = (l[1].clone(), l[2].clone());
match a1 {
List(ref binds,_) | Vector(ref binds,_) => {
for (b, e) in binds.iter().tuples() {
match b {
Sym(_) => {
let _ = env_set(&env, b.clone(),
eval(e.clone(), env.clone())?);
},
_ => {
return error("let* with non-Sym binding");
}
}
}
},
_ => {
return error("let* with non-List bindings");
}
};
ast = a2;
continue 'tco;
},
Sym(ref a0sym) if a0sym == "do" => {
match eval_ast(&list!(l[1..l.len()-1].to_vec()), &env)? {
List(_,_) => {
ast = l.last().unwrap_or(&Nil).clone();
continue 'tco;
},
_ => error("invalid do form"),
}
},
Sym(ref a0sym) if a0sym == "if" => {
let cond = eval(l[1].clone(), env.clone())?;
match cond {
Bool(false) | Nil if l.len() >= 4 => {
ast = l[3].clone();
continue 'tco;
},
Bool(false) | Nil => Ok(Nil),
_ if l.len() >= 3 => {
ast = l[2].clone();
continue 'tco;
},
_ => Ok(Nil)
}
},
Sym(ref a0sym) if a0sym == "fn*" => {
let (a1, a2) = (l[1].clone(), l[2].clone());
Ok(MalFunc{eval: eval, ast: Rc::new(a2), env: env,
params: Rc::new(a1), is_macro: false,
meta: Rc::new(Nil)})
},
Sym(ref a0sym) if a0sym == "eval" => {
ast = eval(l[1].clone(), env.clone())?;
while let Some(ref e) = env.clone().outer {
env = e.clone();
}
continue 'tco;
},
_ => {
match eval_ast(&ast, &env)? {
List(ref el,_) => {
let ref f = el[0].clone();
let args = el[1..].to_vec();
match f {
Func(_,_) => f.apply(args),
MalFunc{ast: mast, env: menv, params,..} => {
let a = &**mast;
let p = &**params;
env = env_bind(Some(menv.clone()), p.clone(), args)?;
ast = a.clone();
continue 'tco;
},
_ => error("attempt to call non-function"), | }
}
}
},
_ => eval_ast(&ast, &env),
};
break;
} // end 'tco loop
ret
}
// print
fn print(ast: &MalVal) -> String {
ast.pr_str(true)
}
fn rep(str: &str, env: &Env) -> Result<String,MalErr> {
let ast = read(str)?;
let exp = eval(ast, env.clone())?;
Ok(print(&exp))
}
fn main() {
let mut args = std::env::args();
let arg1 = args.nth(1);
// `()` can be used when no completer is required
let mut rl = Editor::<()>::new();
if rl.load_history(".mal-history").is_err() {
println!("No previous history.");
}
// core.rs: defined using rust
let repl_env = env_new(None);
for (k, v) in core::ns() {
env_sets(&repl_env, k, v);
}
env_sets(&repl_env, "*ARGV*", list!(args.map(Str).collect()));
// core.mal: defined using the language itself
let _ = rep("(def! not (fn* (a) (if a false true)))", &repl_env);
let _ = rep("(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))", &repl_env);
// Invoked with arguments
if let Some(f) = arg1 {
match rep(&format!("(load-file \"{}\")",f), &repl_env) {
Ok(_) => std::process::exit(0),
Err(e) => {
println!("Error: {}", format_error(e));
std::process::exit(1);
}
}
}
// main repl loop
loop {
let readline = rl.readline("user> ");
match readline {
Ok(line) => {
rl.add_history_entry(&line);
rl.save_history(".mal-history").unwrap();
if line.len() > 0 {
match rep(&line, &repl_env) {
Ok(out) => println!("{}", out),
Err(e) => println!("Error: {}", format_error(e)),
}
}
},
Err(ReadlineError::Interrupted) => continue,
Err(ReadlineError::Eof) => break,
Err(err) => {
println!("Error: {:?}", err);
break
}
}
}
}
// vim: ts=2:sw=2:expandtab | }
},
_ => {
error("expected a list")
} | random_line_split |
step6_file.rs | use std::rc::Rc;
//use std::collections::HashMap;
use fnv::FnvHashMap;
use itertools::Itertools;
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate itertools;
extern crate fnv;
extern crate rustyline;
use rustyline::error::ReadlineError;
use rustyline::Editor;
#[macro_use]
mod types;
use types::{MalVal,MalArgs,MalRet,MalErr,error,format_error};
use types::MalVal::{Nil,Bool,Str,Sym,List,Vector,Hash,Func,MalFunc};
mod reader;
mod printer;
mod env;
use env::{Env,env_new,env_bind,env_get,env_set,env_sets};
#[macro_use]
mod core;
// read
fn read(str: &str) -> MalRet {
reader::read_str(str.to_string())
}
// eval
fn eval_ast(ast: &MalVal, env: &Env) -> MalRet {
match ast {
Sym(_) => Ok(env_get(&env, &ast)?),
List(v,_) => {
let mut lst: MalArgs = vec![];
for a in v.iter() { lst.push(eval(a.clone(), env.clone())?) }
Ok(list!(lst))
},
Vector(v,_) => {
let mut lst: MalArgs = vec![];
for a in v.iter() { lst.push(eval(a.clone(), env.clone())?) }
Ok(vector!(lst))
},
Hash(hm,_) => {
let mut new_hm: FnvHashMap<String,MalVal> = FnvHashMap::default();
for (k,v) in hm.iter() {
new_hm.insert(k.to_string(), eval(v.clone(), env.clone())?);
}
Ok(Hash(Rc::new(new_hm),Rc::new(Nil)))
},
_ => Ok(ast.clone()),
}
}
fn eval(mut ast: MalVal, mut env: Env) -> MalRet {
let ret: MalRet;
'tco: loop {
ret = match ast.clone() {
List(l,_) => {
if l.len() == 0 { return Ok(ast); }
let a0 = &l[0];
match a0 {
Sym(ref a0sym) if a0sym == "def!" => {
env_set(&env, l[1].clone(), eval(l[2].clone(), env.clone())?)
},
Sym(ref a0sym) if a0sym == "let*" => {
env = env_new(Some(env.clone()));
let (a1, a2) = (l[1].clone(), l[2].clone());
match a1 {
List(ref binds,_) | Vector(ref binds,_) => {
for (b, e) in binds.iter().tuples() {
match b {
Sym(_) => {
let _ = env_set(&env, b.clone(),
eval(e.clone(), env.clone())?);
},
_ => {
return error("let* with non-Sym binding");
}
}
}
},
_ => {
return error("let* with non-List bindings");
}
};
ast = a2;
continue 'tco;
},
Sym(ref a0sym) if a0sym == "do" => {
match eval_ast(&list!(l[1..l.len()-1].to_vec()), &env)? {
List(_,_) => {
ast = l.last().unwrap_or(&Nil).clone();
continue 'tco;
},
_ => error("invalid do form"),
}
},
Sym(ref a0sym) if a0sym == "if" => {
let cond = eval(l[1].clone(), env.clone())?;
match cond {
Bool(false) | Nil if l.len() >= 4 => {
ast = l[3].clone();
continue 'tco;
},
Bool(false) | Nil => Ok(Nil),
_ if l.len() >= 3 => {
ast = l[2].clone();
continue 'tco;
},
_ => Ok(Nil)
}
},
Sym(ref a0sym) if a0sym == "fn*" => {
let (a1, a2) = (l[1].clone(), l[2].clone());
Ok(MalFunc{eval: eval, ast: Rc::new(a2), env: env,
params: Rc::new(a1), is_macro: false,
meta: Rc::new(Nil)})
},
Sym(ref a0sym) if a0sym == "eval" => {
ast = eval(l[1].clone(), env.clone())?;
while let Some(ref e) = env.clone().outer {
env = e.clone();
}
continue 'tco;
},
_ => {
match eval_ast(&ast, &env)? {
List(ref el,_) => {
let ref f = el[0].clone();
let args = el[1..].to_vec();
match f {
Func(_,_) => f.apply(args),
MalFunc{ast: mast, env: menv, params,..} => {
let a = &**mast;
let p = &**params;
env = env_bind(Some(menv.clone()), p.clone(), args)?;
ast = a.clone();
continue 'tco;
},
_ => error("attempt to call non-function"),
}
},
_ => {
error("expected a list")
}
}
}
}
},
_ => eval_ast(&ast, &env),
};
break;
} // end 'tco loop
ret
}
// print
fn print(ast: &MalVal) -> String {
ast.pr_str(true)
}
fn | (str: &str, env: &Env) -> Result<String,MalErr> {
let ast = read(str)?;
let exp = eval(ast, env.clone())?;
Ok(print(&exp))
}
fn main() {
let mut args = std::env::args();
let arg1 = args.nth(1);
// `()` can be used when no completer is required
let mut rl = Editor::<()>::new();
if rl.load_history(".mal-history").is_err() {
println!("No previous history.");
}
// core.rs: defined using rust
let repl_env = env_new(None);
for (k, v) in core::ns() {
env_sets(&repl_env, k, v);
}
env_sets(&repl_env, "*ARGV*", list!(args.map(Str).collect()));
// core.mal: defined using the language itself
let _ = rep("(def! not (fn* (a) (if a false true)))", &repl_env);
let _ = rep("(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))", &repl_env);
// Invoked with arguments
if let Some(f) = arg1 {
match rep(&format!("(load-file \"{}\")",f), &repl_env) {
Ok(_) => std::process::exit(0),
Err(e) => {
println!("Error: {}", format_error(e));
std::process::exit(1);
}
}
}
// main repl loop
loop {
let readline = rl.readline("user> ");
match readline {
Ok(line) => {
rl.add_history_entry(&line);
rl.save_history(".mal-history").unwrap();
if line.len() > 0 {
match rep(&line, &repl_env) {
Ok(out) => println!("{}", out),
Err(e) => println!("Error: {}", format_error(e)),
}
}
},
Err(ReadlineError::Interrupted) => continue,
Err(ReadlineError::Eof) => break,
Err(err) => {
println!("Error: {:?}", err);
break
}
}
}
}
// vim: ts=2:sw=2:expandtab
| rep | identifier_name |
mod.rs | //! Provides functions for maintaining database schema.
//!
//! A database migration always provides procedures to update the schema, as well as to revert
//! itself. Diesel's migrations are versioned, and run in order. Diesel also takes care of tracking
//! which migrations have already been run automatically. Your migrations don't need to be
//! idempotent, as Diesel will ensure no migration is run twice unless it has been reverted.
//!
//! Migrations should be placed in a `/migrations` directory at the root of your project (the same
//! directory as `Cargo.toml`). When any of these functions are run, Diesel will search for the
//! migrations directory in the current directory and its parents, stopping when it finds the
//! directory containing `Cargo.toml`.
//! | //! ## Example
//!
//! ```text
//! # Directory Structure
//! - 20151219180527_create_users
//! - up.sql
//! - down.sql
//! - 20160107082941_create_posts
//! - up.sql
//! - down.sql
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/up.sql
//! CREATE TABLE users (
//! id SERIAL PRIMARY KEY,
//! name VARCHAR NOT NULL,
//! hair_color VARCHAR
//! );
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/down.sql
//! DROP TABLE users;
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/up.sql
//! CREATE TABLE posts (
//! id SERIAL PRIMARY KEY,
//! user_id INTEGER NOT NULL,
//! title VARCHAR NOT NULL,
//! body TEXT
//! );
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/down.sql
//! DROP TABLE posts;
//! ```
mod migration;
mod migration_error;
mod schema;
pub use self::migration_error::*;
use ::expression::expression_methods::*;
use ::query_dsl::*;
use self::migration::*;
use self::migration_error::MigrationError::*;
use self::schema::NewMigration;
use self::schema::__diesel_schema_migrations::dsl::*;
use {Connection, QueryResult};
use std::collections::HashSet;
use std::env;
use std::path::{PathBuf, Path};
/// Runs all migrations that have not yet been run. This function will print all progress to
/// stdout. This function will return an `Err` if some error occurs reading the migrations, or if
/// any migration fails to run. Each migration is run in its own transaction, so some migrations
/// may be committed, even if a later migration fails to run.
///
/// It should be noted that this runs all migrations that have not already been run, regardless of
/// whether or not their version is later than the latest run migration. This is generally not a
/// problem, and eases the more common case of two developers generating independent migrations on
/// a branch. Whoever created the second one will eventually need to run the first when both
/// branches are merged.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn run_pending_migrations(conn: &Connection) -> Result<(), RunMigrationsError> {
try!(create_schema_migrations_table_if_needed(conn));
let already_run = try!(previously_run_migration_versions(conn));
let migrations_dir = try!(find_migrations_directory());
let all_migrations = try!(migrations_in_directory(&migrations_dir));
let pending_migrations = all_migrations.into_iter().filter(|m| {
!already_run.contains(m.version())
});
run_migrations(conn, pending_migrations)
}
/// Reverts the last migration that was run. Returns the version that was reverted. Returns an
/// `Err` if no migrations have ever been run.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn revert_latest_migration(conn: &Connection) -> Result<String, RunMigrationsError> {
try!(create_schema_migrations_table_if_needed(conn));
let latest_migration_version = try!(latest_run_migration_version(conn));
revert_migration_with_version(conn, &latest_migration_version)
.map(|_| latest_migration_version)
}
#[doc(hidden)]
pub fn revert_migration_with_version(conn: &Connection, ver: &str) -> Result<(), RunMigrationsError> {
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| revert_migration(conn, m))
}
#[doc(hidden)]
pub fn run_migration_with_version(conn: &Connection, ver: &str) -> Result<(), RunMigrationsError> {
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| run_migration(conn, m))
}
fn migration_with_version(ver: &str) -> Result<Box<Migration>, MigrationError> {
let migrations_dir = try!(find_migrations_directory());
let all_migrations = try!(migrations_in_directory(&migrations_dir));
let migration = all_migrations.into_iter().find(|m| {
m.version() == ver
});
match migration {
Some(m) => Ok(m),
None => Err(UnknownMigrationVersion(ver.into())),
}
}
fn create_schema_migrations_table_if_needed(conn: &Connection) -> QueryResult<usize> {
conn.silence_notices(|| {
conn.execute("CREATE TABLE IF NOT EXISTS __diesel_schema_migrations (
version VARCHAR PRIMARY KEY NOT NULL,
run_on TIMESTAMP NOT NULL DEFAULT NOW()
)")
})
}
fn previously_run_migration_versions(conn: &Connection) -> QueryResult<HashSet<String>> {
__diesel_schema_migrations.select(version)
.load(&conn)
.map(|r| r.collect())
}
fn latest_run_migration_version(conn: &Connection) -> QueryResult<String> {
use ::expression::dsl::max;
__diesel_schema_migrations.select(max(version))
.first(&conn)
}
fn migrations_in_directory(path: &Path) -> Result<Vec<Box<Migration>>, MigrationError> {
use self::migration::migration_from;
try!(path.read_dir())
.filter_map(|entry| {
let entry = match entry {
Ok(e) => e,
Err(e) => return Some(Err(e.into())),
};
if!entry.file_name().to_string_lossy().starts_with(".") {
Some(migration_from(entry.path()))
} else {
None
}
}).collect()
}
fn run_migrations<T>(conn: &Connection, migrations: T)
-> Result<(), RunMigrationsError> where
T: Iterator<Item=Box<Migration>>
{
for migration in migrations {
try!(run_migration(conn, migration));
}
Ok(())
}
fn run_migration(conn: &Connection, migration: Box<Migration>)
-> Result<(), RunMigrationsError>
{
conn.transaction(|| {
println!("Running migration {}", migration.version());
try!(migration.run(conn));
try!(::insert(&NewMigration(migration.version()))
.into(__diesel_schema_migrations)
.execute(&conn));
Ok(())
}).map_err(|e| e.into())
}
fn revert_migration(conn: &Connection, migration: Box<Migration>)
-> Result<(), RunMigrationsError>
{
try!(conn.transaction(|| {
println!("Rolling back migration {}", migration.version());
try!(migration.revert(conn));
let target = __diesel_schema_migrations.filter(version.eq(migration.version()));
try!(::delete(target).execute(&conn));
Ok(())
}));
Ok(())
}
/// Returns the directory containing migrations. Will look at for
/// $PWD/migrations. If it is not found, it will search the parents of the
/// current directory, until it reaches the root directory. Returns
/// `MigrationError::MigrationDirectoryNotFound` if no directory is found.
pub fn find_migrations_directory() -> Result<PathBuf, MigrationError> {
search_for_migrations_directory(&try!(env::current_dir()))
}
fn search_for_migrations_directory(path: &Path) -> Result<PathBuf, MigrationError> {
let migration_path = path.join("migrations");
if migration_path.is_dir() {
Ok(migration_path)
} else {
path.parent().map(search_for_migrations_directory)
.unwrap_or(Err(MigrationError::MigrationDirectoryNotFound))
}
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use super::*;
use super::search_for_migrations_directory;
use self::tempdir::TempDir;
use std::fs;
#[test]
fn migration_directory_not_found_if_no_migration_dir_exists() {
let dir = TempDir::new("diesel").unwrap();
assert_eq!(Err(MigrationError::MigrationDirectoryNotFound),
search_for_migrations_directory(dir.path()));
}
#[test]
fn migration_directory_defaults_to_pwd_slash_migrations() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&temp_path));
}
#[test]
fn migration_directory_checks_parents() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
let child_path = temp_path.join("child");
fs::create_dir(&child_path).unwrap();
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&child_path));
}
} | //! Individual migrations should be a folder containing exactly two files, `up.sql` and `down.sql`.
//! `up.sql` will be used to run the migration, while `down.sql` will be used for reverting it. The
//! folder itself should have the structure `{version}_{migration_name}`. It is recommended that
//! you use the timestamp of creation for the version.
//! | random_line_split |
mod.rs | //! Provides functions for maintaining database schema.
//!
//! A database migration always provides procedures to update the schema, as well as to revert
//! itself. Diesel's migrations are versioned, and run in order. Diesel also takes care of tracking
//! which migrations have already been run automatically. Your migrations don't need to be
//! idempotent, as Diesel will ensure no migration is run twice unless it has been reverted.
//!
//! Migrations should be placed in a `/migrations` directory at the root of your project (the same
//! directory as `Cargo.toml`). When any of these functions are run, Diesel will search for the
//! migrations directory in the current directory and its parents, stopping when it finds the
//! directory containing `Cargo.toml`.
//!
//! Individual migrations should be a folder containing exactly two files, `up.sql` and `down.sql`.
//! `up.sql` will be used to run the migration, while `down.sql` will be used for reverting it. The
//! folder itself should have the structure `{version}_{migration_name}`. It is recommended that
//! you use the timestamp of creation for the version.
//!
//! ## Example
//!
//! ```text
//! # Directory Structure
//! - 20151219180527_create_users
//! - up.sql
//! - down.sql
//! - 20160107082941_create_posts
//! - up.sql
//! - down.sql
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/up.sql
//! CREATE TABLE users (
//! id SERIAL PRIMARY KEY,
//! name VARCHAR NOT NULL,
//! hair_color VARCHAR
//! );
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/down.sql
//! DROP TABLE users;
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/up.sql
//! CREATE TABLE posts (
//! id SERIAL PRIMARY KEY,
//! user_id INTEGER NOT NULL,
//! title VARCHAR NOT NULL,
//! body TEXT
//! );
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/down.sql
//! DROP TABLE posts;
//! ```
mod migration;
mod migration_error;
mod schema;
pub use self::migration_error::*;
use ::expression::expression_methods::*;
use ::query_dsl::*;
use self::migration::*;
use self::migration_error::MigrationError::*;
use self::schema::NewMigration;
use self::schema::__diesel_schema_migrations::dsl::*;
use {Connection, QueryResult};
use std::collections::HashSet;
use std::env;
use std::path::{PathBuf, Path};
/// Runs all migrations that have not yet been run. This function will print all progress to
/// stdout. This function will return an `Err` if some error occurs reading the migrations, or if
/// any migration fails to run. Each migration is run in its own transaction, so some migrations
/// may be committed, even if a later migration fails to run.
///
/// It should be noted that this runs all migrations that have not already been run, regardless of
/// whether or not their version is later than the latest run migration. This is generally not a
/// problem, and eases the more common case of two developers generating independent migrations on
/// a branch. Whoever created the second one will eventually need to run the first when both
/// branches are merged.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn run_pending_migrations(conn: &Connection) -> Result<(), RunMigrationsError> {
try!(create_schema_migrations_table_if_needed(conn));
let already_run = try!(previously_run_migration_versions(conn));
let migrations_dir = try!(find_migrations_directory());
let all_migrations = try!(migrations_in_directory(&migrations_dir));
let pending_migrations = all_migrations.into_iter().filter(|m| {
!already_run.contains(m.version())
});
run_migrations(conn, pending_migrations)
}
/// Reverts the last migration that was run. Returns the version that was reverted. Returns an
/// `Err` if no migrations have ever been run.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn revert_latest_migration(conn: &Connection) -> Result<String, RunMigrationsError> {
try!(create_schema_migrations_table_if_needed(conn));
let latest_migration_version = try!(latest_run_migration_version(conn));
revert_migration_with_version(conn, &latest_migration_version)
.map(|_| latest_migration_version)
}
#[doc(hidden)]
pub fn revert_migration_with_version(conn: &Connection, ver: &str) -> Result<(), RunMigrationsError> {
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| revert_migration(conn, m))
}
#[doc(hidden)]
pub fn run_migration_with_version(conn: &Connection, ver: &str) -> Result<(), RunMigrationsError> {
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| run_migration(conn, m))
}
fn migration_with_version(ver: &str) -> Result<Box<Migration>, MigrationError> {
let migrations_dir = try!(find_migrations_directory());
let all_migrations = try!(migrations_in_directory(&migrations_dir));
let migration = all_migrations.into_iter().find(|m| {
m.version() == ver
});
match migration {
Some(m) => Ok(m),
None => Err(UnknownMigrationVersion(ver.into())),
}
}
fn create_schema_migrations_table_if_needed(conn: &Connection) -> QueryResult<usize> {
conn.silence_notices(|| {
conn.execute("CREATE TABLE IF NOT EXISTS __diesel_schema_migrations (
version VARCHAR PRIMARY KEY NOT NULL,
run_on TIMESTAMP NOT NULL DEFAULT NOW()
)")
})
}
fn previously_run_migration_versions(conn: &Connection) -> QueryResult<HashSet<String>> {
__diesel_schema_migrations.select(version)
.load(&conn)
.map(|r| r.collect())
}
fn latest_run_migration_version(conn: &Connection) -> QueryResult<String> {
use ::expression::dsl::max;
__diesel_schema_migrations.select(max(version))
.first(&conn)
}
fn migrations_in_directory(path: &Path) -> Result<Vec<Box<Migration>>, MigrationError> {
use self::migration::migration_from;
try!(path.read_dir())
.filter_map(|entry| {
let entry = match entry {
Ok(e) => e,
Err(e) => return Some(Err(e.into())),
};
if!entry.file_name().to_string_lossy().starts_with(".") {
Some(migration_from(entry.path()))
} else {
None
}
}).collect()
}
fn run_migrations<T>(conn: &Connection, migrations: T)
-> Result<(), RunMigrationsError> where
T: Iterator<Item=Box<Migration>>
{
for migration in migrations {
try!(run_migration(conn, migration));
}
Ok(())
}
fn run_migration(conn: &Connection, migration: Box<Migration>)
-> Result<(), RunMigrationsError>
{
conn.transaction(|| {
println!("Running migration {}", migration.version());
try!(migration.run(conn));
try!(::insert(&NewMigration(migration.version()))
.into(__diesel_schema_migrations)
.execute(&conn));
Ok(())
}).map_err(|e| e.into())
}
fn revert_migration(conn: &Connection, migration: Box<Migration>)
-> Result<(), RunMigrationsError>
{
try!(conn.transaction(|| {
println!("Rolling back migration {}", migration.version());
try!(migration.revert(conn));
let target = __diesel_schema_migrations.filter(version.eq(migration.version()));
try!(::delete(target).execute(&conn));
Ok(())
}));
Ok(())
}
/// Returns the directory containing migrations. Will look at for
/// $PWD/migrations. If it is not found, it will search the parents of the
/// current directory, until it reaches the root directory. Returns
/// `MigrationError::MigrationDirectoryNotFound` if no directory is found.
pub fn find_migrations_directory() -> Result<PathBuf, MigrationError> {
search_for_migrations_directory(&try!(env::current_dir()))
}
fn search_for_migrations_directory(path: &Path) -> Result<PathBuf, MigrationError> {
let migration_path = path.join("migrations");
if migration_path.is_dir() {
Ok(migration_path)
} else {
path.parent().map(search_for_migrations_directory)
.unwrap_or(Err(MigrationError::MigrationDirectoryNotFound))
}
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use super::*;
use super::search_for_migrations_directory;
use self::tempdir::TempDir;
use std::fs;
#[test]
fn migration_directory_not_found_if_no_migration_dir_exists() {
let dir = TempDir::new("diesel").unwrap();
assert_eq!(Err(MigrationError::MigrationDirectoryNotFound),
search_for_migrations_directory(dir.path()));
}
#[test]
fn migration_directory_defaults_to_pwd_slash_migrations() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&temp_path));
}
#[test]
fn | () {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
let child_path = temp_path.join("child");
fs::create_dir(&child_path).unwrap();
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&child_path));
}
}
| migration_directory_checks_parents | identifier_name |
mod.rs | //! Provides functions for maintaining database schema.
//!
//! A database migration always provides procedures to update the schema, as well as to revert
//! itself. Diesel's migrations are versioned, and run in order. Diesel also takes care of tracking
//! which migrations have already been run automatically. Your migrations don't need to be
//! idempotent, as Diesel will ensure no migration is run twice unless it has been reverted.
//!
//! Migrations should be placed in a `/migrations` directory at the root of your project (the same
//! directory as `Cargo.toml`). When any of these functions are run, Diesel will search for the
//! migrations directory in the current directory and its parents, stopping when it finds the
//! directory containing `Cargo.toml`.
//!
//! Individual migrations should be a folder containing exactly two files, `up.sql` and `down.sql`.
//! `up.sql` will be used to run the migration, while `down.sql` will be used for reverting it. The
//! folder itself should have the structure `{version}_{migration_name}`. It is recommended that
//! you use the timestamp of creation for the version.
//!
//! ## Example
//!
//! ```text
//! # Directory Structure
//! - 20151219180527_create_users
//! - up.sql
//! - down.sql
//! - 20160107082941_create_posts
//! - up.sql
//! - down.sql
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/up.sql
//! CREATE TABLE users (
//! id SERIAL PRIMARY KEY,
//! name VARCHAR NOT NULL,
//! hair_color VARCHAR
//! );
//! ```
//!
//! ```sql
//! -- 20151219180527_create_users/down.sql
//! DROP TABLE users;
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/up.sql
//! CREATE TABLE posts (
//! id SERIAL PRIMARY KEY,
//! user_id INTEGER NOT NULL,
//! title VARCHAR NOT NULL,
//! body TEXT
//! );
//! ```
//!
//! ```sql
//! -- 20160107082941_create_posts/down.sql
//! DROP TABLE posts;
//! ```
mod migration;
mod migration_error;
mod schema;
pub use self::migration_error::*;
use ::expression::expression_methods::*;
use ::query_dsl::*;
use self::migration::*;
use self::migration_error::MigrationError::*;
use self::schema::NewMigration;
use self::schema::__diesel_schema_migrations::dsl::*;
use {Connection, QueryResult};
use std::collections::HashSet;
use std::env;
use std::path::{PathBuf, Path};
/// Runs all migrations that have not yet been run. This function will print all progress to
/// stdout. This function will return an `Err` if some error occurs reading the migrations, or if
/// any migration fails to run. Each migration is run in its own transaction, so some migrations
/// may be committed, even if a later migration fails to run.
///
/// It should be noted that this runs all migrations that have not already been run, regardless of
/// whether or not their version is later than the latest run migration. This is generally not a
/// problem, and eases the more common case of two developers generating independent migrations on
/// a branch. Whoever created the second one will eventually need to run the first when both
/// branches are merged.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn run_pending_migrations(conn: &Connection) -> Result<(), RunMigrationsError> {
try!(create_schema_migrations_table_if_needed(conn));
let already_run = try!(previously_run_migration_versions(conn));
let migrations_dir = try!(find_migrations_directory());
let all_migrations = try!(migrations_in_directory(&migrations_dir));
let pending_migrations = all_migrations.into_iter().filter(|m| {
!already_run.contains(m.version())
});
run_migrations(conn, pending_migrations)
}
/// Reverts the last migration that was run. Returns the version that was reverted. Returns an
/// `Err` if no migrations have ever been run.
///
/// See the [module level documentation](index.html) for information on how migrations should be
/// structured, and where Diesel will look for them by default.
pub fn revert_latest_migration(conn: &Connection) -> Result<String, RunMigrationsError> |
#[doc(hidden)]
pub fn revert_migration_with_version(conn: &Connection, ver: &str) -> Result<(), RunMigrationsError> {
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| revert_migration(conn, m))
}
#[doc(hidden)]
pub fn run_migration_with_version(conn: &Connection, ver: &str) -> Result<(), RunMigrationsError> {
migration_with_version(ver)
.map_err(|e| e.into())
.and_then(|m| run_migration(conn, m))
}
fn migration_with_version(ver: &str) -> Result<Box<Migration>, MigrationError> {
let migrations_dir = try!(find_migrations_directory());
let all_migrations = try!(migrations_in_directory(&migrations_dir));
let migration = all_migrations.into_iter().find(|m| {
m.version() == ver
});
match migration {
Some(m) => Ok(m),
None => Err(UnknownMigrationVersion(ver.into())),
}
}
fn create_schema_migrations_table_if_needed(conn: &Connection) -> QueryResult<usize> {
conn.silence_notices(|| {
conn.execute("CREATE TABLE IF NOT EXISTS __diesel_schema_migrations (
version VARCHAR PRIMARY KEY NOT NULL,
run_on TIMESTAMP NOT NULL DEFAULT NOW()
)")
})
}
fn previously_run_migration_versions(conn: &Connection) -> QueryResult<HashSet<String>> {
__diesel_schema_migrations.select(version)
.load(&conn)
.map(|r| r.collect())
}
fn latest_run_migration_version(conn: &Connection) -> QueryResult<String> {
use ::expression::dsl::max;
__diesel_schema_migrations.select(max(version))
.first(&conn)
}
fn migrations_in_directory(path: &Path) -> Result<Vec<Box<Migration>>, MigrationError> {
use self::migration::migration_from;
try!(path.read_dir())
.filter_map(|entry| {
let entry = match entry {
Ok(e) => e,
Err(e) => return Some(Err(e.into())),
};
if!entry.file_name().to_string_lossy().starts_with(".") {
Some(migration_from(entry.path()))
} else {
None
}
}).collect()
}
fn run_migrations<T>(conn: &Connection, migrations: T)
-> Result<(), RunMigrationsError> where
T: Iterator<Item=Box<Migration>>
{
for migration in migrations {
try!(run_migration(conn, migration));
}
Ok(())
}
fn run_migration(conn: &Connection, migration: Box<Migration>)
-> Result<(), RunMigrationsError>
{
conn.transaction(|| {
println!("Running migration {}", migration.version());
try!(migration.run(conn));
try!(::insert(&NewMigration(migration.version()))
.into(__diesel_schema_migrations)
.execute(&conn));
Ok(())
}).map_err(|e| e.into())
}
fn revert_migration(conn: &Connection, migration: Box<Migration>)
-> Result<(), RunMigrationsError>
{
try!(conn.transaction(|| {
println!("Rolling back migration {}", migration.version());
try!(migration.revert(conn));
let target = __diesel_schema_migrations.filter(version.eq(migration.version()));
try!(::delete(target).execute(&conn));
Ok(())
}));
Ok(())
}
/// Returns the directory containing migrations. Will look at for
/// $PWD/migrations. If it is not found, it will search the parents of the
/// current directory, until it reaches the root directory. Returns
/// `MigrationError::MigrationDirectoryNotFound` if no directory is found.
pub fn find_migrations_directory() -> Result<PathBuf, MigrationError> {
search_for_migrations_directory(&try!(env::current_dir()))
}
fn search_for_migrations_directory(path: &Path) -> Result<PathBuf, MigrationError> {
let migration_path = path.join("migrations");
if migration_path.is_dir() {
Ok(migration_path)
} else {
path.parent().map(search_for_migrations_directory)
.unwrap_or(Err(MigrationError::MigrationDirectoryNotFound))
}
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use super::*;
use super::search_for_migrations_directory;
use self::tempdir::TempDir;
use std::fs;
#[test]
fn migration_directory_not_found_if_no_migration_dir_exists() {
let dir = TempDir::new("diesel").unwrap();
assert_eq!(Err(MigrationError::MigrationDirectoryNotFound),
search_for_migrations_directory(dir.path()));
}
#[test]
fn migration_directory_defaults_to_pwd_slash_migrations() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&temp_path));
}
#[test]
fn migration_directory_checks_parents() {
let dir = TempDir::new("diesel").unwrap();
let temp_path = dir.path().canonicalize().unwrap();
let migrations_path = temp_path.join("migrations");
let child_path = temp_path.join("child");
fs::create_dir(&child_path).unwrap();
fs::create_dir(&migrations_path).unwrap();
assert_eq!(Ok(migrations_path), search_for_migrations_directory(&child_path));
}
}
| {
try!(create_schema_migrations_table_if_needed(conn));
let latest_migration_version = try!(latest_run_migration_version(conn));
revert_migration_with_version(conn, &latest_migration_version)
.map(|_| latest_migration_version)
} | identifier_body |
life.rs | #![cfg(test)]
use traits::Cell;
use traits::Coord;
use traits::Engine;
use traits::Consumer;
use traits::Grid;
use engine::Sequential;
use grid::twodim::TwodimGrid;
use grid::nhood::MooreNhood;
use grid::EmptyState;
use utils::find_cell;
/// Implementation of Conway's Game of Life.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
enum LifeState {
Dead,
Alive,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
struct Life {
state: LifeState,
coord: (i32, i32),
}
impl Life {
fn alive_count<'a, I>(&self, neighbors: I) -> u32
where I: Iterator<Item = Option<&'a Self>>,
{
neighbors.filter(|n| match *n {
Some(n) => n.state == LifeState::Alive,
None => false,
})
.count() as u32
}
#[inline]
fn | (&self, alive: u32) -> LifeState {
match alive {
3 => LifeState::Alive,
_ => LifeState::Dead,
}
}
#[inline]
fn alive_state(&self, alive: u32) -> LifeState {
match alive {
2 | 3 => LifeState::Alive,
_ => LifeState::Dead,
}
}
}
impl Cell for Life {
type Coord = (i32, i32);
type State = EmptyState;
fn update<'a, I>(&'a mut self, old: &'a Self, neighbors: I, _: &Self::State)
where I: Iterator<Item = Option<&'a Self>>,
{
let alive_count = self.alive_count(neighbors);
let new_state = match old.state {
LifeState::Alive => self.alive_state(alive_count),
LifeState::Dead => self.dead_state(alive_count),
};
self.state = new_state;
}
fn with_coord<C: Coord>(coord: C) -> Self {
Life {
state: LifeState::Dead,
coord: (coord.x(), coord.y()),
}
}
fn coord(&self) -> &Self::Coord { &self.coord }
fn set_coord<C: Coord>(&mut self, coord: &C) { self.coord = (coord.x(), coord.y()); }
}
fn pretty_print<G: Grid<Cell = Life>>(grid: &G) {
let dims = grid.size();
println!("");
for y in 0..dims.y() {
for x in 0..dims.x() {
let cell = find_cell(grid.cells(), x, y);
match cell.state {
LifeState::Dead => print!("D |"),
LifeState::Alive => print!("A |"),
};
}
println!("");
}
println!("");
}
struct SpinnerTestConsumer {
vertical: bool,
}
impl SpinnerTestConsumer {
pub fn new() -> Self { SpinnerTestConsumer { vertical: true } }
}
impl Consumer for SpinnerTestConsumer {
type Cell = Life;
fn consume<G: Grid<Cell = Self::Cell>>(&mut self, grid: &mut G) {
assert_eq!(grid.cells().len(), 9);
pretty_print(grid);
let dead_cells_count = grid.cells()
.iter()
.filter(|c| c.state == LifeState::Dead)
.count();
assert_eq!(dead_cells_count, 6);
let alive_cells = || {
grid.cells()
.iter()
.filter(|c| c.state == LifeState::Alive)
};
assert_eq!(alive_cells().count(), 3);
self.vertical =!self.vertical;
// if spinner is in vertical state
if alive_cells().all(|c| c.coord.x() == 1) {
assert!(self.vertical);
}
// if spinner is in horizontal state
if alive_cells().all(|c| c.coord.y() == 1) {
assert!(!self.vertical);
}
}
}
#[test]
fn test_game_of_life() {
let nhood = MooreNhood::new();
let mut grid: TwodimGrid<Life, _, _> = TwodimGrid::new(3, 3, nhood, EmptyState, 1);
// Should be in default state
let default_state = LifeState::Dead;
assert!(grid.cells()
.iter()
.all(|c| c.state == default_state));
// Vertical spinner
// D | A | D
// D | A | D
// D | A | D
let cells = vec![Life {
state: LifeState::Alive,
coord: (1, 0),
},
Life {
state: LifeState::Alive,
coord: (1, 1),
},
Life {
state: LifeState::Alive,
coord: (1, 2),
}];
grid.set_cells(cells);
pretty_print(&grid);
let consumer = SpinnerTestConsumer::new();
let mut engine = Sequential::new(grid, consumer);
engine.run_times(2);
}
| dead_state | identifier_name |
life.rs | #![cfg(test)]
use traits::Cell;
use traits::Coord;
use traits::Engine;
use traits::Consumer;
use traits::Grid;
use engine::Sequential;
use grid::twodim::TwodimGrid;
use grid::nhood::MooreNhood;
use grid::EmptyState;
use utils::find_cell;
/// Implementation of Conway's Game of Life.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
enum LifeState {
Dead,
Alive,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
struct Life {
state: LifeState,
coord: (i32, i32),
}
impl Life {
fn alive_count<'a, I>(&self, neighbors: I) -> u32
where I: Iterator<Item = Option<&'a Self>>,
{
neighbors.filter(|n| match *n {
Some(n) => n.state == LifeState::Alive,
None => false,
})
.count() as u32
}
#[inline]
fn dead_state(&self, alive: u32) -> LifeState {
match alive {
3 => LifeState::Alive,
_ => LifeState::Dead,
}
}
#[inline]
fn alive_state(&self, alive: u32) -> LifeState {
match alive {
2 | 3 => LifeState::Alive,
_ => LifeState::Dead,
}
}
}
impl Cell for Life {
type Coord = (i32, i32);
type State = EmptyState;
fn update<'a, I>(&'a mut self, old: &'a Self, neighbors: I, _: &Self::State)
where I: Iterator<Item = Option<&'a Self>>,
{
let alive_count = self.alive_count(neighbors);
let new_state = match old.state {
LifeState::Alive => self.alive_state(alive_count),
LifeState::Dead => self.dead_state(alive_count),
};
self.state = new_state;
}
fn with_coord<C: Coord>(coord: C) -> Self {
Life {
state: LifeState::Dead,
coord: (coord.x(), coord.y()),
}
}
fn coord(&self) -> &Self::Coord { &self.coord }
fn set_coord<C: Coord>(&mut self, coord: &C) { self.coord = (coord.x(), coord.y()); }
}
fn pretty_print<G: Grid<Cell = Life>>(grid: &G) {
let dims = grid.size();
println!("");
for y in 0..dims.y() {
for x in 0..dims.x() {
let cell = find_cell(grid.cells(), x, y);
match cell.state {
LifeState::Dead => print!("D |"),
LifeState::Alive => print!("A |"),
};
}
println!("");
}
println!("");
}
struct SpinnerTestConsumer {
vertical: bool,
}
impl SpinnerTestConsumer {
pub fn new() -> Self { SpinnerTestConsumer { vertical: true } }
}
impl Consumer for SpinnerTestConsumer {
type Cell = Life;
fn consume<G: Grid<Cell = Self::Cell>>(&mut self, grid: &mut G) {
assert_eq!(grid.cells().len(), 9);
pretty_print(grid);
let dead_cells_count = grid.cells()
.iter()
.filter(|c| c.state == LifeState::Dead)
.count();
assert_eq!(dead_cells_count, 6);
let alive_cells = || {
grid.cells()
.iter()
.filter(|c| c.state == LifeState::Alive)
};
assert_eq!(alive_cells().count(), 3);
self.vertical =!self.vertical;
// if spinner is in vertical state
if alive_cells().all(|c| c.coord.x() == 1) {
assert!(self.vertical);
}
// if spinner is in horizontal state
if alive_cells().all(|c| c.coord.y() == 1) {
assert!(!self.vertical);
}
}
}
#[test]
fn test_game_of_life() {
let nhood = MooreNhood::new(); |
// Should be in default state
let default_state = LifeState::Dead;
assert!(grid.cells()
.iter()
.all(|c| c.state == default_state));
// Vertical spinner
// D | A | D
// D | A | D
// D | A | D
let cells = vec![Life {
state: LifeState::Alive,
coord: (1, 0),
},
Life {
state: LifeState::Alive,
coord: (1, 1),
},
Life {
state: LifeState::Alive,
coord: (1, 2),
}];
grid.set_cells(cells);
pretty_print(&grid);
let consumer = SpinnerTestConsumer::new();
let mut engine = Sequential::new(grid, consumer);
engine.run_times(2);
} | let mut grid: TwodimGrid<Life, _, _> = TwodimGrid::new(3, 3, nhood, EmptyState, 1); | random_line_split |
lib.rs | Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_unaligned(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_unaligned_struct(&s, strct),
Data::Enum(enm) => derive_unaligned_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
// Unwrap a Result<_, Vec<Error>>, converting any Err value into a TokenStream
// and returning it.
macro_rules! try_or_print {
($e:expr) => {
match $e {
Ok(x) => x,
Err(errors) => return print_all_errors(errors),
}
};
}
// A struct is FromBytes if:
// - all fields are FromBytes
fn derive_from_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
impl_block(s.ast(), strct, "FromBytes", true, false)
}
// An enum is FromBytes if:
// - Every possible bit pattern must be valid, which means that every bit
// pattern must correspond to a different enum variant. Thus, for an enum
// whose layout takes up N bytes, there must be 2^N variants.
// - Since we must know N, only representations which guarantee the layout's
// size are allowed. These are repr(uN) and repr(iN) (repr(C) implies an
// implementation-defined size). size and isize technically guarantee the
// layout's size, but would require us to know how large those are on the
// target platform. This isn't terribly difficult - we could emit a const
// expression that could call core::mem::size_of in order to determine the
// size and check against the number of enum variants, but a) this would be
// platform-specific and, b) even on Rust's smallest bit width platform (32),
// this would require ~4 billion enum variants, which obviously isn't a thing.
fn derive_from_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if!enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement FromBytes")
.to_compile_error();
}
let reprs = try_or_print!(ENUM_FROM_BYTES_CFG.validate_reprs(s.ast()));
let variants_required = match reprs.as_slice() {
[EnumRepr::U8] | [EnumRepr::I8] => 1usize << 8,
[EnumRepr::U16] | [EnumRepr::I16] => 1usize << 16,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
if enm.variants.len()!= variants_required {
return Error::new_spanned(
s.ast(),
format!(
"FromBytes only supported on {} enum with {} variants",
reprs[0], variants_required
),
)
.to_compile_error();
}
impl_block(s.ast(), enm, "FromBytes", true, false)
}
#[rustfmt::skip]
const ENUM_FROM_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message: r#"FromBytes requires repr of "u8", "u16", "i8", or "i16""#,
derive_unaligned: false,
allowed_combinations: &[
&[U8],
&[U16],
&[I8],
&[I16],
],
disallowed_but_legal_combinations: &[
&[C],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
}
};
// A struct is AsBytes if:
// - all fields are AsBytes
// - repr(C) or repr(transparent) and
// - no padding (size of struct equals sum of size of field types)
// - repr(packed)
fn derive_as_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
// TODO(joshlf): Support type parameters.
if!s.ast().generics.params.is_empty() {
return Error::new(Span::call_site(), "unsupported on types with type parameters")
.to_compile_error();
}
let reprs = try_or_print!(STRUCT_AS_BYTES_CFG.validate_reprs(s.ast()));
let require_size_check = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "AsBytes", true, require_size_check)
}
#[rustfmt::skip]
const STRUCT_AS_BYTES_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "transparent", or "packed""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[Transparent],
&[C, Packed],
&[Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is AsBytes if it is C-like and has a defined repr
fn derive_as_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if!enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement AsBytes")
.to_compile_error();
}
// We don't care what the repr is; we only care that it is one of the
// allowed ones.
try_or_print!(ENUM_AS_BYTES_CFG.validate_reprs(s.ast()));
impl_block(s.ast(), enm, "AsBytes", false, false)
}
#[rustfmt::skip]
const ENUM_AS_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", or "isize""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[U8],
&[U16],
&[I8],
&[I16],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
disallowed_but_legal_combinations: &[],
}
};
// A struct is Unaligned if:
// - repr(align) is no more than 1 and either
// - repr(C) or repr(transparent) and
// - all fields Unaligned
// - repr(packed)
fn derive_unaligned_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
let reprs = try_or_print!(STRUCT_UNALIGNED_CFG.validate_reprs(s.ast()));
let require_trait_bound = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "Unaligned", require_trait_bound, false)
}
#[rustfmt::skip]
const STRUCT_UNALIGNED_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message:
r#"Unaligned requires either a) repr "C" or "transparent" with all fields implementing Unaligned or, b) repr "packed""#,
derive_unaligned: true,
allowed_combinations: &[
&[C],
&[Transparent],
&[Packed],
&[C, Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is Unaligned if:
// - No repr(align(N > 1))
// - repr(u8) or repr(i8)
fn derive_unaligned_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if!enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement Unaligned")
.to_compile_error();
}
// The only valid reprs are u8 and i8, and optionally align(1). We don't
// actually care what the reprs are so long as they satisfy that
// requirement.
try_or_print!(ENUM_UNALIGNED_CFG.validate_reprs(s.ast()));
// NOTE: C-like enums cannot currently have type parameters, so this value
// of true for require_trait_bounds doesn't really do anything. But it's
// marginally more future-proof in case that restriction is lifted in the
// future.
impl_block(s.ast(), enm, "Unaligned", true, false)
}
#[rustfmt::skip]
const ENUM_UNALIGNED_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message:
r#"Unaligned requires repr of "u8" or "i8", and no alignment (i.e., repr(align(N > 1)))"#,
derive_unaligned: true,
allowed_combinations: &[
&[U8],
&[I8],
],
disallowed_but_legal_combinations: &[
&[C],
&[U16],
&[U32],
&[U64],
&[Usize],
&[I16],
&[I32],
&[I64],
&[Isize],
],
}
};
fn impl_block<D: DataExt>(
input: &DeriveInput,
data: &D,
trait_name: &str,
require_trait_bound: bool,
require_size_check: bool,
) -> proc_macro2::TokenStream {
// In this documentation, we will refer to this hypothetical struct:
//
// #[derive(FromBytes)]
// struct Foo<T, I: Iterator>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// a: u8,
// b: T,
// c: I::Item,
// }
//
// First, we extract the field types, which in this case are u8, T, and
// I::Item. We use the names of the type parameters to split the field types
// into two sets - a set of types which are based on the type parameters,
// and a set of types which are not. First, we re-use the existing
// parameters and where clauses, generating an impl block like:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// }
//
// Then, we use the list of types which are based on the type parameters to
// generate new entries in the where clause:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// T: FromBytes,
// I::Item: FromBytes,
// {
// }
//
// Finally, we use a different technique to generate the bounds for the types
// which are not based on type parameters:
//
//
// fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
// struct ImplementsFromBytes<F:?Sized + FromBytes>(PhantomData<F>);
// let _: ImplementsFromBytes<u8>;
// }
//
// It would be easier to put all types in the where clause, but that won't
// work until the trivial_bounds feature is stabilized (#48214).
//
// NOTE: It is standard practice to only emit bounds for the type parameters
// themselves, not for field types based on those parameters (e.g., `T` vs
// `T::Foo`). For a discussion of why this is standard practice, see
// https://github.com/rust-lang/rust/issues/26925.
//
// The reason we diverge from this standard is that doing it that way for us
// would be unsound. E.g., consider a type, `T` where `T: FromBytes` but
// `T::Foo:!FromBytes`. It would not be sound for us to accept a type with
// a `T::Foo` field as `FromBytes` simply because `T: FromBytes`.
//
// While there's no getting around this requirement for us, it does have
// some pretty serious downsides that are worth calling out:
//
// 1. You lose the ability to have fields of generic type with reduced visibility.
//
// #[derive(Unaligned)]
// #[repr(C)]
// pub struct Public<T>(Private<T>);
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Private<T>(T);
//
//
// warning: private type `Private<T>` in public interface (error E0446)
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: #[warn(private_in_public)] on by default
// = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
// = note: for more information, see issue #34537 <https://github.com/rust-lang/rust/issues/34537>
//
// 2. When lifetimes are involved, the trait solver ties itself in knots.
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Dup<'a, 'b> {
// a: PhantomData<&'a u8>,
// b: PhantomData<&'b u8>,
// }
//
//
// error[E0283]: type annotations required: cannot resolve `core::marker::PhantomData<&'a u8>: zerocopy::Unaligned`
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: required by `zerocopy::Unaligned`
// A visitor which is used to walk a field's type and determine whether any
// of its definition is based on the type or lifetime parameters on a type.
struct FromTypeParamVisit<'a, 'b>(&'a Punctuated<GenericParam, Comma>, &'b mut bool);
impl<'a, 'b> Visit<'a> for FromTypeParamVisit<'a, 'b> {
fn visit_type_path(&mut self, i: &'a TypePath) {
visit::visit_type_path(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Type(param) = param {
i.path.segments.first().unwrap().ident == param.ident
} else {
false
}
}) {
*self.1 = true;
}
}
fn visit_lifetime(&mut self, i: &'a Lifetime) {
visit::visit_lifetime(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Lifetime(param) = param {
param.lifetime.ident == i.ident
} else {
false
}
}) {
*self.1 = true;
}
}
}
// Whether this type is based on one of the type parameters. E.g., given the
// type parameters `<T>`, `T`, `T::Foo`, and `(T::Foo, String)` are all
// based on the type parameters, while `String` and `(String, Box<()>)` are
// not.
let is_from_type_param = |ty: &Type| {
let mut ret = false;
FromTypeParamVisit(&input.generics.params, &mut ret).visit_type(ty);
ret
};
let trait_ident = Ident::new(trait_name, Span::call_site());
let field_types = data.nested_types();
let type_param_field_types = field_types.iter().filter(|ty| is_from_type_param(ty));
let non_type_param_field_types = field_types.iter().filter(|ty|!is_from_type_param(ty));
// Add a new set of where clause predicates of the form `T: Trait` for each
// of the types of the struct's fields (but only the ones whose types are
// based on one of the type parameters).
let mut generics = input.generics.clone();
let where_clause = generics.make_where_clause();
if require_trait_bound {
for ty in type_param_field_types {
let bound = parse_quote!(#ty: zerocopy::#trait_ident);
where_clause.predicates.push(bound);
}
}
let type_ident = &input.ident;
// The parameters with trait bounds, but without type defaults.
let params = input.generics.params.clone().into_iter().map(|mut param| {
match &mut param {
GenericParam::Type(ty) => ty.default = None,
GenericParam::Const(cnst) => cnst.default = None,
GenericParam::Lifetime(_) => {}
}
quote!(#param)
});
// The identifiers of the parameters without trait bounds or type defaults.
let param_idents = input.generics.params.iter().map(|param| match param {
GenericParam::Type(ty) => {
let ident = &ty.ident;
quote!(#ident)
}
GenericParam::Lifetime(l) => quote!(#l),
GenericParam::Const(cnst) => quote!(#cnst),
});
let trait_bound_body = if require_trait_bound {
let implements_type_ident =
Ident::new(format!("Implements{}", trait_ident).as_str(), Span::call_site());
let implements_type_tokens = quote!(#implements_type_ident);
let types = non_type_param_field_types.map(|ty| quote!(#implements_type_tokens<#ty>));
quote!(
// A type with a type parameter that must implement #trait_ident
struct #implements_type_ident<F:?Sized + zerocopy::#trait_ident>(::core::marker::PhantomData<F>);
// For each field type, an instantiation that won't type check if
// that type doesn't implement #trait_ident
#(let _: #types;)*
)
} else {
quote!()
};
let size_check_body = if require_size_check &&!field_types.is_empty() {
quote!(
const HAS_PADDING: bool = core::mem::size_of::<#type_ident>()!= #(core::mem::size_of::<#field_types>())+*;
let _: [(); 1/(1 - HAS_PADDING as usize)];
)
} else {
quote!()
};
quote! {
unsafe impl < #(#params),* > zerocopy::#trait_ident for #type_ident < #(#param_idents),* > #where_clause {
fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
#trait_bound_body
#size_check_body
}
}
}
}
fn print_all_errors(errors: Vec<Error>) -> proc_macro2::TokenStream {
errors.iter().map(Error::to_compile_error).collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_config_repr_orderings() {
// Validate that the repr lists in the various configs are in the
// canonical order. If they aren't, then our algorithm to look up in
// those lists won't work.
// TODO(joshlf): Remove once the is_sorted method is stabilized
// (issue #53485).
fn is_sorted_and_deduped<T: Clone + Ord>(ts: &[T]) -> bool {
let mut sorted = ts.to_vec();
sorted.sort();
sorted.dedup();
ts == sorted.as_slice()
}
fn elements_are_sorted_and_deduped<T: Clone + Ord>(lists: &[&[T]]) -> bool {
lists.iter().all(|list| is_sorted_and_deduped(*list))
}
fn config_is_sorted<T: KindRepr + Clone>(config: &Config<T>) -> bool {
elements_are_sorted_and_deduped(&config.allowed_combinations)
&& elements_are_sorted_and_deduped(&config.disallowed_but_legal_combinations)
}
assert!(config_is_sorted(&STRUCT_UNALIGNED_CFG));
assert!(config_is_sorted(&ENUM_FROM_BYTES_CFG));
assert!(config_is_sorted(&ENUM_UNALIGNED_CFG));
}
#[test]
fn test_config_repr_no_overlap() {
// Validate that no set of reprs appears in both th allowed_combinations
// and disallowed_but_legal_combinations lists.
fn overlap<T: Eq>(a: &[T], b: &[T]) -> bool {
a.iter().any(|elem| b.contains(elem))
}
fn | config_overlaps | identifier_name |
|
lib.rs | |
// help: required by the derive of FromBytes
//
// Instead, we have more verbose error messages like "unsupported representation
// for deriving FromBytes, AsBytes, or Unaligned on an enum"
//
// This will probably require Span::error
// (https://doc.rust-lang.org/nightly/proc_macro/struct.Span.html#method.error),
// which is currently unstable. Revisit this once it's stable.
decl_derive!([FromBytes] => derive_from_bytes);
decl_derive!([AsBytes] => derive_as_bytes);
decl_derive!([Unaligned] => derive_unaligned);
fn derive_from_bytes(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_from_bytes_struct(&s, strct),
Data::Enum(enm) => derive_from_bytes_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_as_bytes(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_as_bytes_struct(&s, strct),
Data::Enum(enm) => derive_as_bytes_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_unaligned(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_unaligned_struct(&s, strct),
Data::Enum(enm) => derive_unaligned_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
// Unwrap a Result<_, Vec<Error>>, converting any Err value into a TokenStream
// and returning it.
macro_rules! try_or_print {
($e:expr) => {
match $e {
Ok(x) => x,
Err(errors) => return print_all_errors(errors),
}
};
}
// A struct is FromBytes if:
// - all fields are FromBytes
fn derive_from_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
impl_block(s.ast(), strct, "FromBytes", true, false)
}
// An enum is FromBytes if:
// - Every possible bit pattern must be valid, which means that every bit
// pattern must correspond to a different enum variant. Thus, for an enum
// whose layout takes up N bytes, there must be 2^N variants.
// - Since we must know N, only representations which guarantee the layout's
// size are allowed. These are repr(uN) and repr(iN) (repr(C) implies an
// implementation-defined size). size and isize technically guarantee the
// layout's size, but would require us to know how large those are on the
// target platform. This isn't terribly difficult - we could emit a const
// expression that could call core::mem::size_of in order to determine the
// size and check against the number of enum variants, but a) this would be
// platform-specific and, b) even on Rust's smallest bit width platform (32),
// this would require ~4 billion enum variants, which obviously isn't a thing.
fn derive_from_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if!enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement FromBytes")
.to_compile_error();
}
let reprs = try_or_print!(ENUM_FROM_BYTES_CFG.validate_reprs(s.ast()));
let variants_required = match reprs.as_slice() {
[EnumRepr::U8] | [EnumRepr::I8] => 1usize << 8,
[EnumRepr::U16] | [EnumRepr::I16] => 1usize << 16,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
if enm.variants.len()!= variants_required {
return Error::new_spanned(
s.ast(),
format!(
"FromBytes only supported on {} enum with {} variants",
reprs[0], variants_required
),
)
.to_compile_error();
}
impl_block(s.ast(), enm, "FromBytes", true, false)
}
#[rustfmt::skip]
const ENUM_FROM_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message: r#"FromBytes requires repr of "u8", "u16", "i8", or "i16""#,
derive_unaligned: false,
allowed_combinations: &[
&[U8],
&[U16],
&[I8],
&[I16],
],
disallowed_but_legal_combinations: &[
&[C],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
}
};
// A struct is AsBytes if:
// - all fields are AsBytes
// - repr(C) or repr(transparent) and
// - no padding (size of struct equals sum of size of field types)
// - repr(packed)
fn derive_as_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream |
#[rustfmt::skip]
const STRUCT_AS_BYTES_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "transparent", or "packed""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[Transparent],
&[C, Packed],
&[Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is AsBytes if it is C-like and has a defined repr
fn derive_as_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if!enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement AsBytes")
.to_compile_error();
}
// We don't care what the repr is; we only care that it is one of the
// allowed ones.
try_or_print!(ENUM_AS_BYTES_CFG.validate_reprs(s.ast()));
impl_block(s.ast(), enm, "AsBytes", false, false)
}
#[rustfmt::skip]
const ENUM_AS_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", or "isize""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[U8],
&[U16],
&[I8],
&[I16],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
disallowed_but_legal_combinations: &[],
}
};
// A struct is Unaligned if:
// - repr(align) is no more than 1 and either
// - repr(C) or repr(transparent) and
// - all fields Unaligned
// - repr(packed)
fn derive_unaligned_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
let reprs = try_or_print!(STRUCT_UNALIGNED_CFG.validate_reprs(s.ast()));
let require_trait_bound = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "Unaligned", require_trait_bound, false)
}
#[rustfmt::skip]
const STRUCT_UNALIGNED_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message:
r#"Unaligned requires either a) repr "C" or "transparent" with all fields implementing Unaligned or, b) repr "packed""#,
derive_unaligned: true,
allowed_combinations: &[
&[C],
&[Transparent],
&[Packed],
&[C, Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is Unaligned if:
// - No repr(align(N > 1))
// - repr(u8) or repr(i8)
fn derive_unaligned_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if!enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement Unaligned")
.to_compile_error();
}
// The only valid reprs are u8 and i8, and optionally align(1). We don't
// actually care what the reprs are so long as they satisfy that
// requirement.
try_or_print!(ENUM_UNALIGNED_CFG.validate_reprs(s.ast()));
// NOTE: C-like enums cannot currently have type parameters, so this value
// of true for require_trait_bounds doesn't really do anything. But it's
// marginally more future-proof in case that restriction is lifted in the
// future.
impl_block(s.ast(), enm, "Unaligned", true, false)
}
#[rustfmt::skip]
const ENUM_UNALIGNED_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message:
r#"Unaligned requires repr of "u8" or "i8", and no alignment (i.e., repr(align(N > 1)))"#,
derive_unaligned: true,
allowed_combinations: &[
&[U8],
&[I8],
],
disallowed_but_legal_combinations: &[
&[C],
&[U16],
&[U32],
&[U64],
&[Usize],
&[I16],
&[I32],
&[I64],
&[Isize],
],
}
};
fn impl_block<D: DataExt>(
input: &DeriveInput,
data: &D,
trait_name: &str,
require_trait_bound: bool,
require_size_check: bool,
) -> proc_macro2::TokenStream {
// In this documentation, we will refer to this hypothetical struct:
//
// #[derive(FromBytes)]
// struct Foo<T, I: Iterator>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// a: u8,
// b: T,
// c: I::Item,
// }
//
// First, we extract the field types, which in this case are u8, T, and
// I::Item. We use the names of the type parameters to split the field types
// into two sets - a set of types which are based on the type parameters,
// and a set of types which are not. First, we re-use the existing
// parameters and where clauses, generating an impl block like:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// }
//
// Then, we use the list of types which are based on the type parameters to
// generate new entries in the where clause:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// T: FromBytes,
// I::Item: FromBytes,
// {
// }
//
// Finally, we use a different technique to generate the bounds for the types
// which are not based on type parameters:
//
//
// fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
// struct ImplementsFromBytes<F:?Sized + FromBytes>(PhantomData<F>);
// let _: ImplementsFromBytes<u8>;
// }
//
// It would be easier to put all types in the where clause, but that won't
// work until the trivial_bounds feature is stabilized (#48214).
//
// NOTE: It is standard practice to only emit bounds for the type parameters
// themselves, not for field types based on those parameters (e.g., `T` vs
// `T::Foo`). For a discussion of why this is standard practice, see
// https://github.com/rust-lang/rust/issues/26925.
//
// The reason we diverge from this standard is that doing it that way for us
// would be unsound. E.g., consider a type, `T` where `T: FromBytes` but
// `T::Foo:!FromBytes`. It would not be sound for us to accept a type with
// a `T::Foo` field as `FromBytes` simply because `T: FromBytes`.
//
// While there's no getting around this requirement for us, it does have
// some pretty serious downsides that are worth calling out:
//
// 1. You lose the ability to have fields of generic type with reduced visibility.
//
// #[derive(Unaligned)]
// #[repr(C)]
// pub struct Public<T>(Private<T>);
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Private<T>(T);
//
//
// warning: private type `Private<T>` in public interface (error E0446)
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: #[warn(private_in_public)] on by default
// = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
// = note: for more information, see issue #34537 <https://github.com/rust-lang/rust/issues/34537>
//
// 2. When lifetimes are involved, the trait solver ties itself in knots.
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Dup<'a, 'b> {
// a: PhantomData<&'a u8>,
// b: PhantomData<&'b u8>,
// }
//
//
// error[E0283]: type annotations required: cannot resolve `core::marker::PhantomData<&'a u8>: zerocopy::Unaligned`
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: required by `zerocopy::Unaligned`
// A visitor which is used to walk a field's type and determine whether any
// of its definition is based on the type or lifetime parameters on a type.
struct FromTypeParamVisit<'a, 'b>(&'a Punctuated<GenericParam, Comma>, &'b mut bool);
impl<'a, 'b> Visit<'a> for FromTypeParamVisit<'a, 'b> {
fn visit_type_path(&mut self, i: &'a TypePath) {
visit::visit_type_path(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Type(param) = param {
i.path.segments.first().unwrap().ident == param.ident
} else {
false
}
}) {
*self.1 = true;
}
}
fn visit_lifetime(&mut self, i: &'a Lifetime) {
visit::visit_lifetime(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Lifetime(param) = param {
param.lifetime.ident == i.ident
} else {
false
}
}) {
*self.1 = true;
}
}
}
// Whether this type is based on one of the type parameters. E.g., given the
// type parameters `<T>`, `T`, `T::Foo`, and `(T::Foo, String)` are all
// based on the type parameters, while `String` and `(String, Box<()>)` are
// not.
let is_from_type_param = |ty: &Type| {
let mut ret = false;
FromTypeParamVisit(&input.generics.params, &mut ret).visit_type(ty);
ret
};
let trait_ident = Ident::new(trait_name, Span::call_site());
let field_types = data.nested_types();
let type_param_field_types = field_types.iter().filter(|ty| is_from_type_param(ty));
let non_type_param_field_types = field_types.iter().filter(|ty|!is_from_type_param(ty));
// Add a new set of where clause predicates of the form `T: Trait` for each
// of the types of the struct's fields (but only the ones whose types are
// based on one of the type parameters).
let mut generics = input.generics.clone();
let where_clause = generics.make_where_clause();
if require_trait_bound {
for ty in type_param_field_types {
let bound = parse_quote!(#ty: zerocopy::#trait_ident);
where_clause.predicates.push(bound);
}
}
let type_ident = &input.ident;
// The parameters with trait bounds, but without type defaults.
let params = input.generics.params.clone().into_iter().map(|mut param| {
match &mut param {
GenericParam::Type(ty) => ty.default = None,
GenericParam::Const(cnst) => cnst.default = None,
GenericParam::Lifetime(_) => {}
}
quote!(#param)
});
// The identifiers of the parameters without trait bounds or type defaults.
let param_idents = input.generics.params.iter().map(|param| match param {
GenericParam::Type(ty) => {
let ident = &ty.ident;
quote!(#ident)
}
GenericParam::Lifetime(l) => quote!(#l),
GenericParam::Const(cnst) => quote!(#cnst),
});
let trait_bound_body = if require_trait_bound {
let implements_type_ident =
Ident::new(format!("Implements{}", trait_ident).as_str(), Span::call_site());
let implements_type_tokens = quote!(#implements_type_ident);
let types = non_type_param_field_types.map(|ty| quote!(#implements_type_tokens<#ty>));
quote!(
// A type with a type parameter that must implement #trait_ident
struct #implements_type_ident<F:?Sized + zerocopy::#trait_ident>(::core::marker::PhantomData<F>);
// For each field type, an instantiation that won't type check if
// that type doesn't implement #trait_ident
#(let _: #types;)*
)
} else {
quote!()
};
let size_check_body = if require_size_check &&!field_types.is_empty() {
quote!(
const HAS_PADDING: bool = core::mem::size_of::<#type_ident>()!= #(core::mem::size_of::<#field_types>())+*;
let _: [(); 1/(1 - HAS_PADDING as usize)];
)
} else {
quote!()
};
quote! {
unsafe impl < #(#params),* > zerocopy::#trait_ident for #type_ident < #(#param_idents),* > #where_clause {
fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
#trait_bound_body
#size_check_body
}
}
}
}
fn print_all_errors(errors: Vec<Error>) -> proc_macro2::TokenStream {
errors.iter().map(Error::to_compile_error).collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_config_repr_orderings() {
// Validate that the repr lists in the various configs are in the
// canonical order. If they aren't, then our algorithm to look up in
// those lists won't work.
// TODO(joshlf): Remove once the is_sorted method is stabilized
// (issue #53485).
fn is_sorted_and_deduped<T: Clone + Ord>(ts: &[T]) -> bool {
let mut sorted = ts.to_vec();
sorted.sort();
sorted.dedup();
ts == sorted.as_slice()
}
fn elements_are_sorted_and_deduped<T: Clone + Ord>(lists: &[&[T]]) -> bool {
| {
// TODO(joshlf): Support type parameters.
if !s.ast().generics.params.is_empty() {
return Error::new(Span::call_site(), "unsupported on types with type parameters")
.to_compile_error();
}
let reprs = try_or_print!(STRUCT_AS_BYTES_CFG.validate_reprs(s.ast()));
let require_size_check = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "AsBytes", true, require_size_check)
} | identifier_body |
lib.rs | |
// help: required by the derive of FromBytes
//
// Instead, we have more verbose error messages like "unsupported representation
// for deriving FromBytes, AsBytes, or Unaligned on an enum"
//
// This will probably require Span::error
// (https://doc.rust-lang.org/nightly/proc_macro/struct.Span.html#method.error),
// which is currently unstable. Revisit this once it's stable.
decl_derive!([FromBytes] => derive_from_bytes);
decl_derive!([AsBytes] => derive_as_bytes);
decl_derive!([Unaligned] => derive_unaligned);
fn derive_from_bytes(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_from_bytes_struct(&s, strct),
Data::Enum(enm) => derive_from_bytes_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_as_bytes(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_as_bytes_struct(&s, strct),
Data::Enum(enm) => derive_as_bytes_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_unaligned(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_unaligned_struct(&s, strct),
Data::Enum(enm) => derive_unaligned_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
// Unwrap a Result<_, Vec<Error>>, converting any Err value into a TokenStream
// and returning it.
macro_rules! try_or_print {
($e:expr) => {
match $e {
Ok(x) => x,
Err(errors) => return print_all_errors(errors),
}
};
}
// A struct is FromBytes if:
// - all fields are FromBytes
fn derive_from_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
impl_block(s.ast(), strct, "FromBytes", true, false)
}
// An enum is FromBytes if:
// - Every possible bit pattern must be valid, which means that every bit
// pattern must correspond to a different enum variant. Thus, for an enum
// whose layout takes up N bytes, there must be 2^N variants.
// - Since we must know N, only representations which guarantee the layout's
// size are allowed. These are repr(uN) and repr(iN) (repr(C) implies an
// implementation-defined size). size and isize technically guarantee the
// layout's size, but would require us to know how large those are on the
// target platform. This isn't terribly difficult - we could emit a const
// expression that could call core::mem::size_of in order to determine the
// size and check against the number of enum variants, but a) this would be
// platform-specific and, b) even on Rust's smallest bit width platform (32),
// this would require ~4 billion enum variants, which obviously isn't a thing.
fn derive_from_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if!enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement FromBytes")
.to_compile_error();
}
let reprs = try_or_print!(ENUM_FROM_BYTES_CFG.validate_reprs(s.ast()));
let variants_required = match reprs.as_slice() {
[EnumRepr::U8] | [EnumRepr::I8] => 1usize << 8,
[EnumRepr::U16] | [EnumRepr::I16] => 1usize << 16,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
if enm.variants.len()!= variants_required {
return Error::new_spanned(
s.ast(),
format!(
"FromBytes only supported on {} enum with {} variants",
reprs[0], variants_required
),
)
.to_compile_error();
}
impl_block(s.ast(), enm, "FromBytes", true, false)
}
#[rustfmt::skip]
const ENUM_FROM_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message: r#"FromBytes requires repr of "u8", "u16", "i8", or "i16""#,
derive_unaligned: false,
allowed_combinations: &[
&[U8],
&[U16],
&[I8],
&[I16],
],
disallowed_but_legal_combinations: &[
&[C],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
}
};
// A struct is AsBytes if:
// - all fields are AsBytes
// - repr(C) or repr(transparent) and
// - no padding (size of struct equals sum of size of field types)
// - repr(packed)
fn derive_as_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
// TODO(joshlf): Support type parameters.
if!s.ast().generics.params.is_empty() {
return Error::new(Span::call_site(), "unsupported on types with type parameters")
.to_compile_error();
}
let reprs = try_or_print!(STRUCT_AS_BYTES_CFG.validate_reprs(s.ast()));
let require_size_check = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "AsBytes", true, require_size_check)
}
#[rustfmt::skip]
const STRUCT_AS_BYTES_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "transparent", or "packed""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[Transparent],
&[C, Packed],
&[Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is AsBytes if it is C-like and has a defined repr
fn derive_as_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if!enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement AsBytes")
.to_compile_error();
}
// We don't care what the repr is; we only care that it is one of the
// allowed ones.
try_or_print!(ENUM_AS_BYTES_CFG.validate_reprs(s.ast()));
impl_block(s.ast(), enm, "AsBytes", false, false)
}
#[rustfmt::skip]
const ENUM_AS_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", or "isize""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[U8],
&[U16],
&[I8],
&[I16],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
disallowed_but_legal_combinations: &[],
}
};
// A struct is Unaligned if:
// - repr(align) is no more than 1 and either
// - repr(C) or repr(transparent) and
// - all fields Unaligned
// - repr(packed)
fn derive_unaligned_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
let reprs = try_or_print!(STRUCT_UNALIGNED_CFG.validate_reprs(s.ast()));
let require_trait_bound = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "Unaligned", require_trait_bound, false)
}
#[rustfmt::skip]
const STRUCT_UNALIGNED_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message:
r#"Unaligned requires either a) repr "C" or "transparent" with all fields implementing Unaligned or, b) repr "packed""#,
derive_unaligned: true,
allowed_combinations: &[
&[C],
&[Transparent],
&[Packed],
&[C, Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is Unaligned if:
// - No repr(align(N > 1))
// - repr(u8) or repr(i8)
fn derive_unaligned_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if!enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement Unaligned")
.to_compile_error();
}
// The only valid reprs are u8 and i8, and optionally align(1). We don't
// actually care what the reprs are so long as they satisfy that
// requirement.
try_or_print!(ENUM_UNALIGNED_CFG.validate_reprs(s.ast()));
// NOTE: C-like enums cannot currently have type parameters, so this value
// of true for require_trait_bounds doesn't really do anything. But it's
// marginally more future-proof in case that restriction is lifted in the
// future.
impl_block(s.ast(), enm, "Unaligned", true, false)
}
#[rustfmt::skip]
const ENUM_UNALIGNED_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message:
r#"Unaligned requires repr of "u8" or "i8", and no alignment (i.e., repr(align(N > 1)))"#,
derive_unaligned: true,
allowed_combinations: &[
&[U8],
&[I8],
],
disallowed_but_legal_combinations: &[
&[C],
&[U16],
&[U32],
&[U64],
&[Usize],
&[I16],
&[I32],
&[I64],
&[Isize],
],
}
};
fn impl_block<D: DataExt>(
input: &DeriveInput,
data: &D,
trait_name: &str,
require_trait_bound: bool,
require_size_check: bool,
) -> proc_macro2::TokenStream {
// In this documentation, we will refer to this hypothetical struct:
//
// #[derive(FromBytes)]
// struct Foo<T, I: Iterator>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// a: u8,
// b: T,
// c: I::Item,
// }
//
// First, we extract the field types, which in this case are u8, T, and
// I::Item. We use the names of the type parameters to split the field types
// into two sets - a set of types which are based on the type parameters,
// and a set of types which are not. First, we re-use the existing
// parameters and where clauses, generating an impl block like:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// }
//
// Then, we use the list of types which are based on the type parameters to
// generate new entries in the where clause:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// T: FromBytes,
// I::Item: FromBytes,
// {
// }
//
// Finally, we use a different technique to generate the bounds for the types
// which are not based on type parameters:
//
//
// fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
// struct ImplementsFromBytes<F:?Sized + FromBytes>(PhantomData<F>);
// let _: ImplementsFromBytes<u8>;
// }
//
// It would be easier to put all types in the where clause, but that won't
// work until the trivial_bounds feature is stabilized (#48214).
//
// NOTE: It is standard practice to only emit bounds for the type parameters
// themselves, not for field types based on those parameters (e.g., `T` vs
// `T::Foo`). For a discussion of why this is standard practice, see
// https://github.com/rust-lang/rust/issues/26925.
//
// The reason we diverge from this standard is that doing it that way for us
// would be unsound. E.g., consider a type, `T` where `T: FromBytes` but
// `T::Foo:!FromBytes`. It would not be sound for us to accept a type with
// a `T::Foo` field as `FromBytes` simply because `T: FromBytes`.
//
// While there's no getting around this requirement for us, it does have
// some pretty serious downsides that are worth calling out:
//
// 1. You lose the ability to have fields of generic type with reduced visibility.
//
// #[derive(Unaligned)]
// #[repr(C)]
// pub struct Public<T>(Private<T>);
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Private<T>(T);
//
//
// warning: private type `Private<T>` in public interface (error E0446)
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: #[warn(private_in_public)] on by default
// = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
// = note: for more information, see issue #34537 <https://github.com/rust-lang/rust/issues/34537>
//
// 2. When lifetimes are involved, the trait solver ties itself in knots.
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Dup<'a, 'b> {
// a: PhantomData<&'a u8>,
// b: PhantomData<&'b u8>,
// }
//
//
// error[E0283]: type annotations required: cannot resolve `core::marker::PhantomData<&'a u8>: zerocopy::Unaligned`
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: required by `zerocopy::Unaligned`
// A visitor which is used to walk a field's type and determine whether any
// of its definition is based on the type or lifetime parameters on a type.
struct FromTypeParamVisit<'a, 'b>(&'a Punctuated<GenericParam, Comma>, &'b mut bool);
impl<'a, 'b> Visit<'a> for FromTypeParamVisit<'a, 'b> {
fn visit_type_path(&mut self, i: &'a TypePath) {
visit::visit_type_path(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Type(param) = param {
i.path.segments.first().unwrap().ident == param.ident
} else {
false
}
}) {
*self.1 = true;
}
}
fn visit_lifetime(&mut self, i: &'a Lifetime) {
visit::visit_lifetime(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Lifetime(param) = param {
param.lifetime.ident == i.ident
} else {
false
}
}) {
*self.1 = true;
}
}
}
// Whether this type is based on one of the type parameters. E.g., given the
// type parameters `<T>`, `T`, `T::Foo`, and `(T::Foo, String)` are all
// based on the type parameters, while `String` and `(String, Box<()>)` are
// not.
let is_from_type_param = |ty: &Type| {
let mut ret = false;
FromTypeParamVisit(&input.generics.params, &mut ret).visit_type(ty);
ret
};
let trait_ident = Ident::new(trait_name, Span::call_site());
let field_types = data.nested_types();
let type_param_field_types = field_types.iter().filter(|ty| is_from_type_param(ty));
let non_type_param_field_types = field_types.iter().filter(|ty|!is_from_type_param(ty));
// Add a new set of where clause predicates of the form `T: Trait` for each
// of the types of the struct's fields (but only the ones whose types are
// based on one of the type parameters).
let mut generics = input.generics.clone();
let where_clause = generics.make_where_clause();
if require_trait_bound {
for ty in type_param_field_types {
let bound = parse_quote!(#ty: zerocopy::#trait_ident);
where_clause.predicates.push(bound);
}
}
let type_ident = &input.ident;
// The parameters with trait bounds, but without type defaults.
let params = input.generics.params.clone().into_iter().map(|mut param| {
match &mut param {
GenericParam::Type(ty) => ty.default = None,
GenericParam::Const(cnst) => cnst.default = None,
GenericParam::Lifetime(_) => {}
}
quote!(#param)
});
// The identifiers of the parameters without trait bounds or type defaults.
let param_idents = input.generics.params.iter().map(|param| match param {
GenericParam::Type(ty) => {
let ident = &ty.ident;
quote!(#ident)
}
GenericParam::Lifetime(l) => quote!(#l),
GenericParam::Const(cnst) => quote!(#cnst),
});
let trait_bound_body = if require_trait_bound {
let implements_type_ident =
Ident::new(format!("Implements{}", trait_ident).as_str(), Span::call_site());
let implements_type_tokens = quote!(#implements_type_ident);
let types = non_type_param_field_types.map(|ty| quote!(#implements_type_tokens<#ty>));
quote!(
// A type with a type parameter that must implement #trait_ident
struct #implements_type_ident<F:?Sized + zerocopy::#trait_ident>(::core::marker::PhantomData<F>);
// For each field type, an instantiation that won't type check if
// that type doesn't implement #trait_ident
#(let _: #types;)*
)
} else {
quote!()
};
let size_check_body = if require_size_check &&!field_types.is_empty() {
quote!(
const HAS_PADDING: bool = core::mem::size_of::<#type_ident>()!= #(core::mem::size_of::<#field_types>())+*;
let _: [(); 1/(1 - HAS_PADDING as usize)];
)
} else {
quote!()
};
quote! {
unsafe impl < #(#params),* > zerocopy::#trait_ident for #type_ident < #(#param_idents),* > #where_clause {
fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
#trait_bound_body
#size_check_body
}
}
}
}
fn print_all_errors(errors: Vec<Error>) -> proc_macro2::TokenStream {
errors.iter().map(Error::to_compile_error).collect() | }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_config_repr_orderings() {
// Validate that the repr lists in the various configs are in the
// canonical order. If they aren't, then our algorithm to look up in
// those lists won't work.
// TODO(joshlf): Remove once the is_sorted method is stabilized
// (issue #53485).
fn is_sorted_and_deduped<T: Clone + Ord>(ts: &[T]) -> bool {
let mut sorted = ts.to_vec();
sorted.sort();
sorted.dedup();
ts == sorted.as_slice()
}
fn elements_are_sorted_and_deduped<T: Clone + Ord>(lists: &[&[T]]) -> bool {
| random_line_split |
|
restyle_hints.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Restyle hints: an optimization to avoid unnecessarily matching selectors.
#[cfg(feature = "gecko")]
use crate::gecko_bindings::structs::nsRestyleHint;
use crate::traversal_flags::TraversalFlags;
bitflags! {
/// The kind of restyle we need to do for a given element.
pub struct RestyleHint: u8 {
/// Do a selector match of the element.
const RESTYLE_SELF = 1 << 0;
/// Do a selector match of the element's descendants.
const RESTYLE_DESCENDANTS = 1 << 1;
/// Recascade the current element.
const RECASCADE_SELF = 1 << 2;
/// Recascade all descendant elements.
const RECASCADE_DESCENDANTS = 1 << 3;
/// Replace the style data coming from CSS transitions without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_TRANSITIONS = 1 << 4;
/// Replace the style data coming from CSS animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_ANIMATIONS = 1 << 5;
/// Don't re-run selector-matching on the element, only the style
/// attribute has changed, and this change didn't have any other
/// dependencies.
const RESTYLE_STYLE_ATTRIBUTE = 1 << 6;
/// Replace the style data coming from SMIL animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_SMIL = 1 << 7;
}
}
impl RestyleHint {
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be fully restyled.
pub fn restyle_subtree() -> Self {
RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS
}
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be recascaded.
pub fn recascade_subtree() -> Self {
RestyleHint::RECASCADE_SELF | RestyleHint::RECASCADE_DESCENDANTS
}
/// Returns whether this hint invalidates the element and all its
/// descendants.
pub fn contains_subtree(&self) -> bool {
self.contains(RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS)
}
/// Returns whether we need to restyle this element.
pub fn has_non_animation_invalidations(&self) -> bool {
self.intersects(
RestyleHint::RESTYLE_SELF |
RestyleHint::RECASCADE_SELF |
(Self::replacements() &!Self::for_animations()),
)
}
/// Propagates this restyle hint to a child element.
pub fn propagate(&mut self, traversal_flags: &TraversalFlags) -> Self {
use std::mem;
// In the middle of an animation only restyle, we don't need to
// propagate any restyle hints, and we need to remove ourselves.
if traversal_flags.for_animation_only() {
self.remove_animation_hints();
return Self::empty();
}
debug_assert!(
!self.has_animation_hint(),
"There should not be any animation restyle hints \
during normal traversal"
);
// Else we should clear ourselves, and return the propagated hint.
mem::replace(self, Self::empty()).propagate_for_non_animation_restyle()
}
/// Returns a new `CascadeHint` appropriate for children of the current
/// element.
fn propagate_for_non_animation_restyle(&self) -> Self {
if self.contains(RestyleHint::RESTYLE_DESCENDANTS) {
return Self::restyle_subtree();
}
if self.contains(RestyleHint::RECASCADE_DESCENDANTS) {
return Self::recascade_subtree();
}
Self::empty()
}
/// Creates a new `RestyleHint` that indicates the element must be
/// recascaded.
pub fn recascade_self() -> Self {
RestyleHint::RECASCADE_SELF
}
/// Returns a hint that contains all the replacement hints.
pub fn replacements() -> Self {
RestyleHint::RESTYLE_STYLE_ATTRIBUTE | Self::for_animations()
}
/// The replacements for the animation cascade levels.
#[inline]
pub fn for_animations() -> Self {
RestyleHint::RESTYLE_SMIL |
RestyleHint::RESTYLE_CSS_ANIMATIONS |
RestyleHint::RESTYLE_CSS_TRANSITIONS
}
/// Returns whether the hint specifies that the currently element must be
/// recascaded.
pub fn has_recascade_self(&self) -> bool {
self.contains(RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint(&self) -> bool {
self.intersects(Self::for_animations())
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint_or_recascade(&self) -> bool {
self.intersects(Self::for_animations() | RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies some restyle work other than an
/// animation cascade level replacement.
#[inline]
pub fn has_non_animation_hint(&self) -> bool |
/// Returns whether the hint specifies that selector matching must be re-run
/// for the element.
#[inline]
pub fn match_self(&self) -> bool {
self.intersects(RestyleHint::RESTYLE_SELF)
}
/// Returns whether the hint specifies that some cascade levels must be
/// replaced.
#[inline]
pub fn has_replacements(&self) -> bool {
self.intersects(Self::replacements())
}
/// Removes all of the animation-related hints.
#[inline]
pub fn remove_animation_hints(&mut self) {
self.remove(Self::for_animations());
// While RECASCADE_SELF is not animation-specific, we only ever add and
// process it during traversal. If we are here, removing animation
// hints, then we are in an animation-only traversal, and we know that
// any RECASCADE_SELF flag must have been set due to changes in
// inherited values after restyling for animations, and thus we want to
// remove it so that we don't later try to restyle the element during a
// normal restyle. (We could have separate RECASCADE_SELF_NORMAL and
// RECASCADE_SELF_ANIMATIONS flags to make it clear, but this isn't
// currently necessary.)
self.remove(RestyleHint::RECASCADE_SELF);
}
}
impl Default for RestyleHint {
fn default() -> Self {
Self::empty()
}
}
#[cfg(feature = "gecko")]
impl From<nsRestyleHint> for RestyleHint {
fn from(mut raw: nsRestyleHint) -> Self {
let mut hint = RestyleHint::empty();
debug_assert!(
raw.0 & nsRestyleHint::eRestyle_LaterSiblings.0 == 0,
"Handle later siblings manually if necessary plz."
);
if (raw.0 & (nsRestyleHint::eRestyle_Self.0 | nsRestyleHint::eRestyle_Subtree.0))!= 0 {
raw.0 &=!nsRestyleHint::eRestyle_Self.0;
hint.insert(RestyleHint::RESTYLE_SELF);
}
if (raw.0 & (nsRestyleHint::eRestyle_Subtree.0 | nsRestyleHint::eRestyle_SomeDescendants.0))!=
0
{
raw.0 &=!nsRestyleHint::eRestyle_Subtree.0;
raw.0 &=!nsRestyleHint::eRestyle_SomeDescendants.0;
hint.insert(RestyleHint::RESTYLE_DESCENDANTS);
}
if (raw.0 & (nsRestyleHint::eRestyle_ForceDescendants.0 | nsRestyleHint::eRestyle_Force.0))!=
0
{
raw.0 &=!nsRestyleHint::eRestyle_Force.0;
hint.insert(RestyleHint::RECASCADE_SELF);
}
if (raw.0 & nsRestyleHint::eRestyle_ForceDescendants.0)!= 0 {
raw.0 &=!nsRestyleHint::eRestyle_ForceDescendants.0;
hint.insert(RestyleHint::RECASCADE_DESCENDANTS);
}
hint.insert(RestyleHint::from_bits_truncate(raw.0 as u8));
hint
}
}
#[cfg(feature = "servo")]
malloc_size_of_is_0!(RestyleHint);
/// Asserts that all replacement hints have a matching nsRestyleHint value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_restyle_hints_match() {
use crate::gecko_bindings::structs;
macro_rules! check_restyle_hints {
( $( $a:ident => $b:path),*, ) => {
if cfg!(debug_assertions) {
let mut replacements = RestyleHint::replacements();
$(
assert_eq!(structs::nsRestyleHint::$a.0 as usize, $b.bits() as usize, stringify!($b));
replacements.remove($b);
)*
assert_eq!(replacements, RestyleHint::empty(),
"all RestyleHint replacement bits should have an \
assertion");
}
}
}
check_restyle_hints! {
eRestyle_CSSTransitions => RestyleHint::RESTYLE_CSS_TRANSITIONS,
eRestyle_CSSAnimations => RestyleHint::RESTYLE_CSS_ANIMATIONS,
eRestyle_StyleAttribute => RestyleHint::RESTYLE_STYLE_ATTRIBUTE,
eRestyle_StyleAttribute_Animations => RestyleHint::RESTYLE_SMIL,
}
}
| {
!(*self & !Self::for_animations()).is_empty()
} | identifier_body |
restyle_hints.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Restyle hints: an optimization to avoid unnecessarily matching selectors.
#[cfg(feature = "gecko")]
use crate::gecko_bindings::structs::nsRestyleHint;
use crate::traversal_flags::TraversalFlags;
bitflags! {
/// The kind of restyle we need to do for a given element.
pub struct RestyleHint: u8 {
/// Do a selector match of the element.
const RESTYLE_SELF = 1 << 0;
/// Do a selector match of the element's descendants.
const RESTYLE_DESCENDANTS = 1 << 1;
/// Recascade the current element.
const RECASCADE_SELF = 1 << 2;
/// Recascade all descendant elements.
const RECASCADE_DESCENDANTS = 1 << 3;
/// Replace the style data coming from CSS transitions without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_TRANSITIONS = 1 << 4;
/// Replace the style data coming from CSS animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_ANIMATIONS = 1 << 5;
/// Don't re-run selector-matching on the element, only the style
/// attribute has changed, and this change didn't have any other
/// dependencies.
const RESTYLE_STYLE_ATTRIBUTE = 1 << 6;
/// Replace the style data coming from SMIL animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_SMIL = 1 << 7;
}
}
impl RestyleHint {
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be fully restyled.
pub fn restyle_subtree() -> Self {
RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS
}
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be recascaded.
pub fn recascade_subtree() -> Self {
RestyleHint::RECASCADE_SELF | RestyleHint::RECASCADE_DESCENDANTS
}
| /// descendants.
pub fn contains_subtree(&self) -> bool {
self.contains(RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS)
}
/// Returns whether we need to restyle this element.
pub fn has_non_animation_invalidations(&self) -> bool {
self.intersects(
RestyleHint::RESTYLE_SELF |
RestyleHint::RECASCADE_SELF |
(Self::replacements() &!Self::for_animations()),
)
}
/// Propagates this restyle hint to a child element.
pub fn propagate(&mut self, traversal_flags: &TraversalFlags) -> Self {
use std::mem;
// In the middle of an animation only restyle, we don't need to
// propagate any restyle hints, and we need to remove ourselves.
if traversal_flags.for_animation_only() {
self.remove_animation_hints();
return Self::empty();
}
debug_assert!(
!self.has_animation_hint(),
"There should not be any animation restyle hints \
during normal traversal"
);
// Else we should clear ourselves, and return the propagated hint.
mem::replace(self, Self::empty()).propagate_for_non_animation_restyle()
}
/// Returns a new `CascadeHint` appropriate for children of the current
/// element.
fn propagate_for_non_animation_restyle(&self) -> Self {
if self.contains(RestyleHint::RESTYLE_DESCENDANTS) {
return Self::restyle_subtree();
}
if self.contains(RestyleHint::RECASCADE_DESCENDANTS) {
return Self::recascade_subtree();
}
Self::empty()
}
/// Creates a new `RestyleHint` that indicates the element must be
/// recascaded.
pub fn recascade_self() -> Self {
RestyleHint::RECASCADE_SELF
}
/// Returns a hint that contains all the replacement hints.
pub fn replacements() -> Self {
RestyleHint::RESTYLE_STYLE_ATTRIBUTE | Self::for_animations()
}
/// The replacements for the animation cascade levels.
#[inline]
pub fn for_animations() -> Self {
RestyleHint::RESTYLE_SMIL |
RestyleHint::RESTYLE_CSS_ANIMATIONS |
RestyleHint::RESTYLE_CSS_TRANSITIONS
}
/// Returns whether the hint specifies that the currently element must be
/// recascaded.
pub fn has_recascade_self(&self) -> bool {
self.contains(RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint(&self) -> bool {
self.intersects(Self::for_animations())
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint_or_recascade(&self) -> bool {
self.intersects(Self::for_animations() | RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies some restyle work other than an
/// animation cascade level replacement.
#[inline]
pub fn has_non_animation_hint(&self) -> bool {
!(*self &!Self::for_animations()).is_empty()
}
/// Returns whether the hint specifies that selector matching must be re-run
/// for the element.
#[inline]
pub fn match_self(&self) -> bool {
self.intersects(RestyleHint::RESTYLE_SELF)
}
/// Returns whether the hint specifies that some cascade levels must be
/// replaced.
#[inline]
pub fn has_replacements(&self) -> bool {
self.intersects(Self::replacements())
}
/// Removes all of the animation-related hints.
#[inline]
pub fn remove_animation_hints(&mut self) {
self.remove(Self::for_animations());
// While RECASCADE_SELF is not animation-specific, we only ever add and
// process it during traversal. If we are here, removing animation
// hints, then we are in an animation-only traversal, and we know that
// any RECASCADE_SELF flag must have been set due to changes in
// inherited values after restyling for animations, and thus we want to
// remove it so that we don't later try to restyle the element during a
// normal restyle. (We could have separate RECASCADE_SELF_NORMAL and
// RECASCADE_SELF_ANIMATIONS flags to make it clear, but this isn't
// currently necessary.)
self.remove(RestyleHint::RECASCADE_SELF);
}
}
impl Default for RestyleHint {
fn default() -> Self {
Self::empty()
}
}
#[cfg(feature = "gecko")]
impl From<nsRestyleHint> for RestyleHint {
fn from(mut raw: nsRestyleHint) -> Self {
let mut hint = RestyleHint::empty();
debug_assert!(
raw.0 & nsRestyleHint::eRestyle_LaterSiblings.0 == 0,
"Handle later siblings manually if necessary plz."
);
if (raw.0 & (nsRestyleHint::eRestyle_Self.0 | nsRestyleHint::eRestyle_Subtree.0))!= 0 {
raw.0 &=!nsRestyleHint::eRestyle_Self.0;
hint.insert(RestyleHint::RESTYLE_SELF);
}
if (raw.0 & (nsRestyleHint::eRestyle_Subtree.0 | nsRestyleHint::eRestyle_SomeDescendants.0))!=
0
{
raw.0 &=!nsRestyleHint::eRestyle_Subtree.0;
raw.0 &=!nsRestyleHint::eRestyle_SomeDescendants.0;
hint.insert(RestyleHint::RESTYLE_DESCENDANTS);
}
if (raw.0 & (nsRestyleHint::eRestyle_ForceDescendants.0 | nsRestyleHint::eRestyle_Force.0))!=
0
{
raw.0 &=!nsRestyleHint::eRestyle_Force.0;
hint.insert(RestyleHint::RECASCADE_SELF);
}
if (raw.0 & nsRestyleHint::eRestyle_ForceDescendants.0)!= 0 {
raw.0 &=!nsRestyleHint::eRestyle_ForceDescendants.0;
hint.insert(RestyleHint::RECASCADE_DESCENDANTS);
}
hint.insert(RestyleHint::from_bits_truncate(raw.0 as u8));
hint
}
}
#[cfg(feature = "servo")]
malloc_size_of_is_0!(RestyleHint);
/// Asserts that all replacement hints have a matching nsRestyleHint value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_restyle_hints_match() {
use crate::gecko_bindings::structs;
macro_rules! check_restyle_hints {
( $( $a:ident => $b:path),*, ) => {
if cfg!(debug_assertions) {
let mut replacements = RestyleHint::replacements();
$(
assert_eq!(structs::nsRestyleHint::$a.0 as usize, $b.bits() as usize, stringify!($b));
replacements.remove($b);
)*
assert_eq!(replacements, RestyleHint::empty(),
"all RestyleHint replacement bits should have an \
assertion");
}
}
}
check_restyle_hints! {
eRestyle_CSSTransitions => RestyleHint::RESTYLE_CSS_TRANSITIONS,
eRestyle_CSSAnimations => RestyleHint::RESTYLE_CSS_ANIMATIONS,
eRestyle_StyleAttribute => RestyleHint::RESTYLE_STYLE_ATTRIBUTE,
eRestyle_StyleAttribute_Animations => RestyleHint::RESTYLE_SMIL,
}
} | /// Returns whether this hint invalidates the element and all its | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.