file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
winsock2.rs
|
// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md>
//! This file contains the core definitions for the Winsock2 specification that can be used by
//! both user-mode and kernel mode modules.
use ctypes::{c_int, c_void, c_char, __int64, c_short};
use shared::guiddef::{LPGUID};
use shared::inaddr::{IN_ADDR};
use shared::minwindef::{INT, ULONG, DWORD, USHORT};
use um::winnt::{PWSTR, CHAR, PROCESSOR_NUMBER};
use vc::vcruntime::{size_t};
pub type ADDRESS_FAMILY = USHORT;
pub const AF_UNSPEC: c_int = 0;
pub const AF_UNIX: c_int = 1;
pub const AF_INET: c_int = 2;
pub const AF_IMPLINK: c_int = 3;
pub const AF_PUP: c_int = 4;
pub const AF_CHAOS: c_int = 5;
pub const AF_NS: c_int = 6;
pub const AF_IPX: c_int = AF_NS;
pub const AF_ISO: c_int = 7;
pub const AF_OSI: c_int = AF_ISO;
pub const AF_ECMA: c_int = 8;
pub const AF_DATAKIT: c_int = 9;
pub const AF_CCITT: c_int = 10;
pub const AF_SNA: c_int = 11;
pub const AF_DECnet: c_int = 12;
pub const AF_DLI: c_int = 13;
pub const AF_LAT: c_int = 14;
pub const AF_HYLINK: c_int = 15;
pub const AF_APPLETALK: c_int = 16;
pub const AF_NETBIOS: c_int = 17;
pub const AF_VOICEVIEW: c_int = 18;
pub const AF_FIREFOX: c_int = 19;
pub const AF_UNKNOWN1: c_int = 20;
pub const AF_BAN: c_int = 21;
pub const AF_ATM: c_int = 22;
pub const AF_INET6: c_int = 23;
pub const AF_CLUSTER: c_int = 24;
pub const AF_12844: c_int = 25;
pub const AF_IRDA: c_int = 26;
pub const AF_NETDES: c_int = 28;
pub const AF_TCNPROCESS: c_int = 29;
pub const AF_TCNMESSAGE: c_int = 30;
pub const AF_ICLFXBM: c_int = 31;
pub const AF_BTH: c_int = 32;
pub const AF_LINK: c_int = 33;
pub const AF_MAX: c_int = 34;
pub const SOCK_STREAM: c_int = 1;
pub const SOCK_DGRAM: c_int = 2;
pub const SOCK_RAW: c_int = 3;
pub const SOCK_RDM: c_int = 4;
pub const SOCK_SEQPACKET: c_int = 5;
pub const SOL_SOCKET: c_int = 0xffff;
pub const SO_DEBUG: c_int = 0x0001;
pub const SO_ACCEPTCONN: c_int = 0x0002;
pub const SO_REUSEADDR: c_int = 0x0004;
pub const SO_KEEPALIVE: c_int = 0x0008;
pub const SO_DONTROUTE: c_int = 0x0010;
pub const SO_BROADCAST: c_int = 0x0020;
pub const SO_USELOOPBACK: c_int = 0x0040;
pub const SO_LINGER: c_int = 0x0080;
pub const SO_OOBINLINE: c_int = 0x0100;
pub const SO_DONTLINGER: c_int =!SO_LINGER;
pub const SO_EXCLUSIVEADDRUSE: c_int =!SO_REUSEADDR;
pub const SO_SNDBUF: c_int = 0x1001;
pub const SO_RCVBUF: c_int = 0x1002;
pub const SO_SNDLOWAT: c_int = 0x1003;
pub const SO_RCVLOWAT: c_int = 0x1004;
pub const SO_SNDTIMEO: c_int = 0x1005;
pub const SO_RCVTIMEO: c_int = 0x1006;
pub const SO_ERROR: c_int = 0x1007;
pub const SO_TYPE: c_int = 0x1008;
pub const SO_BSP_STATE: c_int = 0x1009;
pub const SO_GROUP_ID: c_int = 0x2001;
pub const SO_GROUP_PRIORITY: c_int = 0x2002;
pub const SO_MAX_MSG_SIZE: c_int = 0x2003;
pub const SO_CONDITIONAL_ACCEPT: c_int = 0x3002;
pub const SO_PAUSE_ACCEPT: c_int = 0x3003;
pub const SO_COMPARTMENT_ID: c_int = 0x3004;
pub const SO_RANDOMIZE_PORT: c_int = 0x3005;
pub const SO_PORT_SCALABILITY: c_int = 0x3006;
pub const WSK_SO_BASE: c_int = 0x4000;
pub const TCP_NODELAY: c_int = 0x0001;
STRUCT!{struct SOCKADDR {
sa_family: ADDRESS_FAMILY,
sa_data: [CHAR; 14],
}}
pub type PSOCKADDR = *mut SOCKADDR;
pub type LPSOCKADDR = *mut SOCKADDR;
STRUCT!{struct SOCKET_ADDRESS {
lpSockaddr: LPSOCKADDR,
iSockaddrLength: INT,
}}
pub type PSOCKET_ADDRESS = *mut SOCKET_ADDRESS;
pub type LPSOCKET_ADDRESS = *mut SOCKET_ADDRESS;
STRUCT!{struct SOCKET_ADDRESS_LIST {
iAddressCount: INT,
Address: [SOCKET_ADDRESS; 0],
}}
pub type PSOCKET_ADDRESS_LIST = *mut SOCKET_ADDRESS_LIST;
pub type LPSOCKET_ADDRESS_LIST = *mut SOCKET_ADDRESS_LIST;
STRUCT!{struct CSADDR_INFO {
LocalAddr: SOCKET_ADDRESS,
RemoteAddr: SOCKET_ADDRESS,
iSocketType: INT,
iProtocol: INT,
}}
pub type PCSADDR_INFO = *mut CSADDR_INFO;
pub type LPCSADDR_INFO = *mut CSADDR_INFO;
STRUCT!{struct SOCKADDR_STORAGE_LH {
ss_family: ADDRESS_FAMILY,
__ss_pad1: [CHAR; 6],
__ss_align: __int64,
__ss_pad2: [CHAR; 112],
}}
pub type PSOCKADDR_STORAGE_LH = *mut SOCKADDR_STORAGE_LH;
pub type LPSOCKADDR_STORAGE_LH = *mut SOCKADDR_STORAGE_LH;
STRUCT!{struct SOCKADDR_STORAGE_XP {
ss_family: c_short,
__ss_pad1: [CHAR; 6],
__ss_align: __int64,
__ss_pad2: [CHAR; 112],
}}
pub type PSOCKADDR_STORAGE_XP = *mut SOCKADDR_STORAGE_XP;
pub type LPSOCKADDR_STORAGE_XP = *mut SOCKADDR_STORAGE_XP;
pub type SOCKADDR_STORAGE = SOCKADDR_STORAGE_LH;
pub type PSOCKADDR_STORAGE = *mut SOCKADDR_STORAGE;
pub type LPSOCKADDR_STORAGE = *mut SOCKADDR_STORAGE;
STRUCT!{struct SOCKET_PROCESSOR_AFFINITY {
Processor: PROCESSOR_NUMBER,
NumaNodeId: USHORT,
Reserved: USHORT,
}}
pub type PSOCKET_PROCESSOR_AFFINITY = *mut SOCKET_PROCESSOR_AFFINITY;
pub const IOC_UNIX: DWORD = 0x00000000;
pub const IOC_WS2: DWORD = 0x08000000;
pub const IOC_PROTOCOL: DWORD = 0x10000000;
pub const IOC_VENDOR: DWORD = 0x18000000;
pub const IOC_WSK: DWORD = IOC_WS2 | 0x07000000;
macro_rules! _WSAIO { ($x:expr, $y:expr) => { IOC_VOID | $x | $y } }
macro_rules! _WSAIOR { ($x:expr, $y:expr) => { IOC_OUT | $x | $y } }
macro_rules! _WSAIOW { ($x:expr, $y:expr) => { IOC_IN | $x | $y } }
macro_rules! _WSAIORW { ($x:expr, $y:expr) => { IOC_INOUT | $x | $y } }
pub const SIO_ASSOCIATE_HANDLE: DWORD = _WSAIOW!(IOC_WS2, 1);
pub const SIO_ENABLE_CIRCULAR_QUEUEING: DWORD = _WSAIO!(IOC_WS2, 2);
pub const SIO_FIND_ROUTE: DWORD = _WSAIOR!(IOC_WS2, 3);
pub const SIO_FLUSH: DWORD = _WSAIO!(IOC_WS2, 4);
pub const SIO_GET_BROADCAST_ADDRESS: DWORD = _WSAIOR!(IOC_WS2, 5);
pub const SIO_GET_EXTENSION_FUNCTION_POINTER: DWORD = _WSAIORW!(IOC_WS2, 6);
pub const SIO_GET_QOS: DWORD = _WSAIORW!(IOC_WS2, 7);
pub const SIO_GET_GROUP_QOS: DWORD = _WSAIORW!(IOC_WS2, 8);
pub const SIO_MULTIPOINT_LOOPBACK: DWORD = _WSAIOW!(IOC_WS2, 9);
pub const SIO_MULTICAST_SCOPE: DWORD = _WSAIOW!(IOC_WS2, 10);
pub const SIO_SET_QOS: DWORD = _WSAIOW!(IOC_WS2, 11);
pub const SIO_SET_GROUP_QOS: DWORD = _WSAIOW!(IOC_WS2, 12);
pub const SIO_TRANSLATE_HANDLE: DWORD = _WSAIORW!(IOC_WS2, 13);
pub const SIO_ROUTING_INTERFACE_QUERY: DWORD = _WSAIORW!(IOC_WS2, 20);
pub const SIO_ROUTING_INTERFACE_CHANGE: DWORD = _WSAIOW!(IOC_WS2, 21);
pub const SIO_ADDRESS_LIST_QUERY: DWORD = _WSAIOR!(IOC_WS2, 22);
pub const SIO_ADDRESS_LIST_CHANGE: DWORD = _WSAIO!(IOC_WS2, 23);
pub const SIO_QUERY_TARGET_PNP_HANDLE: DWORD = _WSAIOR!(IOC_WS2, 24);
pub const SIO_QUERY_RSS_PROCESSOR_INFO: DWORD = _WSAIOR!(IOC_WS2, 37);
pub const SIO_ADDRESS_LIST_SORT: DWORD = _WSAIORW!(IOC_WS2, 25);
pub const SIO_RESERVED_1: DWORD = _WSAIOW!(IOC_WS2, 26);
pub const SIO_RESERVED_2: DWORD = _WSAIOW!(IOC_WS2, 33);
pub const SIO_GET_MULTIPLE_EXTENSION_FUNCTION_POINTER: DWORD = _WSAIORW!(IOC_WS2, 36);
pub const IPPROTO_IP: c_int = 0;
ENUM!{enum IPPROTO {
IPPROTO_HOPOPTS = 0, // IPv6 Hop-by-Hop options
IPPROTO_ICMP = 1,
IPPROTO_IGMP = 2,
IPPROTO_GGP = 3,
IPPROTO_IPV4 = 4,
IPPROTO_ST = 5,
IPPROTO_TCP = 6,
IPPROTO_CBT = 7,
IPPROTO_EGP = 8,
IPPROTO_IGP = 9,
IPPROTO_PUP = 12,
IPPROTO_UDP = 17,
IPPROTO_IDP = 22,
IPPROTO_RDP = 27,
IPPROTO_IPV6 = 41, // IPv6 header
IPPROTO_ROUTING = 43, // IPv6 Routing header
IPPROTO_FRAGMENT = 44, // IPv6 fragmentation header
IPPROTO_ESP = 50, // encapsulating security payload
IPPROTO_AH = 51, // authentication header
IPPROTO_ICMPV6 = 58, // ICMPv6
IPPROTO_NONE = 59, // IPv6 no next header
IPPROTO_DSTOPTS = 60, // IPv6 Destination options
IPPROTO_ND = 77,
IPPROTO_ICLFXBM = 78,
IPPROTO_PIM = 103,
IPPROTO_PGM = 113,
IPPROTO_L2TP = 115,
IPPROTO_SCTP = 132,
IPPROTO_RAW = 255,
IPPROTO_MAX = 256,
IPPROTO_RESERVED_RAW = 257,
IPPROTO_RESERVED_IPSEC = 258,
IPPROTO_RESERVED_IPSECOFFLOAD = 259,
IPPROTO_RESERVED_WNV = 260,
IPPROTO_RESERVED_MAX = 261,
}}
pub type PIPPROTO = *mut IPPROTO;
STRUCT!{struct SOCKADDR_IN {
sin_family: ADDRESS_FAMILY,
sin_port: USHORT,
sin_addr: IN_ADDR,
sin_zero: [CHAR; 8],
}}
pub type PSOCKADDR_IN = *mut SOCKADDR_IN;
//645
pub const IOCPARM_MASK: DWORD = 0x7f;
pub const IOC_VOID: DWORD = 0x20000000;
pub const IOC_OUT: DWORD = 0x40000000;
pub const IOC_IN: DWORD = 0x80000000;
pub const IOC_INOUT: DWORD = IOC_IN | IOC_OUT;
STRUCT!{struct WSABUF {
len: ULONG,
buf: *mut CHAR,
}}
pub type LPWSABUF = *mut WSABUF;
STRUCT!{struct WSAMSG {
name: LPSOCKADDR,
namelen: INT,
lpBuffers: LPWSABUF,
dwBufferCount: ULONG,
Control: WSABUF,
dwFlags: ULONG,
}}
pub type PWSAMSG = *mut WSAMSG;
pub type LPWSAMSG = *mut WSAMSG;
STRUCT!{struct ADDRINFOA {
ai_flags: c_int,
ai_family: c_int,
ai_socktype: c_int,
ai_protocol: c_int,
ai_addrlen: size_t,
ai_canonname: *mut c_char,
ai_addr: *mut SOCKADDR,
ai_next: *mut ADDRINFOA,
}}
pub type PADDRINFOA = *mut ADDRINFOA;
STRUCT!{struct ADDRINFOW {
ai_flags: c_int,
ai_family: c_int,
ai_socktype: c_int,
ai_protocol: c_int,
ai_addrlen: size_t,
ai_canonname: PWSTR,
ai_addr: *mut SOCKADDR,
ai_next: *mut ADDRINFOW,
}}
pub type PADDRINFOW = *mut ADDRINFOW;
STRUCT!{struct ADDRINFOEXA {
ai_flags: c_int,
ai_family: c_int,
ai_socktype: c_int,
ai_protocol: c_int,
ai_addrlen: size_t,
ai_canonname: *mut c_char,
ai_addr: *mut SOCKADDR,
ai_blob: *mut c_void,
ai_bloblen: size_t,
ai_provider: LPGUID,
ai_next: *mut ADDRINFOEXW,
|
}}
pub type PADDRINFOEXA = *mut ADDRINFOEXA;
pub type LPADDRINFOEXA = *mut ADDRINFOEXA;
STRUCT!{struct ADDRINFOEXW {
ai_flags: c_int,
ai_family: c_int,
ai_socktype: c_int,
ai_protocol: c_int,
ai_addrlen: size_t,
ai_canonname: PWSTR,
ai_addr: *mut SOCKADDR,
ai_blob: *mut c_void,
ai_bloblen: size_t,
ai_provider: LPGUID,
ai_next: *mut ADDRINFOEXW,
}}
pub type PADDRINFOEXW = *mut ADDRINFOEXW;
pub type LPADDRINFOEXW = *mut ADDRINFOEXW;
|
random_line_split
|
|
solver041.rs
|
// COPYRIGHT (C) 2017 barreiro. All Rights Reserved.
// Rust solvers for Project Euler problems
use euler::algorithm::combinatorics::permutations_with;
use euler::algorithm::long::{from_digits_index, is_even};
use euler::algorithm::prime::miller_rabin;
use euler::Solver;
// We shall say that an n-digit number is pandigital if it makes use of all the digits 1 to n exactly once.
// For example, 2143 is a 4-digit pandigital and is also prime.
// What is the largest n-digit pandigital prime that exists?
pub struct Solver041 {
pub n: isize,
}
|
impl Default for Solver041 {
fn default() -> Self {
Solver041 { n: 9 }
}
}
impl Solver for Solver041 {
fn solve(&self) -> isize {
// Assume the largest prime also start with the biggest digit
let predicate = |d: &[_]| if *d.last().unwrap()!= d.len() as _ || is_even(*d.first().unwrap()) { None } else {
let candidate = from_digits_index(d, 0, d.len());
if miller_rabin(candidate) { Some(candidate) } else { None }
};
// If the sum of the digits of the permutation is multiple of three, all permutations are multiple of three as well
(1..=self.n).rev().filter(|&n| n % 3!= 0).find_map(|n| permutations_with(1, n, predicate).max()).unwrap()
}
}
|
random_line_split
|
|
solver041.rs
|
// COPYRIGHT (C) 2017 barreiro. All Rights Reserved.
// Rust solvers for Project Euler problems
use euler::algorithm::combinatorics::permutations_with;
use euler::algorithm::long::{from_digits_index, is_even};
use euler::algorithm::prime::miller_rabin;
use euler::Solver;
// We shall say that an n-digit number is pandigital if it makes use of all the digits 1 to n exactly once.
// For example, 2143 is a 4-digit pandigital and is also prime.
// What is the largest n-digit pandigital prime that exists?
pub struct Solver041 {
pub n: isize,
}
impl Default for Solver041 {
fn default() -> Self
|
}
impl Solver for Solver041 {
fn solve(&self) -> isize {
// Assume the largest prime also start with the biggest digit
let predicate = |d: &[_]| if *d.last().unwrap()!= d.len() as _ || is_even(*d.first().unwrap()) { None } else {
let candidate = from_digits_index(d, 0, d.len());
if miller_rabin(candidate) { Some(candidate) } else { None }
};
// If the sum of the digits of the permutation is multiple of three, all permutations are multiple of three as well
(1..=self.n).rev().filter(|&n| n % 3!= 0).find_map(|n| permutations_with(1, n, predicate).max()).unwrap()
}
}
|
{
Solver041 { n: 9 }
}
|
identifier_body
|
solver041.rs
|
// COPYRIGHT (C) 2017 barreiro. All Rights Reserved.
// Rust solvers for Project Euler problems
use euler::algorithm::combinatorics::permutations_with;
use euler::algorithm::long::{from_digits_index, is_even};
use euler::algorithm::prime::miller_rabin;
use euler::Solver;
// We shall say that an n-digit number is pandigital if it makes use of all the digits 1 to n exactly once.
// For example, 2143 is a 4-digit pandigital and is also prime.
// What is the largest n-digit pandigital prime that exists?
pub struct Solver041 {
pub n: isize,
}
impl Default for Solver041 {
fn
|
() -> Self {
Solver041 { n: 9 }
}
}
impl Solver for Solver041 {
fn solve(&self) -> isize {
// Assume the largest prime also start with the biggest digit
let predicate = |d: &[_]| if *d.last().unwrap()!= d.len() as _ || is_even(*d.first().unwrap()) { None } else {
let candidate = from_digits_index(d, 0, d.len());
if miller_rabin(candidate) { Some(candidate) } else { None }
};
// If the sum of the digits of the permutation is multiple of three, all permutations are multiple of three as well
(1..=self.n).rev().filter(|&n| n % 3!= 0).find_map(|n| permutations_with(1, n, predicate).max()).unwrap()
}
}
|
default
|
identifier_name
|
solver041.rs
|
// COPYRIGHT (C) 2017 barreiro. All Rights Reserved.
// Rust solvers for Project Euler problems
use euler::algorithm::combinatorics::permutations_with;
use euler::algorithm::long::{from_digits_index, is_even};
use euler::algorithm::prime::miller_rabin;
use euler::Solver;
// We shall say that an n-digit number is pandigital if it makes use of all the digits 1 to n exactly once.
// For example, 2143 is a 4-digit pandigital and is also prime.
// What is the largest n-digit pandigital prime that exists?
pub struct Solver041 {
pub n: isize,
}
impl Default for Solver041 {
fn default() -> Self {
Solver041 { n: 9 }
}
}
impl Solver for Solver041 {
fn solve(&self) -> isize {
// Assume the largest prime also start with the biggest digit
let predicate = |d: &[_]| if *d.last().unwrap()!= d.len() as _ || is_even(*d.first().unwrap()) { None } else {
let candidate = from_digits_index(d, 0, d.len());
if miller_rabin(candidate) { Some(candidate) } else
|
};
// If the sum of the digits of the permutation is multiple of three, all permutations are multiple of three as well
(1..=self.n).rev().filter(|&n| n % 3!= 0).find_map(|n| permutations_with(1, n, predicate).max()).unwrap()
}
}
|
{ None }
|
conditional_block
|
cast_possible_truncation.rs
|
use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::expr_or_init;
use clippy_utils::ty::is_isize_or_usize;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, FloatTy, Ty};
use super::{utils, CAST_POSSIBLE_TRUNCATION};
fn constant_int(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<u128> {
if let Some((Constant::Int(c), _)) = constant(cx, cx.typeck_results(), expr) {
Some(c)
} else {
None
}
}
fn get_constant_bits(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<u64> {
constant_int(cx, expr).map(|c| u64::from(128 - c.leading_zeros()))
}
fn apply_reductions(cx: &LateContext<'_>, nbits: u64, expr: &Expr<'_>, signed: bool) -> u64 {
match expr_or_init(cx, expr).kind {
ExprKind::Cast(inner, _) => apply_reductions(cx, nbits, inner, signed),
ExprKind::Block(block, _) => block.expr.map_or(nbits, |e| apply_reductions(cx, nbits, e, signed)),
ExprKind::Binary(op, left, right) => match op.node {
BinOpKind::Div => {
apply_reductions(cx, nbits, left, signed)
- (if signed {
0 // let's be conservative here
} else {
// by dividing by 1, we remove 0 bits, etc.
get_constant_bits(cx, right).map_or(0, |b| b.saturating_sub(1))
})
},
BinOpKind::Rem | BinOpKind::BitAnd => get_constant_bits(cx, right)
.unwrap_or(u64::max_value())
.min(apply_reductions(cx, nbits, left, signed)),
BinOpKind::Shr => {
apply_reductions(cx, nbits, left, signed)
- constant_int(cx, right).map_or(0, |s| u64::try_from(s).expect("shift too high"))
},
_ => nbits,
},
ExprKind::MethodCall(method, _, [left, right], _) => {
if signed {
return nbits;
}
let max_bits = if method.ident.as_str() == "min" {
get_constant_bits(cx, right)
} else {
None
};
apply_reductions(cx, nbits, left, signed).min(max_bits.unwrap_or(u64::max_value()))
},
ExprKind::MethodCall(method, _, [_, lo, hi], _) => {
if method.ident.as_str() == "clamp" {
//FIXME: make this a diagnostic item
if let (Some(lo_bits), Some(hi_bits)) = (get_constant_bits(cx, lo), get_constant_bits(cx, hi)) {
return lo_bits.max(hi_bits);
}
}
nbits
},
ExprKind::MethodCall(method, _, [_value], _) => {
if method.ident.name.as_str() == "signum" {
0 // do not lint if cast comes from a `signum` function
} else {
nbits
}
},
_ => nbits,
}
}
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>)
|
),
(false, true) => (from_nbits == 64, " on targets with 32-bit wide pointers"),
};
if!should_lint {
return;
}
format!(
"casting `{}` to `{}` may truncate the value{}",
cast_from, cast_to, suffix,
)
},
(false, true) => {
format!("casting `{}` to `{}` may truncate the value", cast_from, cast_to)
},
(_, _) => {
if matches!(cast_from.kind(), &ty::Float(FloatTy::F64))
&& matches!(cast_to.kind(), &ty::Float(FloatTy::F32))
{
"casting `f64` to `f32` may truncate the value".to_string()
} else {
return;
}
},
};
span_lint(cx, CAST_POSSIBLE_TRUNCATION, expr.span, &msg);
}
|
{
let msg = match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
let from_nbits = apply_reductions(
cx,
utils::int_ty_to_nbits(cast_from, cx.tcx),
cast_expr,
cast_from.is_signed(),
);
let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
let (should_lint, suffix) = match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
(true, true) | (false, false) => (to_nbits < from_nbits, ""),
(true, false) => (
to_nbits <= 32,
if to_nbits == 32 {
" on targets with 64-bit wide pointers"
} else {
""
},
|
identifier_body
|
cast_possible_truncation.rs
|
use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::expr_or_init;
use clippy_utils::ty::is_isize_or_usize;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, FloatTy, Ty};
use super::{utils, CAST_POSSIBLE_TRUNCATION};
fn constant_int(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<u128> {
if let Some((Constant::Int(c), _)) = constant(cx, cx.typeck_results(), expr) {
Some(c)
} else {
None
}
}
fn get_constant_bits(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<u64> {
constant_int(cx, expr).map(|c| u64::from(128 - c.leading_zeros()))
}
|
fn apply_reductions(cx: &LateContext<'_>, nbits: u64, expr: &Expr<'_>, signed: bool) -> u64 {
match expr_or_init(cx, expr).kind {
ExprKind::Cast(inner, _) => apply_reductions(cx, nbits, inner, signed),
ExprKind::Block(block, _) => block.expr.map_or(nbits, |e| apply_reductions(cx, nbits, e, signed)),
ExprKind::Binary(op, left, right) => match op.node {
BinOpKind::Div => {
apply_reductions(cx, nbits, left, signed)
- (if signed {
0 // let's be conservative here
} else {
// by dividing by 1, we remove 0 bits, etc.
get_constant_bits(cx, right).map_or(0, |b| b.saturating_sub(1))
})
},
BinOpKind::Rem | BinOpKind::BitAnd => get_constant_bits(cx, right)
.unwrap_or(u64::max_value())
.min(apply_reductions(cx, nbits, left, signed)),
BinOpKind::Shr => {
apply_reductions(cx, nbits, left, signed)
- constant_int(cx, right).map_or(0, |s| u64::try_from(s).expect("shift too high"))
},
_ => nbits,
},
ExprKind::MethodCall(method, _, [left, right], _) => {
if signed {
return nbits;
}
let max_bits = if method.ident.as_str() == "min" {
get_constant_bits(cx, right)
} else {
None
};
apply_reductions(cx, nbits, left, signed).min(max_bits.unwrap_or(u64::max_value()))
},
ExprKind::MethodCall(method, _, [_, lo, hi], _) => {
if method.ident.as_str() == "clamp" {
//FIXME: make this a diagnostic item
if let (Some(lo_bits), Some(hi_bits)) = (get_constant_bits(cx, lo), get_constant_bits(cx, hi)) {
return lo_bits.max(hi_bits);
}
}
nbits
},
ExprKind::MethodCall(method, _, [_value], _) => {
if method.ident.name.as_str() == "signum" {
0 // do not lint if cast comes from a `signum` function
} else {
nbits
}
},
_ => nbits,
}
}
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
let msg = match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
let from_nbits = apply_reductions(
cx,
utils::int_ty_to_nbits(cast_from, cx.tcx),
cast_expr,
cast_from.is_signed(),
);
let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
let (should_lint, suffix) = match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
(true, true) | (false, false) => (to_nbits < from_nbits, ""),
(true, false) => (
to_nbits <= 32,
if to_nbits == 32 {
" on targets with 64-bit wide pointers"
} else {
""
},
),
(false, true) => (from_nbits == 64, " on targets with 32-bit wide pointers"),
};
if!should_lint {
return;
}
format!(
"casting `{}` to `{}` may truncate the value{}",
cast_from, cast_to, suffix,
)
},
(false, true) => {
format!("casting `{}` to `{}` may truncate the value", cast_from, cast_to)
},
(_, _) => {
if matches!(cast_from.kind(), &ty::Float(FloatTy::F64))
&& matches!(cast_to.kind(), &ty::Float(FloatTy::F32))
{
"casting `f64` to `f32` may truncate the value".to_string()
} else {
return;
}
},
};
span_lint(cx, CAST_POSSIBLE_TRUNCATION, expr.span, &msg);
}
|
random_line_split
|
|
cast_possible_truncation.rs
|
use clippy_utils::consts::{constant, Constant};
use clippy_utils::diagnostics::span_lint;
use clippy_utils::expr_or_init;
use clippy_utils::ty::is_isize_or_usize;
use rustc_hir::{BinOpKind, Expr, ExprKind};
use rustc_lint::LateContext;
use rustc_middle::ty::{self, FloatTy, Ty};
use super::{utils, CAST_POSSIBLE_TRUNCATION};
fn constant_int(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<u128> {
if let Some((Constant::Int(c), _)) = constant(cx, cx.typeck_results(), expr) {
Some(c)
} else {
None
}
}
fn
|
(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<u64> {
constant_int(cx, expr).map(|c| u64::from(128 - c.leading_zeros()))
}
fn apply_reductions(cx: &LateContext<'_>, nbits: u64, expr: &Expr<'_>, signed: bool) -> u64 {
match expr_or_init(cx, expr).kind {
ExprKind::Cast(inner, _) => apply_reductions(cx, nbits, inner, signed),
ExprKind::Block(block, _) => block.expr.map_or(nbits, |e| apply_reductions(cx, nbits, e, signed)),
ExprKind::Binary(op, left, right) => match op.node {
BinOpKind::Div => {
apply_reductions(cx, nbits, left, signed)
- (if signed {
0 // let's be conservative here
} else {
// by dividing by 1, we remove 0 bits, etc.
get_constant_bits(cx, right).map_or(0, |b| b.saturating_sub(1))
})
},
BinOpKind::Rem | BinOpKind::BitAnd => get_constant_bits(cx, right)
.unwrap_or(u64::max_value())
.min(apply_reductions(cx, nbits, left, signed)),
BinOpKind::Shr => {
apply_reductions(cx, nbits, left, signed)
- constant_int(cx, right).map_or(0, |s| u64::try_from(s).expect("shift too high"))
},
_ => nbits,
},
ExprKind::MethodCall(method, _, [left, right], _) => {
if signed {
return nbits;
}
let max_bits = if method.ident.as_str() == "min" {
get_constant_bits(cx, right)
} else {
None
};
apply_reductions(cx, nbits, left, signed).min(max_bits.unwrap_or(u64::max_value()))
},
ExprKind::MethodCall(method, _, [_, lo, hi], _) => {
if method.ident.as_str() == "clamp" {
//FIXME: make this a diagnostic item
if let (Some(lo_bits), Some(hi_bits)) = (get_constant_bits(cx, lo), get_constant_bits(cx, hi)) {
return lo_bits.max(hi_bits);
}
}
nbits
},
ExprKind::MethodCall(method, _, [_value], _) => {
if method.ident.name.as_str() == "signum" {
0 // do not lint if cast comes from a `signum` function
} else {
nbits
}
},
_ => nbits,
}
}
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
let msg = match (cast_from.is_integral(), cast_to.is_integral()) {
(true, true) => {
let from_nbits = apply_reductions(
cx,
utils::int_ty_to_nbits(cast_from, cx.tcx),
cast_expr,
cast_from.is_signed(),
);
let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx);
let (should_lint, suffix) = match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
(true, true) | (false, false) => (to_nbits < from_nbits, ""),
(true, false) => (
to_nbits <= 32,
if to_nbits == 32 {
" on targets with 64-bit wide pointers"
} else {
""
},
),
(false, true) => (from_nbits == 64, " on targets with 32-bit wide pointers"),
};
if!should_lint {
return;
}
format!(
"casting `{}` to `{}` may truncate the value{}",
cast_from, cast_to, suffix,
)
},
(false, true) => {
format!("casting `{}` to `{}` may truncate the value", cast_from, cast_to)
},
(_, _) => {
if matches!(cast_from.kind(), &ty::Float(FloatTy::F64))
&& matches!(cast_to.kind(), &ty::Float(FloatTy::F32))
{
"casting `f64` to `f32` may truncate the value".to_string()
} else {
return;
}
},
};
span_lint(cx, CAST_POSSIBLE_TRUNCATION, expr.span, &msg);
}
|
get_constant_bits
|
identifier_name
|
deriving-span-Zero-struct.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
// except according to those terms.
// This file was auto-generated using'src/etc/generate-deriving-span-tests.py'
#![feature(struct_variant)]
extern crate rand;
struct Error;
#[deriving(Zero)] //~ ERROR failed to find an implementation
struct Struct {
x: Error //~ ERROR failed to find an implementation
//~^ ERROR failed to find an implementation
//~^^ ERROR type `Error` does not implement any method in scope
}
fn main() {}
|
// option. This file may not be copied, modified, or distributed
|
random_line_split
|
deriving-span-Zero-struct.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This file was auto-generated using'src/etc/generate-deriving-span-tests.py'
#![feature(struct_variant)]
extern crate rand;
struct Error;
#[deriving(Zero)] //~ ERROR failed to find an implementation
struct Struct {
x: Error //~ ERROR failed to find an implementation
//~^ ERROR failed to find an implementation
//~^^ ERROR type `Error` does not implement any method in scope
}
fn
|
() {}
|
main
|
identifier_name
|
console.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::ConsoleBinding;
use dom::bindings::codegen::Bindings::ConsoleBinding::ConsoleMethods;
use dom::bindings::global::{GlobalRef, GlobalField};
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use devtools_traits::{DevtoolsControlMsg, ConsoleMessage};
use util::str::DOMString;
#[dom_struct]
pub struct Console {
reflector_: Reflector,
global: GlobalField,
}
impl Console {
fn new_inherited(global: GlobalRef) -> Console {
Console {
reflector_: Reflector::new(),
global: GlobalField::from_rooted(&global),
}
}
pub fn new(global: GlobalRef) -> Temporary<Console> {
reflect_dom_object(box Console::new_inherited(global), global, ConsoleBinding::Wrap)
}
}
impl<'a> ConsoleMethods for JSRef<'a, Console> {
fn Log(self, message: DOMString) {
println!("{}", message);
propagate_console_msg(&self, ConsoleMessage::LogMessage(message));
}
fn Debug(self, message: DOMString) {
println!("{}", message);
}
fn Info(self, message: DOMString) {
println!("{}", message);
}
fn Warn(self, message: DOMString) {
println!("{}", message);
}
fn Error(self, message: DOMString) {
println!("{}", message);
}
fn Assert(self, condition: bool, message: Option<DOMString>) {
if!condition {
let message = match message {
Some(ref message) => message.as_slice(),
None => "no message",
};
println!("Assertion failed: {}", message);
}
}
}
fn propagate_console_msg(console: &JSRef<Console>, console_message: ConsoleMessage)
|
{
let global = console.global.root();
match global.r() {
GlobalRef::Window(window_ref) => {
let pipelineId = window_ref.page().id;
console.global.root().r().as_window().page().devtools_chan.as_ref().map(|chan| {
chan.send(DevtoolsControlMsg::SendConsoleMessage(
pipelineId, console_message.clone())).unwrap();
});
},
GlobalRef::Worker(_) => {
// TODO: support worker console logs
}
}
}
|
identifier_body
|
|
console.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::ConsoleBinding;
use dom::bindings::codegen::Bindings::ConsoleBinding::ConsoleMethods;
use dom::bindings::global::{GlobalRef, GlobalField};
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use devtools_traits::{DevtoolsControlMsg, ConsoleMessage};
use util::str::DOMString;
#[dom_struct]
pub struct Console {
reflector_: Reflector,
global: GlobalField,
}
impl Console {
fn new_inherited(global: GlobalRef) -> Console {
Console {
reflector_: Reflector::new(),
global: GlobalField::from_rooted(&global),
}
}
pub fn new(global: GlobalRef) -> Temporary<Console> {
reflect_dom_object(box Console::new_inherited(global), global, ConsoleBinding::Wrap)
}
}
impl<'a> ConsoleMethods for JSRef<'a, Console> {
fn Log(self, message: DOMString) {
println!("{}", message);
propagate_console_msg(&self, ConsoleMessage::LogMessage(message));
}
fn
|
(self, message: DOMString) {
println!("{}", message);
}
fn Info(self, message: DOMString) {
println!("{}", message);
}
fn Warn(self, message: DOMString) {
println!("{}", message);
}
fn Error(self, message: DOMString) {
println!("{}", message);
}
fn Assert(self, condition: bool, message: Option<DOMString>) {
if!condition {
let message = match message {
Some(ref message) => message.as_slice(),
None => "no message",
};
println!("Assertion failed: {}", message);
}
}
}
fn propagate_console_msg(console: &JSRef<Console>, console_message: ConsoleMessage) {
let global = console.global.root();
match global.r() {
GlobalRef::Window(window_ref) => {
let pipelineId = window_ref.page().id;
console.global.root().r().as_window().page().devtools_chan.as_ref().map(|chan| {
chan.send(DevtoolsControlMsg::SendConsoleMessage(
pipelineId, console_message.clone())).unwrap();
});
},
GlobalRef::Worker(_) => {
// TODO: support worker console logs
}
}
}
|
Debug
|
identifier_name
|
console.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::ConsoleBinding;
use dom::bindings::codegen::Bindings::ConsoleBinding::ConsoleMethods;
use dom::bindings::global::{GlobalRef, GlobalField};
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use devtools_traits::{DevtoolsControlMsg, ConsoleMessage};
use util::str::DOMString;
#[dom_struct]
pub struct Console {
reflector_: Reflector,
global: GlobalField,
}
impl Console {
fn new_inherited(global: GlobalRef) -> Console {
Console {
reflector_: Reflector::new(),
global: GlobalField::from_rooted(&global),
}
}
pub fn new(global: GlobalRef) -> Temporary<Console> {
reflect_dom_object(box Console::new_inherited(global), global, ConsoleBinding::Wrap)
}
}
impl<'a> ConsoleMethods for JSRef<'a, Console> {
fn Log(self, message: DOMString) {
println!("{}", message);
propagate_console_msg(&self, ConsoleMessage::LogMessage(message));
}
fn Debug(self, message: DOMString) {
println!("{}", message);
}
fn Info(self, message: DOMString) {
println!("{}", message);
}
fn Warn(self, message: DOMString) {
println!("{}", message);
}
fn Error(self, message: DOMString) {
println!("{}", message);
}
fn Assert(self, condition: bool, message: Option<DOMString>) {
if!condition {
let message = match message {
Some(ref message) => message.as_slice(),
None => "no message",
};
println!("Assertion failed: {}", message);
}
}
}
fn propagate_console_msg(console: &JSRef<Console>, console_message: ConsoleMessage) {
let global = console.global.root();
match global.r() {
GlobalRef::Window(window_ref) => {
let pipelineId = window_ref.page().id;
console.global.root().r().as_window().page().devtools_chan.as_ref().map(|chan| {
chan.send(DevtoolsControlMsg::SendConsoleMessage(
pipelineId, console_message.clone())).unwrap();
|
});
},
GlobalRef::Worker(_) => {
// TODO: support worker console logs
}
}
}
|
random_line_split
|
|
task-comm-12.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern mod extra;
use std::task;
pub fn main() { test00(); }
fn start(_task_number: int)
|
fn test00() {
let i: int = 0;
let mut builder = task::task();
let mut result = builder.future_result();
do builder.spawn {
start(i)
}
// Sleep long enough for the task to finish.
let mut i = 0;
while i < 10000 {
task::deschedule();
i += 1;
}
// Try joining tasks that have already finished.
result.recv();
info!("Joined task.");
}
|
{ info!("Started / Finished task."); }
|
identifier_body
|
task-comm-12.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern mod extra;
use std::task;
pub fn
|
() { test00(); }
fn start(_task_number: int) { info!("Started / Finished task."); }
fn test00() {
let i: int = 0;
let mut builder = task::task();
let mut result = builder.future_result();
do builder.spawn {
start(i)
}
// Sleep long enough for the task to finish.
let mut i = 0;
while i < 10000 {
task::deschedule();
i += 1;
}
// Try joining tasks that have already finished.
result.recv();
info!("Joined task.");
}
|
main
|
identifier_name
|
task-comm-12.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern mod extra;
use std::task;
pub fn main() { test00(); }
fn start(_task_number: int) { info!("Started / Finished task."); }
fn test00() {
let i: int = 0;
let mut builder = task::task();
let mut result = builder.future_result();
do builder.spawn {
start(i)
}
// Sleep long enough for the task to finish.
let mut i = 0;
while i < 10000 {
task::deschedule();
i += 1;
}
// Try joining tasks that have already finished.
result.recv();
|
info!("Joined task.");
}
|
random_line_split
|
|
deck.rs
|
use std::fmt::Write;
|
use etg::card;
use super::card_name;
use crate::etgutil::{decode_code, parse_digit32};
pub fn css_class(byte: u8) -> &'static str {
match byte {
b'A' => "stroke:#da2;stroke-width:.5",
b'B' => "stroke:#000;stroke-width:.5",
b'a' => "fill:#986",
b'b' => "fill:#a59",
b'c' => "fill:#768",
b'd' => "fill:#963",
b'e' => "fill:#654",
b'f' => "fill:#480",
b'g' => "fill:#a31",
b'h' => "fill:#248",
b'i' => "fill:#776",
b'j' => "fill:#38d",
b'k' => "fill:#a80",
b'l' => "fill:#333",
b'm' => "fill:#49b",
b'n' => "fill:#dcb",
b'o' => "fill:#dbc",
b'p' => "fill:#bac",
b'q' => "fill:#ca9",
b'r' => "fill:#ba9",
b's' => "fill:#ac7",
b't' => "fill:#da8",
b'u' => "fill:#8ac",
b'v' => "fill:#ccb",
b'w' => "fill:#9be",
b'x' => "fill:#ed8",
b'y' => "fill:#999",
b'z' => "fill:#ade",
_ => "",
}
}
pub fn deck(deck: &str) -> String {
let mut classes = [false; 256];
let mut paths: FxHashMap<(u8, u8), String> = Default::default();
let mut textml = String::new();
let mut y = 0;
let mut x = 16;
let mut mark: i32 = -1;
for chunk in deck.as_bytes().chunks_exact(5) {
let code = decode_code(&chunk[2..]);
let set = if code < 5000 {
card::OrigSet
} else {
card::OpenSet
};
if let Some(card) = set.try_get(code) {
let count = parse_digit32(chunk[0]) * 32 + parse_digit32(chunk[1]);
let upped = card::Upped(code);
let shiny = card::Shiny(code);
let elech = 97u8 + card.element as u8 + if upped { 13 } else { 0 };
let elecls = (if shiny { b'A' } else { b'B' }, elech);
classes[elech as usize] = true;
classes[if shiny { b'A' } else { b'B' } as usize] = true;
let path = paths.entry(elecls).or_insert(String::new());
for _ in 0..count {
write!(path, "M {} {}h100v16h-100", x, y).ok();
textml.push_str("<text clip-path='polygon(0 0,96px 0,96px 14px,0 14px)' ");
write!(textml, "x='{}' y='{}'", x + 2, y + 13).ok();
if!upped {
textml.push_str(" fill='#fff'");
}
textml.push('>');
textml.push_str(card_name(card));
textml.push_str("</text>");
y += 16;
if y == 160 {
y = 0;
x += 100;
}
}
} else if code >= 9010 && code <= 9022 {
mark = code - 9010;
}
}
if mark!= -1 {
classes[b'a' as usize + mark as usize] = true;
}
let mut result = String::from("<svg xmlns='http://www.w3.org/2000/svg' height='160' width='");
write!(result, "{}", if y == 0 { x } else { x + 100 }).ok();
result.push_str("'><style type='text/css'><![CDATA[text{font:12px sans-serif}");
for (idx, &k) in classes.iter().enumerate() {
if k {
let ch = idx as u8;
result.push('.');
result.push(ch as char);
result.push('{');
result.push_str(css_class(ch));
result.push('}');
}
}
result.push_str("]]></style>");
for (&(c1, c2), v) in paths.iter() {
result.push_str("<path class='");
result.push(c1 as char);
result.push(' ');
result.push(c2 as char);
result.push_str("' d='");
result.push_str(v);
result.push_str("'/>");
}
result.push_str(&textml);
if mark!= -1 {
result.push_str("<path class='");
result.push((b'a' + mark as u8) as char);
result.push_str("' d='M0 0h16v160H0'/>");
}
result.push_str("</svg>");
result
}
|
use fxhash::FxHashMap;
|
random_line_split
|
deck.rs
|
use std::fmt::Write;
use fxhash::FxHashMap;
use etg::card;
use super::card_name;
use crate::etgutil::{decode_code, parse_digit32};
pub fn
|
(byte: u8) -> &'static str {
match byte {
b'A' => "stroke:#da2;stroke-width:.5",
b'B' => "stroke:#000;stroke-width:.5",
b'a' => "fill:#986",
b'b' => "fill:#a59",
b'c' => "fill:#768",
b'd' => "fill:#963",
b'e' => "fill:#654",
b'f' => "fill:#480",
b'g' => "fill:#a31",
b'h' => "fill:#248",
b'i' => "fill:#776",
b'j' => "fill:#38d",
b'k' => "fill:#a80",
b'l' => "fill:#333",
b'm' => "fill:#49b",
b'n' => "fill:#dcb",
b'o' => "fill:#dbc",
b'p' => "fill:#bac",
b'q' => "fill:#ca9",
b'r' => "fill:#ba9",
b's' => "fill:#ac7",
b't' => "fill:#da8",
b'u' => "fill:#8ac",
b'v' => "fill:#ccb",
b'w' => "fill:#9be",
b'x' => "fill:#ed8",
b'y' => "fill:#999",
b'z' => "fill:#ade",
_ => "",
}
}
pub fn deck(deck: &str) -> String {
let mut classes = [false; 256];
let mut paths: FxHashMap<(u8, u8), String> = Default::default();
let mut textml = String::new();
let mut y = 0;
let mut x = 16;
let mut mark: i32 = -1;
for chunk in deck.as_bytes().chunks_exact(5) {
let code = decode_code(&chunk[2..]);
let set = if code < 5000 {
card::OrigSet
} else {
card::OpenSet
};
if let Some(card) = set.try_get(code) {
let count = parse_digit32(chunk[0]) * 32 + parse_digit32(chunk[1]);
let upped = card::Upped(code);
let shiny = card::Shiny(code);
let elech = 97u8 + card.element as u8 + if upped { 13 } else { 0 };
let elecls = (if shiny { b'A' } else { b'B' }, elech);
classes[elech as usize] = true;
classes[if shiny { b'A' } else { b'B' } as usize] = true;
let path = paths.entry(elecls).or_insert(String::new());
for _ in 0..count {
write!(path, "M {} {}h100v16h-100", x, y).ok();
textml.push_str("<text clip-path='polygon(0 0,96px 0,96px 14px,0 14px)' ");
write!(textml, "x='{}' y='{}'", x + 2, y + 13).ok();
if!upped {
textml.push_str(" fill='#fff'");
}
textml.push('>');
textml.push_str(card_name(card));
textml.push_str("</text>");
y += 16;
if y == 160 {
y = 0;
x += 100;
}
}
} else if code >= 9010 && code <= 9022 {
mark = code - 9010;
}
}
if mark!= -1 {
classes[b'a' as usize + mark as usize] = true;
}
let mut result = String::from("<svg xmlns='http://www.w3.org/2000/svg' height='160' width='");
write!(result, "{}", if y == 0 { x } else { x + 100 }).ok();
result.push_str("'><style type='text/css'><![CDATA[text{font:12px sans-serif}");
for (idx, &k) in classes.iter().enumerate() {
if k {
let ch = idx as u8;
result.push('.');
result.push(ch as char);
result.push('{');
result.push_str(css_class(ch));
result.push('}');
}
}
result.push_str("]]></style>");
for (&(c1, c2), v) in paths.iter() {
result.push_str("<path class='");
result.push(c1 as char);
result.push(' ');
result.push(c2 as char);
result.push_str("' d='");
result.push_str(v);
result.push_str("'/>");
}
result.push_str(&textml);
if mark!= -1 {
result.push_str("<path class='");
result.push((b'a' + mark as u8) as char);
result.push_str("' d='M0 0h16v160H0'/>");
}
result.push_str("</svg>");
result
}
|
css_class
|
identifier_name
|
deck.rs
|
use std::fmt::Write;
use fxhash::FxHashMap;
use etg::card;
use super::card_name;
use crate::etgutil::{decode_code, parse_digit32};
pub fn css_class(byte: u8) -> &'static str
|
b'q' => "fill:#ca9",
b'r' => "fill:#ba9",
b's' => "fill:#ac7",
b't' => "fill:#da8",
b'u' => "fill:#8ac",
b'v' => "fill:#ccb",
b'w' => "fill:#9be",
b'x' => "fill:#ed8",
b'y' => "fill:#999",
b'z' => "fill:#ade",
_ => "",
}
}
pub fn deck(deck: &str) -> String {
let mut classes = [false; 256];
let mut paths: FxHashMap<(u8, u8), String> = Default::default();
let mut textml = String::new();
let mut y = 0;
let mut x = 16;
let mut mark: i32 = -1;
for chunk in deck.as_bytes().chunks_exact(5) {
let code = decode_code(&chunk[2..]);
let set = if code < 5000 {
card::OrigSet
} else {
card::OpenSet
};
if let Some(card) = set.try_get(code) {
let count = parse_digit32(chunk[0]) * 32 + parse_digit32(chunk[1]);
let upped = card::Upped(code);
let shiny = card::Shiny(code);
let elech = 97u8 + card.element as u8 + if upped { 13 } else { 0 };
let elecls = (if shiny { b'A' } else { b'B' }, elech);
classes[elech as usize] = true;
classes[if shiny { b'A' } else { b'B' } as usize] = true;
let path = paths.entry(elecls).or_insert(String::new());
for _ in 0..count {
write!(path, "M {} {}h100v16h-100", x, y).ok();
textml.push_str("<text clip-path='polygon(0 0,96px 0,96px 14px,0 14px)' ");
write!(textml, "x='{}' y='{}'", x + 2, y + 13).ok();
if!upped {
textml.push_str(" fill='#fff'");
}
textml.push('>');
textml.push_str(card_name(card));
textml.push_str("</text>");
y += 16;
if y == 160 {
y = 0;
x += 100;
}
}
} else if code >= 9010 && code <= 9022 {
mark = code - 9010;
}
}
if mark!= -1 {
classes[b'a' as usize + mark as usize] = true;
}
let mut result = String::from("<svg xmlns='http://www.w3.org/2000/svg' height='160' width='");
write!(result, "{}", if y == 0 { x } else { x + 100 }).ok();
result.push_str("'><style type='text/css'><![CDATA[text{font:12px sans-serif}");
for (idx, &k) in classes.iter().enumerate() {
if k {
let ch = idx as u8;
result.push('.');
result.push(ch as char);
result.push('{');
result.push_str(css_class(ch));
result.push('}');
}
}
result.push_str("]]></style>");
for (&(c1, c2), v) in paths.iter() {
result.push_str("<path class='");
result.push(c1 as char);
result.push(' ');
result.push(c2 as char);
result.push_str("' d='");
result.push_str(v);
result.push_str("'/>");
}
result.push_str(&textml);
if mark!= -1 {
result.push_str("<path class='");
result.push((b'a' + mark as u8) as char);
result.push_str("' d='M0 0h16v160H0'/>");
}
result.push_str("</svg>");
result
}
|
{
match byte {
b'A' => "stroke:#da2;stroke-width:.5",
b'B' => "stroke:#000;stroke-width:.5",
b'a' => "fill:#986",
b'b' => "fill:#a59",
b'c' => "fill:#768",
b'd' => "fill:#963",
b'e' => "fill:#654",
b'f' => "fill:#480",
b'g' => "fill:#a31",
b'h' => "fill:#248",
b'i' => "fill:#776",
b'j' => "fill:#38d",
b'k' => "fill:#a80",
b'l' => "fill:#333",
b'm' => "fill:#49b",
b'n' => "fill:#dcb",
b'o' => "fill:#dbc",
b'p' => "fill:#bac",
|
identifier_body
|
texture.rs
|
//! Texture handler types.
use std::rc::Rc;
use std::cmp::{ PartialEq, Eq };
use std::ops::Deref;
use image::{ DynamicImage, GenericImage };
use glium::Display;
use glium::texture::{ Texture2dDataSource, RawImage2d };
use glium::uniforms::{ AsUniformValue, UniformValue };
use id::Id;
pub type TextureData = ::glium::texture::CompressedSrgbTexture2d;
/// 2D Texture Object.
pub struct Texture
{
pub id: Id,
pub height: u32,
pub width: u32,
pub data: TextureData,
}
impl Texture {
pub fn new<'a, T>(display: &Display, source: T) -> Texture
where T: Texture2dDataSource<'a>
|
pub fn with_id<'a, T>(display: &Display, source: T, id: Id) -> Texture
where T: Texture2dDataSource<'a>
{
let tex = TextureData::new(display, source).unwrap();
Texture
{
id: id,
width: tex.get_width(),
height: tex.get_height().unwrap(),
data: tex,
}
}
pub fn from_image(display: &Display, image: &DynamicImage) -> Texture
{
Texture::new(display, RawImage2d::from_raw_rgba_reversed(image.raw_pixels(), image.dimensions()))
}
}
impl Eq for Texture {}
impl PartialEq<Texture> for Texture
{
fn eq(&self, other: &Texture) -> bool { self.id == other.id }
}
/// Texture reference.
#[derive(Clone)]
pub struct TextureRef(pub Rc<Texture>);
impl AsUniformValue for TextureRef
{
fn as_uniform_value(&self) -> UniformValue
{
UniformValue::CompressedSrgbTexture2d(&self.0.data, None)
}
}
impl Deref for TextureRef
{
type Target = Texture;
fn deref(&self) -> &Texture { self.0.deref() }
}
|
{
Texture::with_id(display, source, Id::new())
}
|
identifier_body
|
texture.rs
|
//! Texture handler types.
use std::rc::Rc;
use std::cmp::{ PartialEq, Eq };
use std::ops::Deref;
use image::{ DynamicImage, GenericImage };
use glium::Display;
use glium::texture::{ Texture2dDataSource, RawImage2d };
use glium::uniforms::{ AsUniformValue, UniformValue };
use id::Id;
pub type TextureData = ::glium::texture::CompressedSrgbTexture2d;
/// 2D Texture Object.
pub struct Texture
{
pub id: Id,
pub height: u32,
pub width: u32,
pub data: TextureData,
}
impl Texture {
pub fn new<'a, T>(display: &Display, source: T) -> Texture
where T: Texture2dDataSource<'a>
{
Texture::with_id(display, source, Id::new())
}
pub fn with_id<'a, T>(display: &Display, source: T, id: Id) -> Texture
where T: Texture2dDataSource<'a>
{
let tex = TextureData::new(display, source).unwrap();
Texture
{
id: id,
width: tex.get_width(),
height: tex.get_height().unwrap(),
data: tex,
}
}
pub fn
|
(display: &Display, image: &DynamicImage) -> Texture
{
Texture::new(display, RawImage2d::from_raw_rgba_reversed(image.raw_pixels(), image.dimensions()))
}
}
impl Eq for Texture {}
impl PartialEq<Texture> for Texture
{
fn eq(&self, other: &Texture) -> bool { self.id == other.id }
}
/// Texture reference.
#[derive(Clone)]
pub struct TextureRef(pub Rc<Texture>);
impl AsUniformValue for TextureRef
{
fn as_uniform_value(&self) -> UniformValue
{
UniformValue::CompressedSrgbTexture2d(&self.0.data, None)
}
}
impl Deref for TextureRef
{
type Target = Texture;
fn deref(&self) -> &Texture { self.0.deref() }
}
|
from_image
|
identifier_name
|
texture.rs
|
//! Texture handler types.
use std::rc::Rc;
use std::cmp::{ PartialEq, Eq };
use std::ops::Deref;
use image::{ DynamicImage, GenericImage };
use glium::Display;
use glium::texture::{ Texture2dDataSource, RawImage2d };
use glium::uniforms::{ AsUniformValue, UniformValue };
use id::Id;
pub type TextureData = ::glium::texture::CompressedSrgbTexture2d;
/// 2D Texture Object.
pub struct Texture
{
pub id: Id,
pub height: u32,
pub width: u32,
pub data: TextureData,
}
impl Texture {
pub fn new<'a, T>(display: &Display, source: T) -> Texture
where T: Texture2dDataSource<'a>
{
Texture::with_id(display, source, Id::new())
}
pub fn with_id<'a, T>(display: &Display, source: T, id: Id) -> Texture
where T: Texture2dDataSource<'a>
{
let tex = TextureData::new(display, source).unwrap();
Texture
|
width: tex.get_width(),
height: tex.get_height().unwrap(),
data: tex,
}
}
pub fn from_image(display: &Display, image: &DynamicImage) -> Texture
{
Texture::new(display, RawImage2d::from_raw_rgba_reversed(image.raw_pixels(), image.dimensions()))
}
}
impl Eq for Texture {}
impl PartialEq<Texture> for Texture
{
fn eq(&self, other: &Texture) -> bool { self.id == other.id }
}
/// Texture reference.
#[derive(Clone)]
pub struct TextureRef(pub Rc<Texture>);
impl AsUniformValue for TextureRef
{
fn as_uniform_value(&self) -> UniformValue
{
UniformValue::CompressedSrgbTexture2d(&self.0.data, None)
}
}
impl Deref for TextureRef
{
type Target = Texture;
fn deref(&self) -> &Texture { self.0.deref() }
}
|
{
id: id,
|
random_line_split
|
example.rs
|
use anyhow::Error;
use std::{fs, path::Path};
#[derive(Debug, thiserror::Error)]
enum FileAccessError {
#[error("File not found: {}", file_name)]
FileNotFoundError { file_name: String },
#[error("Error reading file: {}", file_name)]
FileReadError { file_name: String },
}
pub struct Flags {
print_line_number: bool,
print_file_name: bool,
use_caseinsensitive_comparison: bool,
use_inverted_comparison: bool,
match_entire_line: bool,
}
impl Flags {
pub fn new(flags: &[&str]) -> Self {
Flags {
print_line_number: flags.contains(&"-n"),
print_file_name: flags.contains(&"-l"),
use_caseinsensitive_comparison: flags.contains(&"-i"),
use_inverted_comparison: flags.contains(&"-v"),
match_entire_line: flags.contains(&"-x"),
}
}
}
fn get_file_lines(file_name: &str) -> Result<Vec<String>, FileAccessError> {
let file_path = Path::new(file_name);
if!file_path.exists() {
return Err(FileAccessError::FileNotFoundError {
file_name: String::from(file_name),
});
}
if let Ok(content) = fs::read_to_string(file_path) {
|
Ok(content.split('\n').map(|line| line.to_string()).collect())
} else {
Err(FileAccessError::FileReadError {
file_name: String::from(file_name),
})
}
}
pub fn grep(pattern: &str, flags: &Flags, files: &[&str]) -> Result<Vec<String>, Error> {
let mut grep_result = vec![];
let is_multiple_file_search = files.len() > 1;
for file_name in files {
let file_lines = get_file_lines(file_name)?;
grep_result.extend(
file_lines
.iter()
.enumerate()
.filter(|&(_, line)| {
let mut inner_line = line.clone();
let mut inner_pattern = String::from(pattern);
if flags.use_caseinsensitive_comparison {
inner_line = inner_line.to_lowercase();
inner_pattern = inner_pattern.to_lowercase();
}
if flags.use_inverted_comparison {
!inner_line.contains(&inner_pattern)
} else if flags.match_entire_line {
inner_line == inner_pattern
} else {
inner_line.contains(&inner_pattern)
}
})
.filter(|(_, line)|!line.is_empty())
.map(|(line_number, line)| {
let mut result = line.to_owned();
if flags.print_line_number {
result.insert_str(0, &format!("{}:", line_number + 1));
}
if is_multiple_file_search {
result.insert_str(0, &format!("{}:", file_name))
}
if flags.print_file_name {
result = file_name.to_owned().to_owned();
}
result
}),
);
}
grep_result.dedup_by(|a, b| (*a).eq(b));
Ok(grep_result)
}
|
random_line_split
|
|
example.rs
|
use anyhow::Error;
use std::{fs, path::Path};
#[derive(Debug, thiserror::Error)]
enum
|
{
#[error("File not found: {}", file_name)]
FileNotFoundError { file_name: String },
#[error("Error reading file: {}", file_name)]
FileReadError { file_name: String },
}
pub struct Flags {
print_line_number: bool,
print_file_name: bool,
use_caseinsensitive_comparison: bool,
use_inverted_comparison: bool,
match_entire_line: bool,
}
impl Flags {
pub fn new(flags: &[&str]) -> Self {
Flags {
print_line_number: flags.contains(&"-n"),
print_file_name: flags.contains(&"-l"),
use_caseinsensitive_comparison: flags.contains(&"-i"),
use_inverted_comparison: flags.contains(&"-v"),
match_entire_line: flags.contains(&"-x"),
}
}
}
fn get_file_lines(file_name: &str) -> Result<Vec<String>, FileAccessError> {
let file_path = Path::new(file_name);
if!file_path.exists() {
return Err(FileAccessError::FileNotFoundError {
file_name: String::from(file_name),
});
}
if let Ok(content) = fs::read_to_string(file_path) {
Ok(content.split('\n').map(|line| line.to_string()).collect())
} else {
Err(FileAccessError::FileReadError {
file_name: String::from(file_name),
})
}
}
pub fn grep(pattern: &str, flags: &Flags, files: &[&str]) -> Result<Vec<String>, Error> {
let mut grep_result = vec![];
let is_multiple_file_search = files.len() > 1;
for file_name in files {
let file_lines = get_file_lines(file_name)?;
grep_result.extend(
file_lines
.iter()
.enumerate()
.filter(|&(_, line)| {
let mut inner_line = line.clone();
let mut inner_pattern = String::from(pattern);
if flags.use_caseinsensitive_comparison {
inner_line = inner_line.to_lowercase();
inner_pattern = inner_pattern.to_lowercase();
}
if flags.use_inverted_comparison {
!inner_line.contains(&inner_pattern)
} else if flags.match_entire_line {
inner_line == inner_pattern
} else {
inner_line.contains(&inner_pattern)
}
})
.filter(|(_, line)|!line.is_empty())
.map(|(line_number, line)| {
let mut result = line.to_owned();
if flags.print_line_number {
result.insert_str(0, &format!("{}:", line_number + 1));
}
if is_multiple_file_search {
result.insert_str(0, &format!("{}:", file_name))
}
if flags.print_file_name {
result = file_name.to_owned().to_owned();
}
result
}),
);
}
grep_result.dedup_by(|a, b| (*a).eq(b));
Ok(grep_result)
}
|
FileAccessError
|
identifier_name
|
example.rs
|
use anyhow::Error;
use std::{fs, path::Path};
#[derive(Debug, thiserror::Error)]
enum FileAccessError {
#[error("File not found: {}", file_name)]
FileNotFoundError { file_name: String },
#[error("Error reading file: {}", file_name)]
FileReadError { file_name: String },
}
pub struct Flags {
print_line_number: bool,
print_file_name: bool,
use_caseinsensitive_comparison: bool,
use_inverted_comparison: bool,
match_entire_line: bool,
}
impl Flags {
pub fn new(flags: &[&str]) -> Self {
Flags {
print_line_number: flags.contains(&"-n"),
print_file_name: flags.contains(&"-l"),
use_caseinsensitive_comparison: flags.contains(&"-i"),
use_inverted_comparison: flags.contains(&"-v"),
match_entire_line: flags.contains(&"-x"),
}
}
}
fn get_file_lines(file_name: &str) -> Result<Vec<String>, FileAccessError> {
let file_path = Path::new(file_name);
if!file_path.exists() {
return Err(FileAccessError::FileNotFoundError {
file_name: String::from(file_name),
});
}
if let Ok(content) = fs::read_to_string(file_path) {
Ok(content.split('\n').map(|line| line.to_string()).collect())
} else {
Err(FileAccessError::FileReadError {
file_name: String::from(file_name),
})
}
}
pub fn grep(pattern: &str, flags: &Flags, files: &[&str]) -> Result<Vec<String>, Error> {
let mut grep_result = vec![];
let is_multiple_file_search = files.len() > 1;
for file_name in files {
let file_lines = get_file_lines(file_name)?;
grep_result.extend(
file_lines
.iter()
.enumerate()
.filter(|&(_, line)| {
let mut inner_line = line.clone();
let mut inner_pattern = String::from(pattern);
if flags.use_caseinsensitive_comparison {
inner_line = inner_line.to_lowercase();
inner_pattern = inner_pattern.to_lowercase();
}
if flags.use_inverted_comparison {
!inner_line.contains(&inner_pattern)
} else if flags.match_entire_line
|
else {
inner_line.contains(&inner_pattern)
}
})
.filter(|(_, line)|!line.is_empty())
.map(|(line_number, line)| {
let mut result = line.to_owned();
if flags.print_line_number {
result.insert_str(0, &format!("{}:", line_number + 1));
}
if is_multiple_file_search {
result.insert_str(0, &format!("{}:", file_name))
}
if flags.print_file_name {
result = file_name.to_owned().to_owned();
}
result
}),
);
}
grep_result.dedup_by(|a, b| (*a).eq(b));
Ok(grep_result)
}
|
{
inner_line == inner_pattern
}
|
conditional_block
|
cell.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Types that provide interior mutability.
use clone::Clone;
use cmp::Eq;
use kinds::{marker, Copy};
use ops::{Deref, DerefMut, Drop};
use option::{None, Option, Some};
use ty::Unsafe;
/// A mutable memory location that admits only `Copy` data.
pub struct Cell<T> {
value: Unsafe<T>,
noshare: marker::NoShare,
}
impl<T:Copy> Cell<T> {
/// Creates a new `Cell` containing the given value.
pub fn new(value: T) -> Cell<T> {
Cell {
value: Unsafe::new(value),
noshare: marker::NoShare,
}
}
/// Returns a copy of the contained value.
#[inline]
pub fn get(&self) -> T {
unsafe{ *self.value.get() }
}
/// Sets the contained value.
#[inline]
pub fn set(&self, value: T) {
unsafe {
*self.value.get() = value;
}
}
}
impl<T:Copy> Clone for Cell<T> {
fn clone(&self) -> Cell<T> {
Cell::new(self.get())
}
}
impl<T:Eq + Copy> Eq for Cell<T> {
fn eq(&self, other: &Cell<T>) -> bool {
self.get() == other.get()
}
}
/// A mutable memory location with dynamically checked borrow rules
pub struct RefCell<T> {
value: Unsafe<T>,
borrow: Cell<BorrowFlag>,
nocopy: marker::NoCopy,
noshare: marker::NoShare,
}
// Values [1, MAX-1] represent the number of `Ref` active
// (will not outgrow its range since `uint` is the size of the address space)
type BorrowFlag = uint;
static UNUSED: BorrowFlag = 0;
static WRITING: BorrowFlag = -1;
impl<T> RefCell<T> {
/// Create a new `RefCell` containing `value`
pub fn new(value: T) -> RefCell<T> {
RefCell {
value: Unsafe::new(value),
borrow: Cell::new(UNUSED),
nocopy: marker::NoCopy,
noshare: marker::NoShare,
}
}
/// Consumes the `RefCell`, returning the wrapped value.
pub fn unwrap(self) -> T {
debug_assert!(self.borrow.get() == UNUSED);
unsafe{self.value.unwrap()}
}
/// Attempts to immutably borrow the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// Returns `None` if the value is currently mutably borrowed.
pub fn try_borrow<'a>(&'a self) -> Option<Ref<'a, T>> {
match self.borrow.get() {
WRITING => None,
borrow => {
self.borrow.set(borrow + 1);
Some(Ref { parent: self })
}
}
}
/// Immutably borrows the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// # Failure
///
/// Fails if the value is currently mutably borrowed.
pub fn borrow<'a>(&'a self) -> Ref<'a, T> {
match self.try_borrow() {
Some(ptr) => ptr,
None => fail!("RefCell<T> already mutably borrowed")
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// Returns `None` if the value is currently borrowed.
pub fn try_borrow_mut<'a>(&'a self) -> Option<RefMut<'a, T>> {
match self.borrow.get() {
UNUSED => {
self.borrow.set(WRITING);
Some(RefMut { parent: self })
},
_ => None
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// # Failure
///
/// Fails if the value is currently borrowed.
pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T> {
match self.try_borrow_mut() {
Some(ptr) => ptr,
None => fail!("RefCell<T> already borrowed")
}
}
}
impl<T: Clone> Clone for RefCell<T> {
fn clone(&self) -> RefCell<T> {
RefCell::new(self.borrow().clone())
}
}
impl<T: Eq> Eq for RefCell<T> {
fn eq(&self, other: &RefCell<T>) -> bool {
*self.borrow() == *other.borrow()
}
}
/// Wraps a borrowed reference to a value in a `RefCell` box.
pub struct Ref<'b, T> {
parent: &'b RefCell<T>
}
#[unsafe_destructor]
impl<'b, T> Drop for Ref<'b, T> {
fn drop(&mut self) {
let borrow = self.parent.borrow.get();
debug_assert!(borrow!= WRITING && borrow!= UNUSED);
self.parent.borrow.set(borrow - 1);
}
}
impl<'b, T> Deref<T> for Ref<'b, T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.parent.value.get() }
}
}
/// Copy a `Ref`.
///
/// The `RefCell` is already immutably borrowed, so this cannot fail.
///
/// A `Clone` implementation would interfere with the widespread
/// use of `r.borrow().clone()` to clone the contents of a `RefCell`.
#[experimental]
pub fn clone_ref<'b, T>(orig: &Ref<'b, T>) -> Ref<'b, T> {
// Since this Ref exists, we know the borrow flag
// is not set to WRITING.
let borrow = orig.parent.borrow.get();
debug_assert!(borrow!= WRITING && borrow!= UNUSED);
orig.parent.borrow.set(borrow + 1);
Ref {
parent: orig.parent,
}
}
/// Wraps a mutable borrowed reference to a value in a `RefCell` box.
pub struct RefMut<'b, T> {
parent: &'b RefCell<T>
}
#[unsafe_destructor]
impl<'b, T> Drop for RefMut<'b, T> {
fn drop(&mut self) {
let borrow = self.parent.borrow.get();
debug_assert!(borrow == WRITING);
self.parent.borrow.set(UNUSED);
}
}
impl<'b, T> Deref<T> for RefMut<'b, T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.parent.value.get() }
}
}
impl<'b, T> DerefMut<T> for RefMut<'b, T> {
#[inline]
fn deref_mut<'a>(&'a mut self) -> &'a mut T {
unsafe { &mut *self.parent.value.get() }
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn smoketest_cell() {
let x = Cell::new(10);
assert!(x == Cell::new(10));
assert!(x.get() == 10);
x.set(20);
assert!(x == Cell::new(20));
assert!(x.get() == 20);
let y = Cell::new((30, 40));
assert!(y == Cell::new((30, 40)));
assert!(y.get() == (30, 40));
}
#[test]
fn cell_has_sensible_show() {
use str::StrSlice;
let x = Cell::new("foo bar");
assert!(format!("{}", x).contains(x.get()));
x.set("baz qux");
assert!(format!("{}", x).contains(x.get()));
}
#[test]
fn double_imm_borrow() {
let x = RefCell::new(0);
let _b1 = x.borrow();
x.borrow();
}
#[test]
fn no_mut_then_imm_borrow() {
let x = RefCell::new(0);
let _b1 = x.borrow_mut();
assert!(x.try_borrow().is_none());
}
#[test]
fn no_imm_then_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow();
assert!(x.try_borrow_mut().is_none());
}
#[test]
fn no_double_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow_mut();
assert!(x.try_borrow_mut().is_none());
}
#[test]
fn imm_release_borrow_mut() {
let x = RefCell::new(0);
{
let _b1 = x.borrow();
}
x.borrow_mut();
}
#[test]
fn mut_release_borrow_mut() {
let x = RefCell::new(0);
{
let _b1 = x.borrow_mut();
}
x.borrow();
}
#[test]
fn double_borrow_single_release_no_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow();
{
let _b2 = x.borrow();
}
assert!(x.try_borrow_mut().is_none());
}
#[test]
#[should_fail]
fn
|
() {
let x = RefCell::new(0);
let _b = x.borrow();
let _ = _b;
let _b = x.borrow_mut();
}
#[test]
fn clone_ref_updates_flag() {
let x = RefCell::new(0);
{
let b1 = x.borrow();
assert!(x.try_borrow_mut().is_none());
{
let _b2 = clone_ref(&b1);
assert!(x.try_borrow_mut().is_none());
}
assert!(x.try_borrow_mut().is_none());
}
assert!(x.try_borrow_mut().is_some());
}
}
|
discard_doesnt_unborrow
|
identifier_name
|
cell.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Types that provide interior mutability.
use clone::Clone;
use cmp::Eq;
use kinds::{marker, Copy};
use ops::{Deref, DerefMut, Drop};
use option::{None, Option, Some};
use ty::Unsafe;
/// A mutable memory location that admits only `Copy` data.
pub struct Cell<T> {
value: Unsafe<T>,
noshare: marker::NoShare,
}
impl<T:Copy> Cell<T> {
/// Creates a new `Cell` containing the given value.
pub fn new(value: T) -> Cell<T> {
Cell {
value: Unsafe::new(value),
noshare: marker::NoShare,
}
}
/// Returns a copy of the contained value.
#[inline]
pub fn get(&self) -> T {
unsafe{ *self.value.get() }
}
/// Sets the contained value.
#[inline]
pub fn set(&self, value: T) {
unsafe {
*self.value.get() = value;
}
}
}
impl<T:Copy> Clone for Cell<T> {
fn clone(&self) -> Cell<T> {
Cell::new(self.get())
}
}
impl<T:Eq + Copy> Eq for Cell<T> {
fn eq(&self, other: &Cell<T>) -> bool {
self.get() == other.get()
}
}
/// A mutable memory location with dynamically checked borrow rules
pub struct RefCell<T> {
value: Unsafe<T>,
borrow: Cell<BorrowFlag>,
nocopy: marker::NoCopy,
noshare: marker::NoShare,
}
// Values [1, MAX-1] represent the number of `Ref` active
// (will not outgrow its range since `uint` is the size of the address space)
type BorrowFlag = uint;
static UNUSED: BorrowFlag = 0;
static WRITING: BorrowFlag = -1;
impl<T> RefCell<T> {
/// Create a new `RefCell` containing `value`
pub fn new(value: T) -> RefCell<T> {
RefCell {
value: Unsafe::new(value),
borrow: Cell::new(UNUSED),
nocopy: marker::NoCopy,
noshare: marker::NoShare,
}
}
/// Consumes the `RefCell`, returning the wrapped value.
pub fn unwrap(self) -> T {
debug_assert!(self.borrow.get() == UNUSED);
unsafe{self.value.unwrap()}
}
/// Attempts to immutably borrow the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// Returns `None` if the value is currently mutably borrowed.
pub fn try_borrow<'a>(&'a self) -> Option<Ref<'a, T>> {
match self.borrow.get() {
WRITING => None,
borrow => {
self.borrow.set(borrow + 1);
Some(Ref { parent: self })
}
}
}
/// Immutably borrows the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// # Failure
///
/// Fails if the value is currently mutably borrowed.
pub fn borrow<'a>(&'a self) -> Ref<'a, T> {
match self.try_borrow() {
Some(ptr) => ptr,
None => fail!("RefCell<T> already mutably borrowed")
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// Returns `None` if the value is currently borrowed.
pub fn try_borrow_mut<'a>(&'a self) -> Option<RefMut<'a, T>> {
match self.borrow.get() {
UNUSED =>
|
,
_ => None
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// # Failure
///
/// Fails if the value is currently borrowed.
pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T> {
match self.try_borrow_mut() {
Some(ptr) => ptr,
None => fail!("RefCell<T> already borrowed")
}
}
}
impl<T: Clone> Clone for RefCell<T> {
fn clone(&self) -> RefCell<T> {
RefCell::new(self.borrow().clone())
}
}
impl<T: Eq> Eq for RefCell<T> {
fn eq(&self, other: &RefCell<T>) -> bool {
*self.borrow() == *other.borrow()
}
}
/// Wraps a borrowed reference to a value in a `RefCell` box.
pub struct Ref<'b, T> {
parent: &'b RefCell<T>
}
#[unsafe_destructor]
impl<'b, T> Drop for Ref<'b, T> {
fn drop(&mut self) {
let borrow = self.parent.borrow.get();
debug_assert!(borrow!= WRITING && borrow!= UNUSED);
self.parent.borrow.set(borrow - 1);
}
}
impl<'b, T> Deref<T> for Ref<'b, T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.parent.value.get() }
}
}
/// Copy a `Ref`.
///
/// The `RefCell` is already immutably borrowed, so this cannot fail.
///
/// A `Clone` implementation would interfere with the widespread
/// use of `r.borrow().clone()` to clone the contents of a `RefCell`.
#[experimental]
pub fn clone_ref<'b, T>(orig: &Ref<'b, T>) -> Ref<'b, T> {
// Since this Ref exists, we know the borrow flag
// is not set to WRITING.
let borrow = orig.parent.borrow.get();
debug_assert!(borrow!= WRITING && borrow!= UNUSED);
orig.parent.borrow.set(borrow + 1);
Ref {
parent: orig.parent,
}
}
/// Wraps a mutable borrowed reference to a value in a `RefCell` box.
pub struct RefMut<'b, T> {
parent: &'b RefCell<T>
}
#[unsafe_destructor]
impl<'b, T> Drop for RefMut<'b, T> {
fn drop(&mut self) {
let borrow = self.parent.borrow.get();
debug_assert!(borrow == WRITING);
self.parent.borrow.set(UNUSED);
}
}
impl<'b, T> Deref<T> for RefMut<'b, T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.parent.value.get() }
}
}
impl<'b, T> DerefMut<T> for RefMut<'b, T> {
#[inline]
fn deref_mut<'a>(&'a mut self) -> &'a mut T {
unsafe { &mut *self.parent.value.get() }
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn smoketest_cell() {
let x = Cell::new(10);
assert!(x == Cell::new(10));
assert!(x.get() == 10);
x.set(20);
assert!(x == Cell::new(20));
assert!(x.get() == 20);
let y = Cell::new((30, 40));
assert!(y == Cell::new((30, 40)));
assert!(y.get() == (30, 40));
}
#[test]
fn cell_has_sensible_show() {
use str::StrSlice;
let x = Cell::new("foo bar");
assert!(format!("{}", x).contains(x.get()));
x.set("baz qux");
assert!(format!("{}", x).contains(x.get()));
}
#[test]
fn double_imm_borrow() {
let x = RefCell::new(0);
let _b1 = x.borrow();
x.borrow();
}
#[test]
fn no_mut_then_imm_borrow() {
let x = RefCell::new(0);
let _b1 = x.borrow_mut();
assert!(x.try_borrow().is_none());
}
#[test]
fn no_imm_then_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow();
assert!(x.try_borrow_mut().is_none());
}
#[test]
fn no_double_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow_mut();
assert!(x.try_borrow_mut().is_none());
}
#[test]
fn imm_release_borrow_mut() {
let x = RefCell::new(0);
{
let _b1 = x.borrow();
}
x.borrow_mut();
}
#[test]
fn mut_release_borrow_mut() {
let x = RefCell::new(0);
{
let _b1 = x.borrow_mut();
}
x.borrow();
}
#[test]
fn double_borrow_single_release_no_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow();
{
let _b2 = x.borrow();
}
assert!(x.try_borrow_mut().is_none());
}
#[test]
#[should_fail]
fn discard_doesnt_unborrow() {
let x = RefCell::new(0);
let _b = x.borrow();
let _ = _b;
let _b = x.borrow_mut();
}
#[test]
fn clone_ref_updates_flag() {
let x = RefCell::new(0);
{
let b1 = x.borrow();
assert!(x.try_borrow_mut().is_none());
{
let _b2 = clone_ref(&b1);
assert!(x.try_borrow_mut().is_none());
}
assert!(x.try_borrow_mut().is_none());
}
assert!(x.try_borrow_mut().is_some());
}
}
|
{
self.borrow.set(WRITING);
Some(RefMut { parent: self })
}
|
conditional_block
|
cell.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Types that provide interior mutability.
use clone::Clone;
use cmp::Eq;
use kinds::{marker, Copy};
use ops::{Deref, DerefMut, Drop};
use option::{None, Option, Some};
use ty::Unsafe;
/// A mutable memory location that admits only `Copy` data.
pub struct Cell<T> {
value: Unsafe<T>,
noshare: marker::NoShare,
}
impl<T:Copy> Cell<T> {
/// Creates a new `Cell` containing the given value.
pub fn new(value: T) -> Cell<T> {
Cell {
value: Unsafe::new(value),
noshare: marker::NoShare,
}
}
/// Returns a copy of the contained value.
#[inline]
pub fn get(&self) -> T {
unsafe{ *self.value.get() }
}
/// Sets the contained value.
#[inline]
pub fn set(&self, value: T) {
unsafe {
|
*self.value.get() = value;
}
}
}
impl<T:Copy> Clone for Cell<T> {
fn clone(&self) -> Cell<T> {
Cell::new(self.get())
}
}
impl<T:Eq + Copy> Eq for Cell<T> {
fn eq(&self, other: &Cell<T>) -> bool {
self.get() == other.get()
}
}
/// A mutable memory location with dynamically checked borrow rules
pub struct RefCell<T> {
value: Unsafe<T>,
borrow: Cell<BorrowFlag>,
nocopy: marker::NoCopy,
noshare: marker::NoShare,
}
// Values [1, MAX-1] represent the number of `Ref` active
// (will not outgrow its range since `uint` is the size of the address space)
type BorrowFlag = uint;
static UNUSED: BorrowFlag = 0;
static WRITING: BorrowFlag = -1;
impl<T> RefCell<T> {
/// Create a new `RefCell` containing `value`
pub fn new(value: T) -> RefCell<T> {
RefCell {
value: Unsafe::new(value),
borrow: Cell::new(UNUSED),
nocopy: marker::NoCopy,
noshare: marker::NoShare,
}
}
/// Consumes the `RefCell`, returning the wrapped value.
pub fn unwrap(self) -> T {
debug_assert!(self.borrow.get() == UNUSED);
unsafe{self.value.unwrap()}
}
/// Attempts to immutably borrow the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// Returns `None` if the value is currently mutably borrowed.
pub fn try_borrow<'a>(&'a self) -> Option<Ref<'a, T>> {
match self.borrow.get() {
WRITING => None,
borrow => {
self.borrow.set(borrow + 1);
Some(Ref { parent: self })
}
}
}
/// Immutably borrows the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// # Failure
///
/// Fails if the value is currently mutably borrowed.
pub fn borrow<'a>(&'a self) -> Ref<'a, T> {
match self.try_borrow() {
Some(ptr) => ptr,
None => fail!("RefCell<T> already mutably borrowed")
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// Returns `None` if the value is currently borrowed.
pub fn try_borrow_mut<'a>(&'a self) -> Option<RefMut<'a, T>> {
match self.borrow.get() {
UNUSED => {
self.borrow.set(WRITING);
Some(RefMut { parent: self })
},
_ => None
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// # Failure
///
/// Fails if the value is currently borrowed.
pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T> {
match self.try_borrow_mut() {
Some(ptr) => ptr,
None => fail!("RefCell<T> already borrowed")
}
}
}
impl<T: Clone> Clone for RefCell<T> {
fn clone(&self) -> RefCell<T> {
RefCell::new(self.borrow().clone())
}
}
impl<T: Eq> Eq for RefCell<T> {
fn eq(&self, other: &RefCell<T>) -> bool {
*self.borrow() == *other.borrow()
}
}
/// Wraps a borrowed reference to a value in a `RefCell` box.
pub struct Ref<'b, T> {
parent: &'b RefCell<T>
}
#[unsafe_destructor]
impl<'b, T> Drop for Ref<'b, T> {
fn drop(&mut self) {
let borrow = self.parent.borrow.get();
debug_assert!(borrow!= WRITING && borrow!= UNUSED);
self.parent.borrow.set(borrow - 1);
}
}
impl<'b, T> Deref<T> for Ref<'b, T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.parent.value.get() }
}
}
/// Copy a `Ref`.
///
/// The `RefCell` is already immutably borrowed, so this cannot fail.
///
/// A `Clone` implementation would interfere with the widespread
/// use of `r.borrow().clone()` to clone the contents of a `RefCell`.
#[experimental]
pub fn clone_ref<'b, T>(orig: &Ref<'b, T>) -> Ref<'b, T> {
// Since this Ref exists, we know the borrow flag
// is not set to WRITING.
let borrow = orig.parent.borrow.get();
debug_assert!(borrow!= WRITING && borrow!= UNUSED);
orig.parent.borrow.set(borrow + 1);
Ref {
parent: orig.parent,
}
}
/// Wraps a mutable borrowed reference to a value in a `RefCell` box.
pub struct RefMut<'b, T> {
parent: &'b RefCell<T>
}
#[unsafe_destructor]
impl<'b, T> Drop for RefMut<'b, T> {
fn drop(&mut self) {
let borrow = self.parent.borrow.get();
debug_assert!(borrow == WRITING);
self.parent.borrow.set(UNUSED);
}
}
impl<'b, T> Deref<T> for RefMut<'b, T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.parent.value.get() }
}
}
impl<'b, T> DerefMut<T> for RefMut<'b, T> {
#[inline]
fn deref_mut<'a>(&'a mut self) -> &'a mut T {
unsafe { &mut *self.parent.value.get() }
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn smoketest_cell() {
let x = Cell::new(10);
assert!(x == Cell::new(10));
assert!(x.get() == 10);
x.set(20);
assert!(x == Cell::new(20));
assert!(x.get() == 20);
let y = Cell::new((30, 40));
assert!(y == Cell::new((30, 40)));
assert!(y.get() == (30, 40));
}
#[test]
fn cell_has_sensible_show() {
use str::StrSlice;
let x = Cell::new("foo bar");
assert!(format!("{}", x).contains(x.get()));
x.set("baz qux");
assert!(format!("{}", x).contains(x.get()));
}
#[test]
fn double_imm_borrow() {
let x = RefCell::new(0);
let _b1 = x.borrow();
x.borrow();
}
#[test]
fn no_mut_then_imm_borrow() {
let x = RefCell::new(0);
let _b1 = x.borrow_mut();
assert!(x.try_borrow().is_none());
}
#[test]
fn no_imm_then_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow();
assert!(x.try_borrow_mut().is_none());
}
#[test]
fn no_double_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow_mut();
assert!(x.try_borrow_mut().is_none());
}
#[test]
fn imm_release_borrow_mut() {
let x = RefCell::new(0);
{
let _b1 = x.borrow();
}
x.borrow_mut();
}
#[test]
fn mut_release_borrow_mut() {
let x = RefCell::new(0);
{
let _b1 = x.borrow_mut();
}
x.borrow();
}
#[test]
fn double_borrow_single_release_no_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow();
{
let _b2 = x.borrow();
}
assert!(x.try_borrow_mut().is_none());
}
#[test]
#[should_fail]
fn discard_doesnt_unborrow() {
let x = RefCell::new(0);
let _b = x.borrow();
let _ = _b;
let _b = x.borrow_mut();
}
#[test]
fn clone_ref_updates_flag() {
let x = RefCell::new(0);
{
let b1 = x.borrow();
assert!(x.try_borrow_mut().is_none());
{
let _b2 = clone_ref(&b1);
assert!(x.try_borrow_mut().is_none());
}
assert!(x.try_borrow_mut().is_none());
}
assert!(x.try_borrow_mut().is_some());
}
}
|
random_line_split
|
|
cell.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Types that provide interior mutability.
use clone::Clone;
use cmp::Eq;
use kinds::{marker, Copy};
use ops::{Deref, DerefMut, Drop};
use option::{None, Option, Some};
use ty::Unsafe;
/// A mutable memory location that admits only `Copy` data.
pub struct Cell<T> {
value: Unsafe<T>,
noshare: marker::NoShare,
}
impl<T:Copy> Cell<T> {
/// Creates a new `Cell` containing the given value.
pub fn new(value: T) -> Cell<T> {
Cell {
value: Unsafe::new(value),
noshare: marker::NoShare,
}
}
/// Returns a copy of the contained value.
#[inline]
pub fn get(&self) -> T {
unsafe{ *self.value.get() }
}
/// Sets the contained value.
#[inline]
pub fn set(&self, value: T) {
unsafe {
*self.value.get() = value;
}
}
}
impl<T:Copy> Clone for Cell<T> {
fn clone(&self) -> Cell<T> {
Cell::new(self.get())
}
}
impl<T:Eq + Copy> Eq for Cell<T> {
fn eq(&self, other: &Cell<T>) -> bool {
self.get() == other.get()
}
}
/// A mutable memory location with dynamically checked borrow rules
pub struct RefCell<T> {
value: Unsafe<T>,
borrow: Cell<BorrowFlag>,
nocopy: marker::NoCopy,
noshare: marker::NoShare,
}
// Values [1, MAX-1] represent the number of `Ref` active
// (will not outgrow its range since `uint` is the size of the address space)
type BorrowFlag = uint;
static UNUSED: BorrowFlag = 0;
static WRITING: BorrowFlag = -1;
impl<T> RefCell<T> {
/// Create a new `RefCell` containing `value`
pub fn new(value: T) -> RefCell<T> {
RefCell {
value: Unsafe::new(value),
borrow: Cell::new(UNUSED),
nocopy: marker::NoCopy,
noshare: marker::NoShare,
}
}
/// Consumes the `RefCell`, returning the wrapped value.
pub fn unwrap(self) -> T {
debug_assert!(self.borrow.get() == UNUSED);
unsafe{self.value.unwrap()}
}
/// Attempts to immutably borrow the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// Returns `None` if the value is currently mutably borrowed.
pub fn try_borrow<'a>(&'a self) -> Option<Ref<'a, T>> {
match self.borrow.get() {
WRITING => None,
borrow => {
self.borrow.set(borrow + 1);
Some(Ref { parent: self })
}
}
}
/// Immutably borrows the wrapped value.
///
/// The borrow lasts until the returned `Ref` exits scope. Multiple
/// immutable borrows can be taken out at the same time.
///
/// # Failure
///
/// Fails if the value is currently mutably borrowed.
pub fn borrow<'a>(&'a self) -> Ref<'a, T> {
match self.try_borrow() {
Some(ptr) => ptr,
None => fail!("RefCell<T> already mutably borrowed")
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// Returns `None` if the value is currently borrowed.
pub fn try_borrow_mut<'a>(&'a self) -> Option<RefMut<'a, T>> {
match self.borrow.get() {
UNUSED => {
self.borrow.set(WRITING);
Some(RefMut { parent: self })
},
_ => None
}
}
/// Mutably borrows the wrapped value.
///
/// The borrow lasts until the returned `RefMut` exits scope. The value
/// cannot be borrowed while this borrow is active.
///
/// # Failure
///
/// Fails if the value is currently borrowed.
pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T> {
match self.try_borrow_mut() {
Some(ptr) => ptr,
None => fail!("RefCell<T> already borrowed")
}
}
}
impl<T: Clone> Clone for RefCell<T> {
fn clone(&self) -> RefCell<T> {
RefCell::new(self.borrow().clone())
}
}
impl<T: Eq> Eq for RefCell<T> {
fn eq(&self, other: &RefCell<T>) -> bool {
*self.borrow() == *other.borrow()
}
}
/// Wraps a borrowed reference to a value in a `RefCell` box.
pub struct Ref<'b, T> {
parent: &'b RefCell<T>
}
#[unsafe_destructor]
impl<'b, T> Drop for Ref<'b, T> {
fn drop(&mut self) {
let borrow = self.parent.borrow.get();
debug_assert!(borrow!= WRITING && borrow!= UNUSED);
self.parent.borrow.set(borrow - 1);
}
}
impl<'b, T> Deref<T> for Ref<'b, T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.parent.value.get() }
}
}
/// Copy a `Ref`.
///
/// The `RefCell` is already immutably borrowed, so this cannot fail.
///
/// A `Clone` implementation would interfere with the widespread
/// use of `r.borrow().clone()` to clone the contents of a `RefCell`.
#[experimental]
pub fn clone_ref<'b, T>(orig: &Ref<'b, T>) -> Ref<'b, T> {
// Since this Ref exists, we know the borrow flag
// is not set to WRITING.
let borrow = orig.parent.borrow.get();
debug_assert!(borrow!= WRITING && borrow!= UNUSED);
orig.parent.borrow.set(borrow + 1);
Ref {
parent: orig.parent,
}
}
/// Wraps a mutable borrowed reference to a value in a `RefCell` box.
pub struct RefMut<'b, T> {
parent: &'b RefCell<T>
}
#[unsafe_destructor]
impl<'b, T> Drop for RefMut<'b, T> {
fn drop(&mut self) {
let borrow = self.parent.borrow.get();
debug_assert!(borrow == WRITING);
self.parent.borrow.set(UNUSED);
}
}
impl<'b, T> Deref<T> for RefMut<'b, T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.parent.value.get() }
}
}
impl<'b, T> DerefMut<T> for RefMut<'b, T> {
#[inline]
fn deref_mut<'a>(&'a mut self) -> &'a mut T {
unsafe { &mut *self.parent.value.get() }
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn smoketest_cell() {
let x = Cell::new(10);
assert!(x == Cell::new(10));
assert!(x.get() == 10);
x.set(20);
assert!(x == Cell::new(20));
assert!(x.get() == 20);
let y = Cell::new((30, 40));
assert!(y == Cell::new((30, 40)));
assert!(y.get() == (30, 40));
}
#[test]
fn cell_has_sensible_show() {
use str::StrSlice;
let x = Cell::new("foo bar");
assert!(format!("{}", x).contains(x.get()));
x.set("baz qux");
assert!(format!("{}", x).contains(x.get()));
}
#[test]
fn double_imm_borrow() {
let x = RefCell::new(0);
let _b1 = x.borrow();
x.borrow();
}
#[test]
fn no_mut_then_imm_borrow() {
let x = RefCell::new(0);
let _b1 = x.borrow_mut();
assert!(x.try_borrow().is_none());
}
#[test]
fn no_imm_then_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow();
assert!(x.try_borrow_mut().is_none());
}
#[test]
fn no_double_borrow_mut()
|
#[test]
fn imm_release_borrow_mut() {
let x = RefCell::new(0);
{
let _b1 = x.borrow();
}
x.borrow_mut();
}
#[test]
fn mut_release_borrow_mut() {
let x = RefCell::new(0);
{
let _b1 = x.borrow_mut();
}
x.borrow();
}
#[test]
fn double_borrow_single_release_no_borrow_mut() {
let x = RefCell::new(0);
let _b1 = x.borrow();
{
let _b2 = x.borrow();
}
assert!(x.try_borrow_mut().is_none());
}
#[test]
#[should_fail]
fn discard_doesnt_unborrow() {
let x = RefCell::new(0);
let _b = x.borrow();
let _ = _b;
let _b = x.borrow_mut();
}
#[test]
fn clone_ref_updates_flag() {
let x = RefCell::new(0);
{
let b1 = x.borrow();
assert!(x.try_borrow_mut().is_none());
{
let _b2 = clone_ref(&b1);
assert!(x.try_borrow_mut().is_none());
}
assert!(x.try_borrow_mut().is_none());
}
assert!(x.try_borrow_mut().is_some());
}
}
|
{
let x = RefCell::new(0);
let _b1 = x.borrow_mut();
assert!(x.try_borrow_mut().is_none());
}
|
identifier_body
|
gpucommandbuffer.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::{DomRefCell, Ref};
use crate::dom::bindings::codegen::Bindings::GPUCommandBufferBinding::GPUCommandBufferMethods;
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::Dom;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::globalscope::GlobalScope;
use crate::dom::gpubuffer::GPUBuffer;
use dom_struct::dom_struct;
use std::collections::HashSet;
use std::hash::{Hash, Hasher};
use webgpu::{WebGPU, WebGPUCommandBuffer};
impl Eq for DomRoot<GPUBuffer> {}
impl Hash for DomRoot<GPUBuffer> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id().hash(state);
}
}
#[dom_struct]
pub struct GPUCommandBuffer {
reflector_: Reflector,
#[ignore_malloc_size_of = "defined in webgpu"]
channel: WebGPU,
label: DomRefCell<Option<DOMString>>,
command_buffer: WebGPUCommandBuffer,
buffers: DomRefCell<HashSet<Dom<GPUBuffer>>>,
}
impl GPUCommandBuffer {
fn new_inherited(
channel: WebGPU,
command_buffer: WebGPUCommandBuffer,
buffers: HashSet<DomRoot<GPUBuffer>>,
) -> Self {
Self {
channel,
reflector_: Reflector::new(),
label: DomRefCell::new(None),
command_buffer,
buffers: DomRefCell::new(buffers.into_iter().map(|b| Dom::from_ref(&*b)).collect()),
}
}
pub fn new(
global: &GlobalScope,
channel: WebGPU,
command_buffer: WebGPUCommandBuffer,
buffers: HashSet<DomRoot<GPUBuffer>>,
) -> DomRoot<Self> {
reflect_dom_object(
Box::new(GPUCommandBuffer::new_inherited(
channel,
command_buffer,
buffers,
)),
global,
)
}
}
impl GPUCommandBuffer {
pub fn id(&self) -> WebGPUCommandBuffer
|
pub fn buffers(&self) -> Ref<HashSet<Dom<GPUBuffer>>> {
self.buffers.borrow()
}
}
impl GPUCommandBufferMethods for GPUCommandBuffer {
/// https://gpuweb.github.io/gpuweb/#dom-gpuobjectbase-label
fn GetLabel(&self) -> Option<DOMString> {
self.label.borrow().clone()
}
/// https://gpuweb.github.io/gpuweb/#dom-gpuobjectbase-label
fn SetLabel(&self, value: Option<DOMString>) {
*self.label.borrow_mut() = value;
}
}
|
{
self.command_buffer
}
|
identifier_body
|
gpucommandbuffer.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::{DomRefCell, Ref};
use crate::dom::bindings::codegen::Bindings::GPUCommandBufferBinding::GPUCommandBufferMethods;
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::Dom;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::globalscope::GlobalScope;
use crate::dom::gpubuffer::GPUBuffer;
use dom_struct::dom_struct;
use std::collections::HashSet;
use std::hash::{Hash, Hasher};
use webgpu::{WebGPU, WebGPUCommandBuffer};
impl Eq for DomRoot<GPUBuffer> {}
impl Hash for DomRoot<GPUBuffer> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id().hash(state);
}
}
#[dom_struct]
pub struct GPUCommandBuffer {
reflector_: Reflector,
#[ignore_malloc_size_of = "defined in webgpu"]
channel: WebGPU,
label: DomRefCell<Option<DOMString>>,
command_buffer: WebGPUCommandBuffer,
buffers: DomRefCell<HashSet<Dom<GPUBuffer>>>,
}
impl GPUCommandBuffer {
fn new_inherited(
channel: WebGPU,
command_buffer: WebGPUCommandBuffer,
buffers: HashSet<DomRoot<GPUBuffer>>,
) -> Self {
Self {
channel,
reflector_: Reflector::new(),
label: DomRefCell::new(None),
command_buffer,
buffers: DomRefCell::new(buffers.into_iter().map(|b| Dom::from_ref(&*b)).collect()),
}
}
pub fn new(
global: &GlobalScope,
channel: WebGPU,
command_buffer: WebGPUCommandBuffer,
buffers: HashSet<DomRoot<GPUBuffer>>,
) -> DomRoot<Self> {
reflect_dom_object(
Box::new(GPUCommandBuffer::new_inherited(
channel,
command_buffer,
buffers,
)),
global,
)
}
}
impl GPUCommandBuffer {
pub fn id(&self) -> WebGPUCommandBuffer {
self.command_buffer
}
pub fn buffers(&self) -> Ref<HashSet<Dom<GPUBuffer>>> {
self.buffers.borrow()
}
}
impl GPUCommandBufferMethods for GPUCommandBuffer {
/// https://gpuweb.github.io/gpuweb/#dom-gpuobjectbase-label
fn GetLabel(&self) -> Option<DOMString> {
self.label.borrow().clone()
}
/// https://gpuweb.github.io/gpuweb/#dom-gpuobjectbase-label
fn
|
(&self, value: Option<DOMString>) {
*self.label.borrow_mut() = value;
}
}
|
SetLabel
|
identifier_name
|
gpucommandbuffer.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::{DomRefCell, Ref};
use crate::dom::bindings::codegen::Bindings::GPUCommandBufferBinding::GPUCommandBufferMethods;
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::Dom;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::globalscope::GlobalScope;
use crate::dom::gpubuffer::GPUBuffer;
use dom_struct::dom_struct;
use std::collections::HashSet;
use std::hash::{Hash, Hasher};
use webgpu::{WebGPU, WebGPUCommandBuffer};
impl Eq for DomRoot<GPUBuffer> {}
impl Hash for DomRoot<GPUBuffer> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id().hash(state);
}
}
#[dom_struct]
pub struct GPUCommandBuffer {
reflector_: Reflector,
#[ignore_malloc_size_of = "defined in webgpu"]
channel: WebGPU,
label: DomRefCell<Option<DOMString>>,
command_buffer: WebGPUCommandBuffer,
buffers: DomRefCell<HashSet<Dom<GPUBuffer>>>,
}
impl GPUCommandBuffer {
fn new_inherited(
channel: WebGPU,
command_buffer: WebGPUCommandBuffer,
buffers: HashSet<DomRoot<GPUBuffer>>,
|
Self {
channel,
reflector_: Reflector::new(),
label: DomRefCell::new(None),
command_buffer,
buffers: DomRefCell::new(buffers.into_iter().map(|b| Dom::from_ref(&*b)).collect()),
}
}
pub fn new(
global: &GlobalScope,
channel: WebGPU,
command_buffer: WebGPUCommandBuffer,
buffers: HashSet<DomRoot<GPUBuffer>>,
) -> DomRoot<Self> {
reflect_dom_object(
Box::new(GPUCommandBuffer::new_inherited(
channel,
command_buffer,
buffers,
)),
global,
)
}
}
impl GPUCommandBuffer {
pub fn id(&self) -> WebGPUCommandBuffer {
self.command_buffer
}
pub fn buffers(&self) -> Ref<HashSet<Dom<GPUBuffer>>> {
self.buffers.borrow()
}
}
impl GPUCommandBufferMethods for GPUCommandBuffer {
/// https://gpuweb.github.io/gpuweb/#dom-gpuobjectbase-label
fn GetLabel(&self) -> Option<DOMString> {
self.label.borrow().clone()
}
/// https://gpuweb.github.io/gpuweb/#dom-gpuobjectbase-label
fn SetLabel(&self, value: Option<DOMString>) {
*self.label.borrow_mut() = value;
}
}
|
) -> Self {
|
random_line_split
|
cargo_compile.rs
|
> {
pub config: &'a Config,
/// Number of concurrent jobs to use.
pub jobs: Option<u32>,
/// The target platform to compile for (example: `i686-unknown-linux-gnu`).
pub target: Option<&'a str>,
/// Extra features to build for the root package
pub features: &'a [String],
/// Flag if the default feature should be built for the root package
pub no_default_features: bool,
/// Root package to build (if None it's the current one)
pub spec: &'a [String],
/// Filter to apply to the root package to select which targets will be
/// built.
pub filter: CompileFilter<'a>,
/// Engine which drives compilation
pub exec_engine: Option<Arc<Box<ExecEngine>>>,
/// Whether this is a release build or not
pub release: bool,
/// Mode for this compile.
pub mode: CompileMode,
/// The specified target will be compiled with all the available arguments,
/// note that this only accounts for the *final* invocation of rustc
pub target_rustc_args: Option<&'a [String]>,
}
#[derive(Clone, Copy, PartialEq)]
pub enum CompileMode {
Test,
Build,
Bench,
Doc { deps: bool },
}
pub enum CompileFilter<'a> {
Everything,
Only {
lib: bool,
bins: &'a [String],
examples: &'a [String],
tests: &'a [String],
benches: &'a [String],
}
}
pub fn compile<'a>(manifest_path: &Path,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
debug!("compile; manifest-path={}", manifest_path.display());
let package = try!(Package::for_path(manifest_path, options.config));
debug!("loaded package; package={}", package);
for key in package.manifest().warnings().iter() {
try!(options.config.shell().warn(key))
}
compile_pkg(&package, options)
}
#[allow(deprecated)] // connect => join in 1.3
pub fn compile_pkg<'a>(root_package: &Package,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
let CompileOptions { config, jobs, target, spec, features,
no_default_features, release, mode,
ref filter, ref exec_engine,
ref target_rustc_args } = *options;
let target = target.map(|s| s.to_string());
let features = features.iter().flat_map(|s| {
s.split(' ')
}).map(|s| s.to_string()).collect::<Vec<String>>();
let ex_filter = {
let summary = root_package.manifest().summary();
let all_features = summary.features().get("default")
.map_or(features.clone(), |x| {
if!no_default_features {
features.iter().chain(x.iter()).cloned().collect()
} else {
features.clone()
}
});
summary.examples().iter().filter_map(|(k,v)| {
if v.iter().any(|x|!all_features.contains(x)) { Some(k) } else { None }
}).cloned().collect()
};
if spec.len() > 0 && (no_default_features || features.len() > 0) {
return Err(human("features cannot be modified when the main package \
is not being built"))
}
if jobs == Some(0) {
return Err(human("jobs must be at least 1"))
}
let override_ids = try!(source_ids_from_config(options.config, root_package.root()));
let (packages, resolve_with_overrides, sources) = {
let mut registry = PackageRegistry::new(options.config);
// First, resolve the root_package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
let resolve = try!(ops::resolve_pkg(&mut registry, root_package));
// Second, resolve with precisely what we're doing. Filter out
// transitive dependencies if necessary, specify features, handle
// overrides, etc.
let _p = profile::start("resolving w/ overrides...");
try!(registry.add_overrides(override_ids));
let method = Method::Required{
dev_deps: true, // TODO: remove this option?
features: &features,
uses_default_features:!no_default_features,
};
let resolved_with_overrides =
try!(ops::resolve_with_previous(&mut registry, root_package, method,
Some(&resolve), None));
let packages = try!(ops::get_resolved_packages(&resolved_with_overrides, &mut registry));
(packages, resolved_with_overrides, registry.move_sources())
};
let mut invalid_spec = vec![];
let pkgids = if spec.len() > 0 {
spec.iter().filter_map(|p| {
match resolve_with_overrides.query(&p) {
Ok(p) => Some(p),
Err(..) => { invalid_spec.push(p.to_string()); None }
}
}).collect::<Vec<_>>()
} else {
vec![root_package.package_id()]
};
if spec.len() > 0 && invalid_spec.len() > 0 {
return Err(human(format!("could not find package matching spec `{}`",
invalid_spec.connect(", "))));
}
let to_builds = packages.iter().filter(|p| pkgids.contains(&p.package_id()))
.collect::<Vec<_>>();
let mut general_targets = Vec::new();
let mut package_targets = Vec::new();
match *target_rustc_args {
Some(args) => {
if to_builds.len() == 1 {
let targets = try!(generate_targets(to_builds[0], mode, filter, &ex_filter, release));
if targets.len() == 1 {
let (target, profile) = targets[0];
let mut profile = profile.clone();
profile.rustc_args = Some(args.to_vec());
general_targets.push((target, profile));
} else {
return Err(human("extra arguments to `rustc` can only be \
passed to one target, consider \
filtering\nthe package by passing e.g. \
`--lib` or `--bin NAME` to specify \
a single target"))
}
} else {
panic!("`rustc` should not accept multiple `-p` flags")
}
}
None => {
for &to_build in to_builds.iter() {
let targets = try!(generate_targets(to_build, mode, filter, &ex_filter, release));
package_targets.push((to_build, targets));
}
}
};
for &(target, ref profile) in &general_targets {
for &to_build in to_builds.iter() {
package_targets.push((to_build, vec![(target, profile)]));
}
}
let mut ret = {
let _p = profile::start("compiling");
let mut build_config = try!(scrape_build_config(config, jobs, target));
build_config.exec_engine = exec_engine.clone();
build_config.release = release;
if let CompileMode::Doc { deps } = mode {
build_config.doc_all = deps;
}
try!(ops::compile_targets(&package_targets,
&PackageSet::new(&packages),
&resolve_with_overrides,
&sources,
config,
build_config,
root_package.manifest().profiles(),
))
};
ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect();
return Ok(ret);
}
impl<'a> CompileFilter<'a> {
pub fn new(lib_only: bool,
bins: &'a [String],
tests: &'a [String],
examples: &'a [String],
benches: &'a [String]) -> CompileFilter<'a> {
if lib_only ||!bins.is_empty() ||!tests.is_empty() ||
!examples.is_empty() ||!benches.is_empty() {
CompileFilter::Only {
lib: lib_only, bins: bins, examples: examples, benches: benches,
tests: tests,
}
} else {
CompileFilter::Everything
}
}
pub fn matches(&self, target: &Target) -> bool {
match *self {
CompileFilter::Everything => true,
CompileFilter::Only { lib, bins, examples, tests, benches } => {
let list = match *target.kind() {
TargetKind::Bin => bins,
TargetKind::Test => tests,
TargetKind::Bench => benches,
TargetKind::Example => examples,
TargetKind::Lib(..) => return lib,
TargetKind::CustomBuild => return false,
};
list.iter().any(|x| *x == target.name())
}
}
}
}
/// Given the configuration for a build, this function will generate all
/// target/profile combinations needed to be built.
fn generate_targets<'a>(pkg: &'a Package,
mode: CompileMode,
filter: &CompileFilter,
ex_filter: &Vec<String>,
release: bool)
-> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let profiles = pkg.manifest().profiles();
let build = if release {&profiles.release} else {&profiles.dev};
let test = if release {&profiles.bench} else {&profiles.test};
let profile = match mode {
CompileMode::Test => test,
CompileMode::Bench => &profiles.bench,
CompileMode::Build => build,
CompileMode::Doc {.. } => &profiles.doc,
};
return match *filter {
CompileFilter::Everything => {
match mode {
CompileMode::Bench => {
Ok(pkg.targets().iter().filter(|t| t.benched()).map(|t| {
(t, profile)
}).collect::<Vec<_>>())
}
CompileMode::Test => {
let mut base = pkg.targets().iter().filter(|t| {
t.tested() &&!(t.is_example() && ex_filter.iter().any(|x| &x[..] == t.name()))
}).map(|t| {
(t, if t.is_example() {build} else {profile})
}).collect::<Vec<_>>();
// Always compile the library if we're testing everything as
// it'll be needed for doctests
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
if t.doctested() {
base.push((t, build));
}
}
Ok(base)
}
CompileMode::Build => {
Ok(pkg.targets().iter().filter(|t| {
t.is_bin() || t.is_lib()
}).map(|t| (t, profile)).collect())
}
CompileMode::Doc {.. } => {
Ok(pkg.targets().iter().filter(|t| t.documented())
.map(|t| (t, profile)).collect())
}
}
}
CompileFilter::Only { lib, bins, examples, tests, benches } => {
let mut targets = Vec::new();
if lib {
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
targets.push((t, profile));
} else {
return Err(human(format!("no library targets found")))
}
}
{
let mut find = |names: &[String], desc, kind, profile| {
for name in names {
let target = pkg.targets().iter().find(|t| {
t.name() == *name && *t.kind() == kind
});
let t = match target {
Some(t) => t,
None => return Err(human(format!("no {} target \
named `{}`",
desc, name))),
};
if kind == TargetKind::Example && ex_filter.iter().any(|x| &x[..] == name) {
continue;
}
debug!("found {} `{}`", desc, name);
targets.push((t, profile));
}
Ok(())
};
try!(find(bins, "bin", TargetKind::Bin, profile));
try!(find(examples, "example", TargetKind::Example, build));
try!(find(tests, "test", TargetKind::Test, test));
try!(find(benches, "bench", TargetKind::Bench, &profiles.bench));
}
Ok(targets)
}
};
}
/// Read the `paths` configuration variable to discover all path overrides that
/// have been configured.
fn source_ids_from_config(config: &Config, cur_path: &Path)
-> CargoResult<Vec<SourceId>> {
let configs = try!(config.values());
debug!("loaded config; configs={:?}", configs);
let config_paths = match configs.get("paths") {
Some(cfg) => cfg,
None => return Ok(Vec::new())
};
let paths = try!(config_paths.list().chain_error(|| {
internal("invalid configuration for the key `paths`")
}));
paths.iter().map(|&(ref s, ref p)| {
// The path listed next to the string is the config file in which the
// key was located, so we want to pop off the `.cargo/config` component
// to get the directory containing the `.cargo` folder.
p.parent().unwrap().parent().unwrap().join(s)
}).filter(|p| {
// Make sure we don't override the local package, even if it's in the
// list of override paths.
cur_path!= &**p
}).map(|p| SourceId::for_path(&p)).collect()
}
/// Parse all config files to learn about build configuration. Currently
/// configured options are:
///
/// * build.jobs
/// * target.$target.ar
/// * target.$target.linker
/// * target.$target.libfoo.metadata
fn scrape_build_config(config: &Config,
jobs: Option<u32>,
target: Option<String>)
-> CargoResult<ops::BuildConfig> {
let cfg_jobs = match try!(config.get_i64("build.jobs")) {
Some((n, p)) => {
if n <= 0 {
return Err(human(format!("build.jobs must be positive, \
but found {} in {:?}", n, p)));
} else if n >= u32::max_value() as i64 {
return Err(human(format!("build.jobs is too large: \
found {} in {:?}", n, p)));
} else {
Some(n as u32)
}
}
None => None,
};
let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32);
let mut base = ops::BuildConfig {
jobs: jobs,
requested_target: target.clone(),
..Default::default()
};
base.host = try!(scrape_target_config(config, &config.rustc_info().host));
base.target = match target.as_ref() {
Some(triple) => try!(scrape_target_config(config, &triple)),
None => base.host.clone(),
};
Ok(base)
}
fn
|
(config: &Config, triple: &str)
-> CargoResult<ops::TargetConfig> {
let key = format!("target.{}", triple);
let mut ret = ops::TargetConfig {
ar: try!(config.get_path(&format!("{}.ar", key))),
linker: try!(config.get_path(&format!("{}.linker", key))),
overrides: HashMap::new(),
};
let table = match try!(config.get_table(&key)) {
Some((table, _)) => table,
None => return Ok(ret),
};
for (lib_name, _) in table.into_iter() {
if lib_name == "ar" || lib_name == "linker" { continue }
let mut output = BuildOutput {
library_paths: Vec::new(),
library_links: Vec::new(),
cfgs: Vec::new(),
metadata: Vec::new(),
};
let key = format!("{}.{}", key, lib_name);
let table = try!(config.get_table(&key)).unwrap().0;
for (k, _) in table.into_iter() {
let key = format!("{}.{}", key, k);
match try!(config.get(&key)).unwrap() {
ConfigValue::String(v, path) => {
if k == "rustc-flags" {
let whence = format!("in `{}` (in {})", key,
path.display());
|
scrape_target_config
|
identifier_name
|
cargo_compile.rs
|
> {
pub config: &'a Config,
/// Number of concurrent jobs to use.
pub jobs: Option<u32>,
/// The target platform to compile for (example: `i686-unknown-linux-gnu`).
pub target: Option<&'a str>,
/// Extra features to build for the root package
pub features: &'a [String],
/// Flag if the default feature should be built for the root package
pub no_default_features: bool,
/// Root package to build (if None it's the current one)
pub spec: &'a [String],
/// Filter to apply to the root package to select which targets will be
/// built.
pub filter: CompileFilter<'a>,
/// Engine which drives compilation
pub exec_engine: Option<Arc<Box<ExecEngine>>>,
/// Whether this is a release build or not
pub release: bool,
/// Mode for this compile.
pub mode: CompileMode,
/// The specified target will be compiled with all the available arguments,
/// note that this only accounts for the *final* invocation of rustc
pub target_rustc_args: Option<&'a [String]>,
}
#[derive(Clone, Copy, PartialEq)]
pub enum CompileMode {
Test,
Build,
Bench,
Doc { deps: bool },
}
pub enum CompileFilter<'a> {
Everything,
Only {
lib: bool,
bins: &'a [String],
examples: &'a [String],
tests: &'a [String],
benches: &'a [String],
}
}
pub fn compile<'a>(manifest_path: &Path,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
debug!("compile; manifest-path={}", manifest_path.display());
let package = try!(Package::for_path(manifest_path, options.config));
debug!("loaded package; package={}", package);
for key in package.manifest().warnings().iter() {
try!(options.config.shell().warn(key))
}
compile_pkg(&package, options)
}
#[allow(deprecated)] // connect => join in 1.3
pub fn compile_pkg<'a>(root_package: &Package,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
let CompileOptions { config, jobs, target, spec, features,
no_default_features, release, mode,
ref filter, ref exec_engine,
ref target_rustc_args } = *options;
let target = target.map(|s| s.to_string());
let features = features.iter().flat_map(|s| {
s.split(' ')
}).map(|s| s.to_string()).collect::<Vec<String>>();
let ex_filter = {
let summary = root_package.manifest().summary();
let all_features = summary.features().get("default")
.map_or(features.clone(), |x| {
if!no_default_features {
features.iter().chain(x.iter()).cloned().collect()
} else {
features.clone()
}
});
summary.examples().iter().filter_map(|(k,v)| {
if v.iter().any(|x|!all_features.contains(x)) { Some(k) } else { None }
}).cloned().collect()
};
if spec.len() > 0 && (no_default_features || features.len() > 0) {
return Err(human("features cannot be modified when the main package \
is not being built"))
}
if jobs == Some(0) {
return Err(human("jobs must be at least 1"))
}
let override_ids = try!(source_ids_from_config(options.config, root_package.root()));
let (packages, resolve_with_overrides, sources) = {
let mut registry = PackageRegistry::new(options.config);
// First, resolve the root_package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
let resolve = try!(ops::resolve_pkg(&mut registry, root_package));
// Second, resolve with precisely what we're doing. Filter out
// transitive dependencies if necessary, specify features, handle
// overrides, etc.
let _p = profile::start("resolving w/ overrides...");
try!(registry.add_overrides(override_ids));
let method = Method::Required{
dev_deps: true, // TODO: remove this option?
features: &features,
uses_default_features:!no_default_features,
};
let resolved_with_overrides =
try!(ops::resolve_with_previous(&mut registry, root_package, method,
Some(&resolve), None));
let packages = try!(ops::get_resolved_packages(&resolved_with_overrides, &mut registry));
(packages, resolved_with_overrides, registry.move_sources())
};
let mut invalid_spec = vec![];
let pkgids = if spec.len() > 0 {
spec.iter().filter_map(|p| {
match resolve_with_overrides.query(&p) {
Ok(p) => Some(p),
Err(..) => { invalid_spec.push(p.to_string()); None }
}
}).collect::<Vec<_>>()
} else {
vec![root_package.package_id()]
};
if spec.len() > 0 && invalid_spec.len() > 0 {
return Err(human(format!("could not find package matching spec `{}`",
invalid_spec.connect(", "))));
}
let to_builds = packages.iter().filter(|p| pkgids.contains(&p.package_id()))
.collect::<Vec<_>>();
let mut general_targets = Vec::new();
let mut package_targets = Vec::new();
match *target_rustc_args {
Some(args) => {
if to_builds.len() == 1 {
let targets = try!(generate_targets(to_builds[0], mode, filter, &ex_filter, release));
if targets.len() == 1 {
let (target, profile) = targets[0];
let mut profile = profile.clone();
profile.rustc_args = Some(args.to_vec());
general_targets.push((target, profile));
} else
|
} else {
panic!("`rustc` should not accept multiple `-p` flags")
}
}
None => {
for &to_build in to_builds.iter() {
let targets = try!(generate_targets(to_build, mode, filter, &ex_filter, release));
package_targets.push((to_build, targets));
}
}
};
for &(target, ref profile) in &general_targets {
for &to_build in to_builds.iter() {
package_targets.push((to_build, vec![(target, profile)]));
}
}
let mut ret = {
let _p = profile::start("compiling");
let mut build_config = try!(scrape_build_config(config, jobs, target));
build_config.exec_engine = exec_engine.clone();
build_config.release = release;
if let CompileMode::Doc { deps } = mode {
build_config.doc_all = deps;
}
try!(ops::compile_targets(&package_targets,
&PackageSet::new(&packages),
&resolve_with_overrides,
&sources,
config,
build_config,
root_package.manifest().profiles(),
))
};
ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect();
return Ok(ret);
}
impl<'a> CompileFilter<'a> {
pub fn new(lib_only: bool,
bins: &'a [String],
tests: &'a [String],
examples: &'a [String],
benches: &'a [String]) -> CompileFilter<'a> {
if lib_only ||!bins.is_empty() ||!tests.is_empty() ||
!examples.is_empty() ||!benches.is_empty() {
CompileFilter::Only {
lib: lib_only, bins: bins, examples: examples, benches: benches,
tests: tests,
}
} else {
CompileFilter::Everything
}
}
pub fn matches(&self, target: &Target) -> bool {
match *self {
CompileFilter::Everything => true,
CompileFilter::Only { lib, bins, examples, tests, benches } => {
let list = match *target.kind() {
TargetKind::Bin => bins,
TargetKind::Test => tests,
TargetKind::Bench => benches,
TargetKind::Example => examples,
TargetKind::Lib(..) => return lib,
TargetKind::CustomBuild => return false,
};
list.iter().any(|x| *x == target.name())
}
}
}
}
/// Given the configuration for a build, this function will generate all
/// target/profile combinations needed to be built.
fn generate_targets<'a>(pkg: &'a Package,
mode: CompileMode,
filter: &CompileFilter,
ex_filter: &Vec<String>,
release: bool)
-> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let profiles = pkg.manifest().profiles();
let build = if release {&profiles.release} else {&profiles.dev};
let test = if release {&profiles.bench} else {&profiles.test};
let profile = match mode {
CompileMode::Test => test,
CompileMode::Bench => &profiles.bench,
CompileMode::Build => build,
CompileMode::Doc {.. } => &profiles.doc,
};
return match *filter {
CompileFilter::Everything => {
match mode {
CompileMode::Bench => {
Ok(pkg.targets().iter().filter(|t| t.benched()).map(|t| {
(t, profile)
}).collect::<Vec<_>>())
}
CompileMode::Test => {
let mut base = pkg.targets().iter().filter(|t| {
t.tested() &&!(t.is_example() && ex_filter.iter().any(|x| &x[..] == t.name()))
}).map(|t| {
(t, if t.is_example() {build} else {profile})
}).collect::<Vec<_>>();
// Always compile the library if we're testing everything as
// it'll be needed for doctests
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
if t.doctested() {
base.push((t, build));
}
}
Ok(base)
}
CompileMode::Build => {
Ok(pkg.targets().iter().filter(|t| {
t.is_bin() || t.is_lib()
}).map(|t| (t, profile)).collect())
}
CompileMode::Doc {.. } => {
Ok(pkg.targets().iter().filter(|t| t.documented())
.map(|t| (t, profile)).collect())
}
}
}
CompileFilter::Only { lib, bins, examples, tests, benches } => {
let mut targets = Vec::new();
if lib {
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
targets.push((t, profile));
} else {
return Err(human(format!("no library targets found")))
}
}
{
let mut find = |names: &[String], desc, kind, profile| {
for name in names {
let target = pkg.targets().iter().find(|t| {
t.name() == *name && *t.kind() == kind
});
let t = match target {
Some(t) => t,
None => return Err(human(format!("no {} target \
named `{}`",
desc, name))),
};
if kind == TargetKind::Example && ex_filter.iter().any(|x| &x[..] == name) {
continue;
}
debug!("found {} `{}`", desc, name);
targets.push((t, profile));
}
Ok(())
};
try!(find(bins, "bin", TargetKind::Bin, profile));
try!(find(examples, "example", TargetKind::Example, build));
try!(find(tests, "test", TargetKind::Test, test));
try!(find(benches, "bench", TargetKind::Bench, &profiles.bench));
}
Ok(targets)
}
};
}
/// Read the `paths` configuration variable to discover all path overrides that
/// have been configured.
fn source_ids_from_config(config: &Config, cur_path: &Path)
-> CargoResult<Vec<SourceId>> {
let configs = try!(config.values());
debug!("loaded config; configs={:?}", configs);
let config_paths = match configs.get("paths") {
Some(cfg) => cfg,
None => return Ok(Vec::new())
};
let paths = try!(config_paths.list().chain_error(|| {
internal("invalid configuration for the key `paths`")
}));
paths.iter().map(|&(ref s, ref p)| {
// The path listed next to the string is the config file in which the
// key was located, so we want to pop off the `.cargo/config` component
// to get the directory containing the `.cargo` folder.
p.parent().unwrap().parent().unwrap().join(s)
}).filter(|p| {
// Make sure we don't override the local package, even if it's in the
// list of override paths.
cur_path!= &**p
}).map(|p| SourceId::for_path(&p)).collect()
}
/// Parse all config files to learn about build configuration. Currently
/// configured options are:
///
/// * build.jobs
/// * target.$target.ar
/// * target.$target.linker
/// * target.$target.libfoo.metadata
fn scrape_build_config(config: &Config,
jobs: Option<u32>,
target: Option<String>)
-> CargoResult<ops::BuildConfig> {
let cfg_jobs = match try!(config.get_i64("build.jobs")) {
Some((n, p)) => {
if n <= 0 {
return Err(human(format!("build.jobs must be positive, \
but found {} in {:?}", n, p)));
} else if n >= u32::max_value() as i64 {
return Err(human(format!("build.jobs is too large: \
found {} in {:?}", n, p)));
} else {
Some(n as u32)
}
}
None => None,
};
let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32);
let mut base = ops::BuildConfig {
jobs: jobs,
requested_target: target.clone(),
..Default::default()
};
base.host = try!(scrape_target_config(config, &config.rustc_info().host));
base.target = match target.as_ref() {
Some(triple) => try!(scrape_target_config(config, &triple)),
None => base.host.clone(),
};
Ok(base)
}
fn scrape_target_config(config: &Config, triple: &str)
-> CargoResult<ops::TargetConfig> {
let key = format!("target.{}", triple);
let mut ret = ops::TargetConfig {
ar: try!(config.get_path(&format!("{}.ar", key))),
linker: try!(config.get_path(&format!("{}.linker", key))),
overrides: HashMap::new(),
};
let table = match try!(config.get_table(&key)) {
Some((table, _)) => table,
None => return Ok(ret),
};
for (lib_name, _) in table.into_iter() {
if lib_name == "ar" || lib_name == "linker" { continue }
let mut output = BuildOutput {
library_paths: Vec::new(),
library_links: Vec::new(),
cfgs: Vec::new(),
metadata: Vec::new(),
};
let key = format!("{}.{}", key, lib_name);
let table = try!(config.get_table(&key)).unwrap().0;
for (k, _) in table.into_iter() {
let key = format!("{}.{}", key, k);
match try!(config.get(&key)).unwrap() {
ConfigValue::String(v, path) => {
if k == "rustc-flags" {
let whence = format!("in `{}` (in {})", key,
path.display());
|
{
return Err(human("extra arguments to `rustc` can only be \
passed to one target, consider \
filtering\nthe package by passing e.g. \
`--lib` or `--bin NAME` to specify \
a single target"))
}
|
conditional_block
|
cargo_compile.rs
|
a> {
pub config: &'a Config,
/// Number of concurrent jobs to use.
pub jobs: Option<u32>,
/// The target platform to compile for (example: `i686-unknown-linux-gnu`).
pub target: Option<&'a str>,
/// Extra features to build for the root package
pub features: &'a [String],
/// Flag if the default feature should be built for the root package
pub no_default_features: bool,
/// Root package to build (if None it's the current one)
pub spec: &'a [String],
/// Filter to apply to the root package to select which targets will be
/// built.
pub filter: CompileFilter<'a>,
/// Engine which drives compilation
pub exec_engine: Option<Arc<Box<ExecEngine>>>,
/// Whether this is a release build or not
pub release: bool,
/// Mode for this compile.
pub mode: CompileMode,
/// The specified target will be compiled with all the available arguments,
/// note that this only accounts for the *final* invocation of rustc
pub target_rustc_args: Option<&'a [String]>,
}
#[derive(Clone, Copy, PartialEq)]
pub enum CompileMode {
Test,
Build,
Bench,
Doc { deps: bool },
}
pub enum CompileFilter<'a> {
Everything,
Only {
lib: bool,
bins: &'a [String],
examples: &'a [String],
tests: &'a [String],
benches: &'a [String],
}
}
pub fn compile<'a>(manifest_path: &Path,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
debug!("compile; manifest-path={}", manifest_path.display());
let package = try!(Package::for_path(manifest_path, options.config));
debug!("loaded package; package={}", package);
for key in package.manifest().warnings().iter() {
try!(options.config.shell().warn(key))
}
compile_pkg(&package, options)
}
#[allow(deprecated)] // connect => join in 1.3
pub fn compile_pkg<'a>(root_package: &Package,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
let CompileOptions { config, jobs, target, spec, features,
no_default_features, release, mode,
ref filter, ref exec_engine,
ref target_rustc_args } = *options;
let target = target.map(|s| s.to_string());
let features = features.iter().flat_map(|s| {
s.split(' ')
}).map(|s| s.to_string()).collect::<Vec<String>>();
let ex_filter = {
let summary = root_package.manifest().summary();
let all_features = summary.features().get("default")
.map_or(features.clone(), |x| {
if!no_default_features {
features.iter().chain(x.iter()).cloned().collect()
} else {
features.clone()
}
});
summary.examples().iter().filter_map(|(k,v)| {
if v.iter().any(|x|!all_features.contains(x)) { Some(k) } else { None }
}).cloned().collect()
};
if spec.len() > 0 && (no_default_features || features.len() > 0) {
return Err(human("features cannot be modified when the main package \
is not being built"))
}
if jobs == Some(0) {
return Err(human("jobs must be at least 1"))
}
let override_ids = try!(source_ids_from_config(options.config, root_package.root()));
let (packages, resolve_with_overrides, sources) = {
let mut registry = PackageRegistry::new(options.config);
// First, resolve the root_package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
let resolve = try!(ops::resolve_pkg(&mut registry, root_package));
// Second, resolve with precisely what we're doing. Filter out
// transitive dependencies if necessary, specify features, handle
// overrides, etc.
let _p = profile::start("resolving w/ overrides...");
try!(registry.add_overrides(override_ids));
let method = Method::Required{
dev_deps: true, // TODO: remove this option?
features: &features,
uses_default_features:!no_default_features,
};
let resolved_with_overrides =
try!(ops::resolve_with_previous(&mut registry, root_package, method,
Some(&resolve), None));
let packages = try!(ops::get_resolved_packages(&resolved_with_overrides, &mut registry));
(packages, resolved_with_overrides, registry.move_sources())
};
let mut invalid_spec = vec![];
let pkgids = if spec.len() > 0 {
spec.iter().filter_map(|p| {
match resolve_with_overrides.query(&p) {
Ok(p) => Some(p),
Err(..) => { invalid_spec.push(p.to_string()); None }
}
}).collect::<Vec<_>>()
} else {
vec![root_package.package_id()]
};
if spec.len() > 0 && invalid_spec.len() > 0 {
return Err(human(format!("could not find package matching spec `{}`",
invalid_spec.connect(", "))));
}
let to_builds = packages.iter().filter(|p| pkgids.contains(&p.package_id()))
.collect::<Vec<_>>();
let mut general_targets = Vec::new();
let mut package_targets = Vec::new();
match *target_rustc_args {
Some(args) => {
if to_builds.len() == 1 {
let targets = try!(generate_targets(to_builds[0], mode, filter, &ex_filter, release));
if targets.len() == 1 {
let (target, profile) = targets[0];
let mut profile = profile.clone();
profile.rustc_args = Some(args.to_vec());
general_targets.push((target, profile));
} else {
return Err(human("extra arguments to `rustc` can only be \
passed to one target, consider \
filtering\nthe package by passing e.g. \
`--lib` or `--bin NAME` to specify \
a single target"))
}
} else {
panic!("`rustc` should not accept multiple `-p` flags")
}
}
None => {
for &to_build in to_builds.iter() {
let targets = try!(generate_targets(to_build, mode, filter, &ex_filter, release));
package_targets.push((to_build, targets));
}
}
};
for &(target, ref profile) in &general_targets {
for &to_build in to_builds.iter() {
package_targets.push((to_build, vec![(target, profile)]));
}
}
let mut ret = {
let _p = profile::start("compiling");
let mut build_config = try!(scrape_build_config(config, jobs, target));
build_config.exec_engine = exec_engine.clone();
build_config.release = release;
if let CompileMode::Doc { deps } = mode {
build_config.doc_all = deps;
}
try!(ops::compile_targets(&package_targets,
&PackageSet::new(&packages),
&resolve_with_overrides,
&sources,
config,
build_config,
root_package.manifest().profiles(),
))
};
ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect();
return Ok(ret);
}
impl<'a> CompileFilter<'a> {
pub fn new(lib_only: bool,
bins: &'a [String],
tests: &'a [String],
examples: &'a [String],
benches: &'a [String]) -> CompileFilter<'a> {
if lib_only ||!bins.is_empty() ||!tests.is_empty() ||
!examples.is_empty() ||!benches.is_empty() {
CompileFilter::Only {
lib: lib_only, bins: bins, examples: examples, benches: benches,
tests: tests,
}
} else {
CompileFilter::Everything
}
}
pub fn matches(&self, target: &Target) -> bool {
match *self {
CompileFilter::Everything => true,
CompileFilter::Only { lib, bins, examples, tests, benches } => {
let list = match *target.kind() {
TargetKind::Bin => bins,
TargetKind::Test => tests,
TargetKind::Bench => benches,
TargetKind::Example => examples,
TargetKind::Lib(..) => return lib,
TargetKind::CustomBuild => return false,
};
list.iter().any(|x| *x == target.name())
}
}
}
}
/// Given the configuration for a build, this function will generate all
/// target/profile combinations needed to be built.
fn generate_targets<'a>(pkg: &'a Package,
mode: CompileMode,
filter: &CompileFilter,
ex_filter: &Vec<String>,
release: bool)
-> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let profiles = pkg.manifest().profiles();
let build = if release {&profiles.release} else {&profiles.dev};
let test = if release {&profiles.bench} else {&profiles.test};
let profile = match mode {
CompileMode::Test => test,
CompileMode::Bench => &profiles.bench,
CompileMode::Build => build,
CompileMode::Doc {.. } => &profiles.doc,
};
return match *filter {
CompileFilter::Everything => {
match mode {
CompileMode::Bench => {
Ok(pkg.targets().iter().filter(|t| t.benched()).map(|t| {
(t, profile)
}).collect::<Vec<_>>())
}
CompileMode::Test => {
let mut base = pkg.targets().iter().filter(|t| {
t.tested() &&!(t.is_example() && ex_filter.iter().any(|x| &x[..] == t.name()))
}).map(|t| {
(t, if t.is_example() {build} else {profile})
}).collect::<Vec<_>>();
// Always compile the library if we're testing everything as
// it'll be needed for doctests
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
if t.doctested() {
base.push((t, build));
}
}
Ok(base)
}
CompileMode::Build => {
Ok(pkg.targets().iter().filter(|t| {
t.is_bin() || t.is_lib()
}).map(|t| (t, profile)).collect())
}
CompileMode::Doc {.. } => {
Ok(pkg.targets().iter().filter(|t| t.documented())
.map(|t| (t, profile)).collect())
}
}
}
CompileFilter::Only { lib, bins, examples, tests, benches } => {
let mut targets = Vec::new();
if lib {
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
targets.push((t, profile));
} else {
return Err(human(format!("no library targets found")))
}
}
{
let mut find = |names: &[String], desc, kind, profile| {
for name in names {
let target = pkg.targets().iter().find(|t| {
t.name() == *name && *t.kind() == kind
});
let t = match target {
Some(t) => t,
None => return Err(human(format!("no {} target \
named `{}`",
desc, name))),
};
if kind == TargetKind::Example && ex_filter.iter().any(|x| &x[..] == name) {
continue;
}
debug!("found {} `{}`", desc, name);
targets.push((t, profile));
}
Ok(())
};
try!(find(bins, "bin", TargetKind::Bin, profile));
try!(find(examples, "example", TargetKind::Example, build));
try!(find(tests, "test", TargetKind::Test, test));
try!(find(benches, "bench", TargetKind::Bench, &profiles.bench));
}
Ok(targets)
}
};
}
/// Read the `paths` configuration variable to discover all path overrides that
/// have been configured.
fn source_ids_from_config(config: &Config, cur_path: &Path)
-> CargoResult<Vec<SourceId>> {
let configs = try!(config.values());
debug!("loaded config; configs={:?}", configs);
let config_paths = match configs.get("paths") {
Some(cfg) => cfg,
None => return Ok(Vec::new())
};
let paths = try!(config_paths.list().chain_error(|| {
internal("invalid configuration for the key `paths`")
}));
paths.iter().map(|&(ref s, ref p)| {
// The path listed next to the string is the config file in which the
|
// Make sure we don't override the local package, even if it's in the
// list of override paths.
cur_path!= &**p
}).map(|p| SourceId::for_path(&p)).collect()
}
/// Parse all config files to learn about build configuration. Currently
/// configured options are:
///
/// * build.jobs
/// * target.$target.ar
/// * target.$target.linker
/// * target.$target.libfoo.metadata
fn scrape_build_config(config: &Config,
jobs: Option<u32>,
target: Option<String>)
-> CargoResult<ops::BuildConfig> {
let cfg_jobs = match try!(config.get_i64("build.jobs")) {
Some((n, p)) => {
if n <= 0 {
return Err(human(format!("build.jobs must be positive, \
but found {} in {:?}", n, p)));
} else if n >= u32::max_value() as i64 {
return Err(human(format!("build.jobs is too large: \
found {} in {:?}", n, p)));
} else {
Some(n as u32)
}
}
None => None,
};
let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32);
let mut base = ops::BuildConfig {
jobs: jobs,
requested_target: target.clone(),
..Default::default()
};
base.host = try!(scrape_target_config(config, &config.rustc_info().host));
base.target = match target.as_ref() {
Some(triple) => try!(scrape_target_config(config, &triple)),
None => base.host.clone(),
};
Ok(base)
}
fn scrape_target_config(config: &Config, triple: &str)
-> CargoResult<ops::TargetConfig> {
let key = format!("target.{}", triple);
let mut ret = ops::TargetConfig {
ar: try!(config.get_path(&format!("{}.ar", key))),
linker: try!(config.get_path(&format!("{}.linker", key))),
overrides: HashMap::new(),
};
let table = match try!(config.get_table(&key)) {
Some((table, _)) => table,
None => return Ok(ret),
};
for (lib_name, _) in table.into_iter() {
if lib_name == "ar" || lib_name == "linker" { continue }
let mut output = BuildOutput {
library_paths: Vec::new(),
library_links: Vec::new(),
cfgs: Vec::new(),
metadata: Vec::new(),
};
let key = format!("{}.{}", key, lib_name);
let table = try!(config.get_table(&key)).unwrap().0;
for (k, _) in table.into_iter() {
let key = format!("{}.{}", key, k);
match try!(config.get(&key)).unwrap() {
ConfigValue::String(v, path) => {
if k == "rustc-flags" {
let whence = format!("in `{}` (in {})", key,
path.display());
|
// key was located, so we want to pop off the `.cargo/config` component
// to get the directory containing the `.cargo` folder.
p.parent().unwrap().parent().unwrap().join(s)
}).filter(|p| {
|
random_line_split
|
cargo_compile.rs
|
> {
pub config: &'a Config,
/// Number of concurrent jobs to use.
pub jobs: Option<u32>,
/// The target platform to compile for (example: `i686-unknown-linux-gnu`).
pub target: Option<&'a str>,
/// Extra features to build for the root package
pub features: &'a [String],
/// Flag if the default feature should be built for the root package
pub no_default_features: bool,
/// Root package to build (if None it's the current one)
pub spec: &'a [String],
/// Filter to apply to the root package to select which targets will be
/// built.
pub filter: CompileFilter<'a>,
/// Engine which drives compilation
pub exec_engine: Option<Arc<Box<ExecEngine>>>,
/// Whether this is a release build or not
pub release: bool,
/// Mode for this compile.
pub mode: CompileMode,
/// The specified target will be compiled with all the available arguments,
/// note that this only accounts for the *final* invocation of rustc
pub target_rustc_args: Option<&'a [String]>,
}
#[derive(Clone, Copy, PartialEq)]
pub enum CompileMode {
Test,
Build,
Bench,
Doc { deps: bool },
}
pub enum CompileFilter<'a> {
Everything,
Only {
lib: bool,
bins: &'a [String],
examples: &'a [String],
tests: &'a [String],
benches: &'a [String],
}
}
pub fn compile<'a>(manifest_path: &Path,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
debug!("compile; manifest-path={}", manifest_path.display());
let package = try!(Package::for_path(manifest_path, options.config));
debug!("loaded package; package={}", package);
for key in package.manifest().warnings().iter() {
try!(options.config.shell().warn(key))
}
compile_pkg(&package, options)
}
#[allow(deprecated)] // connect => join in 1.3
pub fn compile_pkg<'a>(root_package: &Package,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
let CompileOptions { config, jobs, target, spec, features,
no_default_features, release, mode,
ref filter, ref exec_engine,
ref target_rustc_args } = *options;
let target = target.map(|s| s.to_string());
let features = features.iter().flat_map(|s| {
s.split(' ')
}).map(|s| s.to_string()).collect::<Vec<String>>();
let ex_filter = {
let summary = root_package.manifest().summary();
let all_features = summary.features().get("default")
.map_or(features.clone(), |x| {
if!no_default_features {
features.iter().chain(x.iter()).cloned().collect()
} else {
features.clone()
}
});
summary.examples().iter().filter_map(|(k,v)| {
if v.iter().any(|x|!all_features.contains(x)) { Some(k) } else { None }
}).cloned().collect()
};
if spec.len() > 0 && (no_default_features || features.len() > 0) {
return Err(human("features cannot be modified when the main package \
is not being built"))
}
if jobs == Some(0) {
return Err(human("jobs must be at least 1"))
}
let override_ids = try!(source_ids_from_config(options.config, root_package.root()));
let (packages, resolve_with_overrides, sources) = {
let mut registry = PackageRegistry::new(options.config);
// First, resolve the root_package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
let resolve = try!(ops::resolve_pkg(&mut registry, root_package));
// Second, resolve with precisely what we're doing. Filter out
// transitive dependencies if necessary, specify features, handle
// overrides, etc.
let _p = profile::start("resolving w/ overrides...");
try!(registry.add_overrides(override_ids));
let method = Method::Required{
dev_deps: true, // TODO: remove this option?
features: &features,
uses_default_features:!no_default_features,
};
let resolved_with_overrides =
try!(ops::resolve_with_previous(&mut registry, root_package, method,
Some(&resolve), None));
let packages = try!(ops::get_resolved_packages(&resolved_with_overrides, &mut registry));
(packages, resolved_with_overrides, registry.move_sources())
};
let mut invalid_spec = vec![];
let pkgids = if spec.len() > 0 {
spec.iter().filter_map(|p| {
match resolve_with_overrides.query(&p) {
Ok(p) => Some(p),
Err(..) => { invalid_spec.push(p.to_string()); None }
}
}).collect::<Vec<_>>()
} else {
vec![root_package.package_id()]
};
if spec.len() > 0 && invalid_spec.len() > 0 {
return Err(human(format!("could not find package matching spec `{}`",
invalid_spec.connect(", "))));
}
let to_builds = packages.iter().filter(|p| pkgids.contains(&p.package_id()))
.collect::<Vec<_>>();
let mut general_targets = Vec::new();
let mut package_targets = Vec::new();
match *target_rustc_args {
Some(args) => {
if to_builds.len() == 1 {
let targets = try!(generate_targets(to_builds[0], mode, filter, &ex_filter, release));
if targets.len() == 1 {
let (target, profile) = targets[0];
let mut profile = profile.clone();
profile.rustc_args = Some(args.to_vec());
general_targets.push((target, profile));
} else {
return Err(human("extra arguments to `rustc` can only be \
passed to one target, consider \
filtering\nthe package by passing e.g. \
`--lib` or `--bin NAME` to specify \
a single target"))
}
} else {
panic!("`rustc` should not accept multiple `-p` flags")
}
}
None => {
for &to_build in to_builds.iter() {
let targets = try!(generate_targets(to_build, mode, filter, &ex_filter, release));
package_targets.push((to_build, targets));
}
}
};
for &(target, ref profile) in &general_targets {
for &to_build in to_builds.iter() {
package_targets.push((to_build, vec![(target, profile)]));
}
}
let mut ret = {
let _p = profile::start("compiling");
let mut build_config = try!(scrape_build_config(config, jobs, target));
build_config.exec_engine = exec_engine.clone();
build_config.release = release;
if let CompileMode::Doc { deps } = mode {
build_config.doc_all = deps;
}
try!(ops::compile_targets(&package_targets,
&PackageSet::new(&packages),
&resolve_with_overrides,
&sources,
config,
build_config,
root_package.manifest().profiles(),
))
};
ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect();
return Ok(ret);
}
impl<'a> CompileFilter<'a> {
pub fn new(lib_only: bool,
bins: &'a [String],
tests: &'a [String],
examples: &'a [String],
benches: &'a [String]) -> CompileFilter<'a> {
if lib_only ||!bins.is_empty() ||!tests.is_empty() ||
!examples.is_empty() ||!benches.is_empty() {
CompileFilter::Only {
lib: lib_only, bins: bins, examples: examples, benches: benches,
tests: tests,
}
} else {
CompileFilter::Everything
}
}
pub fn matches(&self, target: &Target) -> bool {
match *self {
CompileFilter::Everything => true,
CompileFilter::Only { lib, bins, examples, tests, benches } => {
let list = match *target.kind() {
TargetKind::Bin => bins,
TargetKind::Test => tests,
TargetKind::Bench => benches,
TargetKind::Example => examples,
TargetKind::Lib(..) => return lib,
TargetKind::CustomBuild => return false,
};
list.iter().any(|x| *x == target.name())
}
}
}
}
/// Given the configuration for a build, this function will generate all
/// target/profile combinations needed to be built.
fn generate_targets<'a>(pkg: &'a Package,
mode: CompileMode,
filter: &CompileFilter,
ex_filter: &Vec<String>,
release: bool)
-> CargoResult<Vec<(&'a Target, &'a Profile)>> {
let profiles = pkg.manifest().profiles();
let build = if release {&profiles.release} else {&profiles.dev};
let test = if release {&profiles.bench} else {&profiles.test};
let profile = match mode {
CompileMode::Test => test,
CompileMode::Bench => &profiles.bench,
CompileMode::Build => build,
CompileMode::Doc {.. } => &profiles.doc,
};
return match *filter {
CompileFilter::Everything => {
match mode {
CompileMode::Bench => {
Ok(pkg.targets().iter().filter(|t| t.benched()).map(|t| {
(t, profile)
}).collect::<Vec<_>>())
}
CompileMode::Test => {
let mut base = pkg.targets().iter().filter(|t| {
t.tested() &&!(t.is_example() && ex_filter.iter().any(|x| &x[..] == t.name()))
}).map(|t| {
(t, if t.is_example() {build} else {profile})
}).collect::<Vec<_>>();
// Always compile the library if we're testing everything as
// it'll be needed for doctests
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
if t.doctested() {
base.push((t, build));
}
}
Ok(base)
}
CompileMode::Build => {
Ok(pkg.targets().iter().filter(|t| {
t.is_bin() || t.is_lib()
}).map(|t| (t, profile)).collect())
}
CompileMode::Doc {.. } => {
Ok(pkg.targets().iter().filter(|t| t.documented())
.map(|t| (t, profile)).collect())
}
}
}
CompileFilter::Only { lib, bins, examples, tests, benches } => {
let mut targets = Vec::new();
if lib {
if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
targets.push((t, profile));
} else {
return Err(human(format!("no library targets found")))
}
}
{
let mut find = |names: &[String], desc, kind, profile| {
for name in names {
let target = pkg.targets().iter().find(|t| {
t.name() == *name && *t.kind() == kind
});
let t = match target {
Some(t) => t,
None => return Err(human(format!("no {} target \
named `{}`",
desc, name))),
};
if kind == TargetKind::Example && ex_filter.iter().any(|x| &x[..] == name) {
continue;
}
debug!("found {} `{}`", desc, name);
targets.push((t, profile));
}
Ok(())
};
try!(find(bins, "bin", TargetKind::Bin, profile));
try!(find(examples, "example", TargetKind::Example, build));
try!(find(tests, "test", TargetKind::Test, test));
try!(find(benches, "bench", TargetKind::Bench, &profiles.bench));
}
Ok(targets)
}
};
}
/// Read the `paths` configuration variable to discover all path overrides that
/// have been configured.
fn source_ids_from_config(config: &Config, cur_path: &Path)
-> CargoResult<Vec<SourceId>>
|
cur_path!= &**p
}).map(|p| SourceId::for_path(&p)).collect()
}
/// Parse all config files to learn about build configuration. Currently
/// configured options are:
///
/// * build.jobs
/// * target.$target.ar
/// * target.$target.linker
/// * target.$target.libfoo.metadata
fn scrape_build_config(config: &Config,
jobs: Option<u32>,
target: Option<String>)
-> CargoResult<ops::BuildConfig> {
let cfg_jobs = match try!(config.get_i64("build.jobs")) {
Some((n, p)) => {
if n <= 0 {
return Err(human(format!("build.jobs must be positive, \
but found {} in {:?}", n, p)));
} else if n >= u32::max_value() as i64 {
return Err(human(format!("build.jobs is too large: \
found {} in {:?}", n, p)));
} else {
Some(n as u32)
}
}
None => None,
};
let jobs = jobs.or(cfg_jobs).unwrap_or(::num_cpus::get() as u32);
let mut base = ops::BuildConfig {
jobs: jobs,
requested_target: target.clone(),
..Default::default()
};
base.host = try!(scrape_target_config(config, &config.rustc_info().host));
base.target = match target.as_ref() {
Some(triple) => try!(scrape_target_config(config, &triple)),
None => base.host.clone(),
};
Ok(base)
}
fn scrape_target_config(config: &Config, triple: &str)
-> CargoResult<ops::TargetConfig> {
let key = format!("target.{}", triple);
let mut ret = ops::TargetConfig {
ar: try!(config.get_path(&format!("{}.ar", key))),
linker: try!(config.get_path(&format!("{}.linker", key))),
overrides: HashMap::new(),
};
let table = match try!(config.get_table(&key)) {
Some((table, _)) => table,
None => return Ok(ret),
};
for (lib_name, _) in table.into_iter() {
if lib_name == "ar" || lib_name == "linker" { continue }
let mut output = BuildOutput {
library_paths: Vec::new(),
library_links: Vec::new(),
cfgs: Vec::new(),
metadata: Vec::new(),
};
let key = format!("{}.{}", key, lib_name);
let table = try!(config.get_table(&key)).unwrap().0;
for (k, _) in table.into_iter() {
let key = format!("{}.{}", key, k);
match try!(config.get(&key)).unwrap() {
ConfigValue::String(v, path) => {
if k == "rustc-flags" {
let whence = format!("in `{}` (in {})", key,
path.display());
|
{
let configs = try!(config.values());
debug!("loaded config; configs={:?}", configs);
let config_paths = match configs.get("paths") {
Some(cfg) => cfg,
None => return Ok(Vec::new())
};
let paths = try!(config_paths.list().chain_error(|| {
internal("invalid configuration for the key `paths`")
}));
paths.iter().map(|&(ref s, ref p)| {
// The path listed next to the string is the config file in which the
// key was located, so we want to pop off the `.cargo/config` component
// to get the directory containing the `.cargo` folder.
p.parent().unwrap().parent().unwrap().join(s)
}).filter(|p| {
// Make sure we don't override the local package, even if it's in the
// list of override paths.
|
identifier_body
|
consumer.rs
|
//! Kafka Consumer
//!
//! A simple consumer based on KafkaClient. Accepts an instance of KafkaClient, a group and a
//! topic. Partitions can be specfied using builder pattern (Assumes all partitions if not
//! specfied).
//!
//! # Example
//!
//! ```no_run
//! let mut client = kafka::client::KafkaClient::new(vec!("localhost:9092".to_string()));
//! let res = client.load_metadata_all();
//! let con = kafka::consumer::Consumer::new(client, "test-group".to_string(), "my-topic".to_string())
//! .partition(0);
//! for msg in con {
//! println!("{:?}", msg);
//! }
//! ```
//!
//! Consumer auto-commits the offsets afer consuming COMMIT_INTERVAL messages (Currently set at
//! 100)
//!
//! Consumer implements Iterator.
use std::collections::HashMap;
use error::Result;
use utils::{TopicMessage, TopicPartitionOffset};
use client::KafkaClient;
const COMMIT_INTERVAL: i32 = 100; // Commit after every 100 message
#[derive(Default, Debug)]
pub struct Consumer {
client: KafkaClient,
group: String,
topic: String,
partitions: Vec<i32>,
initialized: bool,
messages: Vec<TopicMessage>,
index: usize,
offsets: HashMap<i32, i64>,
consumed: i32
}
impl Consumer {
/// Constructor
///
/// Create a new consumer. Expects a KafkaClient, group, and topic as arguments.
pub fn new(client: KafkaClient, group: String, topic: String) -> Consumer {
Consumer{
client: client,
group: group,
topic: topic,
initialized: false,
index: 0,
..Consumer::default()
}
}
/// Set the partitions of this consumer.
///
/// If this function is never called, all partitions are assumed.
/// This function call be called multiple times to add more than 1 partitions.
pub fn partition(mut self, partition: i32) -> Consumer {
self.partitions.push(partition);
self
}
fn
|
(&mut self) -> Result<()> {
let tpos = self.offsets.iter()
.map(|(p, o)| TopicPartitionOffset{
topic: self.topic.clone(),
partition: p.clone(),
offset: o.clone()
})
.collect();
self.client.commit_offsets(self.group.clone(), tpos)
}
fn fetch_offsets(&mut self) -> Result<()> {
let tpos = try!(self.client.fetch_group_topic_offset(self.group.clone(), self.topic.clone()));
if self.partitions.len() == 0 {
self.partitions = self.client.topic_partitions.get(&self.topic).unwrap_or(&vec!()).clone();
}
for tpo in tpos {
if self.partitions.contains(&tpo.partition) {
self.offsets.insert(tpo.partition, tpo.offset);
}
}
Ok(())
}
fn make_request(&mut self) -> Result<()>{
if! self.initialized {
try!(self.fetch_offsets());
}
let tpos = self.offsets.iter()
.map(|(p, o)| TopicPartitionOffset{
topic: self.topic.clone(),
partition: p.clone(),
offset: o.clone()
})
.collect();
self.messages = try!(self.client.fetch_messages_multi(tpos));
self.initialized = true;
self.index = 0;
Ok(())
}
}
impl Iterator for Consumer {
type Item = TopicMessage;
fn next(&mut self) -> Option<TopicMessage> {
if self.initialized {
self.index += 1;
self.consumed += 1;
if self.consumed % COMMIT_INTERVAL == 0 {
let _ = self.commit_offsets();
}
if self.index <= self.messages.len() {
if self.messages[self.index-1].error.is_none() {
let curr = self.offsets.entry(self.messages[self.index-1].partition).or_insert(0);
*curr = *curr+1;
return Some(self.messages[self.index-1].clone());
}
return None;
}
let _ = self.commit_offsets();
if self.messages.len() == 0 {
return None;
}
}
match self.make_request() {
Err(_) => None,
Ok(_) => self.next()
}
}
}
|
commit_offsets
|
identifier_name
|
consumer.rs
|
//! Kafka Consumer
//!
//! A simple consumer based on KafkaClient. Accepts an instance of KafkaClient, a group and a
//! topic. Partitions can be specfied using builder pattern (Assumes all partitions if not
//! specfied).
//!
//! # Example
//!
//! ```no_run
//! let mut client = kafka::client::KafkaClient::new(vec!("localhost:9092".to_string()));
//! let res = client.load_metadata_all();
//! let con = kafka::consumer::Consumer::new(client, "test-group".to_string(), "my-topic".to_string())
//! .partition(0);
//! for msg in con {
//! println!("{:?}", msg);
//! }
//! ```
//!
//! Consumer auto-commits the offsets afer consuming COMMIT_INTERVAL messages (Currently set at
//! 100)
//!
//! Consumer implements Iterator.
use std::collections::HashMap;
use error::Result;
use utils::{TopicMessage, TopicPartitionOffset};
use client::KafkaClient;
const COMMIT_INTERVAL: i32 = 100; // Commit after every 100 message
#[derive(Default, Debug)]
pub struct Consumer {
client: KafkaClient,
group: String,
topic: String,
partitions: Vec<i32>,
initialized: bool,
messages: Vec<TopicMessage>,
index: usize,
offsets: HashMap<i32, i64>,
consumed: i32
}
impl Consumer {
/// Constructor
///
/// Create a new consumer. Expects a KafkaClient, group, and topic as arguments.
pub fn new(client: KafkaClient, group: String, topic: String) -> Consumer {
Consumer{
client: client,
group: group,
topic: topic,
initialized: false,
index: 0,
..Consumer::default()
}
}
/// Set the partitions of this consumer.
///
/// If this function is never called, all partitions are assumed.
/// This function call be called multiple times to add more than 1 partitions.
pub fn partition(mut self, partition: i32) -> Consumer {
self.partitions.push(partition);
self
}
fn commit_offsets(&mut self) -> Result<()> {
let tpos = self.offsets.iter()
.map(|(p, o)| TopicPartitionOffset{
topic: self.topic.clone(),
partition: p.clone(),
offset: o.clone()
})
.collect();
self.client.commit_offsets(self.group.clone(), tpos)
}
fn fetch_offsets(&mut self) -> Result<()> {
let tpos = try!(self.client.fetch_group_topic_offset(self.group.clone(), self.topic.clone()));
if self.partitions.len() == 0 {
self.partitions = self.client.topic_partitions.get(&self.topic).unwrap_or(&vec!()).clone();
}
for tpo in tpos {
if self.partitions.contains(&tpo.partition) {
self.offsets.insert(tpo.partition, tpo.offset);
}
}
Ok(())
}
fn make_request(&mut self) -> Result<()>
|
}
impl Iterator for Consumer {
type Item = TopicMessage;
fn next(&mut self) -> Option<TopicMessage> {
if self.initialized {
self.index += 1;
self.consumed += 1;
if self.consumed % COMMIT_INTERVAL == 0 {
let _ = self.commit_offsets();
}
if self.index <= self.messages.len() {
if self.messages[self.index-1].error.is_none() {
let curr = self.offsets.entry(self.messages[self.index-1].partition).or_insert(0);
*curr = *curr+1;
return Some(self.messages[self.index-1].clone());
}
return None;
}
let _ = self.commit_offsets();
if self.messages.len() == 0 {
return None;
}
}
match self.make_request() {
Err(_) => None,
Ok(_) => self.next()
}
}
}
|
{
if ! self.initialized {
try!(self.fetch_offsets());
}
let tpos = self.offsets.iter()
.map(|(p, o)| TopicPartitionOffset{
topic: self.topic.clone(),
partition: p.clone(),
offset: o.clone()
})
.collect();
self.messages = try!(self.client.fetch_messages_multi(tpos));
self.initialized = true;
self.index = 0;
Ok(())
}
|
identifier_body
|
consumer.rs
|
//! Kafka Consumer
//!
//! A simple consumer based on KafkaClient. Accepts an instance of KafkaClient, a group and a
//! topic. Partitions can be specfied using builder pattern (Assumes all partitions if not
//! specfied).
//!
//! # Example
//!
//! ```no_run
//! let mut client = kafka::client::KafkaClient::new(vec!("localhost:9092".to_string()));
//! let res = client.load_metadata_all();
//! let con = kafka::consumer::Consumer::new(client, "test-group".to_string(), "my-topic".to_string())
//! .partition(0);
//! for msg in con {
//! println!("{:?}", msg);
//! }
//! ```
//!
//! Consumer auto-commits the offsets afer consuming COMMIT_INTERVAL messages (Currently set at
//! 100)
//!
//! Consumer implements Iterator.
use std::collections::HashMap;
use error::Result;
use utils::{TopicMessage, TopicPartitionOffset};
use client::KafkaClient;
const COMMIT_INTERVAL: i32 = 100; // Commit after every 100 message
#[derive(Default, Debug)]
pub struct Consumer {
client: KafkaClient,
group: String,
topic: String,
partitions: Vec<i32>,
initialized: bool,
messages: Vec<TopicMessage>,
index: usize,
offsets: HashMap<i32, i64>,
consumed: i32
}
impl Consumer {
/// Constructor
///
/// Create a new consumer. Expects a KafkaClient, group, and topic as arguments.
pub fn new(client: KafkaClient, group: String, topic: String) -> Consumer {
Consumer{
client: client,
group: group,
topic: topic,
initialized: false,
index: 0,
..Consumer::default()
}
}
/// Set the partitions of this consumer.
///
/// If this function is never called, all partitions are assumed.
/// This function call be called multiple times to add more than 1 partitions.
pub fn partition(mut self, partition: i32) -> Consumer {
self.partitions.push(partition);
self
}
fn commit_offsets(&mut self) -> Result<()> {
let tpos = self.offsets.iter()
.map(|(p, o)| TopicPartitionOffset{
topic: self.topic.clone(),
partition: p.clone(),
offset: o.clone()
})
.collect();
self.client.commit_offsets(self.group.clone(), tpos)
}
fn fetch_offsets(&mut self) -> Result<()> {
let tpos = try!(self.client.fetch_group_topic_offset(self.group.clone(), self.topic.clone()));
if self.partitions.len() == 0 {
self.partitions = self.client.topic_partitions.get(&self.topic).unwrap_or(&vec!()).clone();
}
for tpo in tpos {
if self.partitions.contains(&tpo.partition) {
self.offsets.insert(tpo.partition, tpo.offset);
}
}
Ok(())
}
fn make_request(&mut self) -> Result<()>{
if! self.initialized {
try!(self.fetch_offsets());
}
let tpos = self.offsets.iter()
.map(|(p, o)| TopicPartitionOffset{
topic: self.topic.clone(),
partition: p.clone(),
offset: o.clone()
})
.collect();
self.messages = try!(self.client.fetch_messages_multi(tpos));
self.initialized = true;
self.index = 0;
Ok(())
}
}
impl Iterator for Consumer {
type Item = TopicMessage;
fn next(&mut self) -> Option<TopicMessage> {
|
if self.initialized {
self.index += 1;
self.consumed += 1;
if self.consumed % COMMIT_INTERVAL == 0 {
let _ = self.commit_offsets();
}
if self.index <= self.messages.len() {
if self.messages[self.index-1].error.is_none() {
let curr = self.offsets.entry(self.messages[self.index-1].partition).or_insert(0);
*curr = *curr+1;
return Some(self.messages[self.index-1].clone());
}
return None;
}
let _ = self.commit_offsets();
if self.messages.len() == 0 {
return None;
}
}
match self.make_request() {
Err(_) => None,
Ok(_) => self.next()
}
}
}
|
random_line_split
|
|
main.rs
|
/*
* TLRW algorithm implementation for Rust.
* Let's see what we can do here.
*
* (c) Silly Hat, 2016.
*/
/*
* Features, additional crates and constants go here.
*/
#![allow(dead_code)]
extern crate libc;
extern crate rand;
extern crate time;
use rand::Rng;
const NELEMS: usize = 4;
/*
* External C functions.
*/
extern {
/* tut budet tlrw */
}
/* =======
* MAIN!
* =======
*/
fn main() {
println!("hellooooo");
let mut stuff = vec![0; NELEMS];
print_vec(&stuff);
}
/* ==============
* USEFUL STUFF
* ==============
*/
/*
* Getting current time.
*/
fn nownow() -> f64
|
/*
* Printing a vector of unsigned 32-bit integers.
*/
fn print_vec(vec: &Vec<u32>) {
for item in vec.iter() {
print!("{}\t", item);
}
print!("\n");
}
|
{
let timetime = time::get_time();
let current: f64 = timetime.sec as f64 + (timetime.nsec as f64 /
1000.0 / 1000.0 / 1000.0);
current
}
|
identifier_body
|
main.rs
|
/*
* TLRW algorithm implementation for Rust.
* Let's see what we can do here.
*
* (c) Silly Hat, 2016.
*/
/*
* Features, additional crates and constants go here.
*/
#![allow(dead_code)]
extern crate libc;
extern crate rand;
extern crate time;
use rand::Rng;
const NELEMS: usize = 4;
/*
* External C functions.
*/
extern {
/* tut budet tlrw */
}
/* =======
* MAIN!
* =======
*/
fn
|
() {
println!("hellooooo");
let mut stuff = vec![0; NELEMS];
print_vec(&stuff);
}
/* ==============
* USEFUL STUFF
* ==============
*/
/*
* Getting current time.
*/
fn nownow() -> f64 {
let timetime = time::get_time();
let current: f64 = timetime.sec as f64 + (timetime.nsec as f64 /
1000.0 / 1000.0 / 1000.0);
current
}
/*
* Printing a vector of unsigned 32-bit integers.
*/
fn print_vec(vec: &Vec<u32>) {
for item in vec.iter() {
print!("{}\t", item);
}
print!("\n");
}
|
main
|
identifier_name
|
main.rs
|
/*
* TLRW algorithm implementation for Rust.
* Let's see what we can do here.
*
* (c) Silly Hat, 2016.
*/
/*
* Features, additional crates and constants go here.
*/
#![allow(dead_code)]
extern crate libc;
extern crate rand;
extern crate time;
use rand::Rng;
const NELEMS: usize = 4;
/*
* External C functions.
*/
extern {
/* tut budet tlrw */
}
/* =======
* MAIN!
* =======
*/
fn main() {
println!("hellooooo");
let mut stuff = vec![0; NELEMS];
print_vec(&stuff);
}
/* ==============
* USEFUL STUFF
* ==============
*/
/*
* Getting current time.
*/
fn nownow() -> f64 {
let timetime = time::get_time();
let current: f64 = timetime.sec as f64 + (timetime.nsec as f64 /
1000.0 / 1000.0 / 1000.0);
current
|
/*
* Printing a vector of unsigned 32-bit integers.
*/
fn print_vec(vec: &Vec<u32>) {
for item in vec.iter() {
print!("{}\t", item);
}
print!("\n");
}
|
}
|
random_line_split
|
fs.rs
|
use std::path::{Path};
use std::fs;
|
fn exists(&self) -> bool;
}
impl<P: AsRef<Path>> PathExt for P {
fn exists(&self) -> bool { fs::metadata(self).is_ok() }
}
#[cfg(test)]
mod test {
use std::fs::{remove_file, File};
use std::path::{Path, PathBuf};
use super::PathExt;
#[test]
fn test_pathext() {
// XXX: Unit test uses filesystem, expects file creation and
// destruction to work right.
File::create(".pathext_test").unwrap();
assert!(Path::new(".pathext_test").exists());
assert!(PathBuf::from(".pathext_test").exists());
remove_file(".pathext_test").unwrap();
assert!(!Path::new(".pathext_test").exists());
assert!(!PathBuf::from(".pathext_test").exists());
}
}
|
/// Temporary replacement for unstable library PathExt.
pub trait PathExt {
|
random_line_split
|
fs.rs
|
use std::path::{Path};
use std::fs;
/// Temporary replacement for unstable library PathExt.
pub trait PathExt {
fn exists(&self) -> bool;
}
impl<P: AsRef<Path>> PathExt for P {
fn exists(&self) -> bool
|
}
#[cfg(test)]
mod test {
use std::fs::{remove_file, File};
use std::path::{Path, PathBuf};
use super::PathExt;
#[test]
fn test_pathext() {
// XXX: Unit test uses filesystem, expects file creation and
// destruction to work right.
File::create(".pathext_test").unwrap();
assert!(Path::new(".pathext_test").exists());
assert!(PathBuf::from(".pathext_test").exists());
remove_file(".pathext_test").unwrap();
assert!(!Path::new(".pathext_test").exists());
assert!(!PathBuf::from(".pathext_test").exists());
}
}
|
{ fs::metadata(self).is_ok() }
|
identifier_body
|
fs.rs
|
use std::path::{Path};
use std::fs;
/// Temporary replacement for unstable library PathExt.
pub trait PathExt {
fn exists(&self) -> bool;
}
impl<P: AsRef<Path>> PathExt for P {
fn
|
(&self) -> bool { fs::metadata(self).is_ok() }
}
#[cfg(test)]
mod test {
use std::fs::{remove_file, File};
use std::path::{Path, PathBuf};
use super::PathExt;
#[test]
fn test_pathext() {
// XXX: Unit test uses filesystem, expects file creation and
// destruction to work right.
File::create(".pathext_test").unwrap();
assert!(Path::new(".pathext_test").exists());
assert!(PathBuf::from(".pathext_test").exists());
remove_file(".pathext_test").unwrap();
assert!(!Path::new(".pathext_test").exists());
assert!(!PathBuf::from(".pathext_test").exists());
}
}
|
exists
|
identifier_name
|
01a_quick_example.rs
|
extern crate clap;
use clap::{App, SubCommand};
fn main()
|
// > A list flag
// = Uses "-l" (usage is "$ myapp test -l"
// > A help flag (automatically generated by clap
// = Uses "-h" or "--help" (full usage "$ myapp test -h" or "$ myapp test --help")
// > A version flag (automatically generated by clap
// = Uses "-V" or "--version" (full usage "$ myapp test -V" or "$ myapp test --version")
// - A subcommand "help" (automatically generated by clap because we specified a subcommand of our own)
// + Used by "$ myapp help" (same functionality as "-h" or "--help")
let matches = App::new("MyApp")
.version("1.0")
.author("Kevin K. <[email protected]>")
.about("Does awesome things")
.args_from_usage("-c --config=[conf] 'Sets a custom config file'
[output] 'Sets an optional output file'
[debug]... -d 'Turn debugging information on'")
.subcommand(SubCommand::with_name("test")
.about("does testing things")
.arg_from_usage("[list] -l 'lists test values'"))
.get_matches();
// You can check the value provided by positional arguments, or option arguments
if let Some(o) = matches.value_of("output") {
println!("Value for output: {}", o);
}
if let Some(c) = matches.value_of("config") {
println!("Value for config: {}", c);
}
// You can see how many times a particular flag or argument occurred
// Note, only flags can have multiple occurrences
match matches.occurrences_of("debug") {
0 => println!("Debug mode is off"),
1 => println!("Debug mode is kind of on"),
2 => println!("Debug mode is on"),
3 | _ => println!("Don't be crazy"),
}
// You can check for the existence of subcommands, and if found use their
// matches just as you would the top level app
if let Some(ref matches) = matches.subcommand_matches("test") {
// "$ myapp test" was run
if matches.is_present("list") {
// "$ myapp test -l" was run
println!("Printing testing lists...");
} else {
println!("Not printing testing lists...");
}
}
// Continued program logic goes here...
}
|
{
// This example shows how to create an application with several arguments using usage strings, which can be
// far less verbose that shown in 01b_QuickExample.rs, but is more readable. The downside is you cannot set
// the more advanced configuration options using this method (well...actually you can, you'll see ;) )
//
// Create an application with 5 possible arguments (2 auto generated) and 2 subcommands (1 auto generated)
// - A config file
// + Uses "-c filename" or "--config filename"
// - An output file
// + A positional argument (i.e. "$ myapp output_filename")
// - A debug flag
// + Uses "-d" or "--debug"
// + Allows multiple occurrences of such as "-dd" (for vary levels of debugging, as an example)
// - A help flag (automatically generated by clap)
// + Uses "-h" or "--help" (Only autogenerated if you do NOT specify your own "-h" or "--help")
// - A version flag (automatically generated by clap)
// + Uses "-V" or "--version" (Only autogenerated if you do NOT specify your own "-V" or "--version")
// - A subcommand "test" (subcommands behave like their own apps, with their own arguments
// + Used by "$ myapp test" with the following arguments
|
identifier_body
|
01a_quick_example.rs
|
extern crate clap;
use clap::{App, SubCommand};
fn main() {
// This example shows how to create an application with several arguments using usage strings, which can be
// far less verbose that shown in 01b_QuickExample.rs, but is more readable. The downside is you cannot set
// the more advanced configuration options using this method (well...actually you can, you'll see ;) )
//
// Create an application with 5 possible arguments (2 auto generated) and 2 subcommands (1 auto generated)
// - A config file
// + Uses "-c filename" or "--config filename"
// - An output file
// + A positional argument (i.e. "$ myapp output_filename")
// - A debug flag
// + Uses "-d" or "--debug"
// + Allows multiple occurrences of such as "-dd" (for vary levels of debugging, as an example)
// - A help flag (automatically generated by clap)
// + Uses "-h" or "--help" (Only autogenerated if you do NOT specify your own "-h" or "--help")
// - A version flag (automatically generated by clap)
// + Uses "-V" or "--version" (Only autogenerated if you do NOT specify your own "-V" or "--version")
// - A subcommand "test" (subcommands behave like their own apps, with their own arguments
// + Used by "$ myapp test" with the following arguments
// > A list flag
// = Uses "-l" (usage is "$ myapp test -l"
// > A help flag (automatically generated by clap
|
// = Uses "-V" or "--version" (full usage "$ myapp test -V" or "$ myapp test --version")
// - A subcommand "help" (automatically generated by clap because we specified a subcommand of our own)
// + Used by "$ myapp help" (same functionality as "-h" or "--help")
let matches = App::new("MyApp")
.version("1.0")
.author("Kevin K. <[email protected]>")
.about("Does awesome things")
.args_from_usage("-c --config=[conf] 'Sets a custom config file'
[output] 'Sets an optional output file'
[debug]... -d 'Turn debugging information on'")
.subcommand(SubCommand::with_name("test")
.about("does testing things")
.arg_from_usage("[list] -l 'lists test values'"))
.get_matches();
// You can check the value provided by positional arguments, or option arguments
if let Some(o) = matches.value_of("output") {
println!("Value for output: {}", o);
}
if let Some(c) = matches.value_of("config") {
println!("Value for config: {}", c);
}
// You can see how many times a particular flag or argument occurred
// Note, only flags can have multiple occurrences
match matches.occurrences_of("debug") {
0 => println!("Debug mode is off"),
1 => println!("Debug mode is kind of on"),
2 => println!("Debug mode is on"),
3 | _ => println!("Don't be crazy"),
}
// You can check for the existence of subcommands, and if found use their
// matches just as you would the top level app
if let Some(ref matches) = matches.subcommand_matches("test") {
// "$ myapp test" was run
if matches.is_present("list") {
// "$ myapp test -l" was run
println!("Printing testing lists...");
} else {
println!("Not printing testing lists...");
}
}
// Continued program logic goes here...
}
|
// = Uses "-h" or "--help" (full usage "$ myapp test -h" or "$ myapp test --help")
// > A version flag (automatically generated by clap
|
random_line_split
|
01a_quick_example.rs
|
extern crate clap;
use clap::{App, SubCommand};
fn
|
() {
// This example shows how to create an application with several arguments using usage strings, which can be
// far less verbose that shown in 01b_QuickExample.rs, but is more readable. The downside is you cannot set
// the more advanced configuration options using this method (well...actually you can, you'll see ;) )
//
// Create an application with 5 possible arguments (2 auto generated) and 2 subcommands (1 auto generated)
// - A config file
// + Uses "-c filename" or "--config filename"
// - An output file
// + A positional argument (i.e. "$ myapp output_filename")
// - A debug flag
// + Uses "-d" or "--debug"
// + Allows multiple occurrences of such as "-dd" (for vary levels of debugging, as an example)
// - A help flag (automatically generated by clap)
// + Uses "-h" or "--help" (Only autogenerated if you do NOT specify your own "-h" or "--help")
// - A version flag (automatically generated by clap)
// + Uses "-V" or "--version" (Only autogenerated if you do NOT specify your own "-V" or "--version")
// - A subcommand "test" (subcommands behave like their own apps, with their own arguments
// + Used by "$ myapp test" with the following arguments
// > A list flag
// = Uses "-l" (usage is "$ myapp test -l"
// > A help flag (automatically generated by clap
// = Uses "-h" or "--help" (full usage "$ myapp test -h" or "$ myapp test --help")
// > A version flag (automatically generated by clap
// = Uses "-V" or "--version" (full usage "$ myapp test -V" or "$ myapp test --version")
// - A subcommand "help" (automatically generated by clap because we specified a subcommand of our own)
// + Used by "$ myapp help" (same functionality as "-h" or "--help")
let matches = App::new("MyApp")
.version("1.0")
.author("Kevin K. <[email protected]>")
.about("Does awesome things")
.args_from_usage("-c --config=[conf] 'Sets a custom config file'
[output] 'Sets an optional output file'
[debug]... -d 'Turn debugging information on'")
.subcommand(SubCommand::with_name("test")
.about("does testing things")
.arg_from_usage("[list] -l 'lists test values'"))
.get_matches();
// You can check the value provided by positional arguments, or option arguments
if let Some(o) = matches.value_of("output") {
println!("Value for output: {}", o);
}
if let Some(c) = matches.value_of("config") {
println!("Value for config: {}", c);
}
// You can see how many times a particular flag or argument occurred
// Note, only flags can have multiple occurrences
match matches.occurrences_of("debug") {
0 => println!("Debug mode is off"),
1 => println!("Debug mode is kind of on"),
2 => println!("Debug mode is on"),
3 | _ => println!("Don't be crazy"),
}
// You can check for the existence of subcommands, and if found use their
// matches just as you would the top level app
if let Some(ref matches) = matches.subcommand_matches("test") {
// "$ myapp test" was run
if matches.is_present("list") {
// "$ myapp test -l" was run
println!("Printing testing lists...");
} else {
println!("Not printing testing lists...");
}
}
// Continued program logic goes here...
}
|
main
|
identifier_name
|
logger.rs
|
/* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use super::ike::{IKEState, IKETransaction};
use super::ipsec_parser::IKEV2_FLAG_INITIATOR;
use crate::ike::parser::{ExchangeType, IsakmpPayloadType, SaAttribute};
use crate::jsonbuilder::{JsonBuilder, JsonError};
use std;
use std::convert::TryFrom;
use num_traits::FromPrimitive;
const LOG_EXTENDED: u32 = 0x01;
fn add_attributes(transform: &Vec<SaAttribute>, js: &mut JsonBuilder) -> Result<(), JsonError> {
for attribute in transform {
js.set_string(
attribute.attribute_type.to_string().as_str(),
format!("{}", attribute.attribute_value.to_string()).as_str(),
)?;
if let Some(numeric_value) = attribute.numeric_value {
js.set_uint(
format!("{}_raw", attribute.attribute_type).as_str(),
numeric_value as u64,
)?;
} else if let Some(hex_value) = &attribute.hex_value {
js.set_string(
format!("{}_raw", attribute.attribute_type).as_str(),
&hex_value,
)?;
}
}
return Ok(());
}
fn log_ike(
state: &IKEState, tx: &IKETransaction, flags: u32, jb: &mut JsonBuilder,
) -> Result<(), JsonError> {
jb.open_object("ike")?;
jb.set_uint("version_major", tx.hdr.maj_ver as u64)?;
jb.set_uint("version_minor", tx.hdr.min_ver as u64)?;
jb.set_string("init_spi", &tx.hdr.spi_initiator)?;
jb.set_string("resp_spi", &tx.hdr.spi_responder)?;
jb.set_uint("message_id", tx.hdr.msg_id as u64)?;
if tx.ike_version == 1 {
if let Some(exchange_type) = tx.hdr.ikev1_header.exchange_type {
jb.set_uint("exchange_type", exchange_type as u64)?;
if (flags & LOG_EXTENDED) == LOG_EXTENDED {
if let Some(etype) = ExchangeType::from_u8(exchange_type) {
jb.set_string("exchange_type_verbose", etype.to_string().as_str())?;
};
}
}
} else if tx.ike_version == 2 {
jb.set_uint("exchange_type", tx.hdr.ikev2_header.exch_type.0 as u64)?;
}
if tx.ike_version == 1 {
if state.ikev1_container.server.transforms.len() > 0 {
// log the first transform as the chosen one
add_attributes(&state.ikev1_container.server.transforms[0], jb)?;
}
if state.ikev1_container.server.transforms.len() > 1 {
// in case we have multiple server transforms log them in a list
jb.open_array("server_proposals")?;
for server_transform in &state.ikev1_container.server.transforms {
jb.start_object()?;
add_attributes(server_transform, jb)?;
jb.close()?;
}
jb.close()?;
}
} else if tx.ike_version == 2 {
if tx.hdr.flags & IKEV2_FLAG_INITIATOR!= 0 {
jb.set_string("role", &"initiator")?;
} else {
jb.set_string("role", &"responder")?;
jb.set_string("alg_enc", &format!("{:?}", state.ikev2_container.alg_enc))?;
jb.set_string("alg_auth", &format!("{:?}", state.ikev2_container.alg_auth))?;
jb.set_string("alg_prf", &format!("{:?}", state.ikev2_container.alg_prf))?;
jb.set_string("alg_dh", &format!("{:?}", state.ikev2_container.alg_dh))?;
jb.set_string("alg_esn", &format!("{:?}", state.ikev2_container.alg_esn))?;
}
}
// payloads in packet
jb.open_array("payload")?;
if tx.ike_version == 1 {
if let Some(payload_types) = &tx.payload_types.ikev1_payload_types {
for pt in payload_types {
append_payload_type_extended(jb, pt)?;
}
}
} else if tx.ike_version == 2 {
for payload in tx.payload_types.ikev2_payload_types.iter() {
jb.append_string(&format!("{:?}", payload))?;
}
}
jb.close()?;
if tx.ike_version == 1 {
log_ikev1(state, tx, jb)?;
} else if tx.ike_version == 2 {
log_ikev2(tx, jb)?;
}
jb.close()?;
return Ok(());
|
fn log_ikev1(state: &IKEState, tx: &IKETransaction, jb: &mut JsonBuilder) -> Result<(), JsonError> {
jb.open_object("ikev1")?;
if let Some(doi) = state.ikev1_container.domain_of_interpretation {
jb.set_uint("doi", doi as u64)?;
}
jb.set_bool("encrypted_payloads", tx.hdr.ikev1_header.encrypted_payloads)?;
if!tx.hdr.ikev1_header.encrypted_payloads {
// enable logging of collected state if not-encrypted payloads
// client data
jb.open_object("client")?;
if state.ikev1_container.client.key_exchange.len() > 0 {
jb.set_string(
"key_exchange_payload",
&state.ikev1_container.client.key_exchange,
)?;
if let Ok(client_key_length) =
u64::try_from(state.ikev1_container.client.key_exchange.len())
{
jb.set_uint("key_exchange_payload_length", client_key_length / 2)?;
}
}
if state.ikev1_container.client.nonce.len() > 0 {
jb.set_string("nonce_payload", &state.ikev1_container.client.nonce)?;
if let Ok(client_nonce_length) = u64::try_from(state.ikev1_container.client.nonce.len())
{
jb.set_uint("nonce_payload_length", client_nonce_length / 2)?;
}
}
jb.open_array("proposals")?;
for client_transform in &state.ikev1_container.client.transforms {
jb.start_object()?;
add_attributes(client_transform, jb)?;
jb.close()?;
}
jb.close()?; // proposals
jb.close()?; // client
// server data
jb.open_object("server")?;
if state.ikev1_container.server.key_exchange.len() > 0 {
jb.set_string(
"key_exchange_payload",
&state.ikev1_container.server.key_exchange,
)?;
if let Ok(server_key_length) =
u64::try_from(state.ikev1_container.server.key_exchange.len())
{
jb.set_uint("key_exchange_payload_length", server_key_length / 2)?;
}
}
if state.ikev1_container.server.nonce.len() > 0 {
jb.set_string("nonce_payload", &state.ikev1_container.server.nonce)?;
if let Ok(server_nonce_length) = u64::try_from(state.ikev1_container.server.nonce.len())
{
jb.set_uint("nonce_payload_length", server_nonce_length / 2)?;
}
}
jb.close()?; // server
jb.open_array("vendor_ids")?;
for vendor in state
.ikev1_container
.client
.vendor_ids
.union(&state.ikev1_container.server.vendor_ids)
{
jb.append_string(vendor)?;
}
jb.close()?; // vendor_ids
}
jb.close()?;
return Ok(());
}
fn append_payload_type_extended(js: &mut JsonBuilder, pt: &u8) -> Result<(), JsonError> {
if let Some(v) = IsakmpPayloadType::from_u8(*pt) {
js.append_string(&format!("{:?}", v))?;
}
Ok(())
}
fn log_ikev2(tx: &IKETransaction, jb: &mut JsonBuilder) -> Result<(), JsonError> {
jb.open_object("ikev2")?;
jb.set_uint("errors", tx.errors as u64)?;
jb.open_array("notify")?;
for notify in tx.notify_types.iter() {
jb.append_string(&format!("{:?}", notify))?;
}
jb.close()?;
jb.close()?;
Ok(())
}
#[no_mangle]
pub extern "C" fn rs_ike_logger_log(
state: &mut IKEState, tx: *mut std::os::raw::c_void, flags: u32, js: &mut JsonBuilder,
) -> bool {
let tx = cast_pointer!(tx, IKETransaction);
log_ike(state, tx, flags, js).is_ok()
}
|
}
|
random_line_split
|
logger.rs
|
/* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use super::ike::{IKEState, IKETransaction};
use super::ipsec_parser::IKEV2_FLAG_INITIATOR;
use crate::ike::parser::{ExchangeType, IsakmpPayloadType, SaAttribute};
use crate::jsonbuilder::{JsonBuilder, JsonError};
use std;
use std::convert::TryFrom;
use num_traits::FromPrimitive;
const LOG_EXTENDED: u32 = 0x01;
fn add_attributes(transform: &Vec<SaAttribute>, js: &mut JsonBuilder) -> Result<(), JsonError> {
for attribute in transform {
js.set_string(
attribute.attribute_type.to_string().as_str(),
format!("{}", attribute.attribute_value.to_string()).as_str(),
)?;
if let Some(numeric_value) = attribute.numeric_value {
js.set_uint(
format!("{}_raw", attribute.attribute_type).as_str(),
numeric_value as u64,
)?;
} else if let Some(hex_value) = &attribute.hex_value {
js.set_string(
format!("{}_raw", attribute.attribute_type).as_str(),
&hex_value,
)?;
}
}
return Ok(());
}
fn log_ike(
state: &IKEState, tx: &IKETransaction, flags: u32, jb: &mut JsonBuilder,
) -> Result<(), JsonError> {
jb.open_object("ike")?;
jb.set_uint("version_major", tx.hdr.maj_ver as u64)?;
jb.set_uint("version_minor", tx.hdr.min_ver as u64)?;
jb.set_string("init_spi", &tx.hdr.spi_initiator)?;
jb.set_string("resp_spi", &tx.hdr.spi_responder)?;
jb.set_uint("message_id", tx.hdr.msg_id as u64)?;
if tx.ike_version == 1 {
if let Some(exchange_type) = tx.hdr.ikev1_header.exchange_type {
jb.set_uint("exchange_type", exchange_type as u64)?;
if (flags & LOG_EXTENDED) == LOG_EXTENDED {
if let Some(etype) = ExchangeType::from_u8(exchange_type) {
jb.set_string("exchange_type_verbose", etype.to_string().as_str())?;
};
}
}
} else if tx.ike_version == 2 {
jb.set_uint("exchange_type", tx.hdr.ikev2_header.exch_type.0 as u64)?;
}
if tx.ike_version == 1 {
if state.ikev1_container.server.transforms.len() > 0 {
// log the first transform as the chosen one
add_attributes(&state.ikev1_container.server.transforms[0], jb)?;
}
if state.ikev1_container.server.transforms.len() > 1 {
// in case we have multiple server transforms log them in a list
jb.open_array("server_proposals")?;
for server_transform in &state.ikev1_container.server.transforms {
jb.start_object()?;
add_attributes(server_transform, jb)?;
jb.close()?;
}
jb.close()?;
}
} else if tx.ike_version == 2 {
if tx.hdr.flags & IKEV2_FLAG_INITIATOR!= 0 {
jb.set_string("role", &"initiator")?;
} else {
jb.set_string("role", &"responder")?;
jb.set_string("alg_enc", &format!("{:?}", state.ikev2_container.alg_enc))?;
jb.set_string("alg_auth", &format!("{:?}", state.ikev2_container.alg_auth))?;
jb.set_string("alg_prf", &format!("{:?}", state.ikev2_container.alg_prf))?;
jb.set_string("alg_dh", &format!("{:?}", state.ikev2_container.alg_dh))?;
jb.set_string("alg_esn", &format!("{:?}", state.ikev2_container.alg_esn))?;
}
}
// payloads in packet
jb.open_array("payload")?;
if tx.ike_version == 1 {
if let Some(payload_types) = &tx.payload_types.ikev1_payload_types {
for pt in payload_types {
append_payload_type_extended(jb, pt)?;
}
}
} else if tx.ike_version == 2 {
for payload in tx.payload_types.ikev2_payload_types.iter() {
jb.append_string(&format!("{:?}", payload))?;
}
}
jb.close()?;
if tx.ike_version == 1 {
log_ikev1(state, tx, jb)?;
} else if tx.ike_version == 2 {
log_ikev2(tx, jb)?;
}
jb.close()?;
return Ok(());
}
fn log_ikev1(state: &IKEState, tx: &IKETransaction, jb: &mut JsonBuilder) -> Result<(), JsonError> {
jb.open_object("ikev1")?;
if let Some(doi) = state.ikev1_container.domain_of_interpretation {
jb.set_uint("doi", doi as u64)?;
}
jb.set_bool("encrypted_payloads", tx.hdr.ikev1_header.encrypted_payloads)?;
if!tx.hdr.ikev1_header.encrypted_payloads
|
jb.set_uint("nonce_payload_length", client_nonce_length / 2)?;
}
}
jb.open_array("proposals")?;
for client_transform in &state.ikev1_container.client.transforms {
jb.start_object()?;
add_attributes(client_transform, jb)?;
jb.close()?;
}
jb.close()?; // proposals
jb.close()?; // client
// server data
jb.open_object("server")?;
if state.ikev1_container.server.key_exchange.len() > 0 {
jb.set_string(
"key_exchange_payload",
&state.ikev1_container.server.key_exchange,
)?;
if let Ok(server_key_length) =
u64::try_from(state.ikev1_container.server.key_exchange.len())
{
jb.set_uint("key_exchange_payload_length", server_key_length / 2)?;
}
}
if state.ikev1_container.server.nonce.len() > 0 {
jb.set_string("nonce_payload", &state.ikev1_container.server.nonce)?;
if let Ok(server_nonce_length) = u64::try_from(state.ikev1_container.server.nonce.len())
{
jb.set_uint("nonce_payload_length", server_nonce_length / 2)?;
}
}
jb.close()?; // server
jb.open_array("vendor_ids")?;
for vendor in state
.ikev1_container
.client
.vendor_ids
.union(&state.ikev1_container.server.vendor_ids)
{
jb.append_string(vendor)?;
}
jb.close()?; // vendor_ids
}
jb.close()?;
return Ok(());
}
fn append_payload_type_extended(js: &mut JsonBuilder, pt: &u8) -> Result<(), JsonError> {
if let Some(v) = IsakmpPayloadType::from_u8(*pt) {
js.append_string(&format!("{:?}", v))?;
}
Ok(())
}
fn log_ikev2(tx: &IKETransaction, jb: &mut JsonBuilder) -> Result<(), JsonError> {
jb.open_object("ikev2")?;
jb.set_uint("errors", tx.errors as u64)?;
jb.open_array("notify")?;
for notify in tx.notify_types.iter() {
jb.append_string(&format!("{:?}", notify))?;
}
jb.close()?;
jb.close()?;
Ok(())
}
#[no_mangle]
pub extern "C" fn rs_ike_logger_log(
state: &mut IKEState, tx: *mut std::os::raw::c_void, flags: u32, js: &mut JsonBuilder,
) -> bool {
let tx = cast_pointer!(tx, IKETransaction);
log_ike(state, tx, flags, js).is_ok()
}
|
{
// enable logging of collected state if not-encrypted payloads
// client data
jb.open_object("client")?;
if state.ikev1_container.client.key_exchange.len() > 0 {
jb.set_string(
"key_exchange_payload",
&state.ikev1_container.client.key_exchange,
)?;
if let Ok(client_key_length) =
u64::try_from(state.ikev1_container.client.key_exchange.len())
{
jb.set_uint("key_exchange_payload_length", client_key_length / 2)?;
}
}
if state.ikev1_container.client.nonce.len() > 0 {
jb.set_string("nonce_payload", &state.ikev1_container.client.nonce)?;
if let Ok(client_nonce_length) = u64::try_from(state.ikev1_container.client.nonce.len())
{
|
conditional_block
|
logger.rs
|
/* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use super::ike::{IKEState, IKETransaction};
use super::ipsec_parser::IKEV2_FLAG_INITIATOR;
use crate::ike::parser::{ExchangeType, IsakmpPayloadType, SaAttribute};
use crate::jsonbuilder::{JsonBuilder, JsonError};
use std;
use std::convert::TryFrom;
use num_traits::FromPrimitive;
const LOG_EXTENDED: u32 = 0x01;
fn add_attributes(transform: &Vec<SaAttribute>, js: &mut JsonBuilder) -> Result<(), JsonError> {
for attribute in transform {
js.set_string(
attribute.attribute_type.to_string().as_str(),
format!("{}", attribute.attribute_value.to_string()).as_str(),
)?;
if let Some(numeric_value) = attribute.numeric_value {
js.set_uint(
format!("{}_raw", attribute.attribute_type).as_str(),
numeric_value as u64,
)?;
} else if let Some(hex_value) = &attribute.hex_value {
js.set_string(
format!("{}_raw", attribute.attribute_type).as_str(),
&hex_value,
)?;
}
}
return Ok(());
}
fn log_ike(
state: &IKEState, tx: &IKETransaction, flags: u32, jb: &mut JsonBuilder,
) -> Result<(), JsonError> {
jb.open_object("ike")?;
jb.set_uint("version_major", tx.hdr.maj_ver as u64)?;
jb.set_uint("version_minor", tx.hdr.min_ver as u64)?;
jb.set_string("init_spi", &tx.hdr.spi_initiator)?;
jb.set_string("resp_spi", &tx.hdr.spi_responder)?;
jb.set_uint("message_id", tx.hdr.msg_id as u64)?;
if tx.ike_version == 1 {
if let Some(exchange_type) = tx.hdr.ikev1_header.exchange_type {
jb.set_uint("exchange_type", exchange_type as u64)?;
if (flags & LOG_EXTENDED) == LOG_EXTENDED {
if let Some(etype) = ExchangeType::from_u8(exchange_type) {
jb.set_string("exchange_type_verbose", etype.to_string().as_str())?;
};
}
}
} else if tx.ike_version == 2 {
jb.set_uint("exchange_type", tx.hdr.ikev2_header.exch_type.0 as u64)?;
}
if tx.ike_version == 1 {
if state.ikev1_container.server.transforms.len() > 0 {
// log the first transform as the chosen one
add_attributes(&state.ikev1_container.server.transforms[0], jb)?;
}
if state.ikev1_container.server.transforms.len() > 1 {
// in case we have multiple server transforms log them in a list
jb.open_array("server_proposals")?;
for server_transform in &state.ikev1_container.server.transforms {
jb.start_object()?;
add_attributes(server_transform, jb)?;
jb.close()?;
}
jb.close()?;
}
} else if tx.ike_version == 2 {
if tx.hdr.flags & IKEV2_FLAG_INITIATOR!= 0 {
jb.set_string("role", &"initiator")?;
} else {
jb.set_string("role", &"responder")?;
jb.set_string("alg_enc", &format!("{:?}", state.ikev2_container.alg_enc))?;
jb.set_string("alg_auth", &format!("{:?}", state.ikev2_container.alg_auth))?;
jb.set_string("alg_prf", &format!("{:?}", state.ikev2_container.alg_prf))?;
jb.set_string("alg_dh", &format!("{:?}", state.ikev2_container.alg_dh))?;
jb.set_string("alg_esn", &format!("{:?}", state.ikev2_container.alg_esn))?;
}
}
// payloads in packet
jb.open_array("payload")?;
if tx.ike_version == 1 {
if let Some(payload_types) = &tx.payload_types.ikev1_payload_types {
for pt in payload_types {
append_payload_type_extended(jb, pt)?;
}
}
} else if tx.ike_version == 2 {
for payload in tx.payload_types.ikev2_payload_types.iter() {
jb.append_string(&format!("{:?}", payload))?;
}
}
jb.close()?;
if tx.ike_version == 1 {
log_ikev1(state, tx, jb)?;
} else if tx.ike_version == 2 {
log_ikev2(tx, jb)?;
}
jb.close()?;
return Ok(());
}
fn log_ikev1(state: &IKEState, tx: &IKETransaction, jb: &mut JsonBuilder) -> Result<(), JsonError> {
jb.open_object("ikev1")?;
if let Some(doi) = state.ikev1_container.domain_of_interpretation {
jb.set_uint("doi", doi as u64)?;
}
jb.set_bool("encrypted_payloads", tx.hdr.ikev1_header.encrypted_payloads)?;
if!tx.hdr.ikev1_header.encrypted_payloads {
// enable logging of collected state if not-encrypted payloads
// client data
jb.open_object("client")?;
if state.ikev1_container.client.key_exchange.len() > 0 {
jb.set_string(
"key_exchange_payload",
&state.ikev1_container.client.key_exchange,
)?;
if let Ok(client_key_length) =
u64::try_from(state.ikev1_container.client.key_exchange.len())
{
jb.set_uint("key_exchange_payload_length", client_key_length / 2)?;
}
}
if state.ikev1_container.client.nonce.len() > 0 {
jb.set_string("nonce_payload", &state.ikev1_container.client.nonce)?;
if let Ok(client_nonce_length) = u64::try_from(state.ikev1_container.client.nonce.len())
{
jb.set_uint("nonce_payload_length", client_nonce_length / 2)?;
}
}
jb.open_array("proposals")?;
for client_transform in &state.ikev1_container.client.transforms {
jb.start_object()?;
add_attributes(client_transform, jb)?;
jb.close()?;
}
jb.close()?; // proposals
jb.close()?; // client
// server data
jb.open_object("server")?;
if state.ikev1_container.server.key_exchange.len() > 0 {
jb.set_string(
"key_exchange_payload",
&state.ikev1_container.server.key_exchange,
)?;
if let Ok(server_key_length) =
u64::try_from(state.ikev1_container.server.key_exchange.len())
{
jb.set_uint("key_exchange_payload_length", server_key_length / 2)?;
}
}
if state.ikev1_container.server.nonce.len() > 0 {
jb.set_string("nonce_payload", &state.ikev1_container.server.nonce)?;
if let Ok(server_nonce_length) = u64::try_from(state.ikev1_container.server.nonce.len())
{
jb.set_uint("nonce_payload_length", server_nonce_length / 2)?;
}
}
jb.close()?; // server
jb.open_array("vendor_ids")?;
for vendor in state
.ikev1_container
.client
.vendor_ids
.union(&state.ikev1_container.server.vendor_ids)
{
jb.append_string(vendor)?;
}
jb.close()?; // vendor_ids
}
jb.close()?;
return Ok(());
}
fn append_payload_type_extended(js: &mut JsonBuilder, pt: &u8) -> Result<(), JsonError> {
if let Some(v) = IsakmpPayloadType::from_u8(*pt) {
js.append_string(&format!("{:?}", v))?;
}
Ok(())
}
fn
|
(tx: &IKETransaction, jb: &mut JsonBuilder) -> Result<(), JsonError> {
jb.open_object("ikev2")?;
jb.set_uint("errors", tx.errors as u64)?;
jb.open_array("notify")?;
for notify in tx.notify_types.iter() {
jb.append_string(&format!("{:?}", notify))?;
}
jb.close()?;
jb.close()?;
Ok(())
}
#[no_mangle]
pub extern "C" fn rs_ike_logger_log(
state: &mut IKEState, tx: *mut std::os::raw::c_void, flags: u32, js: &mut JsonBuilder,
) -> bool {
let tx = cast_pointer!(tx, IKETransaction);
log_ike(state, tx, flags, js).is_ok()
}
|
log_ikev2
|
identifier_name
|
logger.rs
|
/* Copyright (C) 2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use super::ike::{IKEState, IKETransaction};
use super::ipsec_parser::IKEV2_FLAG_INITIATOR;
use crate::ike::parser::{ExchangeType, IsakmpPayloadType, SaAttribute};
use crate::jsonbuilder::{JsonBuilder, JsonError};
use std;
use std::convert::TryFrom;
use num_traits::FromPrimitive;
const LOG_EXTENDED: u32 = 0x01;
fn add_attributes(transform: &Vec<SaAttribute>, js: &mut JsonBuilder) -> Result<(), JsonError> {
for attribute in transform {
js.set_string(
attribute.attribute_type.to_string().as_str(),
format!("{}", attribute.attribute_value.to_string()).as_str(),
)?;
if let Some(numeric_value) = attribute.numeric_value {
js.set_uint(
format!("{}_raw", attribute.attribute_type).as_str(),
numeric_value as u64,
)?;
} else if let Some(hex_value) = &attribute.hex_value {
js.set_string(
format!("{}_raw", attribute.attribute_type).as_str(),
&hex_value,
)?;
}
}
return Ok(());
}
fn log_ike(
state: &IKEState, tx: &IKETransaction, flags: u32, jb: &mut JsonBuilder,
) -> Result<(), JsonError>
|
}
if tx.ike_version == 1 {
if state.ikev1_container.server.transforms.len() > 0 {
// log the first transform as the chosen one
add_attributes(&state.ikev1_container.server.transforms[0], jb)?;
}
if state.ikev1_container.server.transforms.len() > 1 {
// in case we have multiple server transforms log them in a list
jb.open_array("server_proposals")?;
for server_transform in &state.ikev1_container.server.transforms {
jb.start_object()?;
add_attributes(server_transform, jb)?;
jb.close()?;
}
jb.close()?;
}
} else if tx.ike_version == 2 {
if tx.hdr.flags & IKEV2_FLAG_INITIATOR!= 0 {
jb.set_string("role", &"initiator")?;
} else {
jb.set_string("role", &"responder")?;
jb.set_string("alg_enc", &format!("{:?}", state.ikev2_container.alg_enc))?;
jb.set_string("alg_auth", &format!("{:?}", state.ikev2_container.alg_auth))?;
jb.set_string("alg_prf", &format!("{:?}", state.ikev2_container.alg_prf))?;
jb.set_string("alg_dh", &format!("{:?}", state.ikev2_container.alg_dh))?;
jb.set_string("alg_esn", &format!("{:?}", state.ikev2_container.alg_esn))?;
}
}
// payloads in packet
jb.open_array("payload")?;
if tx.ike_version == 1 {
if let Some(payload_types) = &tx.payload_types.ikev1_payload_types {
for pt in payload_types {
append_payload_type_extended(jb, pt)?;
}
}
} else if tx.ike_version == 2 {
for payload in tx.payload_types.ikev2_payload_types.iter() {
jb.append_string(&format!("{:?}", payload))?;
}
}
jb.close()?;
if tx.ike_version == 1 {
log_ikev1(state, tx, jb)?;
} else if tx.ike_version == 2 {
log_ikev2(tx, jb)?;
}
jb.close()?;
return Ok(());
}
fn log_ikev1(state: &IKEState, tx: &IKETransaction, jb: &mut JsonBuilder) -> Result<(), JsonError> {
jb.open_object("ikev1")?;
if let Some(doi) = state.ikev1_container.domain_of_interpretation {
jb.set_uint("doi", doi as u64)?;
}
jb.set_bool("encrypted_payloads", tx.hdr.ikev1_header.encrypted_payloads)?;
if!tx.hdr.ikev1_header.encrypted_payloads {
// enable logging of collected state if not-encrypted payloads
// client data
jb.open_object("client")?;
if state.ikev1_container.client.key_exchange.len() > 0 {
jb.set_string(
"key_exchange_payload",
&state.ikev1_container.client.key_exchange,
)?;
if let Ok(client_key_length) =
u64::try_from(state.ikev1_container.client.key_exchange.len())
{
jb.set_uint("key_exchange_payload_length", client_key_length / 2)?;
}
}
if state.ikev1_container.client.nonce.len() > 0 {
jb.set_string("nonce_payload", &state.ikev1_container.client.nonce)?;
if let Ok(client_nonce_length) = u64::try_from(state.ikev1_container.client.nonce.len())
{
jb.set_uint("nonce_payload_length", client_nonce_length / 2)?;
}
}
jb.open_array("proposals")?;
for client_transform in &state.ikev1_container.client.transforms {
jb.start_object()?;
add_attributes(client_transform, jb)?;
jb.close()?;
}
jb.close()?; // proposals
jb.close()?; // client
// server data
jb.open_object("server")?;
if state.ikev1_container.server.key_exchange.len() > 0 {
jb.set_string(
"key_exchange_payload",
&state.ikev1_container.server.key_exchange,
)?;
if let Ok(server_key_length) =
u64::try_from(state.ikev1_container.server.key_exchange.len())
{
jb.set_uint("key_exchange_payload_length", server_key_length / 2)?;
}
}
if state.ikev1_container.server.nonce.len() > 0 {
jb.set_string("nonce_payload", &state.ikev1_container.server.nonce)?;
if let Ok(server_nonce_length) = u64::try_from(state.ikev1_container.server.nonce.len())
{
jb.set_uint("nonce_payload_length", server_nonce_length / 2)?;
}
}
jb.close()?; // server
jb.open_array("vendor_ids")?;
for vendor in state
.ikev1_container
.client
.vendor_ids
.union(&state.ikev1_container.server.vendor_ids)
{
jb.append_string(vendor)?;
}
jb.close()?; // vendor_ids
}
jb.close()?;
return Ok(());
}
fn append_payload_type_extended(js: &mut JsonBuilder, pt: &u8) -> Result<(), JsonError> {
if let Some(v) = IsakmpPayloadType::from_u8(*pt) {
js.append_string(&format!("{:?}", v))?;
}
Ok(())
}
fn log_ikev2(tx: &IKETransaction, jb: &mut JsonBuilder) -> Result<(), JsonError> {
jb.open_object("ikev2")?;
jb.set_uint("errors", tx.errors as u64)?;
jb.open_array("notify")?;
for notify in tx.notify_types.iter() {
jb.append_string(&format!("{:?}", notify))?;
}
jb.close()?;
jb.close()?;
Ok(())
}
#[no_mangle]
pub extern "C" fn rs_ike_logger_log(
state: &mut IKEState, tx: *mut std::os::raw::c_void, flags: u32, js: &mut JsonBuilder,
) -> bool {
let tx = cast_pointer!(tx, IKETransaction);
log_ike(state, tx, flags, js).is_ok()
}
|
{
jb.open_object("ike")?;
jb.set_uint("version_major", tx.hdr.maj_ver as u64)?;
jb.set_uint("version_minor", tx.hdr.min_ver as u64)?;
jb.set_string("init_spi", &tx.hdr.spi_initiator)?;
jb.set_string("resp_spi", &tx.hdr.spi_responder)?;
jb.set_uint("message_id", tx.hdr.msg_id as u64)?;
if tx.ike_version == 1 {
if let Some(exchange_type) = tx.hdr.ikev1_header.exchange_type {
jb.set_uint("exchange_type", exchange_type as u64)?;
if (flags & LOG_EXTENDED) == LOG_EXTENDED {
if let Some(etype) = ExchangeType::from_u8(exchange_type) {
jb.set_string("exchange_type_verbose", etype.to_string().as_str())?;
};
}
}
} else if tx.ike_version == 2 {
jb.set_uint("exchange_type", tx.hdr.ikev2_header.exch_type.0 as u64)?;
|
identifier_body
|
attr.rs
|
use syn::{
parse::{Parse, ParseStream},
LitStr, Token,
};
use super::case::RenameRule;
mod kw {
syn::custom_keyword!(rename);
syn::custom_keyword!(rename_all);
}
pub struct RenameAttr(LitStr);
impl RenameAttr {
pub fn into_inner(self) -> LitStr {
self.0
}
}
impl Parse for RenameAttr {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let _: kw::rename = input.parse()?;
let _: Token![=] = input.parse()?;
Ok(Self(input.parse()?))
}
|
impl RenameAllAttr {
pub fn into_inner(self) -> RenameRule {
self.0
}
}
impl Parse for RenameAllAttr {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let _: kw::rename_all = input.parse()?;
let _: Token![=] = input.parse()?;
let s: LitStr = input.parse()?;
Ok(Self(
s.value()
.parse()
.map_err(|_| syn::Error::new_spanned(s, "invalid value for rename_all"))?,
))
}
}
|
}
pub struct RenameAllAttr(RenameRule);
|
random_line_split
|
attr.rs
|
use syn::{
parse::{Parse, ParseStream},
LitStr, Token,
};
use super::case::RenameRule;
mod kw {
syn::custom_keyword!(rename);
syn::custom_keyword!(rename_all);
}
pub struct RenameAttr(LitStr);
impl RenameAttr {
pub fn into_inner(self) -> LitStr {
self.0
}
}
impl Parse for RenameAttr {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let _: kw::rename = input.parse()?;
let _: Token![=] = input.parse()?;
Ok(Self(input.parse()?))
}
}
pub struct
|
(RenameRule);
impl RenameAllAttr {
pub fn into_inner(self) -> RenameRule {
self.0
}
}
impl Parse for RenameAllAttr {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let _: kw::rename_all = input.parse()?;
let _: Token![=] = input.parse()?;
let s: LitStr = input.parse()?;
Ok(Self(
s.value()
.parse()
.map_err(|_| syn::Error::new_spanned(s, "invalid value for rename_all"))?,
))
}
}
|
RenameAllAttr
|
identifier_name
|
mod.rs
|
/*!
Test supports module.
*/
#![allow(dead_code)]
use glium::{self, glutin, DisplayBuild};
use glium::backend::Facade;
use glium::index::PrimitiveType;
use std::env;
/// Builds a headless display for tests.
pub fn
|
() -> glium::Display {
let version = parse_version();
let display = if env::var("GLIUM_HEADLESS_TESTS").is_ok() {
glutin::HeadlessRendererBuilder::new(1024, 768).with_gl_debug_flag(true)
.with_gl(version)
.build_glium().unwrap()
} else {
glutin::WindowBuilder::new().with_gl_debug_flag(true).with_visibility(false)
.with_gl(version).build_glium().unwrap()
};
display
}
/// Rebuilds an existing display.
///
/// In real applications this is used for things such as switching to fullscreen. Some things are
/// invalidated during a rebuild, and this has to be handled by glium.
pub fn rebuild_display(display: &glium::Display) {
let version = parse_version();
if env::var("GLIUM_HEADLESS_TESTS").is_ok() {
glutin::HeadlessRendererBuilder::new(1024, 768).with_gl_debug_flag(true)
.with_gl(version)
.rebuild_glium(display).unwrap();
} else {
glutin::WindowBuilder::new().with_gl_debug_flag(true).with_visibility(false)
.with_gl(version).rebuild_glium(display).unwrap();
}
}
fn parse_version() -> glutin::GlRequest {
match env::var("GLIUM_GL_VERSION") {
Ok(version) => {
// expects "OpenGL 3.3" for example
let mut iter = version.rsplitn(2,'');
let version = iter.next().unwrap();
let ty = iter.next().unwrap();
let mut iter = version.split('.');
let major = iter.next().unwrap().parse().unwrap();
let minor = iter.next().unwrap().parse().unwrap();
let ty = if ty == "OpenGL" {
glutin::Api::OpenGl
} else if ty == "OpenGL ES" {
glutin::Api::OpenGlEs
} else if ty == "WebGL" {
glutin::Api::WebGl
} else {
panic!();
};
glutin::GlRequest::Specific(ty, (major, minor))
},
Err(_) => glutin::GlRequest::Latest,
}
}
/// Builds a 2x2 unicolor texture.
pub fn build_unicolor_texture2d<F>(facade: &F, red: f32, green: f32, blue: f32)
-> glium::Texture2d where F: Facade
{
let color = ((red * 255.0) as u8, (green * 255.0) as u8, (blue * 255.0) as u8);
glium::texture::Texture2d::new(facade, vec![
vec![color, color],
vec![color, color],
]).unwrap()
}
/// Builds a vertex buffer, index buffer, and program, to draw red `(1.0, 0.0, 0.0, 1.0)` to the whole screen.
pub fn build_fullscreen_red_pipeline<F>(facade: &F) -> (glium::vertex::VertexBufferAny,
glium::index::IndexBufferAny, glium::Program) where F: Facade
{
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 2],
}
implement_vertex!(Vertex, position);
(
glium::VertexBuffer::new(facade, &[
Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] },
Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] },
]).unwrap().into_vertex_buffer_any(),
glium::IndexBuffer::new(facade, PrimitiveType::TriangleStrip, &[0u8, 1, 2, 3]).unwrap().into(),
program!(facade,
110 => {
vertex: "
#version 110
attribute vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
",
fragment: "
#version 110
void main() {
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
",
},
100 => {
vertex: "
#version 100
attribute lowp vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
",
fragment: "
#version 100
void main() {
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
",
},
).unwrap()
)
}
/// Builds a vertex buffer and an index buffer corresponding to a rectangle.
///
/// The vertex buffer has the "position" attribute of type "vec2".
pub fn build_rectangle_vb_ib<F>(facade: &F)
-> (glium::vertex::VertexBufferAny, glium::index::IndexBufferAny) where F: Facade
{
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 2],
}
implement_vertex!(Vertex, position);
(
glium::VertexBuffer::new(facade, &[
Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] },
Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] },
]).unwrap().into_vertex_buffer_any(),
glium::IndexBuffer::new(facade, PrimitiveType::TriangleStrip, &[0u8, 1, 2, 3]).unwrap().into(),
)
}
/// Builds a texture suitable for rendering.
pub fn build_renderable_texture<F>(facade: &F) -> glium::Texture2d where F: Facade {
glium::Texture2d::empty(facade, 1024, 1024).unwrap()
}
|
build_display
|
identifier_name
|
mod.rs
|
/*!
Test supports module.
*/
#![allow(dead_code)]
use glium::{self, glutin, DisplayBuild};
use glium::backend::Facade;
use glium::index::PrimitiveType;
use std::env;
/// Builds a headless display for tests.
pub fn build_display() -> glium::Display {
let version = parse_version();
let display = if env::var("GLIUM_HEADLESS_TESTS").is_ok() {
glutin::HeadlessRendererBuilder::new(1024, 768).with_gl_debug_flag(true)
.with_gl(version)
.build_glium().unwrap()
} else {
glutin::WindowBuilder::new().with_gl_debug_flag(true).with_visibility(false)
.with_gl(version).build_glium().unwrap()
};
display
}
/// Rebuilds an existing display.
///
/// In real applications this is used for things such as switching to fullscreen. Some things are
/// invalidated during a rebuild, and this has to be handled by glium.
pub fn rebuild_display(display: &glium::Display) {
let version = parse_version();
if env::var("GLIUM_HEADLESS_TESTS").is_ok() {
glutin::HeadlessRendererBuilder::new(1024, 768).with_gl_debug_flag(true)
.with_gl(version)
.rebuild_glium(display).unwrap();
} else {
glutin::WindowBuilder::new().with_gl_debug_flag(true).with_visibility(false)
.with_gl(version).rebuild_glium(display).unwrap();
}
}
fn parse_version() -> glutin::GlRequest {
match env::var("GLIUM_GL_VERSION") {
Ok(version) => {
// expects "OpenGL 3.3" for example
let mut iter = version.rsplitn(2,'');
let version = iter.next().unwrap();
let ty = iter.next().unwrap();
let mut iter = version.split('.');
let major = iter.next().unwrap().parse().unwrap();
let minor = iter.next().unwrap().parse().unwrap();
let ty = if ty == "OpenGL" {
glutin::Api::OpenGl
} else if ty == "OpenGL ES" {
glutin::Api::OpenGlEs
} else if ty == "WebGL" {
glutin::Api::WebGl
} else {
panic!();
};
glutin::GlRequest::Specific(ty, (major, minor))
},
Err(_) => glutin::GlRequest::Latest,
}
}
/// Builds a 2x2 unicolor texture.
pub fn build_unicolor_texture2d<F>(facade: &F, red: f32, green: f32, blue: f32)
-> glium::Texture2d where F: Facade
{
let color = ((red * 255.0) as u8, (green * 255.0) as u8, (blue * 255.0) as u8);
glium::texture::Texture2d::new(facade, vec![
vec![color, color],
vec![color, color],
]).unwrap()
}
/// Builds a vertex buffer, index buffer, and program, to draw red `(1.0, 0.0, 0.0, 1.0)` to the whole screen.
pub fn build_fullscreen_red_pipeline<F>(facade: &F) -> (glium::vertex::VertexBufferAny,
glium::index::IndexBufferAny, glium::Program) where F: Facade
{
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 2],
|
}
implement_vertex!(Vertex, position);
(
glium::VertexBuffer::new(facade, &[
Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] },
Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] },
]).unwrap().into_vertex_buffer_any(),
glium::IndexBuffer::new(facade, PrimitiveType::TriangleStrip, &[0u8, 1, 2, 3]).unwrap().into(),
program!(facade,
110 => {
vertex: "
#version 110
attribute vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
",
fragment: "
#version 110
void main() {
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
",
},
100 => {
vertex: "
#version 100
attribute lowp vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
",
fragment: "
#version 100
void main() {
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
",
},
).unwrap()
)
}
/// Builds a vertex buffer and an index buffer corresponding to a rectangle.
///
/// The vertex buffer has the "position" attribute of type "vec2".
pub fn build_rectangle_vb_ib<F>(facade: &F)
-> (glium::vertex::VertexBufferAny, glium::index::IndexBufferAny) where F: Facade
{
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 2],
}
implement_vertex!(Vertex, position);
(
glium::VertexBuffer::new(facade, &[
Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] },
Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] },
]).unwrap().into_vertex_buffer_any(),
glium::IndexBuffer::new(facade, PrimitiveType::TriangleStrip, &[0u8, 1, 2, 3]).unwrap().into(),
)
}
/// Builds a texture suitable for rendering.
pub fn build_renderable_texture<F>(facade: &F) -> glium::Texture2d where F: Facade {
glium::Texture2d::empty(facade, 1024, 1024).unwrap()
}
|
random_line_split
|
|
mod.rs
|
/*!
Test supports module.
*/
#![allow(dead_code)]
use glium::{self, glutin, DisplayBuild};
use glium::backend::Facade;
use glium::index::PrimitiveType;
use std::env;
/// Builds a headless display for tests.
pub fn build_display() -> glium::Display {
let version = parse_version();
let display = if env::var("GLIUM_HEADLESS_TESTS").is_ok() {
glutin::HeadlessRendererBuilder::new(1024, 768).with_gl_debug_flag(true)
.with_gl(version)
.build_glium().unwrap()
} else {
glutin::WindowBuilder::new().with_gl_debug_flag(true).with_visibility(false)
.with_gl(version).build_glium().unwrap()
};
display
}
/// Rebuilds an existing display.
///
/// In real applications this is used for things such as switching to fullscreen. Some things are
/// invalidated during a rebuild, and this has to be handled by glium.
pub fn rebuild_display(display: &glium::Display) {
let version = parse_version();
if env::var("GLIUM_HEADLESS_TESTS").is_ok() {
glutin::HeadlessRendererBuilder::new(1024, 768).with_gl_debug_flag(true)
.with_gl(version)
.rebuild_glium(display).unwrap();
} else {
glutin::WindowBuilder::new().with_gl_debug_flag(true).with_visibility(false)
.with_gl(version).rebuild_glium(display).unwrap();
}
}
fn parse_version() -> glutin::GlRequest {
match env::var("GLIUM_GL_VERSION") {
Ok(version) => {
// expects "OpenGL 3.3" for example
let mut iter = version.rsplitn(2,'');
let version = iter.next().unwrap();
let ty = iter.next().unwrap();
let mut iter = version.split('.');
let major = iter.next().unwrap().parse().unwrap();
let minor = iter.next().unwrap().parse().unwrap();
let ty = if ty == "OpenGL" {
glutin::Api::OpenGl
} else if ty == "OpenGL ES" {
glutin::Api::OpenGlEs
} else if ty == "WebGL" {
glutin::Api::WebGl
} else {
panic!();
};
glutin::GlRequest::Specific(ty, (major, minor))
},
Err(_) => glutin::GlRequest::Latest,
}
}
/// Builds a 2x2 unicolor texture.
pub fn build_unicolor_texture2d<F>(facade: &F, red: f32, green: f32, blue: f32)
-> glium::Texture2d where F: Facade
{
let color = ((red * 255.0) as u8, (green * 255.0) as u8, (blue * 255.0) as u8);
glium::texture::Texture2d::new(facade, vec![
vec![color, color],
vec![color, color],
]).unwrap()
}
/// Builds a vertex buffer, index buffer, and program, to draw red `(1.0, 0.0, 0.0, 1.0)` to the whole screen.
pub fn build_fullscreen_red_pipeline<F>(facade: &F) -> (glium::vertex::VertexBufferAny,
glium::index::IndexBufferAny, glium::Program) where F: Facade
|
attribute vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
",
fragment: "
#version 110
void main() {
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
",
},
100 => {
vertex: "
#version 100
attribute lowp vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
",
fragment: "
#version 100
void main() {
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
",
},
).unwrap()
)
}
/// Builds a vertex buffer and an index buffer corresponding to a rectangle.
///
/// The vertex buffer has the "position" attribute of type "vec2".
pub fn build_rectangle_vb_ib<F>(facade: &F)
-> (glium::vertex::VertexBufferAny, glium::index::IndexBufferAny) where F: Facade
{
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 2],
}
implement_vertex!(Vertex, position);
(
glium::VertexBuffer::new(facade, &[
Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] },
Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] },
]).unwrap().into_vertex_buffer_any(),
glium::IndexBuffer::new(facade, PrimitiveType::TriangleStrip, &[0u8, 1, 2, 3]).unwrap().into(),
)
}
/// Builds a texture suitable for rendering.
pub fn build_renderable_texture<F>(facade: &F) -> glium::Texture2d where F: Facade {
glium::Texture2d::empty(facade, 1024, 1024).unwrap()
}
|
{
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 2],
}
implement_vertex!(Vertex, position);
(
glium::VertexBuffer::new(facade, &[
Vertex { position: [-1.0, 1.0] }, Vertex { position: [1.0, 1.0] },
Vertex { position: [-1.0, -1.0] }, Vertex { position: [1.0, -1.0] },
]).unwrap().into_vertex_buffer_any(),
glium::IndexBuffer::new(facade, PrimitiveType::TriangleStrip, &[0u8, 1, 2, 3]).unwrap().into(),
program!(facade,
110 => {
vertex: "
#version 110
|
identifier_body
|
slog_support.rs
|
// Copyright 2013-2014 The Rust Project Developers.
// Copyright 2018 The Uuid Project Developers.
//
// See the COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
use slog;
impl slog::Value for Uuid {
fn serialize(
&self,
_: &slog::Record<'_>,
key: slog::Key,
serializer: &mut dyn slog::Serializer,
) -> Result<(), slog::Error> {
serializer.emit_arguments(key, &format_args!("{}", self))
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_slog_kv() {
use crate::test_util;
use slog;
use slog::{crit, Drain};
let root = slog::Logger::root(slog::Discard.fuse(), slog::o!());
let u1 = test_util::new();
crit!(root, "test"; "u1" => u1);
}
}
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use crate::prelude::*;
|
random_line_split
|
slog_support.rs
|
// Copyright 2013-2014 The Rust Project Developers.
// Copyright 2018 The Uuid Project Developers.
//
// See the COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use crate::prelude::*;
use slog;
impl slog::Value for Uuid {
fn serialize(
&self,
_: &slog::Record<'_>,
key: slog::Key,
serializer: &mut dyn slog::Serializer,
) -> Result<(), slog::Error> {
serializer.emit_arguments(key, &format_args!("{}", self))
}
}
#[cfg(test)]
mod tests {
#[test]
fn
|
() {
use crate::test_util;
use slog;
use slog::{crit, Drain};
let root = slog::Logger::root(slog::Discard.fuse(), slog::o!());
let u1 = test_util::new();
crit!(root, "test"; "u1" => u1);
}
}
|
test_slog_kv
|
identifier_name
|
slog_support.rs
|
// Copyright 2013-2014 The Rust Project Developers.
// Copyright 2018 The Uuid Project Developers.
//
// See the COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use crate::prelude::*;
use slog;
impl slog::Value for Uuid {
fn serialize(
&self,
_: &slog::Record<'_>,
key: slog::Key,
serializer: &mut dyn slog::Serializer,
) -> Result<(), slog::Error> {
serializer.emit_arguments(key, &format_args!("{}", self))
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_slog_kv()
|
}
|
{
use crate::test_util;
use slog;
use slog::{crit, Drain};
let root = slog::Logger::root(slog::Discard.fuse(), slog::o!());
let u1 = test_util::new();
crit!(root, "test"; "u1" => u1);
}
|
identifier_body
|
alignment-gep-tup-like-1.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
#![allow(dead_code)]
#![feature(box_syntax)]
struct pair<A,B> {
a: A, b: B
}
trait Invokable<A> {
fn f(&self) -> (A, u16);
}
struct Invoker<A> {
a: A,
b: u16,
}
impl<A:Clone> Invokable<A> for Invoker<A> {
fn f(&self) -> (A, u16) {
(self.a.clone(), self.b)
}
}
fn f<A:Clone +'static>(a: A, b: u16) -> Box<Invokable<A>+'static> {
box Invoker {
a: a,
b: b,
|
pub fn main() {
let (a, b) = f(22_u64, 44u16).f();
println!("a={} b={}", a, b);
assert_eq!(a, 22u64);
assert_eq!(b, 44u16);
}
|
} as (Box<Invokable<A>+'static>)
}
|
random_line_split
|
alignment-gep-tup-like-1.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
#![allow(dead_code)]
#![feature(box_syntax)]
struct
|
<A,B> {
a: A, b: B
}
trait Invokable<A> {
fn f(&self) -> (A, u16);
}
struct Invoker<A> {
a: A,
b: u16,
}
impl<A:Clone> Invokable<A> for Invoker<A> {
fn f(&self) -> (A, u16) {
(self.a.clone(), self.b)
}
}
fn f<A:Clone +'static>(a: A, b: u16) -> Box<Invokable<A>+'static> {
box Invoker {
a: a,
b: b,
} as (Box<Invokable<A>+'static>)
}
pub fn main() {
let (a, b) = f(22_u64, 44u16).f();
println!("a={} b={}", a, b);
assert_eq!(a, 22u64);
assert_eq!(b, 44u16);
}
|
pair
|
identifier_name
|
alignment-gep-tup-like-1.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
#![allow(dead_code)]
#![feature(box_syntax)]
struct pair<A,B> {
a: A, b: B
}
trait Invokable<A> {
fn f(&self) -> (A, u16);
}
struct Invoker<A> {
a: A,
b: u16,
}
impl<A:Clone> Invokable<A> for Invoker<A> {
fn f(&self) -> (A, u16)
|
}
fn f<A:Clone +'static>(a: A, b: u16) -> Box<Invokable<A>+'static> {
box Invoker {
a: a,
b: b,
} as (Box<Invokable<A>+'static>)
}
pub fn main() {
let (a, b) = f(22_u64, 44u16).f();
println!("a={} b={}", a, b);
assert_eq!(a, 22u64);
assert_eq!(b, 44u16);
}
|
{
(self.a.clone(), self.b)
}
|
identifier_body
|
manifest.rs
|
use crate::Builder;
use serde::{Serialize, Serializer};
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
#[derive(Serialize)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct Manifest {
pub(crate) manifest_version: String,
pub(crate) date: String,
pub(crate) pkg: BTreeMap<String, Package>,
pub(crate) artifacts: BTreeMap<String, Artifact>,
pub(crate) renames: BTreeMap<String, Rename>,
pub(crate) profiles: BTreeMap<String, Vec<String>>,
}
impl Manifest {
pub(crate) fn add_artifact(&mut self, name: &str, f: impl FnOnce(&mut Artifact)) {
let mut artifact = Artifact { target: BTreeMap::new() };
f(&mut artifact);
self.artifacts.insert(name.to_string(), artifact);
}
}
#[derive(Serialize)]
pub(crate) struct Package {
pub(crate) version: String,
pub(crate) git_commit_hash: Option<String>,
pub(crate) target: BTreeMap<String, Target>,
}
#[derive(Serialize)]
pub(crate) struct Rename {
pub(crate) to: String,
}
#[derive(Serialize)]
pub(crate) struct Artifact {
pub(crate) target: BTreeMap<String, Vec<ArtifactFile>>,
}
impl Artifact {
pub(crate) fn add_file(&mut self, builder: &mut Builder, target: &str, path: &str) {
if let Some(path) = record_shipped_file(builder, builder.input.join(path)) {
self.target.entry(target.into()).or_insert_with(Vec::new).push(ArtifactFile {
url: builder.url(&path),
hash_sha256: FileHash::Missing(path),
});
}
}
pub(crate) fn add_tarball(&mut self, builder: &mut Builder, target: &str, base_path: &str) {
let files = self.target.entry(target.into()).or_insert_with(Vec::new);
let base_path = builder.input.join(base_path);
for compression in &["gz", "xz"] {
if let Some(tarball) = tarball_variant(builder, &base_path, compression) {
files.push(ArtifactFile {
url: builder.url(&tarball),
hash_sha256: FileHash::Missing(tarball),
});
}
}
}
}
#[derive(Serialize)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct ArtifactFile {
pub(crate) url: String,
pub(crate) hash_sha256: FileHash,
}
#[derive(Serialize, Default)]
pub(crate) struct Target {
pub(crate) available: bool,
pub(crate) url: Option<String>,
pub(crate) hash: Option<FileHash>,
pub(crate) xz_url: Option<String>,
pub(crate) xz_hash: Option<FileHash>,
pub(crate) components: Option<Vec<Component>>,
pub(crate) extensions: Option<Vec<Component>>,
}
impl Target {
pub(crate) fn from_compressed_tar(builder: &mut Builder, base_path: &str) -> Self {
let base_path = builder.input.join(base_path);
let gz = tarball_variant(builder, &base_path, "gz");
let xz = tarball_variant(builder, &base_path, "xz");
if gz.is_none() {
return Self::unavailable();
}
Self {
available: true,
components: None,
extensions: None,
//.gz
url: gz.as_ref().map(|path| builder.url(path)),
hash: gz.map(FileHash::Missing),
|
}
}
pub(crate) fn unavailable() -> Self {
Self::default()
}
}
#[derive(Serialize)]
pub(crate) struct Component {
pub(crate) pkg: String,
pub(crate) target: String,
}
impl Component {
pub(crate) fn from_str(pkg: &str, target: &str) -> Self {
Self { pkg: pkg.to_string(), target: target.to_string() }
}
}
#[allow(unused)]
pub(crate) enum FileHash {
Missing(PathBuf),
Present(String),
}
impl Serialize for FileHash {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
match self {
FileHash::Missing(path) => Err(serde::ser::Error::custom(format!(
"can't serialize a missing hash for file {}",
path.display()
))),
FileHash::Present(inner) => inner.serialize(serializer),
}
}
}
fn tarball_variant(builder: &mut Builder, base: &Path, ext: &str) -> Option<PathBuf> {
let mut path = base.to_path_buf();
path.set_extension(ext);
record_shipped_file(builder, path)
}
fn record_shipped_file(builder: &mut Builder, path: PathBuf) -> Option<PathBuf> {
if path.is_file() {
builder.shipped_files.insert(
path.file_name()
.expect("missing filename")
.to_str()
.expect("non-utf-8 filename")
.to_string(),
);
Some(path)
} else {
None
}
}
pub(crate) fn visit_file_hashes(manifest: &mut Manifest, mut f: impl FnMut(&mut FileHash)) {
for pkg in manifest.pkg.values_mut() {
for target in pkg.target.values_mut() {
if let Some(hash) = &mut target.hash {
f(hash);
}
if let Some(hash) = &mut target.xz_hash {
f(hash);
}
}
}
for artifact in manifest.artifacts.values_mut() {
for target in artifact.target.values_mut() {
for file in target {
f(&mut file.hash_sha256);
}
}
}
}
|
// .xz
xz_url: xz.as_ref().map(|path| builder.url(path)),
xz_hash: xz.map(FileHash::Missing),
|
random_line_split
|
manifest.rs
|
use crate::Builder;
use serde::{Serialize, Serializer};
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
#[derive(Serialize)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct Manifest {
pub(crate) manifest_version: String,
pub(crate) date: String,
pub(crate) pkg: BTreeMap<String, Package>,
pub(crate) artifacts: BTreeMap<String, Artifact>,
pub(crate) renames: BTreeMap<String, Rename>,
pub(crate) profiles: BTreeMap<String, Vec<String>>,
}
impl Manifest {
pub(crate) fn add_artifact(&mut self, name: &str, f: impl FnOnce(&mut Artifact)) {
let mut artifact = Artifact { target: BTreeMap::new() };
f(&mut artifact);
self.artifacts.insert(name.to_string(), artifact);
}
}
#[derive(Serialize)]
pub(crate) struct
|
{
pub(crate) version: String,
pub(crate) git_commit_hash: Option<String>,
pub(crate) target: BTreeMap<String, Target>,
}
#[derive(Serialize)]
pub(crate) struct Rename {
pub(crate) to: String,
}
#[derive(Serialize)]
pub(crate) struct Artifact {
pub(crate) target: BTreeMap<String, Vec<ArtifactFile>>,
}
impl Artifact {
pub(crate) fn add_file(&mut self, builder: &mut Builder, target: &str, path: &str) {
if let Some(path) = record_shipped_file(builder, builder.input.join(path)) {
self.target.entry(target.into()).or_insert_with(Vec::new).push(ArtifactFile {
url: builder.url(&path),
hash_sha256: FileHash::Missing(path),
});
}
}
pub(crate) fn add_tarball(&mut self, builder: &mut Builder, target: &str, base_path: &str) {
let files = self.target.entry(target.into()).or_insert_with(Vec::new);
let base_path = builder.input.join(base_path);
for compression in &["gz", "xz"] {
if let Some(tarball) = tarball_variant(builder, &base_path, compression) {
files.push(ArtifactFile {
url: builder.url(&tarball),
hash_sha256: FileHash::Missing(tarball),
});
}
}
}
}
#[derive(Serialize)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct ArtifactFile {
pub(crate) url: String,
pub(crate) hash_sha256: FileHash,
}
#[derive(Serialize, Default)]
pub(crate) struct Target {
pub(crate) available: bool,
pub(crate) url: Option<String>,
pub(crate) hash: Option<FileHash>,
pub(crate) xz_url: Option<String>,
pub(crate) xz_hash: Option<FileHash>,
pub(crate) components: Option<Vec<Component>>,
pub(crate) extensions: Option<Vec<Component>>,
}
impl Target {
pub(crate) fn from_compressed_tar(builder: &mut Builder, base_path: &str) -> Self {
let base_path = builder.input.join(base_path);
let gz = tarball_variant(builder, &base_path, "gz");
let xz = tarball_variant(builder, &base_path, "xz");
if gz.is_none() {
return Self::unavailable();
}
Self {
available: true,
components: None,
extensions: None,
//.gz
url: gz.as_ref().map(|path| builder.url(path)),
hash: gz.map(FileHash::Missing),
//.xz
xz_url: xz.as_ref().map(|path| builder.url(path)),
xz_hash: xz.map(FileHash::Missing),
}
}
pub(crate) fn unavailable() -> Self {
Self::default()
}
}
#[derive(Serialize)]
pub(crate) struct Component {
pub(crate) pkg: String,
pub(crate) target: String,
}
impl Component {
pub(crate) fn from_str(pkg: &str, target: &str) -> Self {
Self { pkg: pkg.to_string(), target: target.to_string() }
}
}
#[allow(unused)]
pub(crate) enum FileHash {
Missing(PathBuf),
Present(String),
}
impl Serialize for FileHash {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
match self {
FileHash::Missing(path) => Err(serde::ser::Error::custom(format!(
"can't serialize a missing hash for file {}",
path.display()
))),
FileHash::Present(inner) => inner.serialize(serializer),
}
}
}
fn tarball_variant(builder: &mut Builder, base: &Path, ext: &str) -> Option<PathBuf> {
let mut path = base.to_path_buf();
path.set_extension(ext);
record_shipped_file(builder, path)
}
fn record_shipped_file(builder: &mut Builder, path: PathBuf) -> Option<PathBuf> {
if path.is_file() {
builder.shipped_files.insert(
path.file_name()
.expect("missing filename")
.to_str()
.expect("non-utf-8 filename")
.to_string(),
);
Some(path)
} else {
None
}
}
pub(crate) fn visit_file_hashes(manifest: &mut Manifest, mut f: impl FnMut(&mut FileHash)) {
for pkg in manifest.pkg.values_mut() {
for target in pkg.target.values_mut() {
if let Some(hash) = &mut target.hash {
f(hash);
}
if let Some(hash) = &mut target.xz_hash {
f(hash);
}
}
}
for artifact in manifest.artifacts.values_mut() {
for target in artifact.target.values_mut() {
for file in target {
f(&mut file.hash_sha256);
}
}
}
}
|
Package
|
identifier_name
|
manifest.rs
|
use crate::Builder;
use serde::{Serialize, Serializer};
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
#[derive(Serialize)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct Manifest {
pub(crate) manifest_version: String,
pub(crate) date: String,
pub(crate) pkg: BTreeMap<String, Package>,
pub(crate) artifacts: BTreeMap<String, Artifact>,
pub(crate) renames: BTreeMap<String, Rename>,
pub(crate) profiles: BTreeMap<String, Vec<String>>,
}
impl Manifest {
pub(crate) fn add_artifact(&mut self, name: &str, f: impl FnOnce(&mut Artifact)) {
let mut artifact = Artifact { target: BTreeMap::new() };
f(&mut artifact);
self.artifacts.insert(name.to_string(), artifact);
}
}
#[derive(Serialize)]
pub(crate) struct Package {
pub(crate) version: String,
pub(crate) git_commit_hash: Option<String>,
pub(crate) target: BTreeMap<String, Target>,
}
#[derive(Serialize)]
pub(crate) struct Rename {
pub(crate) to: String,
}
#[derive(Serialize)]
pub(crate) struct Artifact {
pub(crate) target: BTreeMap<String, Vec<ArtifactFile>>,
}
impl Artifact {
pub(crate) fn add_file(&mut self, builder: &mut Builder, target: &str, path: &str) {
if let Some(path) = record_shipped_file(builder, builder.input.join(path)) {
self.target.entry(target.into()).or_insert_with(Vec::new).push(ArtifactFile {
url: builder.url(&path),
hash_sha256: FileHash::Missing(path),
});
}
}
pub(crate) fn add_tarball(&mut self, builder: &mut Builder, target: &str, base_path: &str) {
let files = self.target.entry(target.into()).or_insert_with(Vec::new);
let base_path = builder.input.join(base_path);
for compression in &["gz", "xz"] {
if let Some(tarball) = tarball_variant(builder, &base_path, compression)
|
}
}
}
#[derive(Serialize)]
#[serde(rename_all = "kebab-case")]
pub(crate) struct ArtifactFile {
pub(crate) url: String,
pub(crate) hash_sha256: FileHash,
}
#[derive(Serialize, Default)]
pub(crate) struct Target {
pub(crate) available: bool,
pub(crate) url: Option<String>,
pub(crate) hash: Option<FileHash>,
pub(crate) xz_url: Option<String>,
pub(crate) xz_hash: Option<FileHash>,
pub(crate) components: Option<Vec<Component>>,
pub(crate) extensions: Option<Vec<Component>>,
}
impl Target {
pub(crate) fn from_compressed_tar(builder: &mut Builder, base_path: &str) -> Self {
let base_path = builder.input.join(base_path);
let gz = tarball_variant(builder, &base_path, "gz");
let xz = tarball_variant(builder, &base_path, "xz");
if gz.is_none() {
return Self::unavailable();
}
Self {
available: true,
components: None,
extensions: None,
//.gz
url: gz.as_ref().map(|path| builder.url(path)),
hash: gz.map(FileHash::Missing),
//.xz
xz_url: xz.as_ref().map(|path| builder.url(path)),
xz_hash: xz.map(FileHash::Missing),
}
}
pub(crate) fn unavailable() -> Self {
Self::default()
}
}
#[derive(Serialize)]
pub(crate) struct Component {
pub(crate) pkg: String,
pub(crate) target: String,
}
impl Component {
pub(crate) fn from_str(pkg: &str, target: &str) -> Self {
Self { pkg: pkg.to_string(), target: target.to_string() }
}
}
#[allow(unused)]
pub(crate) enum FileHash {
Missing(PathBuf),
Present(String),
}
impl Serialize for FileHash {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
match self {
FileHash::Missing(path) => Err(serde::ser::Error::custom(format!(
"can't serialize a missing hash for file {}",
path.display()
))),
FileHash::Present(inner) => inner.serialize(serializer),
}
}
}
fn tarball_variant(builder: &mut Builder, base: &Path, ext: &str) -> Option<PathBuf> {
let mut path = base.to_path_buf();
path.set_extension(ext);
record_shipped_file(builder, path)
}
fn record_shipped_file(builder: &mut Builder, path: PathBuf) -> Option<PathBuf> {
if path.is_file() {
builder.shipped_files.insert(
path.file_name()
.expect("missing filename")
.to_str()
.expect("non-utf-8 filename")
.to_string(),
);
Some(path)
} else {
None
}
}
pub(crate) fn visit_file_hashes(manifest: &mut Manifest, mut f: impl FnMut(&mut FileHash)) {
for pkg in manifest.pkg.values_mut() {
for target in pkg.target.values_mut() {
if let Some(hash) = &mut target.hash {
f(hash);
}
if let Some(hash) = &mut target.xz_hash {
f(hash);
}
}
}
for artifact in manifest.artifacts.values_mut() {
for target in artifact.target.values_mut() {
for file in target {
f(&mut file.hash_sha256);
}
}
}
}
|
{
files.push(ArtifactFile {
url: builder.url(&tarball),
hash_sha256: FileHash::Missing(tarball),
});
}
|
conditional_block
|
match-vec-unreachable.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
let x: Vec<(int, int)> = Vec::new();
let x: &[(int, int)] = x.as_slice();
match x {
[a, (2, 3), _] => (),
[(1, 2), (2, 3), b] => (), //~ ERROR unreachable pattern
_ => ()
}
let x: Vec<String> = vec!["foo".to_string(),
"bar".to_string(),
"baz".to_string()];
let x: &[String] = x.as_slice();
match x {
[a, _, _,..] => { println!("{}", a); }
[_, _, _, _, _] => { } //~ ERROR unreachable pattern
_ =>
|
}
let x: Vec<char> = vec!('a', 'b', 'c');
let x: &[char] = x.as_slice();
match x {
['a', 'b', 'c', _tail..] => {}
['a', 'b', 'c'] => {} //~ ERROR unreachable pattern
_ => {}
}
}
|
{ }
|
conditional_block
|
match-vec-unreachable.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
[(1, 2), (2, 3), b] => (), //~ ERROR unreachable pattern
_ => ()
}
let x: Vec<String> = vec!["foo".to_string(),
"bar".to_string(),
"baz".to_string()];
let x: &[String] = x.as_slice();
match x {
[a, _, _,..] => { println!("{}", a); }
[_, _, _, _, _] => { } //~ ERROR unreachable pattern
_ => { }
}
let x: Vec<char> = vec!('a', 'b', 'c');
let x: &[char] = x.as_slice();
match x {
['a', 'b', 'c', _tail..] => {}
['a', 'b', 'c'] => {} //~ ERROR unreachable pattern
_ => {}
}
}
|
fn main() {
let x: Vec<(int, int)> = Vec::new();
let x: &[(int, int)] = x.as_slice();
match x {
[a, (2, 3), _] => (),
|
random_line_split
|
match-vec-unreachable.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main()
|
let x: &[char] = x.as_slice();
match x {
['a', 'b', 'c', _tail..] => {}
['a', 'b', 'c'] => {} //~ ERROR unreachable pattern
_ => {}
}
}
|
{
let x: Vec<(int, int)> = Vec::new();
let x: &[(int, int)] = x.as_slice();
match x {
[a, (2, 3), _] => (),
[(1, 2), (2, 3), b] => (), //~ ERROR unreachable pattern
_ => ()
}
let x: Vec<String> = vec!["foo".to_string(),
"bar".to_string(),
"baz".to_string()];
let x: &[String] = x.as_slice();
match x {
[a, _, _, ..] => { println!("{}", a); }
[_, _, _, _, _] => { } //~ ERROR unreachable pattern
_ => { }
}
let x: Vec<char> = vec!('a', 'b', 'c');
|
identifier_body
|
match-vec-unreachable.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn
|
() {
let x: Vec<(int, int)> = Vec::new();
let x: &[(int, int)] = x.as_slice();
match x {
[a, (2, 3), _] => (),
[(1, 2), (2, 3), b] => (), //~ ERROR unreachable pattern
_ => ()
}
let x: Vec<String> = vec!["foo".to_string(),
"bar".to_string(),
"baz".to_string()];
let x: &[String] = x.as_slice();
match x {
[a, _, _,..] => { println!("{}", a); }
[_, _, _, _, _] => { } //~ ERROR unreachable pattern
_ => { }
}
let x: Vec<char> = vec!('a', 'b', 'c');
let x: &[char] = x.as_slice();
match x {
['a', 'b', 'c', _tail..] => {}
['a', 'b', 'c'] => {} //~ ERROR unreachable pattern
_ => {}
}
}
|
main
|
identifier_name
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `servo` test application.
//!
//! Creates a `Browser` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in lib.rs.
//!
//! [glutin]: https://github.com/tomaka/glutin
#![feature(start)]
#[cfg(target_os = "android")]
#[macro_use]
extern crate android_glue;
// The window backed by glutin
extern crate glutin_app as app;
extern crate env_logger;
#[cfg(target_os = "android")]
extern crate libc;
#[macro_use]
extern crate log;
// The Servo engine
extern crate servo;
extern crate time;
extern crate gleam;
extern crate offscreen_gl_context;
use gleam::gl;
use offscreen_gl_context::GLContext;
use servo::Browser;
use servo::compositing::windowing::WindowEvent;
use servo::net_traits::hosts;
use servo::util::opts;
use std::rc::Rc;
#[cfg(not(target_os = "android"))]
fn load_gl_when_headless() {
gl::load_with(|addr| GLContext::get_proc_address(addr) as *const _);
}
#[cfg(target_os = "android")]
fn load_gl_when_headless() {}
fn main() {
// Parse the command line options and store them globally
opts::from_cmdline_args(&*args());
if opts::get().is_running_problem_test && ::std::env::var("RUST_LOG").is_err() {
::std::env::set_var("RUST_LOG", "compositing::constellation");
}
env_logger::init().unwrap();
setup_logging();
// Possibly interpret the `HOST_FILE` environment variable
hosts::global_init();
let window = if opts::get().headless {
// Load gl functions even when in headless mode,
// to avoid crashing with webgl
load_gl_when_headless();
None
} else {
Some(app::create_window(None))
};
// Our wrapper around `Browser` that also implements some
// callbacks required by the glutin window implementation.
let mut browser = BrowserWrapper {
browser: Browser::new(window.clone()),
};
maybe_register_glutin_resize_handler(&window, &mut browser);
browser.browser.handle_events(vec![WindowEvent::InitializeCompositing]);
// Feed events from the window to the browser until the browser
// says to stop.
loop {
let should_continue = match window {
None => browser.browser.handle_events(Vec::new()),
Some(ref window) => browser.browser.handle_events(window.wait_events()),
};
if!should_continue {
break
}
};
maybe_unregister_glutin_resize_handler(&window);
}
fn maybe_register_glutin_resize_handler(window: &Option<Rc<app::window::Window>>,
browser: &mut BrowserWrapper) {
match *window {
None => {}
Some(ref window) => {
unsafe {
window.set_nested_event_loop_listener(browser);
}
}
}
}
fn
|
(window: &Option<Rc<app::window::Window>>) {
match *window {
None => {}
Some(ref window) => {
unsafe {
window.remove_nested_event_loop_listener();
}
}
}
}
struct BrowserWrapper {
browser: Browser,
}
impl app::NestedEventLoopListener for BrowserWrapper {
fn handle_event_from_nested_event_loop(&mut self, event: WindowEvent) -> bool {
let is_resize = match event {
WindowEvent::Resize(..) => true,
_ => false,
};
if!self.browser.handle_events(vec![event]) {
return false
}
if is_resize {
self.browser.repaint_synchronously()
}
true
}
}
#[cfg(target_os = "android")]
fn setup_logging() {
android::setup_logging();
}
#[cfg(not(target_os = "android"))]
fn setup_logging() {
}
#[cfg(target_os = "android")]
/// Attempt to read parameters from a file since they are not passed to us in Android environments.
/// The first line should be the "servo" argument and the last should be the URL to load.
/// Blank lines and those beginning with a '#' are ignored.
/// Each line should be a separate parameter as would be parsed by the shell.
/// For example, "servo -p 10 http://en.wikipedia.org/wiki/Rust" would take 4 lines.
fn args() -> Vec<String> {
use std::error::Error;
use std::fs::File;
use std::io::{BufRead, BufReader};
const PARAMS_FILE: &'static str = "/sdcard/servo/android_params";
match File::open(PARAMS_FILE) {
Ok(f) => {
let mut vec = Vec::new();
let file = BufReader::new(&f);
for line in file.lines() {
let l = line.unwrap().trim().to_owned();
// ignore blank lines and those that start with a '#'
match l.is_empty() || l.as_bytes()[0] == b'#' {
true => (),
false => vec.push(l),
}
}
vec
},
Err(e) => {
debug!("Failed to open params file '{}': {}", PARAMS_FILE, Error::description(&e));
vec![
"servo".to_owned(),
"http://en.wikipedia.org/wiki/Rust".to_owned()
]
},
}
}
#[cfg(not(target_os = "android"))]
fn args() -> Vec<String> {
use std::env;
env::args().collect()
}
// This extern definition ensures that the linker will not discard
// the static native lib bits, which are brought in from the NDK libraries
// we link in from build.rs.
#[cfg(target_os = "android")]
extern {
fn app_dummy() -> libc::c_void;
}
// This macro must be used at toplevel because it defines a nested
// module, but macros can only accept identifiers - not paths -
// preventing the expansion of this macro within the android module
// without use of an additionl stub method or other hackery.
#[cfg(target_os = "android")]
android_start!(main);
#[cfg(target_os = "android")]
mod android {
extern crate libc;
extern crate android_glue;
use self::libc::c_int;
use std::borrow::ToOwned;
pub fn setup_logging() {
use self::libc::consts::os::posix88::{STDERR_FILENO, STDOUT_FILENO};
//use std::env;
//env::set_var("RUST_LOG", "servo,gfx,msg,util,layers,js,std,rt,extra");
redirect_output(STDERR_FILENO);
redirect_output(STDOUT_FILENO);
unsafe { super::app_dummy(); }
}
struct FilePtr(*mut self::libc::types::common::c95::FILE);
unsafe impl Send for FilePtr {}
fn redirect_output(file_no: c_int) {
use self::libc::funcs::c95::stdio::fgets;
use self::libc::funcs::posix88::stdio::fdopen;
use self::libc::funcs::posix88::unistd::{pipe, dup2};
use servo::util::task::spawn_named;
use std::ffi::CStr;
use std::ffi::CString;
use std::str::from_utf8;
unsafe {
let mut pipes: [c_int; 2] = [ 0, 0 ];
pipe(pipes.as_mut_ptr());
dup2(pipes[1], file_no);
let mode = CString::new("r").unwrap();
let input_file = FilePtr(fdopen(pipes[0], mode.as_ptr()));
spawn_named("android-logger".to_owned(), move || {
static READ_SIZE: usize = 1024;
let mut read_buffer = vec![0; READ_SIZE];
let FilePtr(input_file) = input_file;
loop {
fgets(read_buffer.as_mut_ptr(), (read_buffer.len() as i32)-1, input_file);
let c_str = CStr::from_ptr(read_buffer.as_ptr());
let slice = from_utf8(c_str.to_bytes()).unwrap();
android_glue::write_log(slice);
}
});
}
}
}
|
maybe_unregister_glutin_resize_handler
|
identifier_name
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `servo` test application.
//!
//! Creates a `Browser` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in lib.rs.
//!
//! [glutin]: https://github.com/tomaka/glutin
#![feature(start)]
#[cfg(target_os = "android")]
#[macro_use]
extern crate android_glue;
// The window backed by glutin
extern crate glutin_app as app;
extern crate env_logger;
#[cfg(target_os = "android")]
extern crate libc;
#[macro_use]
extern crate log;
// The Servo engine
extern crate servo;
extern crate time;
extern crate gleam;
extern crate offscreen_gl_context;
use gleam::gl;
use offscreen_gl_context::GLContext;
use servo::Browser;
use servo::compositing::windowing::WindowEvent;
use servo::net_traits::hosts;
use servo::util::opts;
use std::rc::Rc;
#[cfg(not(target_os = "android"))]
fn load_gl_when_headless() {
gl::load_with(|addr| GLContext::get_proc_address(addr) as *const _);
}
|
fn main() {
// Parse the command line options and store them globally
opts::from_cmdline_args(&*args());
if opts::get().is_running_problem_test && ::std::env::var("RUST_LOG").is_err() {
::std::env::set_var("RUST_LOG", "compositing::constellation");
}
env_logger::init().unwrap();
setup_logging();
// Possibly interpret the `HOST_FILE` environment variable
hosts::global_init();
let window = if opts::get().headless {
// Load gl functions even when in headless mode,
// to avoid crashing with webgl
load_gl_when_headless();
None
} else {
Some(app::create_window(None))
};
// Our wrapper around `Browser` that also implements some
// callbacks required by the glutin window implementation.
let mut browser = BrowserWrapper {
browser: Browser::new(window.clone()),
};
maybe_register_glutin_resize_handler(&window, &mut browser);
browser.browser.handle_events(vec![WindowEvent::InitializeCompositing]);
// Feed events from the window to the browser until the browser
// says to stop.
loop {
let should_continue = match window {
None => browser.browser.handle_events(Vec::new()),
Some(ref window) => browser.browser.handle_events(window.wait_events()),
};
if!should_continue {
break
}
};
maybe_unregister_glutin_resize_handler(&window);
}
fn maybe_register_glutin_resize_handler(window: &Option<Rc<app::window::Window>>,
browser: &mut BrowserWrapper) {
match *window {
None => {}
Some(ref window) => {
unsafe {
window.set_nested_event_loop_listener(browser);
}
}
}
}
fn maybe_unregister_glutin_resize_handler(window: &Option<Rc<app::window::Window>>) {
match *window {
None => {}
Some(ref window) => {
unsafe {
window.remove_nested_event_loop_listener();
}
}
}
}
struct BrowserWrapper {
browser: Browser,
}
impl app::NestedEventLoopListener for BrowserWrapper {
fn handle_event_from_nested_event_loop(&mut self, event: WindowEvent) -> bool {
let is_resize = match event {
WindowEvent::Resize(..) => true,
_ => false,
};
if!self.browser.handle_events(vec![event]) {
return false
}
if is_resize {
self.browser.repaint_synchronously()
}
true
}
}
#[cfg(target_os = "android")]
fn setup_logging() {
android::setup_logging();
}
#[cfg(not(target_os = "android"))]
fn setup_logging() {
}
#[cfg(target_os = "android")]
/// Attempt to read parameters from a file since they are not passed to us in Android environments.
/// The first line should be the "servo" argument and the last should be the URL to load.
/// Blank lines and those beginning with a '#' are ignored.
/// Each line should be a separate parameter as would be parsed by the shell.
/// For example, "servo -p 10 http://en.wikipedia.org/wiki/Rust" would take 4 lines.
fn args() -> Vec<String> {
use std::error::Error;
use std::fs::File;
use std::io::{BufRead, BufReader};
const PARAMS_FILE: &'static str = "/sdcard/servo/android_params";
match File::open(PARAMS_FILE) {
Ok(f) => {
let mut vec = Vec::new();
let file = BufReader::new(&f);
for line in file.lines() {
let l = line.unwrap().trim().to_owned();
// ignore blank lines and those that start with a '#'
match l.is_empty() || l.as_bytes()[0] == b'#' {
true => (),
false => vec.push(l),
}
}
vec
},
Err(e) => {
debug!("Failed to open params file '{}': {}", PARAMS_FILE, Error::description(&e));
vec![
"servo".to_owned(),
"http://en.wikipedia.org/wiki/Rust".to_owned()
]
},
}
}
#[cfg(not(target_os = "android"))]
fn args() -> Vec<String> {
use std::env;
env::args().collect()
}
// This extern definition ensures that the linker will not discard
// the static native lib bits, which are brought in from the NDK libraries
// we link in from build.rs.
#[cfg(target_os = "android")]
extern {
fn app_dummy() -> libc::c_void;
}
// This macro must be used at toplevel because it defines a nested
// module, but macros can only accept identifiers - not paths -
// preventing the expansion of this macro within the android module
// without use of an additionl stub method or other hackery.
#[cfg(target_os = "android")]
android_start!(main);
#[cfg(target_os = "android")]
mod android {
extern crate libc;
extern crate android_glue;
use self::libc::c_int;
use std::borrow::ToOwned;
pub fn setup_logging() {
use self::libc::consts::os::posix88::{STDERR_FILENO, STDOUT_FILENO};
//use std::env;
//env::set_var("RUST_LOG", "servo,gfx,msg,util,layers,js,std,rt,extra");
redirect_output(STDERR_FILENO);
redirect_output(STDOUT_FILENO);
unsafe { super::app_dummy(); }
}
struct FilePtr(*mut self::libc::types::common::c95::FILE);
unsafe impl Send for FilePtr {}
fn redirect_output(file_no: c_int) {
use self::libc::funcs::c95::stdio::fgets;
use self::libc::funcs::posix88::stdio::fdopen;
use self::libc::funcs::posix88::unistd::{pipe, dup2};
use servo::util::task::spawn_named;
use std::ffi::CStr;
use std::ffi::CString;
use std::str::from_utf8;
unsafe {
let mut pipes: [c_int; 2] = [ 0, 0 ];
pipe(pipes.as_mut_ptr());
dup2(pipes[1], file_no);
let mode = CString::new("r").unwrap();
let input_file = FilePtr(fdopen(pipes[0], mode.as_ptr()));
spawn_named("android-logger".to_owned(), move || {
static READ_SIZE: usize = 1024;
let mut read_buffer = vec![0; READ_SIZE];
let FilePtr(input_file) = input_file;
loop {
fgets(read_buffer.as_mut_ptr(), (read_buffer.len() as i32)-1, input_file);
let c_str = CStr::from_ptr(read_buffer.as_ptr());
let slice = from_utf8(c_str.to_bytes()).unwrap();
android_glue::write_log(slice);
}
});
}
}
}
|
#[cfg(target_os = "android")]
fn load_gl_when_headless() {}
|
random_line_split
|
xhci.rs
|
use alloc::boxed::Box;
//use arch::memory::*;
use drivers::pci::config::PciConfig;
//use core::mem::size_of;
use fs::KScheme;
#[repr(packed)]
struct Ste {
pub ptr: u64,
pub length: u64,
}
#[repr(packed)]
struct Trb {
pub data: u64,
pub status: u32,
pub control: u32,
}
impl Trb {
pub fn new() -> Self {
Trb {
data: 0,
status: 0,
control: 0,
}
}
pub fn from_type(trb_type: u32) -> Self
|
}
pub struct Xhci {
pub pci: PciConfig,
pub base: usize,
pub irq: u8,
}
impl KScheme for Xhci {
fn on_irq(&mut self, irq: u8) {
if irq == self.irq {
debug!("XHCI handle\n");
}
}
}
impl Xhci {
pub unsafe fn new(mut pci: PciConfig) -> Box<Xhci> {
let mut module = box Xhci {
pci: pci,
base: pci.read(0x10) as usize & 0xFFFFFFF0,
irq: pci.read(0x3C) as u8 & 0xF,
};
module.init();
module
}
pub unsafe fn init(&mut self) {
debugln!(" + XHCI on: {:X}, IRQ: {:X}", self.base, self.irq);
/*
self.pci.flag(4, 4, true); // Bus mastering
let cap_base = self.base;
let op_base = cap_base + *(cap_base as *mut u8) as usize;
let db_base = cap_base + *((cap_base + 0x14) as *mut u32) as usize;
let rt_base = cap_base + *((cap_base + 0x18) as *mut u32) as usize;
d("CAP_BASE: ");
dh(cap_base);
d(" OP_BASE: ");
dh(op_base);
d(" DB_BASE: ");
dh(db_base);
d(" RT_BASE: ");
dh(rt_base);
dl();
//Set FLADJ Frame Length Adjustment (optional?)
//Set I/O memory maps (optional?)
//Wait until the Controller Not Ready flag in USBSTS is 0
let usbsts = (op_base + 0x04) as *mut u32;
while *usbsts & (1 << 11) == (1 << 11) {
d("Controller Not Ready\n");
}
d("Controller Ready ");
dh(*usbsts as usize);
dl();
//Set Run/Stop to 0
let usbcmd = op_base as *mut u32;
*usbcmd = *usbcmd & 0xFFFFFFFE;
while *usbsts & 1 == 0 {
d("Command Not Ready\n");
}
d("Command Ready ");
dh(*usbcmd as usize);
dl();
//Program the Max Device Slots Enabled in the CONFIG register
let hcsparams1 = (cap_base + 0x04) as *const u32;
let max_slots = *hcsparams1 & 0xFF;
let max_ports = (*hcsparams1 >> 24) & 0xFF;
d("Max Slots ");
dd(max_slots as usize);
dl();
d("Max Ports ");
dd(max_ports as usize);
dl();
let config = (op_base + 0x38) as *mut u32;
*config = max_slots;
d("Slots Enabled ");
dd(*config as usize);
dl();
//Program the Device Context Base Address Array Pointer with a pointer to the Device Context Base Address Array
let device_context_base_address_array = alloc(max_slots as usize * size_of::<u64>()) as *mut u64;
for slot in 0..max_slots as isize {
*device_context_base_address_array.offset(slot) = alloc(2048) as u64;
}
let dcbaap = (op_base + 0x30) as *mut u64;
*dcbaap = device_context_base_address_array as u64;
d("Set Device Context Base Address Array ");
dh(*dcbaap as usize);
dl();
//Define the Command Ring Dequeue Pointer by programming the Command ring Control register with a pointer to the first Trb
let command_ring_length = 256;
let mut command_ring_offset = 0;
let command_ring = alloc(command_ring_length * size_of::<Trb>()) as *mut Trb;
for i in 0..command_ring_length {
*command_ring.offset(i as isize) = Trb::new();
d("."); //Timing issue?
}
dl();
let crcr = (op_base + 0x18) as *mut u64;
*crcr = command_ring as u64;
d("Set Command Ring Dequeue Pointer ");
dh(*crcr as usize);
dl();
//Define the Event Ring for interrupter 0
let event_ring_segments = 1;
let event_ring_segment_table = alloc(event_ring_segments * size_of::<Ste>()) as *mut Ste;
let mut event_ring_dequeue = 0;
for segment in 0..event_ring_segments {
let ste = &mut *event_ring_segment_table.offset(segment as isize);
ste.length = 256;
ste.ptr = alloc(ste.length as usize * size_of::<Trb>()) as u64;
for i in 0..ste.length as isize {
*(ste.ptr as *mut Trb).offset(i) = Trb::new();
dd(i as usize);
d(" ");
}
dl();
if segment == 0 {
event_ring_dequeue = ste.ptr;
}
}
let erstsz = (rt_base + 0x28) as *mut u32;
*erstsz = event_ring_segments as u32;
let erdp = (rt_base + 0x38) as *mut u64;
*erdp = event_ring_dequeue;
let erstba = (rt_base + 0x30) as *mut u64;
*erstba = event_ring_segment_table as u64;
d("Set Event Ring Segment Table ");
dh(*erstba as usize);
d(" ");
dd(*erstsz as usize);
d(" ");
dh(*erdp as usize);
dl();
//Write the USBCMD to turn on the host controller by setting Run/Stop to 1
let usbcmd = op_base as *mut u32;
*usbcmd = *usbcmd | 1;
while *usbsts & 1!= 0 {
d("Not Running\n");
}
d("Running ");
dh(*usbcmd as usize);
dl();
for i in 0..max_ports as usize {
let portsc = (op_base + 0x400 + (0x10 * i)) as *mut u32;
d("Port ");
dd(i + 1);
d(" is ");
dh(*portsc as usize);
dl();
if *portsc & 1 == 1 {
d("Connected\n");
}
if *portsc & 2 == 2 {
d("Enabled\n");
}
if *portsc & 3 == 3 {
d("Enabling slot\n");
*command_ring.offset(command_ring_offset as isize) = Trb::from_type(23);
command_ring_offset += 1;
if command_ring_offset >= command_ring_length {
command_ring_offset = 0;
}
*command_ring.offset(command_ring_offset as isize) = Trb::from_type(9);
command_ring_offset += 1;
if command_ring_offset >= command_ring_length {
command_ring_offset = 0;
}
d("Write Doorbell\n");
let doorbell = db_base as *mut u32;
*doorbell = 0;
}
}
*/
}
}
|
{
Trb {
data: 0,
status: 0,
control: (trb_type & 0x3F) << 10,
}
}
|
identifier_body
|
xhci.rs
|
use alloc::boxed::Box;
//use arch::memory::*;
use drivers::pci::config::PciConfig;
//use core::mem::size_of;
use fs::KScheme;
#[repr(packed)]
struct Ste {
pub ptr: u64,
pub length: u64,
}
#[repr(packed)]
struct Trb {
pub data: u64,
pub status: u32,
pub control: u32,
}
impl Trb {
pub fn new() -> Self {
Trb {
data: 0,
status: 0,
control: 0,
}
}
pub fn from_type(trb_type: u32) -> Self {
Trb {
data: 0,
status: 0,
control: (trb_type & 0x3F) << 10,
}
}
}
pub struct Xhci {
pub pci: PciConfig,
pub base: usize,
pub irq: u8,
}
impl KScheme for Xhci {
fn on_irq(&mut self, irq: u8) {
if irq == self.irq {
debug!("XHCI handle\n");
}
}
}
impl Xhci {
pub unsafe fn new(mut pci: PciConfig) -> Box<Xhci> {
let mut module = box Xhci {
pci: pci,
base: pci.read(0x10) as usize & 0xFFFFFFF0,
irq: pci.read(0x3C) as u8 & 0xF,
};
module.init();
module
}
pub unsafe fn init(&mut self) {
debugln!(" + XHCI on: {:X}, IRQ: {:X}", self.base, self.irq);
/*
self.pci.flag(4, 4, true); // Bus mastering
let cap_base = self.base;
let op_base = cap_base + *(cap_base as *mut u8) as usize;
let db_base = cap_base + *((cap_base + 0x14) as *mut u32) as usize;
let rt_base = cap_base + *((cap_base + 0x18) as *mut u32) as usize;
d("CAP_BASE: ");
dh(cap_base);
d(" OP_BASE: ");
dh(op_base);
d(" DB_BASE: ");
dh(db_base);
d(" RT_BASE: ");
dh(rt_base);
dl();
//Set FLADJ Frame Length Adjustment (optional?)
//Set I/O memory maps (optional?)
//Wait until the Controller Not Ready flag in USBSTS is 0
let usbsts = (op_base + 0x04) as *mut u32;
while *usbsts & (1 << 11) == (1 << 11) {
d("Controller Not Ready\n");
}
d("Controller Ready ");
dh(*usbsts as usize);
dl();
//Set Run/Stop to 0
let usbcmd = op_base as *mut u32;
*usbcmd = *usbcmd & 0xFFFFFFFE;
|
d("Command Ready ");
dh(*usbcmd as usize);
dl();
//Program the Max Device Slots Enabled in the CONFIG register
let hcsparams1 = (cap_base + 0x04) as *const u32;
let max_slots = *hcsparams1 & 0xFF;
let max_ports = (*hcsparams1 >> 24) & 0xFF;
d("Max Slots ");
dd(max_slots as usize);
dl();
d("Max Ports ");
dd(max_ports as usize);
dl();
let config = (op_base + 0x38) as *mut u32;
*config = max_slots;
d("Slots Enabled ");
dd(*config as usize);
dl();
//Program the Device Context Base Address Array Pointer with a pointer to the Device Context Base Address Array
let device_context_base_address_array = alloc(max_slots as usize * size_of::<u64>()) as *mut u64;
for slot in 0..max_slots as isize {
*device_context_base_address_array.offset(slot) = alloc(2048) as u64;
}
let dcbaap = (op_base + 0x30) as *mut u64;
*dcbaap = device_context_base_address_array as u64;
d("Set Device Context Base Address Array ");
dh(*dcbaap as usize);
dl();
//Define the Command Ring Dequeue Pointer by programming the Command ring Control register with a pointer to the first Trb
let command_ring_length = 256;
let mut command_ring_offset = 0;
let command_ring = alloc(command_ring_length * size_of::<Trb>()) as *mut Trb;
for i in 0..command_ring_length {
*command_ring.offset(i as isize) = Trb::new();
d("."); //Timing issue?
}
dl();
let crcr = (op_base + 0x18) as *mut u64;
*crcr = command_ring as u64;
d("Set Command Ring Dequeue Pointer ");
dh(*crcr as usize);
dl();
//Define the Event Ring for interrupter 0
let event_ring_segments = 1;
let event_ring_segment_table = alloc(event_ring_segments * size_of::<Ste>()) as *mut Ste;
let mut event_ring_dequeue = 0;
for segment in 0..event_ring_segments {
let ste = &mut *event_ring_segment_table.offset(segment as isize);
ste.length = 256;
ste.ptr = alloc(ste.length as usize * size_of::<Trb>()) as u64;
for i in 0..ste.length as isize {
*(ste.ptr as *mut Trb).offset(i) = Trb::new();
dd(i as usize);
d(" ");
}
dl();
if segment == 0 {
event_ring_dequeue = ste.ptr;
}
}
let erstsz = (rt_base + 0x28) as *mut u32;
*erstsz = event_ring_segments as u32;
let erdp = (rt_base + 0x38) as *mut u64;
*erdp = event_ring_dequeue;
let erstba = (rt_base + 0x30) as *mut u64;
*erstba = event_ring_segment_table as u64;
d("Set Event Ring Segment Table ");
dh(*erstba as usize);
d(" ");
dd(*erstsz as usize);
d(" ");
dh(*erdp as usize);
dl();
//Write the USBCMD to turn on the host controller by setting Run/Stop to 1
let usbcmd = op_base as *mut u32;
*usbcmd = *usbcmd | 1;
while *usbsts & 1!= 0 {
d("Not Running\n");
}
d("Running ");
dh(*usbcmd as usize);
dl();
for i in 0..max_ports as usize {
let portsc = (op_base + 0x400 + (0x10 * i)) as *mut u32;
d("Port ");
dd(i + 1);
d(" is ");
dh(*portsc as usize);
dl();
if *portsc & 1 == 1 {
d("Connected\n");
}
if *portsc & 2 == 2 {
d("Enabled\n");
}
if *portsc & 3 == 3 {
d("Enabling slot\n");
*command_ring.offset(command_ring_offset as isize) = Trb::from_type(23);
command_ring_offset += 1;
if command_ring_offset >= command_ring_length {
command_ring_offset = 0;
}
*command_ring.offset(command_ring_offset as isize) = Trb::from_type(9);
command_ring_offset += 1;
if command_ring_offset >= command_ring_length {
command_ring_offset = 0;
}
d("Write Doorbell\n");
let doorbell = db_base as *mut u32;
*doorbell = 0;
}
}
*/
}
}
|
while *usbsts & 1 == 0 {
d("Command Not Ready\n");
}
|
random_line_split
|
xhci.rs
|
use alloc::boxed::Box;
//use arch::memory::*;
use drivers::pci::config::PciConfig;
//use core::mem::size_of;
use fs::KScheme;
#[repr(packed)]
struct Ste {
pub ptr: u64,
pub length: u64,
}
#[repr(packed)]
struct Trb {
pub data: u64,
pub status: u32,
pub control: u32,
}
impl Trb {
pub fn new() -> Self {
Trb {
data: 0,
status: 0,
control: 0,
}
}
pub fn from_type(trb_type: u32) -> Self {
Trb {
data: 0,
status: 0,
control: (trb_type & 0x3F) << 10,
}
}
}
pub struct Xhci {
pub pci: PciConfig,
pub base: usize,
pub irq: u8,
}
impl KScheme for Xhci {
fn on_irq(&mut self, irq: u8) {
if irq == self.irq
|
}
}
impl Xhci {
pub unsafe fn new(mut pci: PciConfig) -> Box<Xhci> {
let mut module = box Xhci {
pci: pci,
base: pci.read(0x10) as usize & 0xFFFFFFF0,
irq: pci.read(0x3C) as u8 & 0xF,
};
module.init();
module
}
pub unsafe fn init(&mut self) {
debugln!(" + XHCI on: {:X}, IRQ: {:X}", self.base, self.irq);
/*
self.pci.flag(4, 4, true); // Bus mastering
let cap_base = self.base;
let op_base = cap_base + *(cap_base as *mut u8) as usize;
let db_base = cap_base + *((cap_base + 0x14) as *mut u32) as usize;
let rt_base = cap_base + *((cap_base + 0x18) as *mut u32) as usize;
d("CAP_BASE: ");
dh(cap_base);
d(" OP_BASE: ");
dh(op_base);
d(" DB_BASE: ");
dh(db_base);
d(" RT_BASE: ");
dh(rt_base);
dl();
//Set FLADJ Frame Length Adjustment (optional?)
//Set I/O memory maps (optional?)
//Wait until the Controller Not Ready flag in USBSTS is 0
let usbsts = (op_base + 0x04) as *mut u32;
while *usbsts & (1 << 11) == (1 << 11) {
d("Controller Not Ready\n");
}
d("Controller Ready ");
dh(*usbsts as usize);
dl();
//Set Run/Stop to 0
let usbcmd = op_base as *mut u32;
*usbcmd = *usbcmd & 0xFFFFFFFE;
while *usbsts & 1 == 0 {
d("Command Not Ready\n");
}
d("Command Ready ");
dh(*usbcmd as usize);
dl();
//Program the Max Device Slots Enabled in the CONFIG register
let hcsparams1 = (cap_base + 0x04) as *const u32;
let max_slots = *hcsparams1 & 0xFF;
let max_ports = (*hcsparams1 >> 24) & 0xFF;
d("Max Slots ");
dd(max_slots as usize);
dl();
d("Max Ports ");
dd(max_ports as usize);
dl();
let config = (op_base + 0x38) as *mut u32;
*config = max_slots;
d("Slots Enabled ");
dd(*config as usize);
dl();
//Program the Device Context Base Address Array Pointer with a pointer to the Device Context Base Address Array
let device_context_base_address_array = alloc(max_slots as usize * size_of::<u64>()) as *mut u64;
for slot in 0..max_slots as isize {
*device_context_base_address_array.offset(slot) = alloc(2048) as u64;
}
let dcbaap = (op_base + 0x30) as *mut u64;
*dcbaap = device_context_base_address_array as u64;
d("Set Device Context Base Address Array ");
dh(*dcbaap as usize);
dl();
//Define the Command Ring Dequeue Pointer by programming the Command ring Control register with a pointer to the first Trb
let command_ring_length = 256;
let mut command_ring_offset = 0;
let command_ring = alloc(command_ring_length * size_of::<Trb>()) as *mut Trb;
for i in 0..command_ring_length {
*command_ring.offset(i as isize) = Trb::new();
d("."); //Timing issue?
}
dl();
let crcr = (op_base + 0x18) as *mut u64;
*crcr = command_ring as u64;
d("Set Command Ring Dequeue Pointer ");
dh(*crcr as usize);
dl();
//Define the Event Ring for interrupter 0
let event_ring_segments = 1;
let event_ring_segment_table = alloc(event_ring_segments * size_of::<Ste>()) as *mut Ste;
let mut event_ring_dequeue = 0;
for segment in 0..event_ring_segments {
let ste = &mut *event_ring_segment_table.offset(segment as isize);
ste.length = 256;
ste.ptr = alloc(ste.length as usize * size_of::<Trb>()) as u64;
for i in 0..ste.length as isize {
*(ste.ptr as *mut Trb).offset(i) = Trb::new();
dd(i as usize);
d(" ");
}
dl();
if segment == 0 {
event_ring_dequeue = ste.ptr;
}
}
let erstsz = (rt_base + 0x28) as *mut u32;
*erstsz = event_ring_segments as u32;
let erdp = (rt_base + 0x38) as *mut u64;
*erdp = event_ring_dequeue;
let erstba = (rt_base + 0x30) as *mut u64;
*erstba = event_ring_segment_table as u64;
d("Set Event Ring Segment Table ");
dh(*erstba as usize);
d(" ");
dd(*erstsz as usize);
d(" ");
dh(*erdp as usize);
dl();
//Write the USBCMD to turn on the host controller by setting Run/Stop to 1
let usbcmd = op_base as *mut u32;
*usbcmd = *usbcmd | 1;
while *usbsts & 1!= 0 {
d("Not Running\n");
}
d("Running ");
dh(*usbcmd as usize);
dl();
for i in 0..max_ports as usize {
let portsc = (op_base + 0x400 + (0x10 * i)) as *mut u32;
d("Port ");
dd(i + 1);
d(" is ");
dh(*portsc as usize);
dl();
if *portsc & 1 == 1 {
d("Connected\n");
}
if *portsc & 2 == 2 {
d("Enabled\n");
}
if *portsc & 3 == 3 {
d("Enabling slot\n");
*command_ring.offset(command_ring_offset as isize) = Trb::from_type(23);
command_ring_offset += 1;
if command_ring_offset >= command_ring_length {
command_ring_offset = 0;
}
*command_ring.offset(command_ring_offset as isize) = Trb::from_type(9);
command_ring_offset += 1;
if command_ring_offset >= command_ring_length {
command_ring_offset = 0;
}
d("Write Doorbell\n");
let doorbell = db_base as *mut u32;
*doorbell = 0;
}
}
*/
}
}
|
{
debug!("XHCI handle\n");
}
|
conditional_block
|
xhci.rs
|
use alloc::boxed::Box;
//use arch::memory::*;
use drivers::pci::config::PciConfig;
//use core::mem::size_of;
use fs::KScheme;
#[repr(packed)]
struct Ste {
pub ptr: u64,
pub length: u64,
}
#[repr(packed)]
struct Trb {
pub data: u64,
pub status: u32,
pub control: u32,
}
impl Trb {
pub fn new() -> Self {
Trb {
data: 0,
status: 0,
control: 0,
}
}
pub fn from_type(trb_type: u32) -> Self {
Trb {
data: 0,
status: 0,
control: (trb_type & 0x3F) << 10,
}
}
}
pub struct Xhci {
pub pci: PciConfig,
pub base: usize,
pub irq: u8,
}
impl KScheme for Xhci {
fn on_irq(&mut self, irq: u8) {
if irq == self.irq {
debug!("XHCI handle\n");
}
}
}
impl Xhci {
pub unsafe fn
|
(mut pci: PciConfig) -> Box<Xhci> {
let mut module = box Xhci {
pci: pci,
base: pci.read(0x10) as usize & 0xFFFFFFF0,
irq: pci.read(0x3C) as u8 & 0xF,
};
module.init();
module
}
pub unsafe fn init(&mut self) {
debugln!(" + XHCI on: {:X}, IRQ: {:X}", self.base, self.irq);
/*
self.pci.flag(4, 4, true); // Bus mastering
let cap_base = self.base;
let op_base = cap_base + *(cap_base as *mut u8) as usize;
let db_base = cap_base + *((cap_base + 0x14) as *mut u32) as usize;
let rt_base = cap_base + *((cap_base + 0x18) as *mut u32) as usize;
d("CAP_BASE: ");
dh(cap_base);
d(" OP_BASE: ");
dh(op_base);
d(" DB_BASE: ");
dh(db_base);
d(" RT_BASE: ");
dh(rt_base);
dl();
//Set FLADJ Frame Length Adjustment (optional?)
//Set I/O memory maps (optional?)
//Wait until the Controller Not Ready flag in USBSTS is 0
let usbsts = (op_base + 0x04) as *mut u32;
while *usbsts & (1 << 11) == (1 << 11) {
d("Controller Not Ready\n");
}
d("Controller Ready ");
dh(*usbsts as usize);
dl();
//Set Run/Stop to 0
let usbcmd = op_base as *mut u32;
*usbcmd = *usbcmd & 0xFFFFFFFE;
while *usbsts & 1 == 0 {
d("Command Not Ready\n");
}
d("Command Ready ");
dh(*usbcmd as usize);
dl();
//Program the Max Device Slots Enabled in the CONFIG register
let hcsparams1 = (cap_base + 0x04) as *const u32;
let max_slots = *hcsparams1 & 0xFF;
let max_ports = (*hcsparams1 >> 24) & 0xFF;
d("Max Slots ");
dd(max_slots as usize);
dl();
d("Max Ports ");
dd(max_ports as usize);
dl();
let config = (op_base + 0x38) as *mut u32;
*config = max_slots;
d("Slots Enabled ");
dd(*config as usize);
dl();
//Program the Device Context Base Address Array Pointer with a pointer to the Device Context Base Address Array
let device_context_base_address_array = alloc(max_slots as usize * size_of::<u64>()) as *mut u64;
for slot in 0..max_slots as isize {
*device_context_base_address_array.offset(slot) = alloc(2048) as u64;
}
let dcbaap = (op_base + 0x30) as *mut u64;
*dcbaap = device_context_base_address_array as u64;
d("Set Device Context Base Address Array ");
dh(*dcbaap as usize);
dl();
//Define the Command Ring Dequeue Pointer by programming the Command ring Control register with a pointer to the first Trb
let command_ring_length = 256;
let mut command_ring_offset = 0;
let command_ring = alloc(command_ring_length * size_of::<Trb>()) as *mut Trb;
for i in 0..command_ring_length {
*command_ring.offset(i as isize) = Trb::new();
d("."); //Timing issue?
}
dl();
let crcr = (op_base + 0x18) as *mut u64;
*crcr = command_ring as u64;
d("Set Command Ring Dequeue Pointer ");
dh(*crcr as usize);
dl();
//Define the Event Ring for interrupter 0
let event_ring_segments = 1;
let event_ring_segment_table = alloc(event_ring_segments * size_of::<Ste>()) as *mut Ste;
let mut event_ring_dequeue = 0;
for segment in 0..event_ring_segments {
let ste = &mut *event_ring_segment_table.offset(segment as isize);
ste.length = 256;
ste.ptr = alloc(ste.length as usize * size_of::<Trb>()) as u64;
for i in 0..ste.length as isize {
*(ste.ptr as *mut Trb).offset(i) = Trb::new();
dd(i as usize);
d(" ");
}
dl();
if segment == 0 {
event_ring_dequeue = ste.ptr;
}
}
let erstsz = (rt_base + 0x28) as *mut u32;
*erstsz = event_ring_segments as u32;
let erdp = (rt_base + 0x38) as *mut u64;
*erdp = event_ring_dequeue;
let erstba = (rt_base + 0x30) as *mut u64;
*erstba = event_ring_segment_table as u64;
d("Set Event Ring Segment Table ");
dh(*erstba as usize);
d(" ");
dd(*erstsz as usize);
d(" ");
dh(*erdp as usize);
dl();
//Write the USBCMD to turn on the host controller by setting Run/Stop to 1
let usbcmd = op_base as *mut u32;
*usbcmd = *usbcmd | 1;
while *usbsts & 1!= 0 {
d("Not Running\n");
}
d("Running ");
dh(*usbcmd as usize);
dl();
for i in 0..max_ports as usize {
let portsc = (op_base + 0x400 + (0x10 * i)) as *mut u32;
d("Port ");
dd(i + 1);
d(" is ");
dh(*portsc as usize);
dl();
if *portsc & 1 == 1 {
d("Connected\n");
}
if *portsc & 2 == 2 {
d("Enabled\n");
}
if *portsc & 3 == 3 {
d("Enabling slot\n");
*command_ring.offset(command_ring_offset as isize) = Trb::from_type(23);
command_ring_offset += 1;
if command_ring_offset >= command_ring_length {
command_ring_offset = 0;
}
*command_ring.offset(command_ring_offset as isize) = Trb::from_type(9);
command_ring_offset += 1;
if command_ring_offset >= command_ring_length {
command_ring_offset = 0;
}
d("Write Doorbell\n");
let doorbell = db_base as *mut u32;
*doorbell = 0;
}
}
*/
}
}
|
new
|
identifier_name
|
sketch.rs
|
extern crate clap;
use std::io::stdout;
use std::convert::AsRef;
use std::path::Path;
use std::fs::File;
use std::fmt::Display;
use io::bed::*;
use model::{Region,SimpleRegion};
use io::fasta::{FastaReader,IndexedFastaFile};
use sequence::aminoacid::*;
use sequence::dna::*;
use sketch;
use sketch::Canvas;
use tool::Tool;
use util;
pub struct Sketch {}
impl Tool for Sketch {
fn args<'a, 'b>(s: clap::App<'a, 'b>) -> clap::App<'a, 'b>
|
.long("out")
.visible_alias("svg")
.help("Write to this file instead of stdout")
.value_name("filename")
.takes_value(true),
)
.arg(
clap::Arg::with_name("image-width")
.long("image-width")
.help("Set the desired width of the output image")
.takes_value(true),
)
.arg(
clap::Arg::with_name("reference")
.short("f")
.long("fasta-reference")
.visible_alias("reference")
.help("Use this file to load the reference sequence from (must be a faidx-indexed FASTA file)")
.value_name("filename")
.takes_value(true)
.required(true)
)
.arg(
clap::Arg::with_name("tracks")
.help("Visualize these files")
.value_name("filename")
.takes_value(true)
.multiple(true)
.required(true)
)
}
fn run(args: &clap::ArgMatches) {
// Check for a given region
let (template, offset, length) = match util::parse_region_string(args.value_of("region").unwrap()) {
Ok(a) => a,
Err(e) => { error!("Can not parse region string '{}': {}", args.value_of("region").unwrap(), e); return },
};
let mut region = SimpleRegion::new(template, offset, length);
debug!("Start visualization of region: {}", region);
// Load the reference sequence
let reference_filename = match args.value_of("reference") {
None => { error!("Did not found reference file parameter"); return; },
Some(s) => s
};
let reference = match Self::load_reference_sequence(&reference_filename, ®ion) {
Err(e) => { error!("{}", e); return }
Ok(r) => r
};
if reference.length() < region.length() {
region = SimpleRegion::new(region.template(), region.offset(), reference.length())
}
//moz-extension://e94c6c94-63b8-4a7f-aadf-380f4931c605/main-blocked.html?details=eyJ1cmwiOiJodHRwOi8vYml0LmRvLzNEQmlvbG9neSIsImhuIjoiYml0LmRvIiwid2h5IjoiPyJ9 Create the drawing
let mut drawing = sketch::Sketch::new(sketch::canvas::Svg::new(region.clone()));
// Parse output image information
drawing = match args.value_of("image-width") {
Some(s) => {
match f64::from_str(s) {
Ok(w) => drawing.with_canvas_width(w),
Err(e) => { error!("Can not parse --image-width parameter '{}': {}", s, e); return },
}
}
None => drawing.with_canvas_width(reference.length() as f64 * 15f64)
};
// Write given title or use the region to display
match args.value_of("title") {
Some(s) => {
drawing.append_title(s);
}
None => drawing.append_title(format!("{}: {} - {} ({} bp)",
region.template(),
region.offset() + 1usize,
region.end(),
region.length()))
}
drawing.append_section(&reference_filename);
drawing.append_dna_sequence(reference);
match args.values_of("tracks") {
None => {}
Some(values) => {
for filename in values {
debug!("Processing track: {}", filename);
drawing.append_section(filename);
drawing = Self::draw_from_file(drawing, ®ion, &filename);
}
}
}
match args.value_of("outfile") {
Some(p) => {
match File::create(p) {
Ok(f) => {
debug!("Writing to output file: {}", p);
drawing.write(f);
}
Err(e) => { error!("Can not open '{}' for writing: {}", p, e); }
}
}
None => { drawing.write(stdout()); }
}
}
}
impl Sketch {
fn load_reference_sequence<P: AsRef<Path> + Display, R: Region>(filename: &P, region: &R) -> Result<DnaSequence,String> {
let mut fasta = match IndexedFastaFile::open(filename) {
Ok(f) => f,
Err(e) => return Err(format!("{}", e))
};
let seq = match fasta.search_region_as_dna(region.template(), region.offset(), region.length()) {
Some(s) => s,
None => return Err(format!("Can not find region '{}' in: {}", region.template(), filename))
};
return Ok(seq);
}
fn draw_from_file<P: AsRef<Path> + Display, C: sketch::Canvas, R: Region>(mut drawing: sketch::Sketch<C>, region: &R, filename: &P) -> sketch::Sketch<C> {
let fss = filename.to_string();
if fss.ends_with("bam") {
error!("BAM visualization not yet implemented: {}", fss);
}
else if fss.ends_with("bed") || fss.ends_with("bed.gz") {
match BedStream::open(fss.clone()) {
Ok(mut r) => drawing.append_bed_records(r.read_records_in_region(region)),
Err(e) => error!("Can not read BED records from '{}': {}", fss, e)
}
}
else if fss.ends_with("vcf") || fss.ends_with("vcf.gz") {
error!("VCF visualization not yet implemented: {}", fss);
}
else {
error!("Don't know how to visualize file: {}", fss);
}
drawing
}
}
|
{
s.about("Generate a sketch of the data in the given region")
.arg(
clap::Arg::with_name("region")
.short("r")
.long("region")
.help("Visualize the given region")
.takes_value(true)
.required(true)
)
.arg(
clap::Arg::with_name("title")
.short("t")
.long("title")
.help("Use the given title (defaults to visualized region)")
.takes_value(true),
)
.arg(
clap::Arg::with_name("outfile")
.short("o")
|
identifier_body
|
sketch.rs
|
extern crate clap;
use std::io::stdout;
use std::convert::AsRef;
use std::path::Path;
use std::fs::File;
use std::fmt::Display;
use io::bed::*;
use model::{Region,SimpleRegion};
use io::fasta::{FastaReader,IndexedFastaFile};
use sequence::aminoacid::*;
use sequence::dna::*;
use sketch;
use sketch::Canvas;
use tool::Tool;
use util;
pub struct Sketch {}
impl Tool for Sketch {
fn args<'a, 'b>(s: clap::App<'a, 'b>) -> clap::App<'a, 'b> {
s.about("Generate a sketch of the data in the given region")
.arg(
clap::Arg::with_name("region")
.short("r")
.long("region")
.help("Visualize the given region")
.takes_value(true)
.required(true)
)
.arg(
clap::Arg::with_name("title")
.short("t")
.long("title")
.help("Use the given title (defaults to visualized region)")
.takes_value(true),
)
.arg(
clap::Arg::with_name("outfile")
.short("o")
.long("out")
.visible_alias("svg")
.help("Write to this file instead of stdout")
.value_name("filename")
.takes_value(true),
)
.arg(
clap::Arg::with_name("image-width")
.long("image-width")
.help("Set the desired width of the output image")
.takes_value(true),
)
.arg(
clap::Arg::with_name("reference")
.short("f")
.long("fasta-reference")
.visible_alias("reference")
.help("Use this file to load the reference sequence from (must be a faidx-indexed FASTA file)")
.value_name("filename")
.takes_value(true)
.required(true)
)
.arg(
clap::Arg::with_name("tracks")
.help("Visualize these files")
.value_name("filename")
.takes_value(true)
.multiple(true)
.required(true)
)
}
fn run(args: &clap::ArgMatches) {
// Check for a given region
let (template, offset, length) = match util::parse_region_string(args.value_of("region").unwrap()) {
Ok(a) => a,
Err(e) => { error!("Can not parse region string '{}': {}", args.value_of("region").unwrap(), e); return },
};
let mut region = SimpleRegion::new(template, offset, length);
debug!("Start visualization of region: {}", region);
// Load the reference sequence
let reference_filename = match args.value_of("reference") {
None => { error!("Did not found reference file parameter"); return; },
Some(s) => s
};
let reference = match Self::load_reference_sequence(&reference_filename, ®ion) {
Err(e) => { error!("{}", e); return }
Ok(r) => r
};
if reference.length() < region.length() {
region = SimpleRegion::new(region.template(), region.offset(), reference.length())
}
//moz-extension://e94c6c94-63b8-4a7f-aadf-380f4931c605/main-blocked.html?details=eyJ1cmwiOiJodHRwOi8vYml0LmRvLzNEQmlvbG9neSIsImhuIjoiYml0LmRvIiwid2h5IjoiPyJ9 Create the drawing
let mut drawing = sketch::Sketch::new(sketch::canvas::Svg::new(region.clone()));
// Parse output image information
drawing = match args.value_of("image-width") {
Some(s) => {
match f64::from_str(s) {
Ok(w) => drawing.with_canvas_width(w),
Err(e) => { error!("Can not parse --image-width parameter '{}': {}", s, e); return },
}
}
None => drawing.with_canvas_width(reference.length() as f64 * 15f64)
};
// Write given title or use the region to display
match args.value_of("title") {
Some(s) => {
drawing.append_title(s);
}
None => drawing.append_title(format!("{}: {} - {} ({} bp)",
region.template(),
region.offset() + 1usize,
region.end(),
region.length()))
}
drawing.append_section(&reference_filename);
drawing.append_dna_sequence(reference);
match args.values_of("tracks") {
None => {}
Some(values) => {
for filename in values {
debug!("Processing track: {}", filename);
drawing.append_section(filename);
drawing = Self::draw_from_file(drawing, ®ion, &filename);
}
}
}
match args.value_of("outfile") {
Some(p) => {
match File::create(p) {
Ok(f) => {
debug!("Writing to output file: {}", p);
drawing.write(f);
}
Err(e) => { error!("Can not open '{}' for writing: {}", p, e); }
}
}
None => { drawing.write(stdout()); }
}
}
}
impl Sketch {
fn
|
<P: AsRef<Path> + Display, R: Region>(filename: &P, region: &R) -> Result<DnaSequence,String> {
let mut fasta = match IndexedFastaFile::open(filename) {
Ok(f) => f,
Err(e) => return Err(format!("{}", e))
};
let seq = match fasta.search_region_as_dna(region.template(), region.offset(), region.length()) {
Some(s) => s,
None => return Err(format!("Can not find region '{}' in: {}", region.template(), filename))
};
return Ok(seq);
}
fn draw_from_file<P: AsRef<Path> + Display, C: sketch::Canvas, R: Region>(mut drawing: sketch::Sketch<C>, region: &R, filename: &P) -> sketch::Sketch<C> {
let fss = filename.to_string();
if fss.ends_with("bam") {
error!("BAM visualization not yet implemented: {}", fss);
}
else if fss.ends_with("bed") || fss.ends_with("bed.gz") {
match BedStream::open(fss.clone()) {
Ok(mut r) => drawing.append_bed_records(r.read_records_in_region(region)),
Err(e) => error!("Can not read BED records from '{}': {}", fss, e)
}
}
else if fss.ends_with("vcf") || fss.ends_with("vcf.gz") {
error!("VCF visualization not yet implemented: {}", fss);
}
else {
error!("Don't know how to visualize file: {}", fss);
}
drawing
}
}
|
load_reference_sequence
|
identifier_name
|
sketch.rs
|
extern crate clap;
use std::io::stdout;
use std::convert::AsRef;
use std::path::Path;
use std::fs::File;
use std::fmt::Display;
use io::bed::*;
use model::{Region,SimpleRegion};
use io::fasta::{FastaReader,IndexedFastaFile};
use sequence::aminoacid::*;
use sequence::dna::*;
use sketch;
use sketch::Canvas;
use tool::Tool;
use util;
pub struct Sketch {}
impl Tool for Sketch {
fn args<'a, 'b>(s: clap::App<'a, 'b>) -> clap::App<'a, 'b> {
s.about("Generate a sketch of the data in the given region")
.arg(
clap::Arg::with_name("region")
.short("r")
.long("region")
.help("Visualize the given region")
.takes_value(true)
.required(true)
)
.arg(
clap::Arg::with_name("title")
.short("t")
.long("title")
.help("Use the given title (defaults to visualized region)")
.takes_value(true),
)
.arg(
clap::Arg::with_name("outfile")
.short("o")
.long("out")
.visible_alias("svg")
.help("Write to this file instead of stdout")
.value_name("filename")
.takes_value(true),
)
.arg(
clap::Arg::with_name("image-width")
.long("image-width")
.help("Set the desired width of the output image")
.takes_value(true),
)
.arg(
clap::Arg::with_name("reference")
.short("f")
.long("fasta-reference")
.visible_alias("reference")
.help("Use this file to load the reference sequence from (must be a faidx-indexed FASTA file)")
.value_name("filename")
.takes_value(true)
.required(true)
)
.arg(
clap::Arg::with_name("tracks")
.help("Visualize these files")
.value_name("filename")
.takes_value(true)
.multiple(true)
.required(true)
)
}
fn run(args: &clap::ArgMatches) {
// Check for a given region
let (template, offset, length) = match util::parse_region_string(args.value_of("region").unwrap()) {
Ok(a) => a,
Err(e) => { error!("Can not parse region string '{}': {}", args.value_of("region").unwrap(), e); return },
};
let mut region = SimpleRegion::new(template, offset, length);
debug!("Start visualization of region: {}", region);
// Load the reference sequence
let reference_filename = match args.value_of("reference") {
None => { error!("Did not found reference file parameter"); return; },
Some(s) => s
};
let reference = match Self::load_reference_sequence(&reference_filename, ®ion) {
Err(e) => { error!("{}", e); return }
Ok(r) => r
};
if reference.length() < region.length() {
region = SimpleRegion::new(region.template(), region.offset(), reference.length())
}
//moz-extension://e94c6c94-63b8-4a7f-aadf-380f4931c605/main-blocked.html?details=eyJ1cmwiOiJodHRwOi8vYml0LmRvLzNEQmlvbG9neSIsImhuIjoiYml0LmRvIiwid2h5IjoiPyJ9 Create the drawing
let mut drawing = sketch::Sketch::new(sketch::canvas::Svg::new(region.clone()));
// Parse output image information
drawing = match args.value_of("image-width") {
Some(s) => {
match f64::from_str(s) {
Ok(w) => drawing.with_canvas_width(w),
Err(e) => { error!("Can not parse --image-width parameter '{}': {}", s, e); return },
}
}
None => drawing.with_canvas_width(reference.length() as f64 * 15f64)
};
// Write given title or use the region to display
match args.value_of("title") {
Some(s) => {
drawing.append_title(s);
}
None => drawing.append_title(format!("{}: {} - {} ({} bp)",
region.template(),
region.offset() + 1usize,
region.end(),
region.length()))
}
drawing.append_section(&reference_filename);
drawing.append_dna_sequence(reference);
|
for filename in values {
debug!("Processing track: {}", filename);
drawing.append_section(filename);
drawing = Self::draw_from_file(drawing, ®ion, &filename);
}
}
}
match args.value_of("outfile") {
Some(p) => {
match File::create(p) {
Ok(f) => {
debug!("Writing to output file: {}", p);
drawing.write(f);
}
Err(e) => { error!("Can not open '{}' for writing: {}", p, e); }
}
}
None => { drawing.write(stdout()); }
}
}
}
impl Sketch {
fn load_reference_sequence<P: AsRef<Path> + Display, R: Region>(filename: &P, region: &R) -> Result<DnaSequence,String> {
let mut fasta = match IndexedFastaFile::open(filename) {
Ok(f) => f,
Err(e) => return Err(format!("{}", e))
};
let seq = match fasta.search_region_as_dna(region.template(), region.offset(), region.length()) {
Some(s) => s,
None => return Err(format!("Can not find region '{}' in: {}", region.template(), filename))
};
return Ok(seq);
}
fn draw_from_file<P: AsRef<Path> + Display, C: sketch::Canvas, R: Region>(mut drawing: sketch::Sketch<C>, region: &R, filename: &P) -> sketch::Sketch<C> {
let fss = filename.to_string();
if fss.ends_with("bam") {
error!("BAM visualization not yet implemented: {}", fss);
}
else if fss.ends_with("bed") || fss.ends_with("bed.gz") {
match BedStream::open(fss.clone()) {
Ok(mut r) => drawing.append_bed_records(r.read_records_in_region(region)),
Err(e) => error!("Can not read BED records from '{}': {}", fss, e)
}
}
else if fss.ends_with("vcf") || fss.ends_with("vcf.gz") {
error!("VCF visualization not yet implemented: {}", fss);
}
else {
error!("Don't know how to visualize file: {}", fss);
}
drawing
}
}
|
match args.values_of("tracks") {
None => {}
Some(values) => {
|
random_line_split
|
sketch.rs
|
extern crate clap;
use std::io::stdout;
use std::convert::AsRef;
use std::path::Path;
use std::fs::File;
use std::fmt::Display;
use io::bed::*;
use model::{Region,SimpleRegion};
use io::fasta::{FastaReader,IndexedFastaFile};
use sequence::aminoacid::*;
use sequence::dna::*;
use sketch;
use sketch::Canvas;
use tool::Tool;
use util;
pub struct Sketch {}
impl Tool for Sketch {
fn args<'a, 'b>(s: clap::App<'a, 'b>) -> clap::App<'a, 'b> {
s.about("Generate a sketch of the data in the given region")
.arg(
clap::Arg::with_name("region")
.short("r")
.long("region")
.help("Visualize the given region")
.takes_value(true)
.required(true)
)
.arg(
clap::Arg::with_name("title")
.short("t")
.long("title")
.help("Use the given title (defaults to visualized region)")
.takes_value(true),
)
.arg(
clap::Arg::with_name("outfile")
.short("o")
.long("out")
.visible_alias("svg")
.help("Write to this file instead of stdout")
.value_name("filename")
.takes_value(true),
)
.arg(
clap::Arg::with_name("image-width")
.long("image-width")
.help("Set the desired width of the output image")
.takes_value(true),
)
.arg(
clap::Arg::with_name("reference")
.short("f")
.long("fasta-reference")
.visible_alias("reference")
.help("Use this file to load the reference sequence from (must be a faidx-indexed FASTA file)")
.value_name("filename")
.takes_value(true)
.required(true)
)
.arg(
clap::Arg::with_name("tracks")
.help("Visualize these files")
.value_name("filename")
.takes_value(true)
.multiple(true)
.required(true)
)
}
fn run(args: &clap::ArgMatches) {
// Check for a given region
let (template, offset, length) = match util::parse_region_string(args.value_of("region").unwrap()) {
Ok(a) => a,
Err(e) => { error!("Can not parse region string '{}': {}", args.value_of("region").unwrap(), e); return },
};
let mut region = SimpleRegion::new(template, offset, length);
debug!("Start visualization of region: {}", region);
// Load the reference sequence
let reference_filename = match args.value_of("reference") {
None => { error!("Did not found reference file parameter"); return; },
Some(s) => s
};
let reference = match Self::load_reference_sequence(&reference_filename, ®ion) {
Err(e) => { error!("{}", e); return }
Ok(r) => r
};
if reference.length() < region.length() {
region = SimpleRegion::new(region.template(), region.offset(), reference.length())
}
//moz-extension://e94c6c94-63b8-4a7f-aadf-380f4931c605/main-blocked.html?details=eyJ1cmwiOiJodHRwOi8vYml0LmRvLzNEQmlvbG9neSIsImhuIjoiYml0LmRvIiwid2h5IjoiPyJ9 Create the drawing
let mut drawing = sketch::Sketch::new(sketch::canvas::Svg::new(region.clone()));
// Parse output image information
drawing = match args.value_of("image-width") {
Some(s) => {
match f64::from_str(s) {
Ok(w) => drawing.with_canvas_width(w),
Err(e) => { error!("Can not parse --image-width parameter '{}': {}", s, e); return },
}
}
None => drawing.with_canvas_width(reference.length() as f64 * 15f64)
};
// Write given title or use the region to display
match args.value_of("title") {
Some(s) => {
drawing.append_title(s);
}
None => drawing.append_title(format!("{}: {} - {} ({} bp)",
region.template(),
region.offset() + 1usize,
region.end(),
region.length()))
}
drawing.append_section(&reference_filename);
drawing.append_dna_sequence(reference);
match args.values_of("tracks") {
None => {}
Some(values) => {
for filename in values {
debug!("Processing track: {}", filename);
drawing.append_section(filename);
drawing = Self::draw_from_file(drawing, ®ion, &filename);
}
}
}
match args.value_of("outfile") {
Some(p) => {
match File::create(p) {
Ok(f) =>
|
Err(e) => { error!("Can not open '{}' for writing: {}", p, e); }
}
}
None => { drawing.write(stdout()); }
}
}
}
impl Sketch {
fn load_reference_sequence<P: AsRef<Path> + Display, R: Region>(filename: &P, region: &R) -> Result<DnaSequence,String> {
let mut fasta = match IndexedFastaFile::open(filename) {
Ok(f) => f,
Err(e) => return Err(format!("{}", e))
};
let seq = match fasta.search_region_as_dna(region.template(), region.offset(), region.length()) {
Some(s) => s,
None => return Err(format!("Can not find region '{}' in: {}", region.template(), filename))
};
return Ok(seq);
}
fn draw_from_file<P: AsRef<Path> + Display, C: sketch::Canvas, R: Region>(mut drawing: sketch::Sketch<C>, region: &R, filename: &P) -> sketch::Sketch<C> {
let fss = filename.to_string();
if fss.ends_with("bam") {
error!("BAM visualization not yet implemented: {}", fss);
}
else if fss.ends_with("bed") || fss.ends_with("bed.gz") {
match BedStream::open(fss.clone()) {
Ok(mut r) => drawing.append_bed_records(r.read_records_in_region(region)),
Err(e) => error!("Can not read BED records from '{}': {}", fss, e)
}
}
else if fss.ends_with("vcf") || fss.ends_with("vcf.gz") {
error!("VCF visualization not yet implemented: {}", fss);
}
else {
error!("Don't know how to visualize file: {}", fss);
}
drawing
}
}
|
{
debug!("Writing to output file: {}", p);
drawing.write(f);
}
|
conditional_block
|
cycle-projection-based-on-where-clause.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Example cycle where a bound on `T` uses a shorthand for `T`. This
// creates a cycle because we have to know the bounds on `T` to figure
// out what trait defines `Item`, but we can't know the bounds on `T`
// without knowing how to handle `T::Item`.
//
// Note that in the future cases like this could perhaps become legal,
// if we got more fine-grained about our cycle detection or changed
// how we handle `T::Item` resolution.
use std::ops::Add;
// Preamble.
trait Trait { type Item; }
struct A<T>
where T : Trait,
T : Add<T::Item>
//~^ ERROR unsupported cyclic reference between types/traits detected
{
data: T
}
fn main()
|
{
}
|
identifier_body
|
|
cycle-projection-based-on-where-clause.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Example cycle where a bound on `T` uses a shorthand for `T`. This
// creates a cycle because we have to know the bounds on `T` to figure
// out what trait defines `Item`, but we can't know the bounds on `T`
// without knowing how to handle `T::Item`.
//
// Note that in the future cases like this could perhaps become legal,
// if we got more fine-grained about our cycle detection or changed
// how we handle `T::Item` resolution.
use std::ops::Add;
// Preamble.
trait Trait { type Item; }
struct
|
<T>
where T : Trait,
T : Add<T::Item>
//~^ ERROR unsupported cyclic reference between types/traits detected
{
data: T
}
fn main() {
}
|
A
|
identifier_name
|
cycle-projection-based-on-where-clause.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Example cycle where a bound on `T` uses a shorthand for `T`. This
// creates a cycle because we have to know the bounds on `T` to figure
// out what trait defines `Item`, but we can't know the bounds on `T`
// without knowing how to handle `T::Item`.
|
// how we handle `T::Item` resolution.
use std::ops::Add;
// Preamble.
trait Trait { type Item; }
struct A<T>
where T : Trait,
T : Add<T::Item>
//~^ ERROR unsupported cyclic reference between types/traits detected
{
data: T
}
fn main() {
}
|
//
// Note that in the future cases like this could perhaps become legal,
// if we got more fine-grained about our cycle detection or changed
|
random_line_split
|
mod.rs
|
//////////////////////////////////////////////////////////////////////////////
// File: rust-snek/snek/mod.rs
//////////////////////////////////////////////////////////////////////////////
// Copyright 2016 Samuel Sleight
//
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//////////////////////////////////////////////////////////////////////////////
extern crate libc;
use ::{Error, Symbol};
use std::path::Path;
use libc::c_void;
#[cfg(unix)]
pub use self::unix::{load_library, load_symbol, drop_library};
#[cfg(windows)]
pub use self::windows::{load_library, load_symbol, drop_library};
mod unix;
mod windows;
/// This provides an interface for manually loading a dynamic library and
/// symbols from it. While this exists, it is more recommended to use the
/// [`snek!`](macro.snek!.html) macro to generate a wrapper for a library
/// automatically.
///
/// A `Snek` instance contains nothing but a handle to the loaded library,
/// and provides a single method for loading symbols from the library. When
/// the instance is dropped, it unloads the library, so the lifetime of
/// any loaded symbols is tied to the lifetime of the `Snek` instance.
///
/// For more information about using the loaded symbols see the
/// [`Symbol`](struct.Symbol.html) documentation.
///
/// # Example
/// ```
/// # extern crate libc;
/// # extern crate snek;
/// # use snek::Snek;
/// # use libc::c_int;
/// # fn main() {
/// match Snek::load("libexample.so") {
/// Ok(snek) => match snek.symbol("add") {
/// Ok(symbol) => println!("{}", unsafe { symbol.with(
/// |add: extern fn(c_int, c_int) -> c_int| add(3, 7)
/// ) }),
///
/// _ => ()
/// },
///
/// _ => ()
/// }
/// # }
/// ```
#[derive(Debug)]
pub struct Snek {
handle: *mut c_void
}
impl Snek {
/// Attempt to load a dynamic library from the given path, returning a `Snek`
/// instance wrapping the handle.
///
/// If the load fails, this will return [`Error::LibraryLoadError`](enum.Error.html)
pub fn load<P>(path: P) -> Result<Snek, Error> where P: AsRef<Path> {
load_library(path).map(|result| Snek { handle: result })
}
/// Attempt to load a symbol from the dynamic library, returning a
/// [`Symbol`](struct.Symbol.html) instance wrapping it.
///
/// If the load fails, this will return [`Error::SymbolLoadError`](enum.Error.html)
pub fn symbol<'a>(&'a self, symbol: &str) -> Result<Symbol<'a>, Error> {
load_symbol(self.handle, symbol).map(|result| Symbol::new(result))
}
}
impl Drop for Snek {
fn drop(&mut self) {
drop_library(self.handle)
}
}
|
random_line_split
|
|
mod.rs
|
//////////////////////////////////////////////////////////////////////////////
// File: rust-snek/snek/mod.rs
//////////////////////////////////////////////////////////////////////////////
// Copyright 2016 Samuel Sleight
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//////////////////////////////////////////////////////////////////////////////
extern crate libc;
use ::{Error, Symbol};
use std::path::Path;
use libc::c_void;
#[cfg(unix)]
pub use self::unix::{load_library, load_symbol, drop_library};
#[cfg(windows)]
pub use self::windows::{load_library, load_symbol, drop_library};
mod unix;
mod windows;
/// This provides an interface for manually loading a dynamic library and
/// symbols from it. While this exists, it is more recommended to use the
/// [`snek!`](macro.snek!.html) macro to generate a wrapper for a library
/// automatically.
///
/// A `Snek` instance contains nothing but a handle to the loaded library,
/// and provides a single method for loading symbols from the library. When
/// the instance is dropped, it unloads the library, so the lifetime of
/// any loaded symbols is tied to the lifetime of the `Snek` instance.
///
/// For more information about using the loaded symbols see the
/// [`Symbol`](struct.Symbol.html) documentation.
///
/// # Example
/// ```
/// # extern crate libc;
/// # extern crate snek;
/// # use snek::Snek;
/// # use libc::c_int;
/// # fn main() {
/// match Snek::load("libexample.so") {
/// Ok(snek) => match snek.symbol("add") {
/// Ok(symbol) => println!("{}", unsafe { symbol.with(
/// |add: extern fn(c_int, c_int) -> c_int| add(3, 7)
/// ) }),
///
/// _ => ()
/// },
///
/// _ => ()
/// }
/// # }
/// ```
#[derive(Debug)]
pub struct
|
{
handle: *mut c_void
}
impl Snek {
/// Attempt to load a dynamic library from the given path, returning a `Snek`
/// instance wrapping the handle.
///
/// If the load fails, this will return [`Error::LibraryLoadError`](enum.Error.html)
pub fn load<P>(path: P) -> Result<Snek, Error> where P: AsRef<Path> {
load_library(path).map(|result| Snek { handle: result })
}
/// Attempt to load a symbol from the dynamic library, returning a
/// [`Symbol`](struct.Symbol.html) instance wrapping it.
///
/// If the load fails, this will return [`Error::SymbolLoadError`](enum.Error.html)
pub fn symbol<'a>(&'a self, symbol: &str) -> Result<Symbol<'a>, Error> {
load_symbol(self.handle, symbol).map(|result| Symbol::new(result))
}
}
impl Drop for Snek {
fn drop(&mut self) {
drop_library(self.handle)
}
}
|
Snek
|
identifier_name
|
main.rs
|
extern crate image;
extern crate piston_window;
extern crate rand;
#[macro_use]
extern crate clap;
mod cpu;
mod display;
mod instruction;
mod keyboard;
use clap::App;
use cpu::Cpu;
use keyboard::{KeyMapping, Keyboard};
use piston_window::*;
use std::fs::File;
use std::io::Read;
use std::process;
use std::time::Instant;
const ENLARGEMENT_FACTOR: u32 = 8;
const WINDOW_WIDTH: u32 = 64;
const WINDOW_HEIGHT: u32 = 32;
const CLOCK_SPEED_HZ_HALF: u32 = 180;
const CLOCK_SPEED_HZ_DEFAULT: u32 = 360;
const CLOCK_SPEED_HZ_DOUBLE: u32 = 720;
enum
|
{
Half,
Normal,
Double,
}
struct Arguments {
rom: String,
step: bool,
debug: bool,
speed: EmulatorSpeed,
}
fn main() {
let arguments = match parse_args() {
Ok(args) => args,
Err(_) => {
println!("Usage: chip8 [rom] [--debug (optional)] [--step (optional)]");
process::exit(0);
}
};
let width = WINDOW_WIDTH * ENLARGEMENT_FACTOR;
let height = WINDOW_HEIGHT * ENLARGEMENT_FACTOR;
let mut window = create_window(width, height);
let mut file = File::open(arguments.rom).expect("Unable to open the ROM file.");
let mut game_data = Vec::new();
file.read_to_end(&mut game_data)
.expect("Unable to read the ROM file.");
let clock_speed = match arguments.speed {
EmulatorSpeed::Half => CLOCK_SPEED_HZ_HALF,
EmulatorSpeed::Normal => CLOCK_SPEED_HZ_DEFAULT,
EmulatorSpeed::Double => CLOCK_SPEED_HZ_DOUBLE,
};
let mut cpu = Cpu::new(game_data, clock_speed, arguments.debug);
let keyboard = Keyboard::new(KeyMapping::Improved);
let cycle_time_millis: u128 = (1000 / clock_speed).into();
let mut clock = Instant::now();
while let Some(e) = window.next() {
let mut step_forward = false;
if let Some(_) = e.render_args() {
if cpu.draw_flag {
draw_screen(&e, &cpu.get_screen(), &mut window);
cpu.draw_flag = false
}
}
if let Some(button) = e.press_args() {
if button == Button::Keyboard(Key::Space) && arguments.step {
step_forward = true;
}
if let Some(key_val) = keyboard.map_key(button) {
cpu.set_key(key_val, true);
}
}
if let Some(button) = e.release_args() {
if let Some(key_val) = keyboard.map_key(button) {
cpu.set_key(key_val, false);
}
}
// If debugging is enabled, only cycle on space bar presses
if arguments.step {
if step_forward {
cpu.cycle();
}
} else {
let elapsed = clock.elapsed().as_millis();
if elapsed >= cycle_time_millis {
cpu.cycle();
clock = Instant::now();
}
}
}
}
fn parse_args() -> Result<Arguments, String> {
let yaml = load_yaml!("../cli.yml");
let matches = App::from_yaml(yaml).get_matches();
let rom = String::from(matches.value_of("ROM").unwrap());
let debug = matches.is_present("debug");
let step = matches.is_present("step");
let speed_arg = matches.value_of("speed").unwrap_or("1");
let speed = match speed_arg {
"0.5" => EmulatorSpeed::Half,
"2" => EmulatorSpeed::Double,
_ => EmulatorSpeed::Normal,
};
let args = Arguments {
rom,
step,
debug,
speed,
};
return Ok(args);
}
fn create_window(width: u32, height: u32) -> PistonWindow {
let opengl = OpenGL::V3_2;
let mut window: PistonWindow = WindowSettings::new("chip8", (width, height))
.exit_on_esc(true)
.opengl(opengl)
.build()
.unwrap();
window.set_max_fps(60);
window
}
fn draw_screen(event: &Event, screen: &display::Screen, window: &mut PistonWindow) {
window.draw_2d(event, |context, graphics| {
piston_window::clear(color::BLACK, graphics);
for (i, row) in screen.iter().enumerate() {
for (j, val) in row.iter().enumerate() {
if *val {
let dimensions = [
(j * ENLARGEMENT_FACTOR as usize) as f64,
(i * ENLARGEMENT_FACTOR as usize) as f64,
ENLARGEMENT_FACTOR as f64,
ENLARGEMENT_FACTOR as f64,
];
Rectangle::new(color::WHITE).draw(
dimensions,
&context.draw_state,
context.transform,
graphics,
);
}
}
}
});
}
// TODO:
// 1) Fix display issues, it is currently not working at all
// a) This might (and probably should) involve rewriting the display logic
// 4) Keyboard scheme?
// 5) The display should really be separate from the cpu....not sure how to do this though
|
EmulatorSpeed
|
identifier_name
|
main.rs
|
extern crate image;
extern crate piston_window;
extern crate rand;
#[macro_use]
extern crate clap;
mod cpu;
mod display;
mod instruction;
mod keyboard;
use clap::App;
use cpu::Cpu;
use keyboard::{KeyMapping, Keyboard};
use piston_window::*;
use std::fs::File;
use std::io::Read;
use std::process;
use std::time::Instant;
const ENLARGEMENT_FACTOR: u32 = 8;
const WINDOW_WIDTH: u32 = 64;
const WINDOW_HEIGHT: u32 = 32;
const CLOCK_SPEED_HZ_HALF: u32 = 180;
const CLOCK_SPEED_HZ_DEFAULT: u32 = 360;
const CLOCK_SPEED_HZ_DOUBLE: u32 = 720;
enum EmulatorSpeed {
Half,
Normal,
Double,
}
struct Arguments {
rom: String,
step: bool,
debug: bool,
speed: EmulatorSpeed,
}
fn main()
|
let clock_speed = match arguments.speed {
EmulatorSpeed::Half => CLOCK_SPEED_HZ_HALF,
EmulatorSpeed::Normal => CLOCK_SPEED_HZ_DEFAULT,
EmulatorSpeed::Double => CLOCK_SPEED_HZ_DOUBLE,
};
let mut cpu = Cpu::new(game_data, clock_speed, arguments.debug);
let keyboard = Keyboard::new(KeyMapping::Improved);
let cycle_time_millis: u128 = (1000 / clock_speed).into();
let mut clock = Instant::now();
while let Some(e) = window.next() {
let mut step_forward = false;
if let Some(_) = e.render_args() {
if cpu.draw_flag {
draw_screen(&e, &cpu.get_screen(), &mut window);
cpu.draw_flag = false
}
}
if let Some(button) = e.press_args() {
if button == Button::Keyboard(Key::Space) && arguments.step {
step_forward = true;
}
if let Some(key_val) = keyboard.map_key(button) {
cpu.set_key(key_val, true);
}
}
if let Some(button) = e.release_args() {
if let Some(key_val) = keyboard.map_key(button) {
cpu.set_key(key_val, false);
}
}
// If debugging is enabled, only cycle on space bar presses
if arguments.step {
if step_forward {
cpu.cycle();
}
} else {
let elapsed = clock.elapsed().as_millis();
if elapsed >= cycle_time_millis {
cpu.cycle();
clock = Instant::now();
}
}
}
}
fn parse_args() -> Result<Arguments, String> {
let yaml = load_yaml!("../cli.yml");
let matches = App::from_yaml(yaml).get_matches();
let rom = String::from(matches.value_of("ROM").unwrap());
let debug = matches.is_present("debug");
let step = matches.is_present("step");
let speed_arg = matches.value_of("speed").unwrap_or("1");
let speed = match speed_arg {
"0.5" => EmulatorSpeed::Half,
"2" => EmulatorSpeed::Double,
_ => EmulatorSpeed::Normal,
};
let args = Arguments {
rom,
step,
debug,
speed,
};
return Ok(args);
}
fn create_window(width: u32, height: u32) -> PistonWindow {
let opengl = OpenGL::V3_2;
let mut window: PistonWindow = WindowSettings::new("chip8", (width, height))
.exit_on_esc(true)
.opengl(opengl)
.build()
.unwrap();
window.set_max_fps(60);
window
}
fn draw_screen(event: &Event, screen: &display::Screen, window: &mut PistonWindow) {
window.draw_2d(event, |context, graphics| {
piston_window::clear(color::BLACK, graphics);
for (i, row) in screen.iter().enumerate() {
for (j, val) in row.iter().enumerate() {
if *val {
let dimensions = [
(j * ENLARGEMENT_FACTOR as usize) as f64,
(i * ENLARGEMENT_FACTOR as usize) as f64,
ENLARGEMENT_FACTOR as f64,
ENLARGEMENT_FACTOR as f64,
];
Rectangle::new(color::WHITE).draw(
dimensions,
&context.draw_state,
context.transform,
graphics,
);
}
}
}
});
}
// TODO:
// 1) Fix display issues, it is currently not working at all
// a) This might (and probably should) involve rewriting the display logic
// 4) Keyboard scheme?
// 5) The display should really be separate from the cpu....not sure how to do this though
|
{
let arguments = match parse_args() {
Ok(args) => args,
Err(_) => {
println!("Usage: chip8 [rom] [--debug (optional)] [--step (optional)]");
process::exit(0);
}
};
let width = WINDOW_WIDTH * ENLARGEMENT_FACTOR;
let height = WINDOW_HEIGHT * ENLARGEMENT_FACTOR;
let mut window = create_window(width, height);
let mut file = File::open(arguments.rom).expect("Unable to open the ROM file.");
let mut game_data = Vec::new();
file.read_to_end(&mut game_data)
.expect("Unable to read the ROM file.");
|
identifier_body
|
main.rs
|
extern crate image;
extern crate piston_window;
extern crate rand;
#[macro_use]
extern crate clap;
mod cpu;
mod display;
mod instruction;
mod keyboard;
use clap::App;
use cpu::Cpu;
use keyboard::{KeyMapping, Keyboard};
use piston_window::*;
use std::fs::File;
use std::io::Read;
use std::process;
use std::time::Instant;
const ENLARGEMENT_FACTOR: u32 = 8;
const WINDOW_WIDTH: u32 = 64;
const WINDOW_HEIGHT: u32 = 32;
const CLOCK_SPEED_HZ_HALF: u32 = 180;
const CLOCK_SPEED_HZ_DEFAULT: u32 = 360;
const CLOCK_SPEED_HZ_DOUBLE: u32 = 720;
enum EmulatorSpeed {
Half,
Normal,
Double,
}
struct Arguments {
rom: String,
step: bool,
debug: bool,
speed: EmulatorSpeed,
}
fn main() {
let arguments = match parse_args() {
Ok(args) => args,
Err(_) => {
println!("Usage: chip8 [rom] [--debug (optional)] [--step (optional)]");
process::exit(0);
}
};
let width = WINDOW_WIDTH * ENLARGEMENT_FACTOR;
let height = WINDOW_HEIGHT * ENLARGEMENT_FACTOR;
let mut window = create_window(width, height);
let mut file = File::open(arguments.rom).expect("Unable to open the ROM file.");
let mut game_data = Vec::new();
file.read_to_end(&mut game_data)
.expect("Unable to read the ROM file.");
let clock_speed = match arguments.speed {
EmulatorSpeed::Half => CLOCK_SPEED_HZ_HALF,
EmulatorSpeed::Normal => CLOCK_SPEED_HZ_DEFAULT,
EmulatorSpeed::Double => CLOCK_SPEED_HZ_DOUBLE,
};
let mut cpu = Cpu::new(game_data, clock_speed, arguments.debug);
let keyboard = Keyboard::new(KeyMapping::Improved);
let cycle_time_millis: u128 = (1000 / clock_speed).into();
let mut clock = Instant::now();
while let Some(e) = window.next() {
let mut step_forward = false;
|
draw_screen(&e, &cpu.get_screen(), &mut window);
cpu.draw_flag = false
}
}
if let Some(button) = e.press_args() {
if button == Button::Keyboard(Key::Space) && arguments.step {
step_forward = true;
}
if let Some(key_val) = keyboard.map_key(button) {
cpu.set_key(key_val, true);
}
}
if let Some(button) = e.release_args() {
if let Some(key_val) = keyboard.map_key(button) {
cpu.set_key(key_val, false);
}
}
// If debugging is enabled, only cycle on space bar presses
if arguments.step {
if step_forward {
cpu.cycle();
}
} else {
let elapsed = clock.elapsed().as_millis();
if elapsed >= cycle_time_millis {
cpu.cycle();
clock = Instant::now();
}
}
}
}
fn parse_args() -> Result<Arguments, String> {
let yaml = load_yaml!("../cli.yml");
let matches = App::from_yaml(yaml).get_matches();
let rom = String::from(matches.value_of("ROM").unwrap());
let debug = matches.is_present("debug");
let step = matches.is_present("step");
let speed_arg = matches.value_of("speed").unwrap_or("1");
let speed = match speed_arg {
"0.5" => EmulatorSpeed::Half,
"2" => EmulatorSpeed::Double,
_ => EmulatorSpeed::Normal,
};
let args = Arguments {
rom,
step,
debug,
speed,
};
return Ok(args);
}
fn create_window(width: u32, height: u32) -> PistonWindow {
let opengl = OpenGL::V3_2;
let mut window: PistonWindow = WindowSettings::new("chip8", (width, height))
.exit_on_esc(true)
.opengl(opengl)
.build()
.unwrap();
window.set_max_fps(60);
window
}
fn draw_screen(event: &Event, screen: &display::Screen, window: &mut PistonWindow) {
window.draw_2d(event, |context, graphics| {
piston_window::clear(color::BLACK, graphics);
for (i, row) in screen.iter().enumerate() {
for (j, val) in row.iter().enumerate() {
if *val {
let dimensions = [
(j * ENLARGEMENT_FACTOR as usize) as f64,
(i * ENLARGEMENT_FACTOR as usize) as f64,
ENLARGEMENT_FACTOR as f64,
ENLARGEMENT_FACTOR as f64,
];
Rectangle::new(color::WHITE).draw(
dimensions,
&context.draw_state,
context.transform,
graphics,
);
}
}
}
});
}
// TODO:
// 1) Fix display issues, it is currently not working at all
// a) This might (and probably should) involve rewriting the display logic
// 4) Keyboard scheme?
// 5) The display should really be separate from the cpu....not sure how to do this though
|
if let Some(_) = e.render_args() {
if cpu.draw_flag {
|
random_line_split
|
lint-non-camel-case-types.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![forbid(non_camel_case_types)]
#![allow(dead_code)]
struct
|
{ //~ ERROR type `foo` should have a camel case name such as `Foo`
bar: isize,
}
enum foo2 { //~ ERROR type `foo2` should have a camel case name such as `Foo2`
Bar
}
struct foo3 { //~ ERROR type `foo3` should have a camel case name such as `Foo3`
bar: isize
}
type foo4 = isize; //~ ERROR type `foo4` should have a camel case name such as `Foo4`
enum Foo5 {
bar //~ ERROR variant `bar` should have a camel case name such as `Bar`
}
trait foo6 { //~ ERROR trait `foo6` should have a camel case name such as `Foo6`
}
fn f<ty>(_: ty) {} //~ ERROR type parameter `ty` should have a camel case name such as `Ty`
#[repr(C)]
struct foo7 {
bar: isize,
}
type __ = isize; //~ ERROR type `__` should have a camel case name such as `CamelCase`
fn main() { }
|
foo
|
identifier_name
|
lint-non-camel-case-types.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![forbid(non_camel_case_types)]
#![allow(dead_code)]
struct foo { //~ ERROR type `foo` should have a camel case name such as `Foo`
bar: isize,
}
enum foo2 { //~ ERROR type `foo2` should have a camel case name such as `Foo2`
Bar
}
struct foo3 { //~ ERROR type `foo3` should have a camel case name such as `Foo3`
bar: isize
}
type foo4 = isize; //~ ERROR type `foo4` should have a camel case name such as `Foo4`
enum Foo5 {
bar //~ ERROR variant `bar` should have a camel case name such as `Bar`
}
trait foo6 { //~ ERROR trait `foo6` should have a camel case name such as `Foo6`
}
fn f<ty>(_: ty) {} //~ ERROR type parameter `ty` should have a camel case name such as `Ty`
#[repr(C)]
struct foo7 {
bar: isize,
}
type __ = isize; //~ ERROR type `__` should have a camel case name such as `CamelCase`
|
fn main() { }
|
random_line_split
|
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversal over the DOM tree.
//!
//! This code is highly unsafe. Keep this file small and easy to audit.
use dom::{OpaqueNode, StylingMode, TElement, TNode, UnsafeNode};
use rayon;
use std::sync::atomic::Ordering;
use traversal::{STYLE_SHARING_CACHE_HITS, STYLE_SHARING_CACHE_MISSES};
use traversal::DomTraversalContext;
use util::opts;
pub const CHUNK_SIZE: usize = 64;
pub fn traverse_dom<N, C>(root: N,
shared_context: &C::SharedContext,
queue: &rayon::ThreadPool)
where N: TNode,
C: DomTraversalContext<N>
{
debug_assert!(root.as_element().unwrap().styling_mode()!= StylingMode::Stop);
|
if opts::get().style_sharing_stats {
STYLE_SHARING_CACHE_HITS.store(0, Ordering::SeqCst);
STYLE_SHARING_CACHE_MISSES.store(0, Ordering::SeqCst);
}
let nodes = vec![root.to_unsafe()].into_boxed_slice();
let root = root.opaque();
queue.install(|| {
rayon::scope(|scope| {
let nodes = nodes;
top_down_dom::<N, C>(&nodes, root, scope, shared_context);
});
});
if opts::get().style_sharing_stats {
let hits = STYLE_SHARING_CACHE_HITS.load(Ordering::SeqCst);
let misses = STYLE_SHARING_CACHE_MISSES.load(Ordering::SeqCst);
println!("Style sharing stats:");
println!(" * Hits: {}", hits);
println!(" * Misses: {}", misses);
}
}
/// A parallel top-down DOM traversal.
#[inline(always)]
#[allow(unsafe_code)]
fn top_down_dom<'a,'scope, N, C>(unsafe_nodes: &'a [UnsafeNode],
root: OpaqueNode,
scope: &'a rayon::Scope<'scope>,
shared_context: &'scope C::SharedContext)
where N: TNode,
C: DomTraversalContext<N>,
{
let context = C::new(shared_context, root);
let mut discovered_child_nodes = vec![];
for unsafe_node in unsafe_nodes {
// Get a real layout node.
let node = unsafe { N::from_unsafe(&unsafe_node) };
// Perform the appropriate traversal.
let mut children_to_process = 0isize;
context.process_preorder(node);
if let Some(el) = node.as_element() {
C::traverse_children(el, |kid| {
children_to_process += 1;
discovered_child_nodes.push(kid.to_unsafe())
});
}
// Reset the count of children if we need to do a bottom-up traversal
// after the top up.
if context.needs_postorder_traversal() {
if children_to_process == 0 {
// If there were no more children, start walking back up.
bottom_up_dom::<N, C>(root, *unsafe_node, shared_context)
} else {
// Otherwise record the number of children to process when the
// time comes.
node.as_element().unwrap().store_children_to_process(children_to_process);
}
}
}
// NB: In parallel traversal mode we have to purge the LRU cache in order to
// be able to access it without races.
context.local_context().style_sharing_candidate_cache.borrow_mut().clear();
for chunk in discovered_child_nodes.chunks(CHUNK_SIZE) {
let nodes = chunk.iter().cloned().collect::<Vec<_>>().into_boxed_slice();
scope.spawn(move |scope| {
let nodes = nodes;
top_down_dom::<N, C>(&nodes, root, scope, shared_context)
})
}
}
/// Process current node and potentially traverse its ancestors.
///
/// If we are the last child that finished processing, recursively process
/// our parent. Else, stop. Also, stop at the root.
///
/// Thus, if we start with all the leaves of a tree, we end up traversing
/// the whole tree bottom-up because each parent will be processed exactly
/// once (by the last child that finishes processing).
///
/// The only communication between siblings is that they both
/// fetch-and-subtract the parent's children count.
#[allow(unsafe_code)]
fn bottom_up_dom<N, C>(root: OpaqueNode,
unsafe_node: UnsafeNode,
shared_context: &C::SharedContext)
where N: TNode,
C: DomTraversalContext<N>
{
let context = C::new(shared_context, root);
// Get a real layout node.
let mut node = unsafe { N::from_unsafe(&unsafe_node) };
loop {
// Perform the appropriate operation.
context.process_postorder(node);
let parent = match node.layout_parent_element(root) {
None => break,
Some(parent) => parent,
};
let remaining = parent.did_process_child();
if remaining!= 0 {
// Get out of here and find another node to work on.
break
}
// We were the last child of our parent. Construct flows for our parent.
node = parent.as_node();
}
}
|
random_line_split
|
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversal over the DOM tree.
//!
//! This code is highly unsafe. Keep this file small and easy to audit.
use dom::{OpaqueNode, StylingMode, TElement, TNode, UnsafeNode};
use rayon;
use std::sync::atomic::Ordering;
use traversal::{STYLE_SHARING_CACHE_HITS, STYLE_SHARING_CACHE_MISSES};
use traversal::DomTraversalContext;
use util::opts;
pub const CHUNK_SIZE: usize = 64;
pub fn traverse_dom<N, C>(root: N,
shared_context: &C::SharedContext,
queue: &rayon::ThreadPool)
where N: TNode,
C: DomTraversalContext<N>
{
debug_assert!(root.as_element().unwrap().styling_mode()!= StylingMode::Stop);
if opts::get().style_sharing_stats {
STYLE_SHARING_CACHE_HITS.store(0, Ordering::SeqCst);
STYLE_SHARING_CACHE_MISSES.store(0, Ordering::SeqCst);
}
let nodes = vec![root.to_unsafe()].into_boxed_slice();
let root = root.opaque();
queue.install(|| {
rayon::scope(|scope| {
let nodes = nodes;
top_down_dom::<N, C>(&nodes, root, scope, shared_context);
});
});
if opts::get().style_sharing_stats {
let hits = STYLE_SHARING_CACHE_HITS.load(Ordering::SeqCst);
let misses = STYLE_SHARING_CACHE_MISSES.load(Ordering::SeqCst);
println!("Style sharing stats:");
println!(" * Hits: {}", hits);
println!(" * Misses: {}", misses);
}
}
/// A parallel top-down DOM traversal.
#[inline(always)]
#[allow(unsafe_code)]
fn top_down_dom<'a,'scope, N, C>(unsafe_nodes: &'a [UnsafeNode],
root: OpaqueNode,
scope: &'a rayon::Scope<'scope>,
shared_context: &'scope C::SharedContext)
where N: TNode,
C: DomTraversalContext<N>,
|
if context.needs_postorder_traversal() {
if children_to_process == 0 {
// If there were no more children, start walking back up.
bottom_up_dom::<N, C>(root, *unsafe_node, shared_context)
} else {
// Otherwise record the number of children to process when the
// time comes.
node.as_element().unwrap().store_children_to_process(children_to_process);
}
}
}
// NB: In parallel traversal mode we have to purge the LRU cache in order to
// be able to access it without races.
context.local_context().style_sharing_candidate_cache.borrow_mut().clear();
for chunk in discovered_child_nodes.chunks(CHUNK_SIZE) {
let nodes = chunk.iter().cloned().collect::<Vec<_>>().into_boxed_slice();
scope.spawn(move |scope| {
let nodes = nodes;
top_down_dom::<N, C>(&nodes, root, scope, shared_context)
})
}
}
/// Process current node and potentially traverse its ancestors.
///
/// If we are the last child that finished processing, recursively process
/// our parent. Else, stop. Also, stop at the root.
///
/// Thus, if we start with all the leaves of a tree, we end up traversing
/// the whole tree bottom-up because each parent will be processed exactly
/// once (by the last child that finishes processing).
///
/// The only communication between siblings is that they both
/// fetch-and-subtract the parent's children count.
#[allow(unsafe_code)]
fn bottom_up_dom<N, C>(root: OpaqueNode,
unsafe_node: UnsafeNode,
shared_context: &C::SharedContext)
where N: TNode,
C: DomTraversalContext<N>
{
let context = C::new(shared_context, root);
// Get a real layout node.
let mut node = unsafe { N::from_unsafe(&unsafe_node) };
loop {
// Perform the appropriate operation.
context.process_postorder(node);
let parent = match node.layout_parent_element(root) {
None => break,
Some(parent) => parent,
};
let remaining = parent.did_process_child();
if remaining!= 0 {
// Get out of here and find another node to work on.
break
}
// We were the last child of our parent. Construct flows for our parent.
node = parent.as_node();
}
}
|
{
let context = C::new(shared_context, root);
let mut discovered_child_nodes = vec![];
for unsafe_node in unsafe_nodes {
// Get a real layout node.
let node = unsafe { N::from_unsafe(&unsafe_node) };
// Perform the appropriate traversal.
let mut children_to_process = 0isize;
context.process_preorder(node);
if let Some(el) = node.as_element() {
C::traverse_children(el, |kid| {
children_to_process += 1;
discovered_child_nodes.push(kid.to_unsafe())
});
}
// Reset the count of children if we need to do a bottom-up traversal
// after the top up.
|
identifier_body
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversal over the DOM tree.
//!
//! This code is highly unsafe. Keep this file small and easy to audit.
use dom::{OpaqueNode, StylingMode, TElement, TNode, UnsafeNode};
use rayon;
use std::sync::atomic::Ordering;
use traversal::{STYLE_SHARING_CACHE_HITS, STYLE_SHARING_CACHE_MISSES};
use traversal::DomTraversalContext;
use util::opts;
pub const CHUNK_SIZE: usize = 64;
pub fn
|
<N, C>(root: N,
shared_context: &C::SharedContext,
queue: &rayon::ThreadPool)
where N: TNode,
C: DomTraversalContext<N>
{
debug_assert!(root.as_element().unwrap().styling_mode()!= StylingMode::Stop);
if opts::get().style_sharing_stats {
STYLE_SHARING_CACHE_HITS.store(0, Ordering::SeqCst);
STYLE_SHARING_CACHE_MISSES.store(0, Ordering::SeqCst);
}
let nodes = vec![root.to_unsafe()].into_boxed_slice();
let root = root.opaque();
queue.install(|| {
rayon::scope(|scope| {
let nodes = nodes;
top_down_dom::<N, C>(&nodes, root, scope, shared_context);
});
});
if opts::get().style_sharing_stats {
let hits = STYLE_SHARING_CACHE_HITS.load(Ordering::SeqCst);
let misses = STYLE_SHARING_CACHE_MISSES.load(Ordering::SeqCst);
println!("Style sharing stats:");
println!(" * Hits: {}", hits);
println!(" * Misses: {}", misses);
}
}
/// A parallel top-down DOM traversal.
#[inline(always)]
#[allow(unsafe_code)]
fn top_down_dom<'a,'scope, N, C>(unsafe_nodes: &'a [UnsafeNode],
root: OpaqueNode,
scope: &'a rayon::Scope<'scope>,
shared_context: &'scope C::SharedContext)
where N: TNode,
C: DomTraversalContext<N>,
{
let context = C::new(shared_context, root);
let mut discovered_child_nodes = vec![];
for unsafe_node in unsafe_nodes {
// Get a real layout node.
let node = unsafe { N::from_unsafe(&unsafe_node) };
// Perform the appropriate traversal.
let mut children_to_process = 0isize;
context.process_preorder(node);
if let Some(el) = node.as_element() {
C::traverse_children(el, |kid| {
children_to_process += 1;
discovered_child_nodes.push(kid.to_unsafe())
});
}
// Reset the count of children if we need to do a bottom-up traversal
// after the top up.
if context.needs_postorder_traversal() {
if children_to_process == 0 {
// If there were no more children, start walking back up.
bottom_up_dom::<N, C>(root, *unsafe_node, shared_context)
} else {
// Otherwise record the number of children to process when the
// time comes.
node.as_element().unwrap().store_children_to_process(children_to_process);
}
}
}
// NB: In parallel traversal mode we have to purge the LRU cache in order to
// be able to access it without races.
context.local_context().style_sharing_candidate_cache.borrow_mut().clear();
for chunk in discovered_child_nodes.chunks(CHUNK_SIZE) {
let nodes = chunk.iter().cloned().collect::<Vec<_>>().into_boxed_slice();
scope.spawn(move |scope| {
let nodes = nodes;
top_down_dom::<N, C>(&nodes, root, scope, shared_context)
})
}
}
/// Process current node and potentially traverse its ancestors.
///
/// If we are the last child that finished processing, recursively process
/// our parent. Else, stop. Also, stop at the root.
///
/// Thus, if we start with all the leaves of a tree, we end up traversing
/// the whole tree bottom-up because each parent will be processed exactly
/// once (by the last child that finishes processing).
///
/// The only communication between siblings is that they both
/// fetch-and-subtract the parent's children count.
#[allow(unsafe_code)]
fn bottom_up_dom<N, C>(root: OpaqueNode,
unsafe_node: UnsafeNode,
shared_context: &C::SharedContext)
where N: TNode,
C: DomTraversalContext<N>
{
let context = C::new(shared_context, root);
// Get a real layout node.
let mut node = unsafe { N::from_unsafe(&unsafe_node) };
loop {
// Perform the appropriate operation.
context.process_postorder(node);
let parent = match node.layout_parent_element(root) {
None => break,
Some(parent) => parent,
};
let remaining = parent.did_process_child();
if remaining!= 0 {
// Get out of here and find another node to work on.
break
}
// We were the last child of our parent. Construct flows for our parent.
node = parent.as_node();
}
}
|
traverse_dom
|
identifier_name
|
parallel.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversal over the DOM tree.
//!
//! This code is highly unsafe. Keep this file small and easy to audit.
use dom::{OpaqueNode, StylingMode, TElement, TNode, UnsafeNode};
use rayon;
use std::sync::atomic::Ordering;
use traversal::{STYLE_SHARING_CACHE_HITS, STYLE_SHARING_CACHE_MISSES};
use traversal::DomTraversalContext;
use util::opts;
pub const CHUNK_SIZE: usize = 64;
pub fn traverse_dom<N, C>(root: N,
shared_context: &C::SharedContext,
queue: &rayon::ThreadPool)
where N: TNode,
C: DomTraversalContext<N>
{
debug_assert!(root.as_element().unwrap().styling_mode()!= StylingMode::Stop);
if opts::get().style_sharing_stats {
STYLE_SHARING_CACHE_HITS.store(0, Ordering::SeqCst);
STYLE_SHARING_CACHE_MISSES.store(0, Ordering::SeqCst);
}
let nodes = vec![root.to_unsafe()].into_boxed_slice();
let root = root.opaque();
queue.install(|| {
rayon::scope(|scope| {
let nodes = nodes;
top_down_dom::<N, C>(&nodes, root, scope, shared_context);
});
});
if opts::get().style_sharing_stats {
let hits = STYLE_SHARING_CACHE_HITS.load(Ordering::SeqCst);
let misses = STYLE_SHARING_CACHE_MISSES.load(Ordering::SeqCst);
println!("Style sharing stats:");
println!(" * Hits: {}", hits);
println!(" * Misses: {}", misses);
}
}
/// A parallel top-down DOM traversal.
#[inline(always)]
#[allow(unsafe_code)]
fn top_down_dom<'a,'scope, N, C>(unsafe_nodes: &'a [UnsafeNode],
root: OpaqueNode,
scope: &'a rayon::Scope<'scope>,
shared_context: &'scope C::SharedContext)
where N: TNode,
C: DomTraversalContext<N>,
{
let context = C::new(shared_context, root);
let mut discovered_child_nodes = vec![];
for unsafe_node in unsafe_nodes {
// Get a real layout node.
let node = unsafe { N::from_unsafe(&unsafe_node) };
// Perform the appropriate traversal.
let mut children_to_process = 0isize;
context.process_preorder(node);
if let Some(el) = node.as_element() {
C::traverse_children(el, |kid| {
children_to_process += 1;
discovered_child_nodes.push(kid.to_unsafe())
});
}
// Reset the count of children if we need to do a bottom-up traversal
// after the top up.
if context.needs_postorder_traversal()
|
}
// NB: In parallel traversal mode we have to purge the LRU cache in order to
// be able to access it without races.
context.local_context().style_sharing_candidate_cache.borrow_mut().clear();
for chunk in discovered_child_nodes.chunks(CHUNK_SIZE) {
let nodes = chunk.iter().cloned().collect::<Vec<_>>().into_boxed_slice();
scope.spawn(move |scope| {
let nodes = nodes;
top_down_dom::<N, C>(&nodes, root, scope, shared_context)
})
}
}
/// Process current node and potentially traverse its ancestors.
///
/// If we are the last child that finished processing, recursively process
/// our parent. Else, stop. Also, stop at the root.
///
/// Thus, if we start with all the leaves of a tree, we end up traversing
/// the whole tree bottom-up because each parent will be processed exactly
/// once (by the last child that finishes processing).
///
/// The only communication between siblings is that they both
/// fetch-and-subtract the parent's children count.
#[allow(unsafe_code)]
fn bottom_up_dom<N, C>(root: OpaqueNode,
unsafe_node: UnsafeNode,
shared_context: &C::SharedContext)
where N: TNode,
C: DomTraversalContext<N>
{
let context = C::new(shared_context, root);
// Get a real layout node.
let mut node = unsafe { N::from_unsafe(&unsafe_node) };
loop {
// Perform the appropriate operation.
context.process_postorder(node);
let parent = match node.layout_parent_element(root) {
None => break,
Some(parent) => parent,
};
let remaining = parent.did_process_child();
if remaining!= 0 {
// Get out of here and find another node to work on.
break
}
// We were the last child of our parent. Construct flows for our parent.
node = parent.as_node();
}
}
|
{
if children_to_process == 0 {
// If there were no more children, start walking back up.
bottom_up_dom::<N, C>(root, *unsafe_node, shared_context)
} else {
// Otherwise record the number of children to process when the
// time comes.
node.as_element().unwrap().store_children_to_process(children_to_process);
}
}
|
conditional_block
|
shared.rs
|
// Copyright 2015-2016, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! `IMPL` Shared (reference counted) wrapper implementation.
use std::fmt;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use translate::*;
/// Wrapper implementations for shared types. See `glib_wrapper!`.
#[macro_export]
macro_rules! glib_shared_wrapper {
([$($attr:meta)*] $name:ident, $ffi_name:path, @ref $ref_arg:ident $ref_expr:expr,
@unref $unref_arg:ident $unref_expr:expr) => {
$(#[$attr])*
#[derive(Clone)]
pub struct $name($crate::shared::Shared<$ffi_name, MemoryManager>);
#[doc(hidden)]
pub struct MemoryManager;
impl $crate::shared::SharedMemoryManager<$ffi_name> for MemoryManager {
#[inline]
unsafe fn ref_($ref_arg: *mut $ffi_name) {
$ref_expr;
}
#[inline]
unsafe fn unref($unref_arg: *mut $ffi_name) {
$unref_expr
}
}
#[doc(hidden)]
impl $crate::translate::GlibPtrDefault for $name {
type GlibType = *mut $ffi_name;
}
#[doc(hidden)]
impl<'a> $crate::translate::ToGlibPtr<'a, *mut $ffi_name> for $name {
type Storage = &'a $crate::shared::Shared<$ffi_name, MemoryManager>;
#[inline]
fn to_glib_none(&'a self) -> $crate::translate::Stash<'a, *mut $ffi_name, Self> {
let stash = self.0.to_glib_none();
$crate::translate::Stash(stash.0, stash.1)
}
#[inline]
fn to_glib_full(&self) -> *mut $ffi_name {
(&self.0).to_glib_full()
}
}
#[doc(hidden)]
impl $crate::translate::FromGlibPtr<*mut $ffi_name> for $name {
#[inline]
unsafe fn from_glib_none(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_none(ptr))
}
#[inline]
unsafe fn from_glib_full(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_full(ptr))
}
#[inline]
unsafe fn from_glib_borrow(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_borrow(ptr))
}
}
}
}
pub trait SharedMemoryManager<T> {
unsafe fn ref_(ptr: *mut T);
unsafe fn unref(ptr: *mut T);
}
/// Encapsulates memory management logic for shared types.
pub struct Shared<T, MM: SharedMemoryManager<T>> {
inner: *mut T,
borrowed: bool,
mm: PhantomData<*const MM>,
}
impl<T, MM: SharedMemoryManager<T>> Drop for Shared<T, MM> {
fn drop(&mut self) {
if!self.borrowed
|
}
}
impl<T, MM: SharedMemoryManager<T>> Clone for Shared<T, MM> {
fn clone(&self) -> Self {
unsafe { MM::ref_(self.inner); }
Shared {
inner: self.inner,
borrowed: false,
mm: PhantomData,
}
}
}
impl<T, MM: SharedMemoryManager<T>> fmt::Debug for Shared<T, MM> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Shared {{ inner: {:?}, borrowed: {} }}", self.inner, self.borrowed)
}
}
impl<T, MM: SharedMemoryManager<T>> PartialEq for Shared<T, MM> {
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
impl<T, MM: SharedMemoryManager<T>> Eq for Shared<T, MM> {}
impl<T, MM: SharedMemoryManager<T>> Hash for Shared<T, MM> {
fn hash<H>(&self, state: &mut H) where H: Hasher {
self.inner.hash(state)
}
}
impl<'a, T:'static, MM> ToGlibPtr<'a, *mut T> for Shared<T, MM>
where MM: SharedMemoryManager<T> +'static {
type Storage = &'a Self;
#[inline]
fn to_glib_none(&'a self) -> Stash<'a, *mut T, Self> {
Stash(self.inner, self)
}
#[inline]
fn to_glib_full(&self) -> *mut T {
unsafe { MM::ref_(self.inner); }
self.inner
}
}
impl<T:'static, MM: SharedMemoryManager<T>> FromGlibPtr<*mut T> for Shared<T, MM> {
#[inline]
unsafe fn from_glib_none(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
MM::ref_(ptr);
Shared {
inner: ptr,
borrowed: false,
mm: PhantomData,
}
}
#[inline]
unsafe fn from_glib_full(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
Shared {
inner: ptr,
borrowed: false,
mm: PhantomData,
}
}
#[inline]
unsafe fn from_glib_borrow(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
Shared {
inner: ptr,
borrowed: true,
mm: PhantomData,
}
}
}
|
{
unsafe { MM::unref(self.inner); }
}
|
conditional_block
|
shared.rs
|
// Copyright 2015-2016, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! `IMPL` Shared (reference counted) wrapper implementation.
use std::fmt;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use translate::*;
/// Wrapper implementations for shared types. See `glib_wrapper!`.
#[macro_export]
macro_rules! glib_shared_wrapper {
([$($attr:meta)*] $name:ident, $ffi_name:path, @ref $ref_arg:ident $ref_expr:expr,
@unref $unref_arg:ident $unref_expr:expr) => {
$(#[$attr])*
#[derive(Clone)]
pub struct $name($crate::shared::Shared<$ffi_name, MemoryManager>);
#[doc(hidden)]
pub struct MemoryManager;
impl $crate::shared::SharedMemoryManager<$ffi_name> for MemoryManager {
#[inline]
unsafe fn ref_($ref_arg: *mut $ffi_name) {
$ref_expr;
}
#[inline]
unsafe fn unref($unref_arg: *mut $ffi_name) {
$unref_expr
}
}
#[doc(hidden)]
impl $crate::translate::GlibPtrDefault for $name {
type GlibType = *mut $ffi_name;
}
#[doc(hidden)]
impl<'a> $crate::translate::ToGlibPtr<'a, *mut $ffi_name> for $name {
type Storage = &'a $crate::shared::Shared<$ffi_name, MemoryManager>;
#[inline]
fn to_glib_none(&'a self) -> $crate::translate::Stash<'a, *mut $ffi_name, Self> {
let stash = self.0.to_glib_none();
$crate::translate::Stash(stash.0, stash.1)
}
#[inline]
fn to_glib_full(&self) -> *mut $ffi_name {
(&self.0).to_glib_full()
}
}
#[doc(hidden)]
impl $crate::translate::FromGlibPtr<*mut $ffi_name> for $name {
#[inline]
unsafe fn from_glib_none(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_none(ptr))
}
#[inline]
unsafe fn from_glib_full(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_full(ptr))
}
#[inline]
unsafe fn from_glib_borrow(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_borrow(ptr))
}
}
}
}
pub trait SharedMemoryManager<T> {
unsafe fn ref_(ptr: *mut T);
unsafe fn unref(ptr: *mut T);
}
/// Encapsulates memory management logic for shared types.
pub struct Shared<T, MM: SharedMemoryManager<T>> {
inner: *mut T,
borrowed: bool,
mm: PhantomData<*const MM>,
}
impl<T, MM: SharedMemoryManager<T>> Drop for Shared<T, MM> {
fn drop(&mut self) {
if!self.borrowed {
unsafe { MM::unref(self.inner); }
}
}
}
impl<T, MM: SharedMemoryManager<T>> Clone for Shared<T, MM> {
fn clone(&self) -> Self {
unsafe { MM::ref_(self.inner); }
Shared {
inner: self.inner,
borrowed: false,
mm: PhantomData,
}
}
}
impl<T, MM: SharedMemoryManager<T>> fmt::Debug for Shared<T, MM> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Shared {{ inner: {:?}, borrowed: {} }}", self.inner, self.borrowed)
}
}
impl<T, MM: SharedMemoryManager<T>> PartialEq for Shared<T, MM> {
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
impl<T, MM: SharedMemoryManager<T>> Eq for Shared<T, MM> {}
impl<T, MM: SharedMemoryManager<T>> Hash for Shared<T, MM> {
fn hash<H>(&self, state: &mut H) where H: Hasher {
self.inner.hash(state)
}
}
impl<'a, T:'static, MM> ToGlibPtr<'a, *mut T> for Shared<T, MM>
where MM: SharedMemoryManager<T> +'static {
type Storage = &'a Self;
#[inline]
fn to_glib_none(&'a self) -> Stash<'a, *mut T, Self> {
Stash(self.inner, self)
}
#[inline]
fn to_glib_full(&self) -> *mut T {
unsafe { MM::ref_(self.inner); }
self.inner
|
}
impl<T:'static, MM: SharedMemoryManager<T>> FromGlibPtr<*mut T> for Shared<T, MM> {
#[inline]
unsafe fn from_glib_none(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
MM::ref_(ptr);
Shared {
inner: ptr,
borrowed: false,
mm: PhantomData,
}
}
#[inline]
unsafe fn from_glib_full(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
Shared {
inner: ptr,
borrowed: false,
mm: PhantomData,
}
}
#[inline]
unsafe fn from_glib_borrow(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
Shared {
inner: ptr,
borrowed: true,
mm: PhantomData,
}
}
}
|
}
|
random_line_split
|
shared.rs
|
// Copyright 2015-2016, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! `IMPL` Shared (reference counted) wrapper implementation.
use std::fmt;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use translate::*;
/// Wrapper implementations for shared types. See `glib_wrapper!`.
#[macro_export]
macro_rules! glib_shared_wrapper {
([$($attr:meta)*] $name:ident, $ffi_name:path, @ref $ref_arg:ident $ref_expr:expr,
@unref $unref_arg:ident $unref_expr:expr) => {
$(#[$attr])*
#[derive(Clone)]
pub struct $name($crate::shared::Shared<$ffi_name, MemoryManager>);
#[doc(hidden)]
pub struct MemoryManager;
impl $crate::shared::SharedMemoryManager<$ffi_name> for MemoryManager {
#[inline]
unsafe fn ref_($ref_arg: *mut $ffi_name) {
$ref_expr;
}
#[inline]
unsafe fn unref($unref_arg: *mut $ffi_name) {
$unref_expr
}
}
#[doc(hidden)]
impl $crate::translate::GlibPtrDefault for $name {
type GlibType = *mut $ffi_name;
}
#[doc(hidden)]
impl<'a> $crate::translate::ToGlibPtr<'a, *mut $ffi_name> for $name {
type Storage = &'a $crate::shared::Shared<$ffi_name, MemoryManager>;
#[inline]
fn to_glib_none(&'a self) -> $crate::translate::Stash<'a, *mut $ffi_name, Self> {
let stash = self.0.to_glib_none();
$crate::translate::Stash(stash.0, stash.1)
}
#[inline]
fn to_glib_full(&self) -> *mut $ffi_name {
(&self.0).to_glib_full()
}
}
#[doc(hidden)]
impl $crate::translate::FromGlibPtr<*mut $ffi_name> for $name {
#[inline]
unsafe fn from_glib_none(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_none(ptr))
}
#[inline]
unsafe fn from_glib_full(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_full(ptr))
}
#[inline]
unsafe fn from_glib_borrow(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_borrow(ptr))
}
}
}
}
pub trait SharedMemoryManager<T> {
unsafe fn ref_(ptr: *mut T);
unsafe fn unref(ptr: *mut T);
}
/// Encapsulates memory management logic for shared types.
pub struct Shared<T, MM: SharedMemoryManager<T>> {
inner: *mut T,
borrowed: bool,
mm: PhantomData<*const MM>,
}
impl<T, MM: SharedMemoryManager<T>> Drop for Shared<T, MM> {
fn drop(&mut self) {
if!self.borrowed {
unsafe { MM::unref(self.inner); }
}
}
}
impl<T, MM: SharedMemoryManager<T>> Clone for Shared<T, MM> {
fn clone(&self) -> Self {
unsafe { MM::ref_(self.inner); }
Shared {
inner: self.inner,
borrowed: false,
mm: PhantomData,
}
}
}
impl<T, MM: SharedMemoryManager<T>> fmt::Debug for Shared<T, MM> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Shared {{ inner: {:?}, borrowed: {} }}", self.inner, self.borrowed)
}
}
impl<T, MM: SharedMemoryManager<T>> PartialEq for Shared<T, MM> {
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
impl<T, MM: SharedMemoryManager<T>> Eq for Shared<T, MM> {}
impl<T, MM: SharedMemoryManager<T>> Hash for Shared<T, MM> {
fn hash<H>(&self, state: &mut H) where H: Hasher
|
}
impl<'a, T:'static, MM> ToGlibPtr<'a, *mut T> for Shared<T, MM>
where MM: SharedMemoryManager<T> +'static {
type Storage = &'a Self;
#[inline]
fn to_glib_none(&'a self) -> Stash<'a, *mut T, Self> {
Stash(self.inner, self)
}
#[inline]
fn to_glib_full(&self) -> *mut T {
unsafe { MM::ref_(self.inner); }
self.inner
}
}
impl<T:'static, MM: SharedMemoryManager<T>> FromGlibPtr<*mut T> for Shared<T, MM> {
#[inline]
unsafe fn from_glib_none(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
MM::ref_(ptr);
Shared {
inner: ptr,
borrowed: false,
mm: PhantomData,
}
}
#[inline]
unsafe fn from_glib_full(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
Shared {
inner: ptr,
borrowed: false,
mm: PhantomData,
}
}
#[inline]
unsafe fn from_glib_borrow(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
Shared {
inner: ptr,
borrowed: true,
mm: PhantomData,
}
}
}
|
{
self.inner.hash(state)
}
|
identifier_body
|
shared.rs
|
// Copyright 2015-2016, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! `IMPL` Shared (reference counted) wrapper implementation.
use std::fmt;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use translate::*;
/// Wrapper implementations for shared types. See `glib_wrapper!`.
#[macro_export]
macro_rules! glib_shared_wrapper {
([$($attr:meta)*] $name:ident, $ffi_name:path, @ref $ref_arg:ident $ref_expr:expr,
@unref $unref_arg:ident $unref_expr:expr) => {
$(#[$attr])*
#[derive(Clone)]
pub struct $name($crate::shared::Shared<$ffi_name, MemoryManager>);
#[doc(hidden)]
pub struct MemoryManager;
impl $crate::shared::SharedMemoryManager<$ffi_name> for MemoryManager {
#[inline]
unsafe fn ref_($ref_arg: *mut $ffi_name) {
$ref_expr;
}
#[inline]
unsafe fn unref($unref_arg: *mut $ffi_name) {
$unref_expr
}
}
#[doc(hidden)]
impl $crate::translate::GlibPtrDefault for $name {
type GlibType = *mut $ffi_name;
}
#[doc(hidden)]
impl<'a> $crate::translate::ToGlibPtr<'a, *mut $ffi_name> for $name {
type Storage = &'a $crate::shared::Shared<$ffi_name, MemoryManager>;
#[inline]
fn to_glib_none(&'a self) -> $crate::translate::Stash<'a, *mut $ffi_name, Self> {
let stash = self.0.to_glib_none();
$crate::translate::Stash(stash.0, stash.1)
}
#[inline]
fn to_glib_full(&self) -> *mut $ffi_name {
(&self.0).to_glib_full()
}
}
#[doc(hidden)]
impl $crate::translate::FromGlibPtr<*mut $ffi_name> for $name {
#[inline]
unsafe fn from_glib_none(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_none(ptr))
}
#[inline]
unsafe fn from_glib_full(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_full(ptr))
}
#[inline]
unsafe fn from_glib_borrow(ptr: *mut $ffi_name) -> Self {
$name($crate::translate::from_glib_borrow(ptr))
}
}
}
}
pub trait SharedMemoryManager<T> {
unsafe fn ref_(ptr: *mut T);
unsafe fn unref(ptr: *mut T);
}
/// Encapsulates memory management logic for shared types.
pub struct Shared<T, MM: SharedMemoryManager<T>> {
inner: *mut T,
borrowed: bool,
mm: PhantomData<*const MM>,
}
impl<T, MM: SharedMemoryManager<T>> Drop for Shared<T, MM> {
fn drop(&mut self) {
if!self.borrowed {
unsafe { MM::unref(self.inner); }
}
}
}
impl<T, MM: SharedMemoryManager<T>> Clone for Shared<T, MM> {
fn clone(&self) -> Self {
unsafe { MM::ref_(self.inner); }
Shared {
inner: self.inner,
borrowed: false,
mm: PhantomData,
}
}
}
impl<T, MM: SharedMemoryManager<T>> fmt::Debug for Shared<T, MM> {
fn
|
(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Shared {{ inner: {:?}, borrowed: {} }}", self.inner, self.borrowed)
}
}
impl<T, MM: SharedMemoryManager<T>> PartialEq for Shared<T, MM> {
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
impl<T, MM: SharedMemoryManager<T>> Eq for Shared<T, MM> {}
impl<T, MM: SharedMemoryManager<T>> Hash for Shared<T, MM> {
fn hash<H>(&self, state: &mut H) where H: Hasher {
self.inner.hash(state)
}
}
impl<'a, T:'static, MM> ToGlibPtr<'a, *mut T> for Shared<T, MM>
where MM: SharedMemoryManager<T> +'static {
type Storage = &'a Self;
#[inline]
fn to_glib_none(&'a self) -> Stash<'a, *mut T, Self> {
Stash(self.inner, self)
}
#[inline]
fn to_glib_full(&self) -> *mut T {
unsafe { MM::ref_(self.inner); }
self.inner
}
}
impl<T:'static, MM: SharedMemoryManager<T>> FromGlibPtr<*mut T> for Shared<T, MM> {
#[inline]
unsafe fn from_glib_none(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
MM::ref_(ptr);
Shared {
inner: ptr,
borrowed: false,
mm: PhantomData,
}
}
#[inline]
unsafe fn from_glib_full(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
Shared {
inner: ptr,
borrowed: false,
mm: PhantomData,
}
}
#[inline]
unsafe fn from_glib_borrow(ptr: *mut T) -> Self {
assert!(!ptr.is_null());
Shared {
inner: ptr,
borrowed: true,
mm: PhantomData,
}
}
}
|
fmt
|
identifier_name
|
guard.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Machinery to conditionally expose things.
use js::jsapi::JSContext;
use js::rust::HandleObject;
use servo_config::prefs;
/// A container with a condition.
pub struct Guard<T: Clone + Copy> {
condition: Condition,
value: T,
}
impl<T: Clone + Copy> Guard<T> {
/// Construct a new guarded value.
pub const fn new(condition: Condition, value: T) -> Self {
Guard {
condition: condition,
value: value,
}
}
/// Expose the value if the condition is satisfied.
///
/// The passed handle is the object on which the value may be exposed.
pub unsafe fn expose(&self, cx: *mut JSContext, obj: HandleObject) -> Option<T> {
if self.condition.is_satisfied(cx, obj) {
Some(self.value)
} else {
None
}
}
}
/// A condition to expose things.
pub enum
|
{
/// The condition is satisfied if the function returns true.
Func(unsafe fn(*mut JSContext, HandleObject) -> bool),
/// The condition is satisfied if the preference is set.
Pref(&'static str),
/// The condition is always satisfied.
Satisfied,
}
impl Condition {
unsafe fn is_satisfied(&self, cx: *mut JSContext, obj: HandleObject) -> bool {
match *self {
Condition::Pref(name) => prefs::pref_map().get(name).as_bool().unwrap_or(false),
Condition::Func(f) => f(cx, obj),
Condition::Satisfied => true,
}
}
}
|
Condition
|
identifier_name
|
guard.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Machinery to conditionally expose things.
use js::jsapi::JSContext;
use js::rust::HandleObject;
use servo_config::prefs;
/// A container with a condition.
pub struct Guard<T: Clone + Copy> {
condition: Condition,
value: T,
}
impl<T: Clone + Copy> Guard<T> {
/// Construct a new guarded value.
pub const fn new(condition: Condition, value: T) -> Self {
Guard {
condition: condition,
value: value,
}
}
/// Expose the value if the condition is satisfied.
///
/// The passed handle is the object on which the value may be exposed.
pub unsafe fn expose(&self, cx: *mut JSContext, obj: HandleObject) -> Option<T> {
if self.condition.is_satisfied(cx, obj) {
Some(self.value)
} else {
None
}
}
}
/// A condition to expose things.
pub enum Condition {
/// The condition is satisfied if the function returns true.
Func(unsafe fn(*mut JSContext, HandleObject) -> bool),
/// The condition is satisfied if the preference is set.
Pref(&'static str),
/// The condition is always satisfied.
Satisfied,
}
impl Condition {
unsafe fn is_satisfied(&self, cx: *mut JSContext, obj: HandleObject) -> bool
|
}
|
{
match *self {
Condition::Pref(name) => prefs::pref_map().get(name).as_bool().unwrap_or(false),
Condition::Func(f) => f(cx, obj),
Condition::Satisfied => true,
}
}
|
identifier_body
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.