file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
raw.rs | extern crate libsqlite3_sys as ffi;
extern crate libc;
use std::ffi::{CString, CStr};
use std::io::{stderr, Write};
use std::{ptr, str};
use result::*;
use result::Error::DatabaseError;
#[allow(missing_debug_implementations, missing_copy_implementations)]
pub struct RawConnection {
pub internal_connection: *mut ffi::sqlite3,
}
impl RawConnection {
pub fn establish(database_url: &str) -> ConnectionResult<Self> {
let mut conn_pointer = ptr::null_mut();
let database_url = try!(CString::new(database_url));
let connection_status = unsafe {
ffi::sqlite3_open(database_url.as_ptr(), &mut conn_pointer)
};
match connection_status {
ffi::SQLITE_OK => Ok(RawConnection {
internal_connection: conn_pointer,
}),
err_code => {
let message = super::error_message(err_code);
Err(ConnectionError::BadConnection(message.into()))
}
}
}
pub fn exec(&self, query: &str) -> QueryResult<()> | Ok(())
}
}
pub fn rows_affected_by_last_query(&self) -> usize {
unsafe { ffi::sqlite3_changes(self.internal_connection) as usize }
}
pub fn last_error_message(&self) -> String {
let c_str = unsafe { CStr::from_ptr(ffi::sqlite3_errmsg(self.internal_connection)) };
c_str.to_string_lossy().into_owned()
}
pub fn last_error_code(&self) -> libc::c_int {
unsafe { ffi::sqlite3_extended_errcode(self.internal_connection) }
}
}
impl Drop for RawConnection {
fn drop(&mut self) {
use std::thread::panicking;
let close_result = unsafe { ffi::sqlite3_close(self.internal_connection) };
if close_result!= ffi::SQLITE_OK {
let error_message = super::error_message(close_result);
if panicking() {
write!(stderr(), "Error closing SQLite connection: {}", error_message).unwrap();
} else {
panic!("Error closing SQLite connection: {}", error_message);
}
}
}
}
fn convert_to_string_and_free(err_msg: *const libc::c_char) -> String {
let msg = unsafe {
let bytes = CStr::from_ptr(err_msg).to_bytes();
str::from_utf8_unchecked(bytes).into()
};
unsafe { ffi::sqlite3_free(err_msg as *mut libc::c_void) };
msg
}
| {
let mut err_msg = ptr::null_mut();
let query = try!(CString::new(query));
let callback_fn = None;
let callback_arg = ptr::null_mut();
unsafe {
ffi::sqlite3_exec(
self.internal_connection,
query.as_ptr(),
callback_fn,
callback_arg,
&mut err_msg,
);
}
if !err_msg.is_null() {
let msg = convert_to_string_and_free(err_msg);
let error_kind = DatabaseErrorKind::__Unknown;
Err(DatabaseError(error_kind, Box::new(msg)))
} else { | identifier_body |
raw.rs | extern crate libsqlite3_sys as ffi;
extern crate libc;
use std::ffi::{CString, CStr};
use std::io::{stderr, Write};
use std::{ptr, str};
use result::*;
use result::Error::DatabaseError;
#[allow(missing_debug_implementations, missing_copy_implementations)]
pub struct RawConnection {
pub internal_connection: *mut ffi::sqlite3,
}
impl RawConnection {
pub fn establish(database_url: &str) -> ConnectionResult<Self> {
let mut conn_pointer = ptr::null_mut(); | };
match connection_status {
ffi::SQLITE_OK => Ok(RawConnection {
internal_connection: conn_pointer,
}),
err_code => {
let message = super::error_message(err_code);
Err(ConnectionError::BadConnection(message.into()))
}
}
}
pub fn exec(&self, query: &str) -> QueryResult<()> {
let mut err_msg = ptr::null_mut();
let query = try!(CString::new(query));
let callback_fn = None;
let callback_arg = ptr::null_mut();
unsafe {
ffi::sqlite3_exec(
self.internal_connection,
query.as_ptr(),
callback_fn,
callback_arg,
&mut err_msg,
);
}
if!err_msg.is_null() {
let msg = convert_to_string_and_free(err_msg);
let error_kind = DatabaseErrorKind::__Unknown;
Err(DatabaseError(error_kind, Box::new(msg)))
} else {
Ok(())
}
}
pub fn rows_affected_by_last_query(&self) -> usize {
unsafe { ffi::sqlite3_changes(self.internal_connection) as usize }
}
pub fn last_error_message(&self) -> String {
let c_str = unsafe { CStr::from_ptr(ffi::sqlite3_errmsg(self.internal_connection)) };
c_str.to_string_lossy().into_owned()
}
pub fn last_error_code(&self) -> libc::c_int {
unsafe { ffi::sqlite3_extended_errcode(self.internal_connection) }
}
}
impl Drop for RawConnection {
fn drop(&mut self) {
use std::thread::panicking;
let close_result = unsafe { ffi::sqlite3_close(self.internal_connection) };
if close_result!= ffi::SQLITE_OK {
let error_message = super::error_message(close_result);
if panicking() {
write!(stderr(), "Error closing SQLite connection: {}", error_message).unwrap();
} else {
panic!("Error closing SQLite connection: {}", error_message);
}
}
}
}
fn convert_to_string_and_free(err_msg: *const libc::c_char) -> String {
let msg = unsafe {
let bytes = CStr::from_ptr(err_msg).to_bytes();
str::from_utf8_unchecked(bytes).into()
};
unsafe { ffi::sqlite3_free(err_msg as *mut libc::c_void) };
msg
} | let database_url = try!(CString::new(database_url));
let connection_status = unsafe {
ffi::sqlite3_open(database_url.as_ptr(), &mut conn_pointer) | random_line_split |
lib.rs | use std::path::{Path, PathBuf};
use std::fs::{self, ReadDir, DirEntry};
use std::io::Error;
pub struct DeepWalk {
root: PathBuf,
}
impl DeepWalk {
pub fn new<P: AsRef<Path>>(root: P) -> Self {
DeepWalk { root: root.as_ref().to_path_buf() }
}
} | type IntoIter = Iter;
fn into_iter(self) -> Iter {
Iter { root: Some(self.root), dirs: Vec::new() }
}
}
pub struct Iter {
root: Option<PathBuf>,
dirs: Vec<ReadDir>,
}
// TODO: Remove and implement Iterator for DeepWalk.
impl Iterator for Iter {
type Item = Result<DirEntry, Error>;
fn next(&mut self) -> Option<Result<DirEntry, Error>> {
if let Some(path) = self.root.take() {
match fs::read_dir(path) {
Ok(dir) => self.dirs.push(dir),
Err(err) => return Some(Err(err)),
}
}
while!self.dirs.is_empty() {
// TODO: FIXME.
break;
}
None
}
}
#[cfg(test)]
mod tests {
use super::DeepWalk;
use std::path::{Path, PathBuf};
fn get_test_roots() -> &'static[&'static str] {
const DATA: &'static[&'static str] = &["", "a", "test", "eee/aaa", "some/long/path"];
DATA
}
#[test]
fn deep_walk_new() {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).root, Path::new(val));
}
}
#[test]
fn deep_walk_into_iterator() {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).into_iter().root, Some(PathBuf::from(val)));
}
}
} |
impl IntoIterator for DeepWalk {
type Item = Result<DirEntry, Error>; | random_line_split |
lib.rs | use std::path::{Path, PathBuf};
use std::fs::{self, ReadDir, DirEntry};
use std::io::Error;
pub struct DeepWalk {
root: PathBuf,
}
impl DeepWalk {
pub fn new<P: AsRef<Path>>(root: P) -> Self {
DeepWalk { root: root.as_ref().to_path_buf() }
}
}
impl IntoIterator for DeepWalk {
type Item = Result<DirEntry, Error>;
type IntoIter = Iter;
fn into_iter(self) -> Iter {
Iter { root: Some(self.root), dirs: Vec::new() }
}
}
pub struct Iter {
root: Option<PathBuf>,
dirs: Vec<ReadDir>,
}
// TODO: Remove and implement Iterator for DeepWalk.
impl Iterator for Iter {
type Item = Result<DirEntry, Error>;
fn next(&mut self) -> Option<Result<DirEntry, Error>> {
if let Some(path) = self.root.take() {
match fs::read_dir(path) {
Ok(dir) => self.dirs.push(dir),
Err(err) => return Some(Err(err)),
}
}
while!self.dirs.is_empty() {
// TODO: FIXME.
break;
}
None
}
}
#[cfg(test)]
mod tests {
use super::DeepWalk;
use std::path::{Path, PathBuf};
fn get_test_roots() -> &'static[&'static str] {
const DATA: &'static[&'static str] = &["", "a", "test", "eee/aaa", "some/long/path"];
DATA
}
#[test]
fn | () {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).root, Path::new(val));
}
}
#[test]
fn deep_walk_into_iterator() {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).into_iter().root, Some(PathBuf::from(val)));
}
}
}
| deep_walk_new | identifier_name |
lib.rs | use std::path::{Path, PathBuf};
use std::fs::{self, ReadDir, DirEntry};
use std::io::Error;
pub struct DeepWalk {
root: PathBuf,
}
impl DeepWalk {
pub fn new<P: AsRef<Path>>(root: P) -> Self {
DeepWalk { root: root.as_ref().to_path_buf() }
}
}
impl IntoIterator for DeepWalk {
type Item = Result<DirEntry, Error>;
type IntoIter = Iter;
fn into_iter(self) -> Iter {
Iter { root: Some(self.root), dirs: Vec::new() }
}
}
pub struct Iter {
root: Option<PathBuf>,
dirs: Vec<ReadDir>,
}
// TODO: Remove and implement Iterator for DeepWalk.
impl Iterator for Iter {
type Item = Result<DirEntry, Error>;
fn next(&mut self) -> Option<Result<DirEntry, Error>> {
if let Some(path) = self.root.take() {
match fs::read_dir(path) {
Ok(dir) => self.dirs.push(dir),
Err(err) => return Some(Err(err)),
}
}
while!self.dirs.is_empty() {
// TODO: FIXME.
break;
}
None
}
}
#[cfg(test)]
mod tests {
use super::DeepWalk;
use std::path::{Path, PathBuf};
fn get_test_roots() -> &'static[&'static str] {
const DATA: &'static[&'static str] = &["", "a", "test", "eee/aaa", "some/long/path"];
DATA
}
#[test]
fn deep_walk_new() |
#[test]
fn deep_walk_into_iterator() {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).into_iter().root, Some(PathBuf::from(val)));
}
}
}
| {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).root, Path::new(val));
}
} | identifier_body |
udptransport.rs | #[feature(struct_variant)];
#[feature(macro_rules)];
use osc::{OscType, OscMessage, OscWriter, OscReader};
use rpc::{ServerId, LogEntry, AppendEntriesRpc, AppendEntriesResponseRpc,
RequestVoteRpc, RequestVoteResponseRpc, RaftRpc, AppendEntries,
AppendEntriesResponse, RequestVote, RequestVoteResponse};
use std::io::net::ip::{Ipv4Addr, SocketAddr};
use std::io::net::udp::{UdpSocket, UdpStream};
use std::io::timer;
use std::os;
use std::vec;
use std::rand;
use transport::RaftRpcTransport;
mod rpc;
mod transport;
static raftPort : u16 = 9000;
pub struct UdpTransport {
socket: UdpSocket,
incomingRpcsChan: Chan<RaftRpc>,
incomingRpcsPort: Port<RaftRpc>,
outgoingRpcsChan: Chan<RaftRpc>,
outgoingRpcsPort: Port<RaftRpc>,
}
impl UdpTransport {
pub fn new() -> UdpTransport {
let socket = UdpSocket::bind(SocketAddr {ip: Ipv4Addr(127,0,0,1), port:raftPort}).unwrap();
let (incomingRpcsPort, incomingRpcsChan) = Chan::new();
let (outgoingRpcsPort, outgoingRpcsChan) = Chan::new();
return UdpTransport {socket: socket, incomingRpcsChan: incomingRpcsChan,
incomingRpcsPort: incomingRpcsPort, outgoingRpcsChan: outgoingRpcsChan,
outgoingRpcsPort: outgoingRpcsPort};
}
pub fn run(&self) {
let readSocket = self.socket.clone();
let writeSocket = self.socket.clone();
//spawn(proc() {
// let mut udpStream = readSocket.connect(remoteAddr);
// loop {
// let msg = OscMessage::from_reader(&mut udpStream).unwrap();
// println!("recv {}: {:?}", msg.address, msg.arguments);
// let msgRpc = self.parseRpcMessage(msg);
// self.incomingRpcsChan.send(msgRpc);
// }
//});
//spawn(proc() {
// let mut udpStream = writeSocket.connect(remoteAddr);
// loop {
// let msgRpc = self.outgoingRpcsPort.recv();
// let msg = self.createRpcMessage(msgRpc);
// println!("send {}: {:?}", msg.address, msg.arguments);
// msg.write_to(&mut udpStream).unwrap();
// }
//});
}
fn parseRpcMessage(&self, sender: ServerId, msg: OscMessage) -> RaftRpc {
return match msg.address {
~"/appendEntries" => AppendEntries(self.parseAppendEntries(sender, msg.arguments)),
~"/requestVote" => RequestVote(self.parseRequestVote(sender, msg.arguments)),
_ => fail!("woops no implementation for {}", msg.address)
};
}
// AppendEntries {term: int, leaderId: ServerId, prevLogIndex: int,
// entries: ~[LogEntry], leaderCommitIndex: int},
fn parseAppendEntries(&self, sender: ServerId, argsVec: ~[OscType]) -> AppendEntriesRpc {
let mut args = argsVec.move_iter();
let term = args.next().unwrap().unwrap_int() as int;
let leaderId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let prevLogIndex = args.next().unwrap().unwrap_int() as int;
let prevLogTerm = args.next().unwrap().unwrap_int() as int;
let entryCount = (args.len()-5)/2;
let mut entries: ~[LogEntry] = vec::with_capacity(entryCount);
for i in range(0,entryCount) {
let term = args.next().unwrap().unwrap_int() as int;
let entry = args.next().unwrap().unwrap_string();
entries[i] = LogEntry {entry: entry, term: term};
}
let leaderCommitIndex = args.next().unwrap().unwrap_int() as int;
return AppendEntriesRpc {sender: sender, term: term, leaderId: leaderId,
prevLogIndex: prevLogIndex, prevLogTerm: prevLogTerm,
entries: entries, leaderCommitIndex: leaderCommitIndex};
}
// RequestVote {term: int, candidateId: ServerId, lastLogIndex: int,
// lastLogTerm: int}
fn | (&self, sender: ServerId, argsVec: ~[OscType]) -> RequestVoteRpc {
let mut args = argsVec.move_iter();
let term: int = args.next().unwrap().unwrap_int() as int;
let candidateId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let lastLogIndex: int = args.next().unwrap().unwrap_int() as int;
let lastLogTerm: int = args.next().unwrap().unwrap_int() as int;
return RequestVoteRpc {sender: sender, term: term, candidateId: candidateId,
lastLogIndex: lastLogIndex, lastLogTerm: lastLogTerm};
}
}
impl RaftRpcTransport for UdpTransport {
fn readIncoming(&self) -> Option<RaftRpc> {
return Some(self.incomingRpcsPort.recv());
}
fn sendRpc(&self, recipient: ServerId, rpc: &RaftRpc) {
}
}
| parseRequestVote | identifier_name |
udptransport.rs | #[feature(struct_variant)];
#[feature(macro_rules)];
use osc::{OscType, OscMessage, OscWriter, OscReader};
use rpc::{ServerId, LogEntry, AppendEntriesRpc, AppendEntriesResponseRpc,
RequestVoteRpc, RequestVoteResponseRpc, RaftRpc, AppendEntries,
AppendEntriesResponse, RequestVote, RequestVoteResponse};
use std::io::net::ip::{Ipv4Addr, SocketAddr};
use std::io::net::udp::{UdpSocket, UdpStream};
use std::io::timer;
use std::os;
use std::vec;
use std::rand;
use transport::RaftRpcTransport;
mod rpc;
mod transport;
static raftPort : u16 = 9000;
pub struct UdpTransport {
socket: UdpSocket,
incomingRpcsChan: Chan<RaftRpc>,
incomingRpcsPort: Port<RaftRpc>,
outgoingRpcsChan: Chan<RaftRpc>,
outgoingRpcsPort: Port<RaftRpc>,
}
impl UdpTransport {
pub fn new() -> UdpTransport {
let socket = UdpSocket::bind(SocketAddr {ip: Ipv4Addr(127,0,0,1), port:raftPort}).unwrap();
let (incomingRpcsPort, incomingRpcsChan) = Chan::new();
let (outgoingRpcsPort, outgoingRpcsChan) = Chan::new();
return UdpTransport {socket: socket, incomingRpcsChan: incomingRpcsChan,
incomingRpcsPort: incomingRpcsPort, outgoingRpcsChan: outgoingRpcsChan,
outgoingRpcsPort: outgoingRpcsPort};
}
pub fn run(&self) {
let readSocket = self.socket.clone();
let writeSocket = self.socket.clone();
//spawn(proc() {
// let mut udpStream = readSocket.connect(remoteAddr);
// loop {
// let msg = OscMessage::from_reader(&mut udpStream).unwrap();
// println!("recv {}: {:?}", msg.address, msg.arguments);
// let msgRpc = self.parseRpcMessage(msg);
// self.incomingRpcsChan.send(msgRpc);
// }
//});
//spawn(proc() {
// let mut udpStream = writeSocket.connect(remoteAddr);
// loop {
// let msgRpc = self.outgoingRpcsPort.recv();
// let msg = self.createRpcMessage(msgRpc);
// println!("send {}: {:?}", msg.address, msg.arguments);
// msg.write_to(&mut udpStream).unwrap();
// }
//});
}
fn parseRpcMessage(&self, sender: ServerId, msg: OscMessage) -> RaftRpc |
// AppendEntries {term: int, leaderId: ServerId, prevLogIndex: int,
// entries: ~[LogEntry], leaderCommitIndex: int},
fn parseAppendEntries(&self, sender: ServerId, argsVec: ~[OscType]) -> AppendEntriesRpc {
let mut args = argsVec.move_iter();
let term = args.next().unwrap().unwrap_int() as int;
let leaderId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let prevLogIndex = args.next().unwrap().unwrap_int() as int;
let prevLogTerm = args.next().unwrap().unwrap_int() as int;
let entryCount = (args.len()-5)/2;
let mut entries: ~[LogEntry] = vec::with_capacity(entryCount);
for i in range(0,entryCount) {
let term = args.next().unwrap().unwrap_int() as int;
let entry = args.next().unwrap().unwrap_string();
entries[i] = LogEntry {entry: entry, term: term};
}
let leaderCommitIndex = args.next().unwrap().unwrap_int() as int;
return AppendEntriesRpc {sender: sender, term: term, leaderId: leaderId,
prevLogIndex: prevLogIndex, prevLogTerm: prevLogTerm,
entries: entries, leaderCommitIndex: leaderCommitIndex};
}
// RequestVote {term: int, candidateId: ServerId, lastLogIndex: int,
// lastLogTerm: int}
fn parseRequestVote(&self, sender: ServerId, argsVec: ~[OscType]) -> RequestVoteRpc {
let mut args = argsVec.move_iter();
let term: int = args.next().unwrap().unwrap_int() as int;
let candidateId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let lastLogIndex: int = args.next().unwrap().unwrap_int() as int;
let lastLogTerm: int = args.next().unwrap().unwrap_int() as int;
return RequestVoteRpc {sender: sender, term: term, candidateId: candidateId,
lastLogIndex: lastLogIndex, lastLogTerm: lastLogTerm};
}
}
impl RaftRpcTransport for UdpTransport {
fn readIncoming(&self) -> Option<RaftRpc> {
return Some(self.incomingRpcsPort.recv());
}
fn sendRpc(&self, recipient: ServerId, rpc: &RaftRpc) {
}
}
| {
return match msg.address {
~"/appendEntries" => AppendEntries(self.parseAppendEntries(sender, msg.arguments)),
~"/requestVote" => RequestVote(self.parseRequestVote(sender, msg.arguments)),
_ => fail!("woops no implementation for {}", msg.address)
};
} | identifier_body |
udptransport.rs | #[feature(struct_variant)];
#[feature(macro_rules)];
use osc::{OscType, OscMessage, OscWriter, OscReader};
use rpc::{ServerId, LogEntry, AppendEntriesRpc, AppendEntriesResponseRpc,
RequestVoteRpc, RequestVoteResponseRpc, RaftRpc, AppendEntries,
AppendEntriesResponse, RequestVote, RequestVoteResponse};
use std::io::net::ip::{Ipv4Addr, SocketAddr};
use std::io::net::udp::{UdpSocket, UdpStream};
use std::io::timer;
use std::os;
use std::vec;
use std::rand;
use transport::RaftRpcTransport;
mod rpc;
mod transport;
static raftPort : u16 = 9000;
pub struct UdpTransport {
socket: UdpSocket,
incomingRpcsChan: Chan<RaftRpc>,
incomingRpcsPort: Port<RaftRpc>,
outgoingRpcsChan: Chan<RaftRpc>,
outgoingRpcsPort: Port<RaftRpc>,
}
impl UdpTransport {
pub fn new() -> UdpTransport {
let socket = UdpSocket::bind(SocketAddr {ip: Ipv4Addr(127,0,0,1), port:raftPort}).unwrap();
let (incomingRpcsPort, incomingRpcsChan) = Chan::new();
let (outgoingRpcsPort, outgoingRpcsChan) = Chan::new();
return UdpTransport {socket: socket, incomingRpcsChan: incomingRpcsChan,
incomingRpcsPort: incomingRpcsPort, outgoingRpcsChan: outgoingRpcsChan,
outgoingRpcsPort: outgoingRpcsPort};
}
pub fn run(&self) {
let readSocket = self.socket.clone();
let writeSocket = self.socket.clone();
//spawn(proc() {
// let mut udpStream = readSocket.connect(remoteAddr);
// loop {
// let msg = OscMessage::from_reader(&mut udpStream).unwrap();
// println!("recv {}: {:?}", msg.address, msg.arguments);
// let msgRpc = self.parseRpcMessage(msg);
// self.incomingRpcsChan.send(msgRpc);
// }
//});
//spawn(proc() {
// let mut udpStream = writeSocket.connect(remoteAddr);
// loop {
// let msgRpc = self.outgoingRpcsPort.recv();
// let msg = self.createRpcMessage(msgRpc);
// println!("send {}: {:?}", msg.address, msg.arguments);
// msg.write_to(&mut udpStream).unwrap();
// }
//});
}
fn parseRpcMessage(&self, sender: ServerId, msg: OscMessage) -> RaftRpc {
return match msg.address {
~"/appendEntries" => AppendEntries(self.parseAppendEntries(sender, msg.arguments)),
~"/requestVote" => RequestVote(self.parseRequestVote(sender, msg.arguments)),
_ => fail!("woops no implementation for {}", msg.address)
};
}
// AppendEntries {term: int, leaderId: ServerId, prevLogIndex: int,
// entries: ~[LogEntry], leaderCommitIndex: int},
fn parseAppendEntries(&self, sender: ServerId, argsVec: ~[OscType]) -> AppendEntriesRpc {
let mut args = argsVec.move_iter();
let term = args.next().unwrap().unwrap_int() as int;
let leaderId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let prevLogIndex = args.next().unwrap().unwrap_int() as int;
let prevLogTerm = args.next().unwrap().unwrap_int() as int;
let entryCount = (args.len()-5)/2;
let mut entries: ~[LogEntry] = vec::with_capacity(entryCount);
for i in range(0,entryCount) {
let term = args.next().unwrap().unwrap_int() as int;
let entry = args.next().unwrap().unwrap_string();
entries[i] = LogEntry {entry: entry, term: term};
}
let leaderCommitIndex = args.next().unwrap().unwrap_int() as int;
return AppendEntriesRpc {sender: sender, term: term, leaderId: leaderId, | prevLogIndex: prevLogIndex, prevLogTerm: prevLogTerm,
entries: entries, leaderCommitIndex: leaderCommitIndex};
}
// RequestVote {term: int, candidateId: ServerId, lastLogIndex: int,
// lastLogTerm: int}
fn parseRequestVote(&self, sender: ServerId, argsVec: ~[OscType]) -> RequestVoteRpc {
let mut args = argsVec.move_iter();
let term: int = args.next().unwrap().unwrap_int() as int;
let candidateId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let lastLogIndex: int = args.next().unwrap().unwrap_int() as int;
let lastLogTerm: int = args.next().unwrap().unwrap_int() as int;
return RequestVoteRpc {sender: sender, term: term, candidateId: candidateId,
lastLogIndex: lastLogIndex, lastLogTerm: lastLogTerm};
}
}
impl RaftRpcTransport for UdpTransport {
fn readIncoming(&self) -> Option<RaftRpc> {
return Some(self.incomingRpcsPort.recv());
}
fn sendRpc(&self, recipient: ServerId, rpc: &RaftRpc) {
}
} | random_line_split |
|
configure.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Configuration interface used by the supervisor to change service configuration.
use exonum::runtime::{CommonError, ExecutionContext, ExecutionError, InstanceId, MethodId};
use exonum_merkledb::BinaryValue;
use exonum_rust_runtime::{GenericCallMut, Interface, MethodDescriptor};
/// Fully qualified name of the [`Configure`] interface.
///
/// [`Configure`]: trait.Configure.html
pub const CONFIGURE_INTERFACE_NAME: &str = "exonum.Configure";
/// Identifier of the [`Configure::verify_config`] method.
///
/// [`Configure::verify_config`]: trait.Configure.html#tymethod.verify_config
const VERIFY_CONFIG_METHOD_ID: MethodId = 0;
/// Identifier of the [`Configure::apply_config`] method.
///
/// [`Configure::apply_config`]: trait.Configure.html#tymethod.apply_config
const APPLY_CONFIG_METHOD_ID: MethodId = 1;
/// Describes a procedure for updating the configuration of a service instance.
pub trait Configure {
/// The specific type of parameters passed during the service instance configuration.
type Params: BinaryValue;
/// Verify a new configuration parameters before their actual application.
///
/// This method is called by the new configuration change proposal. If the proposed
/// parameters do not fit for this service instance, it should return a corresponding
/// error to discard this proposal. Thus only a configuration change proposal in which all
/// changes are correct can be applied later.
///
/// The proposal approval process details, and even the configuration proposal format, depends
/// on the particular runtime implementation.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn verify_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
/// Update service configuration with the given parameters.
///
/// The configuration parameters passed to the method are discarded immediately.
/// So the service instance should save them by itself if it is important for
/// the service business logic.
///
/// This method can be triggered at any time and does not follow the general transaction
/// execution workflow, so the errors returned might be ignored.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn apply_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
}
impl<'a, T: BinaryValue> Interface<'a> for dyn Configure<Params = T> {
const INTERFACE_NAME: &'static str = CONFIGURE_INTERFACE_NAME;
fn dispatch(
&self,
context: ExecutionContext<'a>,
method: MethodId,
payload: &[u8],
) -> Result<(), ExecutionError> {
match method {
VERIFY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.verify_config(context, params)
}
APPLY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.apply_config(context, params)
}
_ => Err(CommonError::NoSuchMethod.into()),
}
}
}
// Makeshift replacement for generic stubbing, which is made difficult by the existence
// of the type param.
pub(crate) trait ConfigureMut<Ctx> {
type Output;
fn verify_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
fn apply_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
}
impl ConfigureMut<InstanceId> for ExecutionContext<'_> {
type Output = Result<(), ExecutionError>;
fn | (&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 0);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
}
fn apply_config(&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 1);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
}
}
| verify_config | identifier_name |
configure.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Configuration interface used by the supervisor to change service configuration.
use exonum::runtime::{CommonError, ExecutionContext, ExecutionError, InstanceId, MethodId};
use exonum_merkledb::BinaryValue;
use exonum_rust_runtime::{GenericCallMut, Interface, MethodDescriptor};
/// Fully qualified name of the [`Configure`] interface.
///
/// [`Configure`]: trait.Configure.html
pub const CONFIGURE_INTERFACE_NAME: &str = "exonum.Configure";
/// Identifier of the [`Configure::verify_config`] method.
///
/// [`Configure::verify_config`]: trait.Configure.html#tymethod.verify_config
const VERIFY_CONFIG_METHOD_ID: MethodId = 0;
/// Identifier of the [`Configure::apply_config`] method.
///
/// [`Configure::apply_config`]: trait.Configure.html#tymethod.apply_config
const APPLY_CONFIG_METHOD_ID: MethodId = 1;
/// Describes a procedure for updating the configuration of a service instance.
pub trait Configure {
/// The specific type of parameters passed during the service instance configuration.
type Params: BinaryValue;
/// Verify a new configuration parameters before their actual application.
///
/// This method is called by the new configuration change proposal. If the proposed
/// parameters do not fit for this service instance, it should return a corresponding
/// error to discard this proposal. Thus only a configuration change proposal in which all
/// changes are correct can be applied later.
///
/// The proposal approval process details, and even the configuration proposal format, depends
/// on the particular runtime implementation.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn verify_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
/// Update service configuration with the given parameters.
///
/// The configuration parameters passed to the method are discarded immediately.
/// So the service instance should save them by itself if it is important for
/// the service business logic.
///
/// This method can be triggered at any time and does not follow the general transaction
/// execution workflow, so the errors returned might be ignored.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn apply_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
}
impl<'a, T: BinaryValue> Interface<'a> for dyn Configure<Params = T> {
const INTERFACE_NAME: &'static str = CONFIGURE_INTERFACE_NAME;
fn dispatch(
&self,
context: ExecutionContext<'a>,
method: MethodId,
payload: &[u8],
) -> Result<(), ExecutionError> {
match method {
VERIFY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.verify_config(context, params)
}
APPLY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.apply_config(context, params)
}
_ => Err(CommonError::NoSuchMethod.into()),
}
}
}
// Makeshift replacement for generic stubbing, which is made difficult by the existence
// of the type param.
pub(crate) trait ConfigureMut<Ctx> {
type Output;
fn verify_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
fn apply_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
}
impl ConfigureMut<InstanceId> for ExecutionContext<'_> {
type Output = Result<(), ExecutionError>;
fn verify_config(&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 0);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
}
fn apply_config(&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output |
}
| {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 1);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
} | identifier_body |
configure.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Configuration interface used by the supervisor to change service configuration.
use exonum::runtime::{CommonError, ExecutionContext, ExecutionError, InstanceId, MethodId};
use exonum_merkledb::BinaryValue;
use exonum_rust_runtime::{GenericCallMut, Interface, MethodDescriptor};
/// Fully qualified name of the [`Configure`] interface.
///
/// [`Configure`]: trait.Configure.html
pub const CONFIGURE_INTERFACE_NAME: &str = "exonum.Configure";
/// Identifier of the [`Configure::verify_config`] method.
///
/// [`Configure::verify_config`]: trait.Configure.html#tymethod.verify_config
const VERIFY_CONFIG_METHOD_ID: MethodId = 0;
/// Identifier of the [`Configure::apply_config`] method.
///
/// [`Configure::apply_config`]: trait.Configure.html#tymethod.apply_config
const APPLY_CONFIG_METHOD_ID: MethodId = 1;
/// Describes a procedure for updating the configuration of a service instance.
pub trait Configure {
/// The specific type of parameters passed during the service instance configuration.
type Params: BinaryValue;
/// Verify a new configuration parameters before their actual application.
///
/// This method is called by the new configuration change proposal. If the proposed
/// parameters do not fit for this service instance, it should return a corresponding
/// error to discard this proposal. Thus only a configuration change proposal in which all
/// changes are correct can be applied later.
///
/// The proposal approval process details, and even the configuration proposal format, depends
/// on the particular runtime implementation.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn verify_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
/// Update service configuration with the given parameters.
///
/// The configuration parameters passed to the method are discarded immediately. | /// So the service instance should save them by itself if it is important for
/// the service business logic.
///
/// This method can be triggered at any time and does not follow the general transaction
/// execution workflow, so the errors returned might be ignored.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn apply_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
}
impl<'a, T: BinaryValue> Interface<'a> for dyn Configure<Params = T> {
const INTERFACE_NAME: &'static str = CONFIGURE_INTERFACE_NAME;
fn dispatch(
&self,
context: ExecutionContext<'a>,
method: MethodId,
payload: &[u8],
) -> Result<(), ExecutionError> {
match method {
VERIFY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.verify_config(context, params)
}
APPLY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.apply_config(context, params)
}
_ => Err(CommonError::NoSuchMethod.into()),
}
}
}
// Makeshift replacement for generic stubbing, which is made difficult by the existence
// of the type param.
pub(crate) trait ConfigureMut<Ctx> {
type Output;
fn verify_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
fn apply_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
}
impl ConfigureMut<InstanceId> for ExecutionContext<'_> {
type Output = Result<(), ExecutionError>;
fn verify_config(&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 0);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
}
fn apply_config(&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 1);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
}
} | random_line_split |
|
abstract-const-as-cast-4.rs | // check-pass
#![feature(generic_const_exprs)]
#![allow(incomplete_features)]
trait Trait {}
pub struct EvaluatableU128<const N: u128>;
struct HasCastInTraitImpl<const N: usize, const M: u128>;
impl<const O: usize> Trait for HasCastInTraitImpl<O, { O as u128 }> {}
pub fn use_trait_impl<const N: usize>() where EvaluatableU128<{N as u128}>:, {
fn | <T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>();
assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
}
pub fn use_trait_impl_2<const N: usize>() where EvaluatableU128<{N as _}>:, {
fn assert_impl<T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>();
assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
}
fn main() {}
| assert_impl | identifier_name |
abstract-const-as-cast-4.rs | // check-pass
#![feature(generic_const_exprs)]
#![allow(incomplete_features)]
trait Trait {}
pub struct EvaluatableU128<const N: u128>;
struct HasCastInTraitImpl<const N: usize, const M: u128>;
impl<const O: usize> Trait for HasCastInTraitImpl<O, { O as u128 }> {}
pub fn use_trait_impl<const N: usize>() where EvaluatableU128<{N as u128}>:, {
fn assert_impl<T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>();
assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
}
pub fn use_trait_impl_2<const N: usize>() where EvaluatableU128<{N as _}>:, {
fn assert_impl<T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>(); |
fn main() {} | assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
} | random_line_split |
abstract-const-as-cast-4.rs | // check-pass
#![feature(generic_const_exprs)]
#![allow(incomplete_features)]
trait Trait {}
pub struct EvaluatableU128<const N: u128>;
struct HasCastInTraitImpl<const N: usize, const M: u128>;
impl<const O: usize> Trait for HasCastInTraitImpl<O, { O as u128 }> {}
pub fn use_trait_impl<const N: usize>() where EvaluatableU128<{N as u128}>:, {
fn assert_impl<T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>();
assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
}
pub fn use_trait_impl_2<const N: usize>() where EvaluatableU128<{N as _}>:, |
fn main() {}
| {
fn assert_impl<T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>();
assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
} | identifier_body |
static-reference-to-fn-2.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct StateMachineIter<'a> {
statefn: &'a fn(&mut StateMachineIter<'a>) -> Option<&'static str>
}
impl<'a> Iterator<&'static str> for StateMachineIter<'a> {
fn next(&mut self) -> Option<&'static str> {
return (*self.statefn)(self);
}
}
fn state1(self_: &mut StateMachineIter) -> Option<&'static str> {
self_.statefn = &state2;
//~^ ERROR borrowed value does not live long enough
return Some("state1");
}
fn state2(self_: &mut StateMachineIter) -> Option<(&'static str)> {
self_.statefn = &state3;
//~^ ERROR borrowed value does not live long enough
return Some("state2");
}
fn state3(self_: &mut StateMachineIter) -> Option<(&'static str)> |
fn finished(_: &mut StateMachineIter) -> Option<(&'static str)> {
return None;
}
fn state_iter() -> StateMachineIter<'static> {
StateMachineIter {
statefn: &state1 //~ ERROR borrowed value does not live long enough
}
}
fn main() {
let mut it = state_iter();
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
}
| {
self_.statefn = &finished;
//~^ ERROR borrowed value does not live long enough
return Some("state3");
} | identifier_body |
static-reference-to-fn-2.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct StateMachineIter<'a> {
statefn: &'a fn(&mut StateMachineIter<'a>) -> Option<&'static str>
}
impl<'a> Iterator<&'static str> for StateMachineIter<'a> {
fn next(&mut self) -> Option<&'static str> {
return (*self.statefn)(self);
}
}
fn state1(self_: &mut StateMachineIter) -> Option<&'static str> {
self_.statefn = &state2;
//~^ ERROR borrowed value does not live long enough
return Some("state1");
}
fn state2(self_: &mut StateMachineIter) -> Option<(&'static str)> {
self_.statefn = &state3;
//~^ ERROR borrowed value does not live long enough
return Some("state2");
}
fn state3(self_: &mut StateMachineIter) -> Option<(&'static str)> {
self_.statefn = &finished;
//~^ ERROR borrowed value does not live long enough
return Some("state3");
}
fn finished(_: &mut StateMachineIter) -> Option<(&'static str)> {
return None;
}
fn state_iter() -> StateMachineIter<'static> {
StateMachineIter {
statefn: &state1 //~ ERROR borrowed value does not live long enough
}
}
fn main() {
let mut it = state_iter();
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
} | // http://rust-lang.org/COPYRIGHT.
// | random_line_split |
static-reference-to-fn-2.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct StateMachineIter<'a> {
statefn: &'a fn(&mut StateMachineIter<'a>) -> Option<&'static str>
}
impl<'a> Iterator<&'static str> for StateMachineIter<'a> {
fn next(&mut self) -> Option<&'static str> {
return (*self.statefn)(self);
}
}
fn | (self_: &mut StateMachineIter) -> Option<&'static str> {
self_.statefn = &state2;
//~^ ERROR borrowed value does not live long enough
return Some("state1");
}
fn state2(self_: &mut StateMachineIter) -> Option<(&'static str)> {
self_.statefn = &state3;
//~^ ERROR borrowed value does not live long enough
return Some("state2");
}
fn state3(self_: &mut StateMachineIter) -> Option<(&'static str)> {
self_.statefn = &finished;
//~^ ERROR borrowed value does not live long enough
return Some("state3");
}
fn finished(_: &mut StateMachineIter) -> Option<(&'static str)> {
return None;
}
fn state_iter() -> StateMachineIter<'static> {
StateMachineIter {
statefn: &state1 //~ ERROR borrowed value does not live long enough
}
}
fn main() {
let mut it = state_iter();
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
}
| state1 | identifier_name |
scene.rs | // This is a part of Sonorous.
// Copyright (c) 2005, 2007, 2009, 2012, 2013, 2014, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! Scene management.
use std::io::timer::sleep;
use std::time::Duration;
use sdl::get_ticks;
use ui::common::Ticker;
/// Options used by the scene to customize the scene loop.
#[deriving(Clone)]
pub struct | {
/// If specified, limits the number of `Scene::tick` calls per second to this value.
/// `run_scene` ensures this limitation by sleeping after each tick as needed.
pub tpslimit: Option<uint>,
/// If specified, limits the number of `Scene::render` calls per second to this value.
/// Due to the implementation strategy `tpslimit` takes precedence over this if specified.
pub fpslimit: Option<uint>,
}
impl SceneOptions {
/// Creates default options for the scene.
pub fn new() -> SceneOptions {
SceneOptions { tpslimit: None, fpslimit: None }
}
/// Replaces `tpslimit` field with given value.
pub fn tpslimit(self, tps: uint) -> SceneOptions {
SceneOptions { tpslimit: Some(tps),..self }
}
/// Replaces `fpslimit` field with given value.
pub fn fpslimit(self, fps: uint) -> SceneOptions {
SceneOptions { fpslimit: Some(fps),..self }
}
}
/// A command returned by `Scene`'s `tick` method.
pub enum SceneCommand {
/// Continues displaying this scene.
Continue,
/// Pushes a new `Scene` to the scene stack, making it the active scene. The current scene is
/// stopped (after calling `deactivate`) until the new scene returns `PopScene` command.
Push(Box<Scene+'static>),
/// Replaces the current scene with a new `Scene` that will be returned by `consume` method.
/// The command itself does not have a `Scene` argument since new scene may have to be
/// constructured out of the existing scene. Therefore the scene should be prepared for
/// multiple next scenes possible if any.
Replace,
/// Pops the current scene from the scene stack. The program exits if it was the only remaining
/// scene in the stack.
Pop,
/// Clears the scene stack, effectively ending the program.
Exit,
}
/// Scene interface.
pub trait Scene {
/// Called when the scene is to be activated, prior to the first `tick` call. May return
/// a non-`Continue` command to immediately deactivate the scene.
fn activate(&mut self) -> SceneCommand;
/// Returns the options for this scene. It is called *after* the `activate` call.
fn scene_options(&self) -> SceneOptions;
/// Does the event handling and internal logics, and returns a command to instruct the caller.
fn tick(&mut self) -> SceneCommand;
/// Does the rendering jobs. It may get called once after the `tick` call (but not mandatory,
/// for example, due to the frame drop).
fn render(&self);
/// Called when the scene is to be deactivated by the latest `tick` call. It is not called
/// when `activate` returns a non-`Continue` command and the scene becomes deactivated.
fn deactivate(&mut self);
/// Called when the scene is to be replaced by a new `Scene` due to the `ReplaceScene` command.
/// When called due to the `tick` call, this is called after `deactivate` call.
fn consume(self: Box<Self>) -> Box<Scene+'static>;
}
/// Runs given scene and other additionally spawned scenes.
pub fn run_scene(scene: Box<Scene+'static>) {
let mut current = scene;
let mut stack = Vec::new();
loop {
let mut result = current.activate();
match result {
SceneCommand::Continue => {
let opts = current.scene_options();
let mintickdelay = opts.tpslimit.map_or(0, |tps| 1000 / tps);
let interval = opts.fpslimit.map_or(0, |fps| 1000 / fps);
let mut ticker = Ticker::with_interval(interval);
loop {
let ticklimit = get_ticks() + mintickdelay;
result = current.tick();
match result {
SceneCommand::Continue => {
ticker.on_tick(get_ticks(), || { current.render(); });
}
_ => { break; }
}
let now = get_ticks();
if now < ticklimit { sleep(Duration::milliseconds((ticklimit - now) as i64)); }
}
current.deactivate();
}
_ => {}
}
match result {
SceneCommand::Continue => {
panic!("impossible");
}
SceneCommand::Push(newscene) => {
stack.push(current);
current = newscene;
}
SceneCommand::Replace => {
current = current.consume();
}
SceneCommand::Pop => {
if stack.is_empty() { break; }
current = stack.pop().unwrap();
}
SceneCommand::Exit => {
break;
}
}
}
}
| SceneOptions | identifier_name |
scene.rs | // This is a part of Sonorous.
// Copyright (c) 2005, 2007, 2009, 2012, 2013, 2014, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! Scene management.
use std::io::timer::sleep;
use std::time::Duration;
use sdl::get_ticks;
use ui::common::Ticker;
/// Options used by the scene to customize the scene loop.
#[deriving(Clone)]
pub struct SceneOptions {
/// If specified, limits the number of `Scene::tick` calls per second to this value.
/// `run_scene` ensures this limitation by sleeping after each tick as needed.
pub tpslimit: Option<uint>,
/// If specified, limits the number of `Scene::render` calls per second to this value.
/// Due to the implementation strategy `tpslimit` takes precedence over this if specified.
pub fpslimit: Option<uint>,
}
impl SceneOptions {
/// Creates default options for the scene.
pub fn new() -> SceneOptions {
SceneOptions { tpslimit: None, fpslimit: None }
}
/// Replaces `tpslimit` field with given value.
pub fn tpslimit(self, tps: uint) -> SceneOptions {
SceneOptions { tpslimit: Some(tps),..self }
}
/// Replaces `fpslimit` field with given value.
pub fn fpslimit(self, fps: uint) -> SceneOptions |
}
/// A command returned by `Scene`'s `tick` method.
pub enum SceneCommand {
/// Continues displaying this scene.
Continue,
/// Pushes a new `Scene` to the scene stack, making it the active scene. The current scene is
/// stopped (after calling `deactivate`) until the new scene returns `PopScene` command.
Push(Box<Scene+'static>),
/// Replaces the current scene with a new `Scene` that will be returned by `consume` method.
/// The command itself does not have a `Scene` argument since new scene may have to be
/// constructured out of the existing scene. Therefore the scene should be prepared for
/// multiple next scenes possible if any.
Replace,
/// Pops the current scene from the scene stack. The program exits if it was the only remaining
/// scene in the stack.
Pop,
/// Clears the scene stack, effectively ending the program.
Exit,
}
/// Scene interface.
pub trait Scene {
/// Called when the scene is to be activated, prior to the first `tick` call. May return
/// a non-`Continue` command to immediately deactivate the scene.
fn activate(&mut self) -> SceneCommand;
/// Returns the options for this scene. It is called *after* the `activate` call.
fn scene_options(&self) -> SceneOptions;
/// Does the event handling and internal logics, and returns a command to instruct the caller.
fn tick(&mut self) -> SceneCommand;
/// Does the rendering jobs. It may get called once after the `tick` call (but not mandatory,
/// for example, due to the frame drop).
fn render(&self);
/// Called when the scene is to be deactivated by the latest `tick` call. It is not called
/// when `activate` returns a non-`Continue` command and the scene becomes deactivated.
fn deactivate(&mut self);
/// Called when the scene is to be replaced by a new `Scene` due to the `ReplaceScene` command.
/// When called due to the `tick` call, this is called after `deactivate` call.
fn consume(self: Box<Self>) -> Box<Scene+'static>;
}
/// Runs given scene and other additionally spawned scenes.
pub fn run_scene(scene: Box<Scene+'static>) {
let mut current = scene;
let mut stack = Vec::new();
loop {
let mut result = current.activate();
match result {
SceneCommand::Continue => {
let opts = current.scene_options();
let mintickdelay = opts.tpslimit.map_or(0, |tps| 1000 / tps);
let interval = opts.fpslimit.map_or(0, |fps| 1000 / fps);
let mut ticker = Ticker::with_interval(interval);
loop {
let ticklimit = get_ticks() + mintickdelay;
result = current.tick();
match result {
SceneCommand::Continue => {
ticker.on_tick(get_ticks(), || { current.render(); });
}
_ => { break; }
}
let now = get_ticks();
if now < ticklimit { sleep(Duration::milliseconds((ticklimit - now) as i64)); }
}
current.deactivate();
}
_ => {}
}
match result {
SceneCommand::Continue => {
panic!("impossible");
}
SceneCommand::Push(newscene) => {
stack.push(current);
current = newscene;
}
SceneCommand::Replace => {
current = current.consume();
}
SceneCommand::Pop => {
if stack.is_empty() { break; }
current = stack.pop().unwrap();
}
SceneCommand::Exit => {
break;
}
}
}
}
| {
SceneOptions { fpslimit: Some(fps), ..self }
} | identifier_body |
scene.rs | // This is a part of Sonorous.
// Copyright (c) 2005, 2007, 2009, 2012, 2013, 2014, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! Scene management.
use std::io::timer::sleep;
use std::time::Duration;
use sdl::get_ticks;
use ui::common::Ticker;
/// Options used by the scene to customize the scene loop.
#[deriving(Clone)]
pub struct SceneOptions {
/// If specified, limits the number of `Scene::tick` calls per second to this value.
/// `run_scene` ensures this limitation by sleeping after each tick as needed.
pub tpslimit: Option<uint>,
/// If specified, limits the number of `Scene::render` calls per second to this value.
/// Due to the implementation strategy `tpslimit` takes precedence over this if specified.
pub fpslimit: Option<uint>, | impl SceneOptions {
/// Creates default options for the scene.
pub fn new() -> SceneOptions {
SceneOptions { tpslimit: None, fpslimit: None }
}
/// Replaces `tpslimit` field with given value.
pub fn tpslimit(self, tps: uint) -> SceneOptions {
SceneOptions { tpslimit: Some(tps),..self }
}
/// Replaces `fpslimit` field with given value.
pub fn fpslimit(self, fps: uint) -> SceneOptions {
SceneOptions { fpslimit: Some(fps),..self }
}
}
/// A command returned by `Scene`'s `tick` method.
pub enum SceneCommand {
/// Continues displaying this scene.
Continue,
/// Pushes a new `Scene` to the scene stack, making it the active scene. The current scene is
/// stopped (after calling `deactivate`) until the new scene returns `PopScene` command.
Push(Box<Scene+'static>),
/// Replaces the current scene with a new `Scene` that will be returned by `consume` method.
/// The command itself does not have a `Scene` argument since new scene may have to be
/// constructured out of the existing scene. Therefore the scene should be prepared for
/// multiple next scenes possible if any.
Replace,
/// Pops the current scene from the scene stack. The program exits if it was the only remaining
/// scene in the stack.
Pop,
/// Clears the scene stack, effectively ending the program.
Exit,
}
/// Scene interface.
pub trait Scene {
/// Called when the scene is to be activated, prior to the first `tick` call. May return
/// a non-`Continue` command to immediately deactivate the scene.
fn activate(&mut self) -> SceneCommand;
/// Returns the options for this scene. It is called *after* the `activate` call.
fn scene_options(&self) -> SceneOptions;
/// Does the event handling and internal logics, and returns a command to instruct the caller.
fn tick(&mut self) -> SceneCommand;
/// Does the rendering jobs. It may get called once after the `tick` call (but not mandatory,
/// for example, due to the frame drop).
fn render(&self);
/// Called when the scene is to be deactivated by the latest `tick` call. It is not called
/// when `activate` returns a non-`Continue` command and the scene becomes deactivated.
fn deactivate(&mut self);
/// Called when the scene is to be replaced by a new `Scene` due to the `ReplaceScene` command.
/// When called due to the `tick` call, this is called after `deactivate` call.
fn consume(self: Box<Self>) -> Box<Scene+'static>;
}
/// Runs given scene and other additionally spawned scenes.
pub fn run_scene(scene: Box<Scene+'static>) {
let mut current = scene;
let mut stack = Vec::new();
loop {
let mut result = current.activate();
match result {
SceneCommand::Continue => {
let opts = current.scene_options();
let mintickdelay = opts.tpslimit.map_or(0, |tps| 1000 / tps);
let interval = opts.fpslimit.map_or(0, |fps| 1000 / fps);
let mut ticker = Ticker::with_interval(interval);
loop {
let ticklimit = get_ticks() + mintickdelay;
result = current.tick();
match result {
SceneCommand::Continue => {
ticker.on_tick(get_ticks(), || { current.render(); });
}
_ => { break; }
}
let now = get_ticks();
if now < ticklimit { sleep(Duration::milliseconds((ticklimit - now) as i64)); }
}
current.deactivate();
}
_ => {}
}
match result {
SceneCommand::Continue => {
panic!("impossible");
}
SceneCommand::Push(newscene) => {
stack.push(current);
current = newscene;
}
SceneCommand::Replace => {
current = current.consume();
}
SceneCommand::Pop => {
if stack.is_empty() { break; }
current = stack.pop().unwrap();
}
SceneCommand::Exit => {
break;
}
}
}
} | }
| random_line_split |
scene.rs | // This is a part of Sonorous.
// Copyright (c) 2005, 2007, 2009, 2012, 2013, 2014, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! Scene management.
use std::io::timer::sleep;
use std::time::Duration;
use sdl::get_ticks;
use ui::common::Ticker;
/// Options used by the scene to customize the scene loop.
#[deriving(Clone)]
pub struct SceneOptions {
/// If specified, limits the number of `Scene::tick` calls per second to this value.
/// `run_scene` ensures this limitation by sleeping after each tick as needed.
pub tpslimit: Option<uint>,
/// If specified, limits the number of `Scene::render` calls per second to this value.
/// Due to the implementation strategy `tpslimit` takes precedence over this if specified.
pub fpslimit: Option<uint>,
}
impl SceneOptions {
/// Creates default options for the scene.
pub fn new() -> SceneOptions {
SceneOptions { tpslimit: None, fpslimit: None }
}
/// Replaces `tpslimit` field with given value.
pub fn tpslimit(self, tps: uint) -> SceneOptions {
SceneOptions { tpslimit: Some(tps),..self }
}
/// Replaces `fpslimit` field with given value.
pub fn fpslimit(self, fps: uint) -> SceneOptions {
SceneOptions { fpslimit: Some(fps),..self }
}
}
/// A command returned by `Scene`'s `tick` method.
pub enum SceneCommand {
/// Continues displaying this scene.
Continue,
/// Pushes a new `Scene` to the scene stack, making it the active scene. The current scene is
/// stopped (after calling `deactivate`) until the new scene returns `PopScene` command.
Push(Box<Scene+'static>),
/// Replaces the current scene with a new `Scene` that will be returned by `consume` method.
/// The command itself does not have a `Scene` argument since new scene may have to be
/// constructured out of the existing scene. Therefore the scene should be prepared for
/// multiple next scenes possible if any.
Replace,
/// Pops the current scene from the scene stack. The program exits if it was the only remaining
/// scene in the stack.
Pop,
/// Clears the scene stack, effectively ending the program.
Exit,
}
/// Scene interface.
pub trait Scene {
/// Called when the scene is to be activated, prior to the first `tick` call. May return
/// a non-`Continue` command to immediately deactivate the scene.
fn activate(&mut self) -> SceneCommand;
/// Returns the options for this scene. It is called *after* the `activate` call.
fn scene_options(&self) -> SceneOptions;
/// Does the event handling and internal logics, and returns a command to instruct the caller.
fn tick(&mut self) -> SceneCommand;
/// Does the rendering jobs. It may get called once after the `tick` call (but not mandatory,
/// for example, due to the frame drop).
fn render(&self);
/// Called when the scene is to be deactivated by the latest `tick` call. It is not called
/// when `activate` returns a non-`Continue` command and the scene becomes deactivated.
fn deactivate(&mut self);
/// Called when the scene is to be replaced by a new `Scene` due to the `ReplaceScene` command.
/// When called due to the `tick` call, this is called after `deactivate` call.
fn consume(self: Box<Self>) -> Box<Scene+'static>;
}
/// Runs given scene and other additionally spawned scenes.
pub fn run_scene(scene: Box<Scene+'static>) {
let mut current = scene;
let mut stack = Vec::new();
loop {
let mut result = current.activate();
match result {
SceneCommand::Continue => |
_ => {}
}
match result {
SceneCommand::Continue => {
panic!("impossible");
}
SceneCommand::Push(newscene) => {
stack.push(current);
current = newscene;
}
SceneCommand::Replace => {
current = current.consume();
}
SceneCommand::Pop => {
if stack.is_empty() { break; }
current = stack.pop().unwrap();
}
SceneCommand::Exit => {
break;
}
}
}
}
| {
let opts = current.scene_options();
let mintickdelay = opts.tpslimit.map_or(0, |tps| 1000 / tps);
let interval = opts.fpslimit.map_or(0, |fps| 1000 / fps);
let mut ticker = Ticker::with_interval(interval);
loop {
let ticklimit = get_ticks() + mintickdelay;
result = current.tick();
match result {
SceneCommand::Continue => {
ticker.on_tick(get_ticks(), || { current.render(); });
}
_ => { break; }
}
let now = get_ticks();
if now < ticklimit { sleep(Duration::milliseconds((ticklimit - now) as i64)); }
}
current.deactivate();
} | conditional_block |
associated-types-eq-3.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test equality constraints on associated types. Check we get type errors
// where we should.
pub trait Foo {
type A;
fn boo(&self) -> <Self as Foo>::A;
}
struct Bar;
impl Foo for isize {
type A = usize;
fn boo(&self) -> usize {
42
}
}
fn foo1<I: Foo<A=Bar>>(x: I) {
let _: Bar = x.boo();
}
fn foo2<I: Foo>(x: I) {
let _: Bar = x.boo();
//~^ ERROR mismatched types
//~| expected `Bar`
//~| found `<I as Foo>::A`
//~| expected struct `Bar`
//~| found associated type
}
pub fn baz(x: &Foo<A=Bar>) {
let _: Bar = x.boo();
}
pub fn main() {
let a = 42;
foo1(a);
//~^ ERROR type mismatch resolving
//~| expected usize
//~| found struct `Bar` | baz(&a);
//~^ ERROR type mismatch resolving
//~| expected usize
//~| found struct `Bar`
} | random_line_split |
|
associated-types-eq-3.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test equality constraints on associated types. Check we get type errors
// where we should.
pub trait Foo {
type A;
fn boo(&self) -> <Self as Foo>::A;
}
struct Bar;
impl Foo for isize {
type A = usize;
fn boo(&self) -> usize {
42
}
}
fn foo1<I: Foo<A=Bar>>(x: I) {
let _: Bar = x.boo();
}
fn foo2<I: Foo>(x: I) {
let _: Bar = x.boo();
//~^ ERROR mismatched types
//~| expected `Bar`
//~| found `<I as Foo>::A`
//~| expected struct `Bar`
//~| found associated type
}
pub fn baz(x: &Foo<A=Bar>) {
let _: Bar = x.boo();
}
pub fn | () {
let a = 42;
foo1(a);
//~^ ERROR type mismatch resolving
//~| expected usize
//~| found struct `Bar`
baz(&a);
//~^ ERROR type mismatch resolving
//~| expected usize
//~| found struct `Bar`
}
| main | identifier_name |
trait-inheritance-auto.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
// Testing that this impl turns A into a Quux, because
// A is already a Foo Bar Baz
impl<T:Foo + Bar + Baz> Quux for T { }
trait Foo { fn f(&self) -> isize; }
trait Bar { fn g(&self) -> isize; }
trait Baz { fn h(&self) -> isize; }
trait Quux: Foo + Bar + Baz { }
struct A { x: isize }
impl Foo for A { fn f(&self) -> isize { 10 } }
impl Bar for A { fn g(&self) -> isize { 20 } }
impl Baz for A { fn h(&self) -> isize { 30 } }
fn | <T:Quux>(a: &T) {
assert_eq!(a.f(), 10);
assert_eq!(a.g(), 20);
assert_eq!(a.h(), 30);
}
pub fn main() {
let a = &A { x: 3 };
f(a);
}
| f | identifier_name |
trait-inheritance-auto.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
// Testing that this impl turns A into a Quux, because
// A is already a Foo Bar Baz
impl<T:Foo + Bar + Baz> Quux for T { }
trait Foo { fn f(&self) -> isize; }
trait Bar { fn g(&self) -> isize; } |
trait Quux: Foo + Bar + Baz { }
struct A { x: isize }
impl Foo for A { fn f(&self) -> isize { 10 } }
impl Bar for A { fn g(&self) -> isize { 20 } }
impl Baz for A { fn h(&self) -> isize { 30 } }
fn f<T:Quux>(a: &T) {
assert_eq!(a.f(), 10);
assert_eq!(a.g(), 20);
assert_eq!(a.h(), 30);
}
pub fn main() {
let a = &A { x: 3 };
f(a);
} | trait Baz { fn h(&self) -> isize; } | random_line_split |
trait-inheritance-auto.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(dead_code)]
// Testing that this impl turns A into a Quux, because
// A is already a Foo Bar Baz
impl<T:Foo + Bar + Baz> Quux for T { }
trait Foo { fn f(&self) -> isize; }
trait Bar { fn g(&self) -> isize; }
trait Baz { fn h(&self) -> isize; }
trait Quux: Foo + Bar + Baz { }
struct A { x: isize }
impl Foo for A { fn f(&self) -> isize { 10 } }
impl Bar for A { fn g(&self) -> isize { 20 } }
impl Baz for A { fn h(&self) -> isize | }
fn f<T:Quux>(a: &T) {
assert_eq!(a.f(), 10);
assert_eq!(a.g(), 20);
assert_eq!(a.h(), 30);
}
pub fn main() {
let a = &A { x: 3 };
f(a);
}
| { 30 } | identifier_body |
stylesheet.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use {Namespace, Prefix};
use context::QuirksMode;
use cssparser::{Parser, ParserInput, RuleListParser};
use error_reporting::{ContextualParseError, ParseErrorReporter};
use fallible::FallibleVec;
use fxhash::FxHashMap;
use invalidation::media_queries::{MediaListKey, ToMediaListKey};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use media_queries::{Device, MediaList};
use parking_lot::RwLock;
use parser::ParserContext;
use servo_arc::Arc;
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard};
use std::mem;
use std::sync::atomic::{AtomicBool, Ordering};
use style_traits::ParsingMode;
use stylesheets::{CssRule, CssRules, Origin, UrlExtraData};
use stylesheets::loader::StylesheetLoader;
use stylesheets::rule_parser::{State, TopLevelRuleParser};
use stylesheets::rules_iterator::{EffectiveRules, EffectiveRulesIterator};
use stylesheets::rules_iterator::{NestedRuleIterationCondition, RulesIterator};
use use_counters::UseCounters;
/// This structure holds the user-agent and user stylesheets.
pub struct | {
/// The lock used for user-agent stylesheets.
pub shared_lock: SharedRwLock,
/// The user or user agent stylesheets.
pub user_or_user_agent_stylesheets: Vec<DocumentStyleSheet>,
/// The quirks mode stylesheet.
pub quirks_mode_stylesheet: DocumentStyleSheet,
}
/// A set of namespaces applying to a given stylesheet.
///
/// The namespace id is used in gecko
#[derive(Clone, Debug, Default, MallocSizeOf)]
#[allow(missing_docs)]
pub struct Namespaces {
pub default: Option<Namespace>,
pub prefixes: FxHashMap<Prefix, Namespace>,
}
/// The contents of a given stylesheet. This effectively maps to a
/// StyleSheetInner in Gecko.
#[derive(Debug)]
pub struct StylesheetContents {
/// List of rules in the order they were found (important for
/// cascading order)
pub rules: Arc<Locked<CssRules>>,
/// The origin of this stylesheet.
pub origin: Origin,
/// The url data this stylesheet should use.
pub url_data: RwLock<UrlExtraData>,
/// The namespaces that apply to this stylesheet.
pub namespaces: RwLock<Namespaces>,
/// The quirks mode of this stylesheet.
pub quirks_mode: QuirksMode,
/// This stylesheet's source map URL.
pub source_map_url: RwLock<Option<String>>,
/// This stylesheet's source URL.
pub source_url: RwLock<Option<String>>,
}
impl StylesheetContents {
/// Parse a given CSS string, with a given url-data, origin, and
/// quirks mode.
pub fn from_str(
css: &str,
url_data: UrlExtraData,
origin: Origin,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
quirks_mode: QuirksMode,
line_number_offset: u32,
use_counters: Option<&UseCounters>,
) -> Self {
let namespaces = RwLock::new(Namespaces::default());
let (rules, source_map_url, source_url) = Stylesheet::parse_rules(
css,
&url_data,
origin,
&mut *namespaces.write(),
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
use_counters,
);
Self {
rules: CssRules::new(rules, &shared_lock),
origin: origin,
url_data: RwLock::new(url_data),
namespaces: namespaces,
quirks_mode: quirks_mode,
source_map_url: RwLock::new(source_map_url),
source_url: RwLock::new(source_url),
}
}
/// Returns a reference to the list of rules.
#[inline]
pub fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
&self.rules.read_with(guard).0
}
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
}
impl DeepCloneWithLock for StylesheetContents {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
// Make a deep clone of the rules, using the new lock.
let rules = self
.rules
.read_with(guard)
.deep_clone_with_lock(lock, guard, params);
Self {
rules: Arc::new(lock.wrap(rules)),
quirks_mode: self.quirks_mode,
origin: self.origin,
url_data: RwLock::new((*self.url_data.read()).clone()),
namespaces: RwLock::new((*self.namespaces.read()).clone()),
source_map_url: RwLock::new((*self.source_map_url.read()).clone()),
source_url: RwLock::new((*self.source_url.read()).clone()),
}
}
}
/// The structure servo uses to represent a stylesheet.
#[derive(Debug)]
pub struct Stylesheet {
/// The contents of this stylesheet.
pub contents: StylesheetContents,
/// The lock used for objects inside this stylesheet
pub shared_lock: SharedRwLock,
/// List of media associated with the Stylesheet.
pub media: Arc<Locked<MediaList>>,
/// Whether this stylesheet should be disabled.
pub disabled: AtomicBool,
}
macro_rules! rule_filter {
($( $method: ident($variant:ident => $rule_type: ident), )+) => {
$(
#[allow(missing_docs)]
fn $method<F>(&self, device: &Device, guard: &SharedRwLockReadGuard, mut f: F)
where F: FnMut(&::stylesheets::$rule_type),
{
use stylesheets::CssRule;
for rule in self.effective_rules(device, guard) {
if let CssRule::$variant(ref lock) = *rule {
let rule = lock.read_with(guard);
f(&rule)
}
}
}
)+
}
}
/// A trait to represent a given stylesheet in a document.
pub trait StylesheetInDocument: ::std::fmt::Debug {
/// Get the stylesheet origin.
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin;
/// Get the stylesheet quirks mode.
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode;
/// Get whether this stylesheet is enabled.
fn enabled(&self) -> bool;
/// Get the media associated with this stylesheet.
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList>;
/// Returns a reference to the list of rules in this stylesheet.
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule];
/// Return an iterator using the condition `C`.
#[inline]
fn iter_rules<'a, 'b, C>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> RulesIterator<'a, 'b, C>
where
C: NestedRuleIterationCondition,
{
RulesIterator::new(device, self.quirks_mode(guard), guard, self.rules(guard))
}
/// Returns whether the style-sheet applies for the current device.
fn is_effective_for_device(&self, device: &Device, guard: &SharedRwLockReadGuard) -> bool {
match self.media(guard) {
Some(medialist) => medialist.evaluate(device, self.quirks_mode(guard)),
None => true,
}
}
/// Return an iterator over the effective rules within the style-sheet, as
/// according to the supplied `Device`.
#[inline]
fn effective_rules<'a, 'b>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> EffectiveRulesIterator<'a, 'b> {
self.iter_rules::<EffectiveRules>(device, guard)
}
rule_filter! {
effective_style_rules(Style => StyleRule),
effective_media_rules(Media => MediaRule),
effective_font_face_rules(FontFace => FontFaceRule),
effective_font_face_feature_values_rules(FontFeatureValues => FontFeatureValuesRule),
effective_counter_style_rules(CounterStyle => CounterStyleRule),
effective_viewport_rules(Viewport => ViewportRule),
effective_keyframes_rules(Keyframes => KeyframesRule),
effective_supports_rules(Supports => SupportsRule),
effective_page_rules(Page => PageRule),
effective_document_rules(Document => DocumentRule),
}
}
impl StylesheetInDocument for Stylesheet {
fn origin(&self, _guard: &SharedRwLockReadGuard) -> Origin {
self.contents.origin
}
fn quirks_mode(&self, _guard: &SharedRwLockReadGuard) -> QuirksMode {
self.contents.quirks_mode
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
Some(self.media.read_with(guard))
}
fn enabled(&self) -> bool {
!self.disabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.contents.rules(guard)
}
}
/// A simple wrapper over an `Arc<Stylesheet>`, with pointer comparison, and
/// suitable for its use in a `StylesheetSet`.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub struct DocumentStyleSheet(
#[cfg_attr(feature = "servo", ignore_malloc_size_of = "Arc")] pub Arc<Stylesheet>,
);
impl PartialEq for DocumentStyleSheet {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
}
impl ToMediaListKey for DocumentStyleSheet {
fn to_media_list_key(&self) -> MediaListKey {
self.0.to_media_list_key()
}
}
impl StylesheetInDocument for DocumentStyleSheet {
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin {
self.0.origin(guard)
}
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode {
self.0.quirks_mode(guard)
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
self.0.media(guard)
}
fn enabled(&self) -> bool {
self.0.enabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.0.rules(guard)
}
}
impl Stylesheet {
/// Updates an empty stylesheet from a given string of text.
pub fn update_from_str(
existing: &Stylesheet,
css: &str,
url_data: UrlExtraData,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
line_number_offset: u32,
) {
let namespaces = RwLock::new(Namespaces::default());
// FIXME: Consider adding use counters to Servo?
let (rules, source_map_url, source_url) = Self::parse_rules(
css,
&url_data,
existing.contents.origin,
&mut *namespaces.write(),
&existing.shared_lock,
stylesheet_loader,
error_reporter,
existing.contents.quirks_mode,
line_number_offset,
/* use_counters = */ None,
);
*existing.contents.url_data.write() = url_data;
mem::swap(
&mut *existing.contents.namespaces.write(),
&mut *namespaces.write(),
);
// Acquire the lock *after* parsing, to minimize the exclusive section.
let mut guard = existing.shared_lock.write();
*existing.contents.rules.write_with(&mut guard) = CssRules(rules);
*existing.contents.source_map_url.write() = source_map_url;
*existing.contents.source_url.write() = source_url;
}
fn parse_rules(
css: &str,
url_data: &UrlExtraData,
origin: Origin,
namespaces: &mut Namespaces,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
quirks_mode: QuirksMode,
line_number_offset: u32,
use_counters: Option<&UseCounters>,
) -> (Vec<CssRule>, Option<String>, Option<String>) {
let mut rules = Vec::new();
let mut input = ParserInput::new_with_line_number_offset(css, line_number_offset);
let mut input = Parser::new(&mut input);
let context = ParserContext::new(
origin,
url_data,
None,
ParsingMode::DEFAULT,
quirks_mode,
error_reporter,
use_counters,
);
let rule_parser = TopLevelRuleParser {
shared_lock,
loader: stylesheet_loader,
context,
state: State::Start,
dom_error: None,
insert_rule_context: None,
namespaces,
};
{
let mut iter = RuleListParser::new_for_stylesheet(&mut input, rule_parser);
while let Some(result) = iter.next() {
match result {
Ok(rule) => {
// Use a fallible push here, and if it fails, just
// fall out of the loop. This will cause the page to
// be shown incorrectly, but it's better than OOMing.
if rules.try_push(rule).is_err() {
break;
}
},
Err((error, slice)) => {
let location = error.location;
let error = ContextualParseError::InvalidRule(slice, error);
iter.parser.context.log_css_error(location, error);
},
}
}
}
let source_map_url = input.current_source_map_url().map(String::from);
let source_url = input.current_source_url().map(String::from);
(rules, source_map_url, source_url)
}
/// Creates an empty stylesheet and parses it with a given base url, origin
/// and media.
///
/// Effectively creates a new stylesheet and forwards the hard work to
/// `Stylesheet::update_from_str`.
pub fn from_str(
css: &str,
url_data: UrlExtraData,
origin: Origin,
media: Arc<Locked<MediaList>>,
shared_lock: SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> Self {
// FIXME: Consider adding use counters to Servo?
let contents = StylesheetContents::from_str(
css,
url_data,
origin,
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
/* use_counters = */ None,
);
Stylesheet {
contents,
shared_lock,
media,
disabled: AtomicBool::new(false),
}
}
/// Returns whether the stylesheet has been explicitly disabled through the
/// CSSOM.
pub fn disabled(&self) -> bool {
self.disabled.load(Ordering::SeqCst)
}
/// Records that the stylesheet has been explicitly disabled through the
/// CSSOM.
///
/// Returns whether the the call resulted in a change in disabled state.
///
/// Disabled stylesheets remain in the document, but their rules are not
/// added to the Stylist.
pub fn set_disabled(&self, disabled: bool) -> bool {
self.disabled.swap(disabled, Ordering::SeqCst)!= disabled
}
}
#[cfg(feature = "servo")]
impl Clone for Stylesheet {
fn clone(&self) -> Self {
// Create a new lock for our clone.
let lock = self.shared_lock.clone();
let guard = self.shared_lock.read();
// Make a deep clone of the media, using the new lock.
let media = self.media.read_with(&guard).clone();
let media = Arc::new(lock.wrap(media));
let contents = self
.contents
.deep_clone_with_lock(&lock, &guard, &DeepCloneParams);
Stylesheet {
contents,
media: media,
shared_lock: lock,
disabled: AtomicBool::new(self.disabled.load(Ordering::SeqCst)),
}
}
}
| UserAgentStylesheets | identifier_name |
stylesheet.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use {Namespace, Prefix};
use context::QuirksMode;
use cssparser::{Parser, ParserInput, RuleListParser};
use error_reporting::{ContextualParseError, ParseErrorReporter};
use fallible::FallibleVec;
use fxhash::FxHashMap;
use invalidation::media_queries::{MediaListKey, ToMediaListKey};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use media_queries::{Device, MediaList};
use parking_lot::RwLock;
use parser::ParserContext;
use servo_arc::Arc;
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard};
use std::mem;
use std::sync::atomic::{AtomicBool, Ordering};
use style_traits::ParsingMode;
use stylesheets::{CssRule, CssRules, Origin, UrlExtraData};
use stylesheets::loader::StylesheetLoader;
use stylesheets::rule_parser::{State, TopLevelRuleParser};
use stylesheets::rules_iterator::{EffectiveRules, EffectiveRulesIterator};
use stylesheets::rules_iterator::{NestedRuleIterationCondition, RulesIterator};
use use_counters::UseCounters;
/// This structure holds the user-agent and user stylesheets.
pub struct UserAgentStylesheets {
/// The lock used for user-agent stylesheets.
pub shared_lock: SharedRwLock,
/// The user or user agent stylesheets.
pub user_or_user_agent_stylesheets: Vec<DocumentStyleSheet>,
/// The quirks mode stylesheet.
pub quirks_mode_stylesheet: DocumentStyleSheet,
}
/// A set of namespaces applying to a given stylesheet.
///
/// The namespace id is used in gecko
#[derive(Clone, Debug, Default, MallocSizeOf)]
#[allow(missing_docs)]
pub struct Namespaces {
pub default: Option<Namespace>,
pub prefixes: FxHashMap<Prefix, Namespace>,
}
/// The contents of a given stylesheet. This effectively maps to a
/// StyleSheetInner in Gecko.
#[derive(Debug)]
pub struct StylesheetContents {
/// List of rules in the order they were found (important for
/// cascading order)
pub rules: Arc<Locked<CssRules>>,
/// The origin of this stylesheet.
pub origin: Origin,
/// The url data this stylesheet should use.
pub url_data: RwLock<UrlExtraData>,
/// The namespaces that apply to this stylesheet.
pub namespaces: RwLock<Namespaces>,
/// The quirks mode of this stylesheet.
pub quirks_mode: QuirksMode,
/// This stylesheet's source map URL.
pub source_map_url: RwLock<Option<String>>,
/// This stylesheet's source URL.
pub source_url: RwLock<Option<String>>,
}
impl StylesheetContents {
/// Parse a given CSS string, with a given url-data, origin, and
/// quirks mode.
pub fn from_str(
css: &str,
url_data: UrlExtraData,
origin: Origin,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
quirks_mode: QuirksMode,
line_number_offset: u32,
use_counters: Option<&UseCounters>,
) -> Self {
let namespaces = RwLock::new(Namespaces::default());
let (rules, source_map_url, source_url) = Stylesheet::parse_rules(
css,
&url_data,
origin,
&mut *namespaces.write(),
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
use_counters,
);
Self {
rules: CssRules::new(rules, &shared_lock),
origin: origin,
url_data: RwLock::new(url_data),
namespaces: namespaces,
quirks_mode: quirks_mode,
source_map_url: RwLock::new(source_map_url),
source_url: RwLock::new(source_url),
}
}
/// Returns a reference to the list of rules.
#[inline]
pub fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
&self.rules.read_with(guard).0
}
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
}
impl DeepCloneWithLock for StylesheetContents {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
// Make a deep clone of the rules, using the new lock.
let rules = self
.rules
.read_with(guard)
.deep_clone_with_lock(lock, guard, params);
Self {
rules: Arc::new(lock.wrap(rules)),
quirks_mode: self.quirks_mode,
origin: self.origin,
url_data: RwLock::new((*self.url_data.read()).clone()),
namespaces: RwLock::new((*self.namespaces.read()).clone()),
source_map_url: RwLock::new((*self.source_map_url.read()).clone()),
source_url: RwLock::new((*self.source_url.read()).clone()),
}
}
}
/// The structure servo uses to represent a stylesheet.
#[derive(Debug)]
pub struct Stylesheet {
/// The contents of this stylesheet.
pub contents: StylesheetContents,
/// The lock used for objects inside this stylesheet
pub shared_lock: SharedRwLock,
/// List of media associated with the Stylesheet.
pub media: Arc<Locked<MediaList>>,
/// Whether this stylesheet should be disabled.
pub disabled: AtomicBool,
}
macro_rules! rule_filter {
($( $method: ident($variant:ident => $rule_type: ident), )+) => {
$(
#[allow(missing_docs)]
fn $method<F>(&self, device: &Device, guard: &SharedRwLockReadGuard, mut f: F)
where F: FnMut(&::stylesheets::$rule_type),
{
use stylesheets::CssRule;
for rule in self.effective_rules(device, guard) {
if let CssRule::$variant(ref lock) = *rule {
let rule = lock.read_with(guard);
f(&rule)
}
}
}
)+
}
}
/// A trait to represent a given stylesheet in a document.
pub trait StylesheetInDocument: ::std::fmt::Debug {
/// Get the stylesheet origin.
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin;
/// Get the stylesheet quirks mode.
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode;
/// Get whether this stylesheet is enabled.
fn enabled(&self) -> bool;
/// Get the media associated with this stylesheet.
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList>;
/// Returns a reference to the list of rules in this stylesheet.
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule];
/// Return an iterator using the condition `C`.
#[inline]
fn iter_rules<'a, 'b, C>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> RulesIterator<'a, 'b, C>
where
C: NestedRuleIterationCondition,
{
RulesIterator::new(device, self.quirks_mode(guard), guard, self.rules(guard))
}
/// Returns whether the style-sheet applies for the current device.
fn is_effective_for_device(&self, device: &Device, guard: &SharedRwLockReadGuard) -> bool {
match self.media(guard) {
Some(medialist) => medialist.evaluate(device, self.quirks_mode(guard)),
None => true,
}
}
/// Return an iterator over the effective rules within the style-sheet, as
/// according to the supplied `Device`.
#[inline]
fn effective_rules<'a, 'b>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> EffectiveRulesIterator<'a, 'b> {
self.iter_rules::<EffectiveRules>(device, guard)
}
rule_filter! {
effective_style_rules(Style => StyleRule),
effective_media_rules(Media => MediaRule),
effective_font_face_rules(FontFace => FontFaceRule),
effective_font_face_feature_values_rules(FontFeatureValues => FontFeatureValuesRule),
effective_counter_style_rules(CounterStyle => CounterStyleRule),
effective_viewport_rules(Viewport => ViewportRule),
effective_keyframes_rules(Keyframes => KeyframesRule),
effective_supports_rules(Supports => SupportsRule),
effective_page_rules(Page => PageRule),
effective_document_rules(Document => DocumentRule),
}
}
impl StylesheetInDocument for Stylesheet {
fn origin(&self, _guard: &SharedRwLockReadGuard) -> Origin {
self.contents.origin
}
fn quirks_mode(&self, _guard: &SharedRwLockReadGuard) -> QuirksMode {
self.contents.quirks_mode
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
Some(self.media.read_with(guard))
}
fn enabled(&self) -> bool {
!self.disabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.contents.rules(guard)
}
}
/// A simple wrapper over an `Arc<Stylesheet>`, with pointer comparison, and
/// suitable for its use in a `StylesheetSet`.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub struct DocumentStyleSheet(
#[cfg_attr(feature = "servo", ignore_malloc_size_of = "Arc")] pub Arc<Stylesheet>,
);
| Arc::ptr_eq(&self.0, &other.0)
}
}
impl ToMediaListKey for DocumentStyleSheet {
fn to_media_list_key(&self) -> MediaListKey {
self.0.to_media_list_key()
}
}
impl StylesheetInDocument for DocumentStyleSheet {
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin {
self.0.origin(guard)
}
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode {
self.0.quirks_mode(guard)
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
self.0.media(guard)
}
fn enabled(&self) -> bool {
self.0.enabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.0.rules(guard)
}
}
impl Stylesheet {
/// Updates an empty stylesheet from a given string of text.
pub fn update_from_str(
existing: &Stylesheet,
css: &str,
url_data: UrlExtraData,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
line_number_offset: u32,
) {
let namespaces = RwLock::new(Namespaces::default());
// FIXME: Consider adding use counters to Servo?
let (rules, source_map_url, source_url) = Self::parse_rules(
css,
&url_data,
existing.contents.origin,
&mut *namespaces.write(),
&existing.shared_lock,
stylesheet_loader,
error_reporter,
existing.contents.quirks_mode,
line_number_offset,
/* use_counters = */ None,
);
*existing.contents.url_data.write() = url_data;
mem::swap(
&mut *existing.contents.namespaces.write(),
&mut *namespaces.write(),
);
// Acquire the lock *after* parsing, to minimize the exclusive section.
let mut guard = existing.shared_lock.write();
*existing.contents.rules.write_with(&mut guard) = CssRules(rules);
*existing.contents.source_map_url.write() = source_map_url;
*existing.contents.source_url.write() = source_url;
}
fn parse_rules(
css: &str,
url_data: &UrlExtraData,
origin: Origin,
namespaces: &mut Namespaces,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
quirks_mode: QuirksMode,
line_number_offset: u32,
use_counters: Option<&UseCounters>,
) -> (Vec<CssRule>, Option<String>, Option<String>) {
let mut rules = Vec::new();
let mut input = ParserInput::new_with_line_number_offset(css, line_number_offset);
let mut input = Parser::new(&mut input);
let context = ParserContext::new(
origin,
url_data,
None,
ParsingMode::DEFAULT,
quirks_mode,
error_reporter,
use_counters,
);
let rule_parser = TopLevelRuleParser {
shared_lock,
loader: stylesheet_loader,
context,
state: State::Start,
dom_error: None,
insert_rule_context: None,
namespaces,
};
{
let mut iter = RuleListParser::new_for_stylesheet(&mut input, rule_parser);
while let Some(result) = iter.next() {
match result {
Ok(rule) => {
// Use a fallible push here, and if it fails, just
// fall out of the loop. This will cause the page to
// be shown incorrectly, but it's better than OOMing.
if rules.try_push(rule).is_err() {
break;
}
},
Err((error, slice)) => {
let location = error.location;
let error = ContextualParseError::InvalidRule(slice, error);
iter.parser.context.log_css_error(location, error);
},
}
}
}
let source_map_url = input.current_source_map_url().map(String::from);
let source_url = input.current_source_url().map(String::from);
(rules, source_map_url, source_url)
}
/// Creates an empty stylesheet and parses it with a given base url, origin
/// and media.
///
/// Effectively creates a new stylesheet and forwards the hard work to
/// `Stylesheet::update_from_str`.
pub fn from_str(
css: &str,
url_data: UrlExtraData,
origin: Origin,
media: Arc<Locked<MediaList>>,
shared_lock: SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> Self {
// FIXME: Consider adding use counters to Servo?
let contents = StylesheetContents::from_str(
css,
url_data,
origin,
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
/* use_counters = */ None,
);
Stylesheet {
contents,
shared_lock,
media,
disabled: AtomicBool::new(false),
}
}
/// Returns whether the stylesheet has been explicitly disabled through the
/// CSSOM.
pub fn disabled(&self) -> bool {
self.disabled.load(Ordering::SeqCst)
}
/// Records that the stylesheet has been explicitly disabled through the
/// CSSOM.
///
/// Returns whether the the call resulted in a change in disabled state.
///
/// Disabled stylesheets remain in the document, but their rules are not
/// added to the Stylist.
pub fn set_disabled(&self, disabled: bool) -> bool {
self.disabled.swap(disabled, Ordering::SeqCst)!= disabled
}
}
#[cfg(feature = "servo")]
impl Clone for Stylesheet {
fn clone(&self) -> Self {
// Create a new lock for our clone.
let lock = self.shared_lock.clone();
let guard = self.shared_lock.read();
// Make a deep clone of the media, using the new lock.
let media = self.media.read_with(&guard).clone();
let media = Arc::new(lock.wrap(media));
let contents = self
.contents
.deep_clone_with_lock(&lock, &guard, &DeepCloneParams);
Stylesheet {
contents,
media: media,
shared_lock: lock,
disabled: AtomicBool::new(self.disabled.load(Ordering::SeqCst)),
}
}
} | impl PartialEq for DocumentStyleSheet {
fn eq(&self, other: &Self) -> bool { | random_line_split |
stylesheet.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use {Namespace, Prefix};
use context::QuirksMode;
use cssparser::{Parser, ParserInput, RuleListParser};
use error_reporting::{ContextualParseError, ParseErrorReporter};
use fallible::FallibleVec;
use fxhash::FxHashMap;
use invalidation::media_queries::{MediaListKey, ToMediaListKey};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use media_queries::{Device, MediaList};
use parking_lot::RwLock;
use parser::ParserContext;
use servo_arc::Arc;
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard};
use std::mem;
use std::sync::atomic::{AtomicBool, Ordering};
use style_traits::ParsingMode;
use stylesheets::{CssRule, CssRules, Origin, UrlExtraData};
use stylesheets::loader::StylesheetLoader;
use stylesheets::rule_parser::{State, TopLevelRuleParser};
use stylesheets::rules_iterator::{EffectiveRules, EffectiveRulesIterator};
use stylesheets::rules_iterator::{NestedRuleIterationCondition, RulesIterator};
use use_counters::UseCounters;
/// This structure holds the user-agent and user stylesheets.
pub struct UserAgentStylesheets {
/// The lock used for user-agent stylesheets.
pub shared_lock: SharedRwLock,
/// The user or user agent stylesheets.
pub user_or_user_agent_stylesheets: Vec<DocumentStyleSheet>,
/// The quirks mode stylesheet.
pub quirks_mode_stylesheet: DocumentStyleSheet,
}
/// A set of namespaces applying to a given stylesheet.
///
/// The namespace id is used in gecko
#[derive(Clone, Debug, Default, MallocSizeOf)]
#[allow(missing_docs)]
pub struct Namespaces {
pub default: Option<Namespace>,
pub prefixes: FxHashMap<Prefix, Namespace>,
}
/// The contents of a given stylesheet. This effectively maps to a
/// StyleSheetInner in Gecko.
#[derive(Debug)]
pub struct StylesheetContents {
/// List of rules in the order they were found (important for
/// cascading order)
pub rules: Arc<Locked<CssRules>>,
/// The origin of this stylesheet.
pub origin: Origin,
/// The url data this stylesheet should use.
pub url_data: RwLock<UrlExtraData>,
/// The namespaces that apply to this stylesheet.
pub namespaces: RwLock<Namespaces>,
/// The quirks mode of this stylesheet.
pub quirks_mode: QuirksMode,
/// This stylesheet's source map URL.
pub source_map_url: RwLock<Option<String>>,
/// This stylesheet's source URL.
pub source_url: RwLock<Option<String>>,
}
impl StylesheetContents {
/// Parse a given CSS string, with a given url-data, origin, and
/// quirks mode.
pub fn from_str(
css: &str,
url_data: UrlExtraData,
origin: Origin,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
quirks_mode: QuirksMode,
line_number_offset: u32,
use_counters: Option<&UseCounters>,
) -> Self {
let namespaces = RwLock::new(Namespaces::default());
let (rules, source_map_url, source_url) = Stylesheet::parse_rules(
css,
&url_data,
origin,
&mut *namespaces.write(),
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
use_counters,
);
Self {
rules: CssRules::new(rules, &shared_lock),
origin: origin,
url_data: RwLock::new(url_data),
namespaces: namespaces,
quirks_mode: quirks_mode,
source_map_url: RwLock::new(source_map_url),
source_url: RwLock::new(source_url),
}
}
/// Returns a reference to the list of rules.
#[inline]
pub fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
&self.rules.read_with(guard).0
}
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
}
impl DeepCloneWithLock for StylesheetContents {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
// Make a deep clone of the rules, using the new lock.
let rules = self
.rules
.read_with(guard)
.deep_clone_with_lock(lock, guard, params);
Self {
rules: Arc::new(lock.wrap(rules)),
quirks_mode: self.quirks_mode,
origin: self.origin,
url_data: RwLock::new((*self.url_data.read()).clone()),
namespaces: RwLock::new((*self.namespaces.read()).clone()),
source_map_url: RwLock::new((*self.source_map_url.read()).clone()),
source_url: RwLock::new((*self.source_url.read()).clone()),
}
}
}
/// The structure servo uses to represent a stylesheet.
#[derive(Debug)]
pub struct Stylesheet {
/// The contents of this stylesheet.
pub contents: StylesheetContents,
/// The lock used for objects inside this stylesheet
pub shared_lock: SharedRwLock,
/// List of media associated with the Stylesheet.
pub media: Arc<Locked<MediaList>>,
/// Whether this stylesheet should be disabled.
pub disabled: AtomicBool,
}
macro_rules! rule_filter {
($( $method: ident($variant:ident => $rule_type: ident), )+) => {
$(
#[allow(missing_docs)]
fn $method<F>(&self, device: &Device, guard: &SharedRwLockReadGuard, mut f: F)
where F: FnMut(&::stylesheets::$rule_type),
{
use stylesheets::CssRule;
for rule in self.effective_rules(device, guard) {
if let CssRule::$variant(ref lock) = *rule {
let rule = lock.read_with(guard);
f(&rule)
}
}
}
)+
}
}
/// A trait to represent a given stylesheet in a document.
pub trait StylesheetInDocument: ::std::fmt::Debug {
/// Get the stylesheet origin.
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin;
/// Get the stylesheet quirks mode.
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode;
/// Get whether this stylesheet is enabled.
fn enabled(&self) -> bool;
/// Get the media associated with this stylesheet.
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList>;
/// Returns a reference to the list of rules in this stylesheet.
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule];
/// Return an iterator using the condition `C`.
#[inline]
fn iter_rules<'a, 'b, C>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> RulesIterator<'a, 'b, C>
where
C: NestedRuleIterationCondition,
{
RulesIterator::new(device, self.quirks_mode(guard), guard, self.rules(guard))
}
/// Returns whether the style-sheet applies for the current device.
fn is_effective_for_device(&self, device: &Device, guard: &SharedRwLockReadGuard) -> bool {
match self.media(guard) {
Some(medialist) => medialist.evaluate(device, self.quirks_mode(guard)),
None => true,
}
}
/// Return an iterator over the effective rules within the style-sheet, as
/// according to the supplied `Device`.
#[inline]
fn effective_rules<'a, 'b>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> EffectiveRulesIterator<'a, 'b> {
self.iter_rules::<EffectiveRules>(device, guard)
}
rule_filter! {
effective_style_rules(Style => StyleRule),
effective_media_rules(Media => MediaRule),
effective_font_face_rules(FontFace => FontFaceRule),
effective_font_face_feature_values_rules(FontFeatureValues => FontFeatureValuesRule),
effective_counter_style_rules(CounterStyle => CounterStyleRule),
effective_viewport_rules(Viewport => ViewportRule),
effective_keyframes_rules(Keyframes => KeyframesRule),
effective_supports_rules(Supports => SupportsRule),
effective_page_rules(Page => PageRule),
effective_document_rules(Document => DocumentRule),
}
}
impl StylesheetInDocument for Stylesheet {
fn origin(&self, _guard: &SharedRwLockReadGuard) -> Origin {
self.contents.origin
}
fn quirks_mode(&self, _guard: &SharedRwLockReadGuard) -> QuirksMode {
self.contents.quirks_mode
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
Some(self.media.read_with(guard))
}
fn enabled(&self) -> bool {
!self.disabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.contents.rules(guard)
}
}
/// A simple wrapper over an `Arc<Stylesheet>`, with pointer comparison, and
/// suitable for its use in a `StylesheetSet`.
#[derive(Clone, Debug)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub struct DocumentStyleSheet(
#[cfg_attr(feature = "servo", ignore_malloc_size_of = "Arc")] pub Arc<Stylesheet>,
);
impl PartialEq for DocumentStyleSheet {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
}
impl ToMediaListKey for DocumentStyleSheet {
fn to_media_list_key(&self) -> MediaListKey {
self.0.to_media_list_key()
}
}
impl StylesheetInDocument for DocumentStyleSheet {
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin {
self.0.origin(guard)
}
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode {
self.0.quirks_mode(guard)
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
self.0.media(guard)
}
fn enabled(&self) -> bool {
self.0.enabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.0.rules(guard)
}
}
impl Stylesheet {
/// Updates an empty stylesheet from a given string of text.
pub fn update_from_str(
existing: &Stylesheet,
css: &str,
url_data: UrlExtraData,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
line_number_offset: u32,
) {
let namespaces = RwLock::new(Namespaces::default());
// FIXME: Consider adding use counters to Servo?
let (rules, source_map_url, source_url) = Self::parse_rules(
css,
&url_data,
existing.contents.origin,
&mut *namespaces.write(),
&existing.shared_lock,
stylesheet_loader,
error_reporter,
existing.contents.quirks_mode,
line_number_offset,
/* use_counters = */ None,
);
*existing.contents.url_data.write() = url_data;
mem::swap(
&mut *existing.contents.namespaces.write(),
&mut *namespaces.write(),
);
// Acquire the lock *after* parsing, to minimize the exclusive section.
let mut guard = existing.shared_lock.write();
*existing.contents.rules.write_with(&mut guard) = CssRules(rules);
*existing.contents.source_map_url.write() = source_map_url;
*existing.contents.source_url.write() = source_url;
}
fn parse_rules(
css: &str,
url_data: &UrlExtraData,
origin: Origin,
namespaces: &mut Namespaces,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
quirks_mode: QuirksMode,
line_number_offset: u32,
use_counters: Option<&UseCounters>,
) -> (Vec<CssRule>, Option<String>, Option<String>) {
let mut rules = Vec::new();
let mut input = ParserInput::new_with_line_number_offset(css, line_number_offset);
let mut input = Parser::new(&mut input);
let context = ParserContext::new(
origin,
url_data,
None,
ParsingMode::DEFAULT,
quirks_mode,
error_reporter,
use_counters,
);
let rule_parser = TopLevelRuleParser {
shared_lock,
loader: stylesheet_loader,
context,
state: State::Start,
dom_error: None,
insert_rule_context: None,
namespaces,
};
{
let mut iter = RuleListParser::new_for_stylesheet(&mut input, rule_parser);
while let Some(result) = iter.next() {
match result {
Ok(rule) => {
// Use a fallible push here, and if it fails, just
// fall out of the loop. This will cause the page to
// be shown incorrectly, but it's better than OOMing.
if rules.try_push(rule).is_err() {
break;
}
},
Err((error, slice)) => {
let location = error.location;
let error = ContextualParseError::InvalidRule(slice, error);
iter.parser.context.log_css_error(location, error);
},
}
}
}
let source_map_url = input.current_source_map_url().map(String::from);
let source_url = input.current_source_url().map(String::from);
(rules, source_map_url, source_url)
}
/// Creates an empty stylesheet and parses it with a given base url, origin
/// and media.
///
/// Effectively creates a new stylesheet and forwards the hard work to
/// `Stylesheet::update_from_str`.
pub fn from_str(
css: &str,
url_data: UrlExtraData,
origin: Origin,
media: Arc<Locked<MediaList>>,
shared_lock: SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: Option<&ParseErrorReporter>,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> Self {
// FIXME: Consider adding use counters to Servo?
let contents = StylesheetContents::from_str(
css,
url_data,
origin,
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
/* use_counters = */ None,
);
Stylesheet {
contents,
shared_lock,
media,
disabled: AtomicBool::new(false),
}
}
/// Returns whether the stylesheet has been explicitly disabled through the
/// CSSOM.
pub fn disabled(&self) -> bool |
/// Records that the stylesheet has been explicitly disabled through the
/// CSSOM.
///
/// Returns whether the the call resulted in a change in disabled state.
///
/// Disabled stylesheets remain in the document, but their rules are not
/// added to the Stylist.
pub fn set_disabled(&self, disabled: bool) -> bool {
self.disabled.swap(disabled, Ordering::SeqCst)!= disabled
}
}
#[cfg(feature = "servo")]
impl Clone for Stylesheet {
fn clone(&self) -> Self {
// Create a new lock for our clone.
let lock = self.shared_lock.clone();
let guard = self.shared_lock.read();
// Make a deep clone of the media, using the new lock.
let media = self.media.read_with(&guard).clone();
let media = Arc::new(lock.wrap(media));
let contents = self
.contents
.deep_clone_with_lock(&lock, &guard, &DeepCloneParams);
Stylesheet {
contents,
media: media,
shared_lock: lock,
disabled: AtomicBool::new(self.disabled.load(Ordering::SeqCst)),
}
}
}
| {
self.disabled.load(Ordering::SeqCst)
} | identifier_body |
test.rs | use quickcheck::{TestResult, quickcheck};
use rand::Rng;
use super::{integer_value, regex_value};
use expectest::prelude::*;
use pact_matching::s;
#[test]
fn validates_integer_value() {
fn prop(s: String) -> TestResult {
let mut rng = ::rand::thread_rng();
if rng.gen() && s.chars().any(|ch|!ch.is_numeric()) {
TestResult::discard()
} else {
let validation = integer_value(s.clone());
match validation {
Ok(_) => TestResult::from_bool(!s.is_empty() && s.chars().all(|ch| ch.is_numeric() )),
Err(_) => TestResult::from_bool(s.is_empty() || s.chars().find(|ch|!ch.is_numeric() ).is_some())
}
}
} | quickcheck(prop as fn(_) -> _);
expect!(integer_value(s!("1234"))).to(be_ok());
expect!(integer_value(s!("1234x"))).to(be_err());
}
#[test]
fn validates_regex_value() {
expect!(regex_value(s!("1234"))).to(be_ok());
expect!(regex_value(s!("["))).to(be_err());
} | random_line_split |
|
test.rs | use quickcheck::{TestResult, quickcheck};
use rand::Rng;
use super::{integer_value, regex_value};
use expectest::prelude::*;
use pact_matching::s;
#[test]
fn validates_integer_value() {
fn prop(s: String) -> TestResult {
let mut rng = ::rand::thread_rng();
if rng.gen() && s.chars().any(|ch|!ch.is_numeric()) {
TestResult::discard()
} else {
let validation = integer_value(s.clone());
match validation {
Ok(_) => TestResult::from_bool(!s.is_empty() && s.chars().all(|ch| ch.is_numeric() )),
Err(_) => TestResult::from_bool(s.is_empty() || s.chars().find(|ch|!ch.is_numeric() ).is_some())
}
}
}
quickcheck(prop as fn(_) -> _);
expect!(integer_value(s!("1234"))).to(be_ok());
expect!(integer_value(s!("1234x"))).to(be_err());
}
#[test]
fn | () {
expect!(regex_value(s!("1234"))).to(be_ok());
expect!(regex_value(s!("["))).to(be_err());
}
| validates_regex_value | identifier_name |
test.rs | use quickcheck::{TestResult, quickcheck};
use rand::Rng;
use super::{integer_value, regex_value};
use expectest::prelude::*;
use pact_matching::s;
#[test]
fn validates_integer_value() {
fn prop(s: String) -> TestResult {
let mut rng = ::rand::thread_rng();
if rng.gen() && s.chars().any(|ch|!ch.is_numeric()) {
TestResult::discard()
} else {
let validation = integer_value(s.clone());
match validation {
Ok(_) => TestResult::from_bool(!s.is_empty() && s.chars().all(|ch| ch.is_numeric() )),
Err(_) => TestResult::from_bool(s.is_empty() || s.chars().find(|ch|!ch.is_numeric() ).is_some())
}
}
}
quickcheck(prop as fn(_) -> _);
expect!(integer_value(s!("1234"))).to(be_ok());
expect!(integer_value(s!("1234x"))).to(be_err());
}
#[test]
fn validates_regex_value() | {
expect!(regex_value(s!("1234"))).to(be_ok());
expect!(regex_value(s!("["))).to(be_err());
} | identifier_body |
|
tables.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
//! Analyze tracing data for edenscm
//!
//! This is edenscm application specific. It's not a general purposed library.
use serde_json::Value;
// use std::borrow::Cow;
use std::collections::BTreeMap as Map;
use tracing_collector::model::{IndexMap, TreeSpan, TreeSpans};
type Row = Map<String, Value>;
type Rows = Vec<Row>;
type Tables = Map<String, Rows>;
type TidSpans<'a> = IndexMap<(u64, u64), TreeSpans<&'a str>>;
// TODO: Make things more configurable.
/// Extract rows from tracing data. Output format is similar to NoSQL tables:
///
/// ```plain,ignore
/// {table_name: [{column_name: column_data}]}
/// ```
pub fn extract_tables(tid_spans: &TidSpans) -> Tables {
let mut tables = Map::new();
extract_dev_command_timers(&mut tables, tid_spans);
extract_other_tables(&mut tables, tid_spans);
tables
}
fn extract_dev_command_timers<'a>(tables: &mut Tables, tid_spans: &TidSpans) {
let mut row = Row::new();
let toint = |value: &str| -> Value { value.parse::<i64>().unwrap_or_default().into() };
for spans in tid_spans.values() {
for span in spans.walk() {
match span.meta.get("name").cloned().unwrap_or("") {
// By hgcommands, run.rs
"Run Command" => {
let duration = span.duration_millis().unwrap_or(0);
row.insert("command_duration".into(), duration.into());
row.insert("elapsed".into(), duration.into());
for (&name, &value) in span.meta.iter() {
match name {
"nice" => {
row.insert("nice".into(), toint(value));
}
"version" => {
// Truncate the "version" string. This matches the old telemetry behavior.
row.insert("version".into(), value[..34.min(value.len())].into());
}
"max_rss" => {
row.insert("maxrss".into(), toint(value));
}
"exit_code" => {
row.insert("errorcode".into(), toint(value));
}
"parent_names" => |
"args" => {
if let Ok(args) = serde_json::from_str::<Vec<String>>(value) {
// Normalize the first argument to "hg".
let mut full = "hg".to_string();
for arg in args.into_iter().skip(1) {
// Keep the length bounded.
if full.len() + arg.len() >= 256 {
full += " (truncated)";
break;
}
full += &" ";
// TODO: Use shell_escape once in tp2.
// full += &shell_escape::unix::escape(Cow::Owned(arg));
full += &arg;
}
row.insert("fullcommand".into(), full.into());
}
}
_ => {}
}
}
}
// The "log:command-row" event is used by code that wants to
// log to columns of the main command row easily.
"log:command-row" if span.is_event => {
extract_span(&span, &mut row);
}
_ => {}
}
}
}
tables.insert("dev_command_timers".into(), vec![row]);
}
fn extract_other_tables<'a>(tables: &mut Tables, tid_spans: &TidSpans) {
for spans in tid_spans.values() {
for span in spans.walk() {
match span.meta.get("name").cloned().unwrap_or("") {
// The "log:create-row" event is used by code that wants to log
// to a entire new column in a specified table.
//
// The event is expected to have "table", and the rest of the
// metadata will be logged as-is.
"log:create-row" => {
let table_name = match span.meta.get("table") {
Some(&name) => name,
None => continue,
};
let mut row = Row::new();
extract_span(span, &mut row);
tables.entry(table_name.into()).or_default().push(row);
}
_ => {}
}
}
}
}
/// Parse a span, extract its metadata to a row.
fn extract_span(span: &TreeSpan<&str>, row: &mut Row) {
for (&name, &value) in span.meta.iter() {
match name {
// Those keys are likely generated. Skip them.
"module_path" | "cat" | "line" | "name" => {}
// Attempt to convert it to an integer (since tracing data is
// string only).
_ => match value.parse::<i64>() {
Ok(i) => {
row.insert(name.into(), i.into());
}
_ => {
row.insert(name.into(), value.into());
}
},
}
}
}
| {
if let Ok(names) = serde_json::from_str::<Vec<String>>(value) {
let name = names.get(0).cloned().unwrap_or_default();
row.insert("parent".into(), name.into());
}
} | conditional_block |
tables.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
//! Analyze tracing data for edenscm
//!
//! This is edenscm application specific. It's not a general purposed library.
use serde_json::Value;
// use std::borrow::Cow;
use std::collections::BTreeMap as Map;
use tracing_collector::model::{IndexMap, TreeSpan, TreeSpans};
type Row = Map<String, Value>;
type Rows = Vec<Row>;
type Tables = Map<String, Rows>;
type TidSpans<'a> = IndexMap<(u64, u64), TreeSpans<&'a str>>;
// TODO: Make things more configurable.
/// Extract rows from tracing data. Output format is similar to NoSQL tables:
///
/// ```plain,ignore
/// {table_name: [{column_name: column_data}]}
/// ```
pub fn extract_tables(tid_spans: &TidSpans) -> Tables {
let mut tables = Map::new();
extract_dev_command_timers(&mut tables, tid_spans);
extract_other_tables(&mut tables, tid_spans);
tables
}
fn extract_dev_command_timers<'a>(tables: &mut Tables, tid_spans: &TidSpans) {
let mut row = Row::new();
let toint = |value: &str| -> Value { value.parse::<i64>().unwrap_or_default().into() };
for spans in tid_spans.values() {
for span in spans.walk() {
match span.meta.get("name").cloned().unwrap_or("") {
// By hgcommands, run.rs
"Run Command" => {
let duration = span.duration_millis().unwrap_or(0);
row.insert("command_duration".into(), duration.into());
row.insert("elapsed".into(), duration.into());
for (&name, &value) in span.meta.iter() {
match name {
"nice" => {
row.insert("nice".into(), toint(value));
}
"version" => {
// Truncate the "version" string. This matches the old telemetry behavior.
row.insert("version".into(), value[..34.min(value.len())].into());
}
"max_rss" => {
row.insert("maxrss".into(), toint(value));
}
"exit_code" => {
row.insert("errorcode".into(), toint(value));
}
"parent_names" => {
if let Ok(names) = serde_json::from_str::<Vec<String>>(value) {
let name = names.get(0).cloned().unwrap_or_default();
row.insert("parent".into(), name.into());
}
}
"args" => {
if let Ok(args) = serde_json::from_str::<Vec<String>>(value) {
// Normalize the first argument to "hg".
let mut full = "hg".to_string();
for arg in args.into_iter().skip(1) {
// Keep the length bounded.
if full.len() + arg.len() >= 256 {
full += " (truncated)";
break;
}
full += &" ";
// TODO: Use shell_escape once in tp2.
// full += &shell_escape::unix::escape(Cow::Owned(arg));
full += &arg;
}
row.insert("fullcommand".into(), full.into());
}
}
_ => {}
}
}
}
// The "log:command-row" event is used by code that wants to
// log to columns of the main command row easily.
"log:command-row" if span.is_event => {
extract_span(&span, &mut row);
}
_ => {}
}
}
}
tables.insert("dev_command_timers".into(), vec![row]);
}
fn extract_other_tables<'a>(tables: &mut Tables, tid_spans: &TidSpans) {
for spans in tid_spans.values() {
for span in spans.walk() {
match span.meta.get("name").cloned().unwrap_or("") {
// The "log:create-row" event is used by code that wants to log
// to a entire new column in a specified table.
//
// The event is expected to have "table", and the rest of the
// metadata will be logged as-is.
"log:create-row" => {
let table_name = match span.meta.get("table") {
Some(&name) => name,
None => continue,
};
let mut row = Row::new();
extract_span(span, &mut row);
tables.entry(table_name.into()).or_default().push(row);
}
_ => {}
}
}
}
}
/// Parse a span, extract its metadata to a row.
fn | (span: &TreeSpan<&str>, row: &mut Row) {
for (&name, &value) in span.meta.iter() {
match name {
// Those keys are likely generated. Skip them.
"module_path" | "cat" | "line" | "name" => {}
// Attempt to convert it to an integer (since tracing data is
// string only).
_ => match value.parse::<i64>() {
Ok(i) => {
row.insert(name.into(), i.into());
}
_ => {
row.insert(name.into(), value.into());
}
},
}
}
}
| extract_span | identifier_name |
tables.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
//! Analyze tracing data for edenscm
//!
//! This is edenscm application specific. It's not a general purposed library.
use serde_json::Value;
// use std::borrow::Cow;
use std::collections::BTreeMap as Map;
use tracing_collector::model::{IndexMap, TreeSpan, TreeSpans};
type Row = Map<String, Value>;
type Rows = Vec<Row>;
type Tables = Map<String, Rows>;
type TidSpans<'a> = IndexMap<(u64, u64), TreeSpans<&'a str>>;
// TODO: Make things more configurable.
/// Extract rows from tracing data. Output format is similar to NoSQL tables:
///
/// ```plain,ignore
/// {table_name: [{column_name: column_data}]}
/// ```
pub fn extract_tables(tid_spans: &TidSpans) -> Tables {
let mut tables = Map::new();
extract_dev_command_timers(&mut tables, tid_spans);
extract_other_tables(&mut tables, tid_spans);
tables
}
fn extract_dev_command_timers<'a>(tables: &mut Tables, tid_spans: &TidSpans) {
let mut row = Row::new();
let toint = |value: &str| -> Value { value.parse::<i64>().unwrap_or_default().into() };
for spans in tid_spans.values() {
for span in spans.walk() {
match span.meta.get("name").cloned().unwrap_or("") {
// By hgcommands, run.rs
"Run Command" => {
let duration = span.duration_millis().unwrap_or(0);
row.insert("command_duration".into(), duration.into());
row.insert("elapsed".into(), duration.into());
for (&name, &value) in span.meta.iter() {
match name {
"nice" => {
row.insert("nice".into(), toint(value));
}
"version" => {
// Truncate the "version" string. This matches the old telemetry behavior.
row.insert("version".into(), value[..34.min(value.len())].into());
}
"max_rss" => {
row.insert("maxrss".into(), toint(value));
}
"exit_code" => {
row.insert("errorcode".into(), toint(value));
}
"parent_names" => {
if let Ok(names) = serde_json::from_str::<Vec<String>>(value) {
let name = names.get(0).cloned().unwrap_or_default();
row.insert("parent".into(), name.into());
}
}
"args" => {
if let Ok(args) = serde_json::from_str::<Vec<String>>(value) {
// Normalize the first argument to "hg".
let mut full = "hg".to_string();
for arg in args.into_iter().skip(1) {
// Keep the length bounded.
if full.len() + arg.len() >= 256 {
full += " (truncated)";
break;
}
full += &" ";
// TODO: Use shell_escape once in tp2.
// full += &shell_escape::unix::escape(Cow::Owned(arg));
full += &arg;
}
row.insert("fullcommand".into(), full.into());
}
}
_ => {}
}
}
}
// The "log:command-row" event is used by code that wants to
// log to columns of the main command row easily.
"log:command-row" if span.is_event => {
extract_span(&span, &mut row);
}
_ => {}
}
}
}
tables.insert("dev_command_timers".into(), vec![row]);
}
fn extract_other_tables<'a>(tables: &mut Tables, tid_spans: &TidSpans) {
for spans in tid_spans.values() {
for span in spans.walk() {
match span.meta.get("name").cloned().unwrap_or("") {
// The "log:create-row" event is used by code that wants to log
// to a entire new column in a specified table.
//
// The event is expected to have "table", and the rest of the
// metadata will be logged as-is.
"log:create-row" => {
let table_name = match span.meta.get("table") {
Some(&name) => name,
None => continue,
};
let mut row = Row::new();
extract_span(span, &mut row);
tables.entry(table_name.into()).or_default().push(row);
}
_ => {}
}
}
}
}
/// Parse a span, extract its metadata to a row.
fn extract_span(span: &TreeSpan<&str>, row: &mut Row) | {
for (&name, &value) in span.meta.iter() {
match name {
// Those keys are likely generated. Skip them.
"module_path" | "cat" | "line" | "name" => {}
// Attempt to convert it to an integer (since tracing data is
// string only).
_ => match value.parse::<i64>() {
Ok(i) => {
row.insert(name.into(), i.into());
}
_ => {
row.insert(name.into(), value.into());
}
},
}
}
} | identifier_body |
|
tables.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
//! Analyze tracing data for edenscm
//!
//! This is edenscm application specific. It's not a general purposed library.
use serde_json::Value;
// use std::borrow::Cow;
use std::collections::BTreeMap as Map;
use tracing_collector::model::{IndexMap, TreeSpan, TreeSpans};
type Row = Map<String, Value>;
type Rows = Vec<Row>;
type Tables = Map<String, Rows>;
type TidSpans<'a> = IndexMap<(u64, u64), TreeSpans<&'a str>>;
// TODO: Make things more configurable.
/// Extract rows from tracing data. Output format is similar to NoSQL tables:
///
/// ```plain,ignore
/// {table_name: [{column_name: column_data}]}
/// ```
pub fn extract_tables(tid_spans: &TidSpans) -> Tables {
let mut tables = Map::new();
extract_dev_command_timers(&mut tables, tid_spans);
extract_other_tables(&mut tables, tid_spans);
tables
}
fn extract_dev_command_timers<'a>(tables: &mut Tables, tid_spans: &TidSpans) {
let mut row = Row::new();
let toint = |value: &str| -> Value { value.parse::<i64>().unwrap_or_default().into() };
for spans in tid_spans.values() {
for span in spans.walk() {
match span.meta.get("name").cloned().unwrap_or("") {
// By hgcommands, run.rs
"Run Command" => {
let duration = span.duration_millis().unwrap_or(0);
row.insert("command_duration".into(), duration.into());
row.insert("elapsed".into(), duration.into());
for (&name, &value) in span.meta.iter() {
match name {
"nice" => {
row.insert("nice".into(), toint(value));
}
"version" => {
// Truncate the "version" string. This matches the old telemetry behavior.
row.insert("version".into(), value[..34.min(value.len())].into());
}
"max_rss" => {
row.insert("maxrss".into(), toint(value));
}
"exit_code" => {
row.insert("errorcode".into(), toint(value));
}
"parent_names" => {
if let Ok(names) = serde_json::from_str::<Vec<String>>(value) {
let name = names.get(0).cloned().unwrap_or_default();
row.insert("parent".into(), name.into());
}
}
"args" => {
if let Ok(args) = serde_json::from_str::<Vec<String>>(value) {
// Normalize the first argument to "hg".
let mut full = "hg".to_string();
for arg in args.into_iter().skip(1) {
// Keep the length bounded.
if full.len() + arg.len() >= 256 {
full += " (truncated)";
break;
}
full += &" ";
// TODO: Use shell_escape once in tp2.
// full += &shell_escape::unix::escape(Cow::Owned(arg));
full += &arg;
}
row.insert("fullcommand".into(), full.into()); | }
_ => {}
}
}
}
// The "log:command-row" event is used by code that wants to
// log to columns of the main command row easily.
"log:command-row" if span.is_event => {
extract_span(&span, &mut row);
}
_ => {}
}
}
}
tables.insert("dev_command_timers".into(), vec![row]);
}
fn extract_other_tables<'a>(tables: &mut Tables, tid_spans: &TidSpans) {
for spans in tid_spans.values() {
for span in spans.walk() {
match span.meta.get("name").cloned().unwrap_or("") {
// The "log:create-row" event is used by code that wants to log
// to a entire new column in a specified table.
//
// The event is expected to have "table", and the rest of the
// metadata will be logged as-is.
"log:create-row" => {
let table_name = match span.meta.get("table") {
Some(&name) => name,
None => continue,
};
let mut row = Row::new();
extract_span(span, &mut row);
tables.entry(table_name.into()).or_default().push(row);
}
_ => {}
}
}
}
}
/// Parse a span, extract its metadata to a row.
fn extract_span(span: &TreeSpan<&str>, row: &mut Row) {
for (&name, &value) in span.meta.iter() {
match name {
// Those keys are likely generated. Skip them.
"module_path" | "cat" | "line" | "name" => {}
// Attempt to convert it to an integer (since tracing data is
// string only).
_ => match value.parse::<i64>() {
Ok(i) => {
row.insert(name.into(), i.into());
}
_ => {
row.insert(name.into(), value.into());
}
},
}
}
} | } | random_line_split |
lib.rs | #![crate_name = "graphics"]
#![deny(missing_docs)]
#![deny(missing_copy_implementations)]
//! A library for 2D graphics that works with multiple back-ends.
//!
//! Piston-Graphics was started in 2014 by Sven Nilsen to test
//! back-end agnostic design for 2D in Rust.
//! This means generic code can be reused across projects and platforms.
//!
//! ### Design
//!
//! A graphics back-end implements the `Graphics` trait.
//!
//! This library uses immediate design for flexibility.
//! By default, triangles are generated from 2D shapes and passed in chunks
//! to the back-end. This behavior can be overridden by a back-end library.
//!
//! The structures used for drawing 2D shapes contains settings for rendering.
//! The separation of shapes and settings allows more reuse and flexibility.
//! For example, to render an image, you use an `Image` object.
//!
//! The `math` module contains useful methods for 2D geometry.
//!
//! `Context` stores settings that are commonly shared when rendering.
//! It can be copied and changed without affecting any global state.
//!
//! At top level, there are some shortcut methods for common operations.
//! For example, `ellipse` is a simplified version of `Ellipse`.
extern crate vecmath;
extern crate texture;
extern crate read_color;
extern crate interpolation;
extern crate viewport;
pub use texture::ImageSize;
pub use viewport::Viewport;
pub use graphics::Graphics;
pub use source_rectangled::SourceRectangled;
pub use rectangled::Rectangled;
pub use transformed::Transformed;
pub use colored::Colored;
pub use rectangle::Rectangle;
pub use line::Line;
pub use ellipse::Ellipse;
pub use circle_arc::CircleArc;
pub use image::Image;
pub use polygon::Polygon;
pub use text::Text;
pub use context::Context;
pub use draw_state::DrawState;
/// Any triangulation method called on the back-end
/// never exceeds this number of vertices.
/// This can be used to initialize buffers that fit the chunk size.
pub static BACK_END_MAX_VERTEX_COUNT: usize = 1024;
mod graphics;
mod source_rectangled;
mod rectangled;
mod transformed;
mod colored;
pub mod draw_state;
pub mod character;
pub mod context;
pub mod color;
pub mod polygon;
pub mod line;
pub mod circle_arc;
pub mod ellipse;
pub mod rectangle;
pub mod image;
pub mod types;
pub mod modular_index;
pub mod text;
pub mod triangulation;
pub mod math;
pub mod deform;
pub mod grid;
pub mod radians {
//! Reexport radians helper trait from vecmath
pub use vecmath::traits::Radians;
}
/// Clears the screen.
pub fn clear<G>(
color: types::Color, g: &mut G
)
where G: Graphics
{
g.clear_color(color);
g.clear_stencil(0);
}
/// Draws image.
pub fn | <G>(
image: &<G as Graphics>::Texture,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Image::new().draw(image, &Default::default(), transform, g);
}
/// Draws ellipse.
pub fn ellipse<R: Into<types::Rectangle>, G>(
color: types::Color,
rect: R,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Ellipse::new(color).draw(rect, &Default::default(), transform, g);
}
/// Draws arc
pub fn circle_arc<R: Into<types::Rectangle>, G>(
color: types::Color,
radius: types::Radius,
start: types::Scalar,
end: types::Scalar,
rect: R,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
CircleArc::new(color, radius, start, end)
.draw(rect, &Default::default(), transform, g);
}
/// Draws rectangle.
pub fn rectangle<R: Into<types::Rectangle>, G>(
color: types::Color,
rect: R,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Rectangle::new(color).draw(rect, &Default::default(), transform, g);
}
/// Draws polygon.
pub fn polygon<G>(
color: types::Color,
polygon: types::Polygon,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Polygon::new(color).draw(polygon, &Default::default(), transform, g);
}
/// Draws line.
pub fn line<L: Into<types::Line>, G>(
color: types::Color,
radius: types::Radius,
line: L,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Line::new(color, radius).draw(line, &Default::default(), transform, g)
}
/// Draws text.
pub fn text<C, G>(
color: types::Color,
font_size: types::FontSize,
text: &str,
cache: &mut C,
transform: math::Matrix2d,
g: &mut G
)
where
C: character::CharacterCache,
G: Graphics<Texture = <C as character::CharacterCache>::Texture>
{
Text::new_color(color, font_size)
.draw(text, cache, &Default::default(), transform, g)
}
| image | identifier_name |
lib.rs | #![crate_name = "graphics"]
#![deny(missing_docs)]
#![deny(missing_copy_implementations)]
//! A library for 2D graphics that works with multiple back-ends.
//!
//! Piston-Graphics was started in 2014 by Sven Nilsen to test
//! back-end agnostic design for 2D in Rust.
//! This means generic code can be reused across projects and platforms.
//!
//! ### Design
//!
//! A graphics back-end implements the `Graphics` trait.
//!
//! This library uses immediate design for flexibility.
//! By default, triangles are generated from 2D shapes and passed in chunks
//! to the back-end. This behavior can be overridden by a back-end library.
//!
//! The structures used for drawing 2D shapes contains settings for rendering.
//! The separation of shapes and settings allows more reuse and flexibility.
//! For example, to render an image, you use an `Image` object.
//!
//! The `math` module contains useful methods for 2D geometry.
//!
//! `Context` stores settings that are commonly shared when rendering.
//! It can be copied and changed without affecting any global state.
//!
//! At top level, there are some shortcut methods for common operations.
//! For example, `ellipse` is a simplified version of `Ellipse`.
extern crate vecmath;
extern crate texture;
extern crate read_color;
extern crate interpolation;
extern crate viewport;
pub use texture::ImageSize;
pub use viewport::Viewport;
pub use graphics::Graphics;
pub use source_rectangled::SourceRectangled;
pub use rectangled::Rectangled;
pub use transformed::Transformed;
pub use colored::Colored;
pub use rectangle::Rectangle;
pub use line::Line;
pub use ellipse::Ellipse;
pub use circle_arc::CircleArc;
pub use image::Image;
pub use polygon::Polygon;
pub use text::Text;
pub use context::Context;
pub use draw_state::DrawState;
/// Any triangulation method called on the back-end
/// never exceeds this number of vertices.
/// This can be used to initialize buffers that fit the chunk size.
pub static BACK_END_MAX_VERTEX_COUNT: usize = 1024;
mod graphics;
mod source_rectangled;
mod rectangled;
mod transformed;
mod colored;
pub mod draw_state;
pub mod character;
pub mod context;
pub mod color;
pub mod polygon;
pub mod line;
pub mod circle_arc;
pub mod ellipse;
pub mod rectangle;
pub mod image;
pub mod types;
pub mod modular_index;
pub mod text;
pub mod triangulation;
pub mod math;
pub mod deform;
pub mod grid;
pub mod radians {
//! Reexport radians helper trait from vecmath
pub use vecmath::traits::Radians;
}
/// Clears the screen.
pub fn clear<G>(
color: types::Color, g: &mut G
)
where G: Graphics
{
g.clear_color(color);
g.clear_stencil(0);
}
/// Draws image.
pub fn image<G>(
image: &<G as Graphics>::Texture,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Image::new().draw(image, &Default::default(), transform, g);
}
/// Draws ellipse.
pub fn ellipse<R: Into<types::Rectangle>, G>(
color: types::Color,
rect: R,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Ellipse::new(color).draw(rect, &Default::default(), transform, g);
}
/// Draws arc
pub fn circle_arc<R: Into<types::Rectangle>, G>(
color: types::Color,
radius: types::Radius,
start: types::Scalar,
end: types::Scalar,
rect: R,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
CircleArc::new(color, radius, start, end)
.draw(rect, &Default::default(), transform, g);
}
/// Draws rectangle.
pub fn rectangle<R: Into<types::Rectangle>, G>(
color: types::Color,
rect: R,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Rectangle::new(color).draw(rect, &Default::default(), transform, g);
}
/// Draws polygon.
pub fn polygon<G>(
color: types::Color,
polygon: types::Polygon,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Polygon::new(color).draw(polygon, &Default::default(), transform, g);
}
/// Draws line.
pub fn line<L: Into<types::Line>, G>(
color: types::Color,
radius: types::Radius,
line: L,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Line::new(color, radius).draw(line, &Default::default(), transform, g)
}
/// Draws text.
pub fn text<C, G>(
color: types::Color,
font_size: types::FontSize,
text: &str,
cache: &mut C,
transform: math::Matrix2d,
g: &mut G
)
where
C: character::CharacterCache,
G: Graphics<Texture = <C as character::CharacterCache>::Texture>
| {
Text::new_color(color, font_size)
.draw(text, cache, &Default::default(), transform, g)
} | identifier_body |
|
lib.rs | #![crate_name = "graphics"]
#![deny(missing_docs)]
#![deny(missing_copy_implementations)]
//! A library for 2D graphics that works with multiple back-ends.
//!
//! Piston-Graphics was started in 2014 by Sven Nilsen to test
//! back-end agnostic design for 2D in Rust.
//! This means generic code can be reused across projects and platforms.
//!
//! ### Design
//!
//! A graphics back-end implements the `Graphics` trait.
//!
//! This library uses immediate design for flexibility.
//! By default, triangles are generated from 2D shapes and passed in chunks
//! to the back-end. This behavior can be overridden by a back-end library.
//!
//! The structures used for drawing 2D shapes contains settings for rendering.
//! The separation of shapes and settings allows more reuse and flexibility.
//! For example, to render an image, you use an `Image` object.
//!
//! The `math` module contains useful methods for 2D geometry.
//!
//! `Context` stores settings that are commonly shared when rendering.
//! It can be copied and changed without affecting any global state.
//!
//! At top level, there are some shortcut methods for common operations.
//! For example, `ellipse` is a simplified version of `Ellipse`.
extern crate vecmath;
extern crate texture;
extern crate read_color;
extern crate interpolation;
extern crate viewport;
pub use texture::ImageSize;
pub use viewport::Viewport;
pub use graphics::Graphics;
pub use source_rectangled::SourceRectangled;
pub use rectangled::Rectangled;
pub use transformed::Transformed;
pub use colored::Colored;
pub use rectangle::Rectangle;
pub use line::Line;
pub use ellipse::Ellipse;
pub use circle_arc::CircleArc;
pub use image::Image;
pub use polygon::Polygon;
pub use text::Text;
pub use context::Context;
pub use draw_state::DrawState;
/// Any triangulation method called on the back-end
/// never exceeds this number of vertices.
/// This can be used to initialize buffers that fit the chunk size.
pub static BACK_END_MAX_VERTEX_COUNT: usize = 1024;
mod graphics;
mod source_rectangled;
mod rectangled;
mod transformed;
mod colored;
pub mod draw_state;
pub mod character;
pub mod context;
pub mod color;
pub mod polygon;
pub mod line;
pub mod circle_arc;
pub mod ellipse;
pub mod rectangle; | pub mod text;
pub mod triangulation;
pub mod math;
pub mod deform;
pub mod grid;
pub mod radians {
//! Reexport radians helper trait from vecmath
pub use vecmath::traits::Radians;
}
/// Clears the screen.
pub fn clear<G>(
color: types::Color, g: &mut G
)
where G: Graphics
{
g.clear_color(color);
g.clear_stencil(0);
}
/// Draws image.
pub fn image<G>(
image: &<G as Graphics>::Texture,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Image::new().draw(image, &Default::default(), transform, g);
}
/// Draws ellipse.
pub fn ellipse<R: Into<types::Rectangle>, G>(
color: types::Color,
rect: R,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Ellipse::new(color).draw(rect, &Default::default(), transform, g);
}
/// Draws arc
pub fn circle_arc<R: Into<types::Rectangle>, G>(
color: types::Color,
radius: types::Radius,
start: types::Scalar,
end: types::Scalar,
rect: R,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
CircleArc::new(color, radius, start, end)
.draw(rect, &Default::default(), transform, g);
}
/// Draws rectangle.
pub fn rectangle<R: Into<types::Rectangle>, G>(
color: types::Color,
rect: R,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Rectangle::new(color).draw(rect, &Default::default(), transform, g);
}
/// Draws polygon.
pub fn polygon<G>(
color: types::Color,
polygon: types::Polygon,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Polygon::new(color).draw(polygon, &Default::default(), transform, g);
}
/// Draws line.
pub fn line<L: Into<types::Line>, G>(
color: types::Color,
radius: types::Radius,
line: L,
transform: math::Matrix2d,
g: &mut G
)
where G: Graphics
{
Line::new(color, radius).draw(line, &Default::default(), transform, g)
}
/// Draws text.
pub fn text<C, G>(
color: types::Color,
font_size: types::FontSize,
text: &str,
cache: &mut C,
transform: math::Matrix2d,
g: &mut G
)
where
C: character::CharacterCache,
G: Graphics<Texture = <C as character::CharacterCache>::Texture>
{
Text::new_color(color, font_size)
.draw(text, cache, &Default::default(), transform, g)
} | pub mod image;
pub mod types;
pub mod modular_index; | random_line_split |
cluster.rs | //! This modules contains an implementation of [r2d2](https://github.com/sfackler/r2d2)
//! functionality of connection pools. To get more details about creating r2d2 pools
//! please refer to original documentation.
use std::iter::Iterator;
use query::QueryBuilder;
use client::{CDRS, Session};
use error::{Error as CError, Result as CResult};
use authenticators::Authenticator;
use compression::Compression;
use r2d2;
use transport::CDRSTransport;
use rand;
use std::sync::atomic::{AtomicUsize, Ordering};
/// Load balancing strategy
#[derive(PartialEq)]
pub enum LoadBalancingStrategy {
/// Round Robin balancing strategy
RoundRobin,
/// Random balancing strategy
Random,
}
impl LoadBalancingStrategy {
/// Returns next value for selected load balancing strategy
pub fn next<'a, N>(&'a self, nodes: &'a Vec<N>, i: usize) -> Option<&N> {
match *self {
LoadBalancingStrategy::Random => nodes.get(self.rnd_idx((0, Some(nodes.len())))),
LoadBalancingStrategy::RoundRobin => {
let mut cycle = nodes.iter().cycle().skip(i);
cycle.next()
}
}
}
/// Returns random number from a range
fn rnd_idx(&self, bounds: (usize, Option<usize>)) -> usize {
let min = bounds.0;
let max = bounds.1.unwrap_or(u8::max_value() as usize);
let rnd = rand::random::<usize>();
rnd % (max - min) + min
}
}
/// Load balancer
///
/// #Example
///
/// ```no_run
/// use cdrs::cluster::{LoadBalancingStrategy, LoadBalancer};
/// use cdrs::transport::TransportTcp;
/// let transports = vec![TransportTcp::new("127.0.0.1:9042"), TransportTcp::new("127.0.0.1:9042")];
/// let load_balancer = LoadBalancer::new(transports, LoadBalancingStrategy::RoundRobin);
/// let node = load_balancer.next().unwrap();
/// ```
pub struct LoadBalancer<T> {
strategy: LoadBalancingStrategy,
nodes: Vec<T>,
i: AtomicUsize,
}
impl<T> LoadBalancer<T> {
/// Factory function which creates new `LoadBalancer` with provided strategy.
pub fn new(nodes: Vec<T>, strategy: LoadBalancingStrategy) -> LoadBalancer<T> {
LoadBalancer {
nodes: nodes,
strategy: strategy,
i: AtomicUsize::new(0),
}
}
/// Returns next node basing on provided strategy.
pub fn next(&self) -> Option<&T> {
let next = self.strategy
.next(&self.nodes, self.i.load(Ordering::Relaxed) as usize);
if self.strategy == LoadBalancingStrategy::RoundRobin {
self.i.fetch_add(1, Ordering::Relaxed);
// prevent overflow
let i = self.i.load(Ordering::Relaxed);
match i.checked_rem(self.nodes.len() as usize) {
Some(rem) => self.i.store(rem, Ordering::Relaxed),
None => return None,
}
}
next
}
}
/// [r2d2](https://github.com/sfackler/r2d2) `ManageConnection`.
pub struct ClusterConnectionManager<T, X> {
load_balancer: LoadBalancer<X>,
authenticator: T,
compression: Compression,
}
impl<T, X> ClusterConnectionManager<T, X>
where T: Authenticator + Send + Sync +'static
{
/// Creates a new instance of `ConnectionManager`.
/// It requires transport, authenticator and compression as inputs.
pub fn new(load_balancer: LoadBalancer<X>,
authenticator: T,
compression: Compression)
-> ClusterConnectionManager<T, X> {
ClusterConnectionManager {
load_balancer: load_balancer,
authenticator: authenticator,
compression: compression,
}
}
}
impl<T: Authenticator + Send + Sync +'static,
X: CDRSTransport + Send + Sync +'static> r2d2::ManageConnection
for ClusterConnectionManager<T, X> {
type Connection = Session<T, X>;
type Error = CError;
fn connect(&self) -> Result<Self::Connection, Self::Error> {
let transport_res: CResult<X> = self.load_balancer
.next()
.ok_or_else(|| "Cannot get next node".into())
.and_then(|x| x.try_clone().map_err(|e| e.into()));
let transport = try!(transport_res);
let compression = self.compression;
let cdrs = CDRS::new(transport, self.authenticator.clone());
cdrs.start(compression)
}
fn | (&self, connection: &mut Self::Connection) -> Result<(), Self::Error> {
let query = QueryBuilder::new("SELECT * FROM system.peers;").finalize();
connection.query(query, false, false).map(|_| ())
}
fn has_broken(&self, _connection: &mut Self::Connection) -> bool {
false
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn round_robin() {
let nodes = vec!["a", "b", "c"];
let nodes_c = nodes.clone();
let load_balancer = LoadBalancer::new(nodes, LoadBalancingStrategy::RoundRobin);
for i in 0..10 {
assert_eq!(&nodes_c[i % 3], load_balancer.next().unwrap());
}
}
#[test]
fn lb_random() {
let nodes = vec!["a", "b", "c", "d", "e", "f", "g"];
let load_balancer = LoadBalancer::new(nodes, LoadBalancingStrategy::Random);
for _ in 0..100 {
let s = load_balancer.next();
assert!(s.is_some());
}
}
}
| is_valid | identifier_name |
cluster.rs | //! This modules contains an implementation of [r2d2](https://github.com/sfackler/r2d2)
//! functionality of connection pools. To get more details about creating r2d2 pools
//! please refer to original documentation.
use std::iter::Iterator;
use query::QueryBuilder;
use client::{CDRS, Session};
use error::{Error as CError, Result as CResult};
use authenticators::Authenticator;
use compression::Compression;
use r2d2;
use transport::CDRSTransport;
use rand;
use std::sync::atomic::{AtomicUsize, Ordering};
/// Load balancing strategy
#[derive(PartialEq)]
pub enum LoadBalancingStrategy {
/// Round Robin balancing strategy
RoundRobin,
/// Random balancing strategy
Random,
}
impl LoadBalancingStrategy {
/// Returns next value for selected load balancing strategy
pub fn next<'a, N>(&'a self, nodes: &'a Vec<N>, i: usize) -> Option<&N> {
match *self {
LoadBalancingStrategy::Random => nodes.get(self.rnd_idx((0, Some(nodes.len())))),
LoadBalancingStrategy::RoundRobin => {
let mut cycle = nodes.iter().cycle().skip(i);
cycle.next()
}
}
}
/// Returns random number from a range
fn rnd_idx(&self, bounds: (usize, Option<usize>)) -> usize {
let min = bounds.0;
let max = bounds.1.unwrap_or(u8::max_value() as usize);
let rnd = rand::random::<usize>();
rnd % (max - min) + min
}
}
/// Load balancer
///
/// #Example
///
/// ```no_run
/// use cdrs::cluster::{LoadBalancingStrategy, LoadBalancer};
/// use cdrs::transport::TransportTcp;
/// let transports = vec![TransportTcp::new("127.0.0.1:9042"), TransportTcp::new("127.0.0.1:9042")];
/// let load_balancer = LoadBalancer::new(transports, LoadBalancingStrategy::RoundRobin);
/// let node = load_balancer.next().unwrap();
/// ```
pub struct LoadBalancer<T> {
strategy: LoadBalancingStrategy,
nodes: Vec<T>,
i: AtomicUsize,
}
impl<T> LoadBalancer<T> {
/// Factory function which creates new `LoadBalancer` with provided strategy.
pub fn new(nodes: Vec<T>, strategy: LoadBalancingStrategy) -> LoadBalancer<T> {
LoadBalancer {
nodes: nodes,
strategy: strategy,
i: AtomicUsize::new(0),
}
}
/// Returns next node basing on provided strategy.
pub fn next(&self) -> Option<&T> {
let next = self.strategy
.next(&self.nodes, self.i.load(Ordering::Relaxed) as usize);
if self.strategy == LoadBalancingStrategy::RoundRobin {
self.i.fetch_add(1, Ordering::Relaxed);
// prevent overflow
let i = self.i.load(Ordering::Relaxed);
match i.checked_rem(self.nodes.len() as usize) {
Some(rem) => self.i.store(rem, Ordering::Relaxed),
None => return None,
}
}
next
}
}
/// [r2d2](https://github.com/sfackler/r2d2) `ManageConnection`.
pub struct ClusterConnectionManager<T, X> {
load_balancer: LoadBalancer<X>,
authenticator: T,
compression: Compression,
}
impl<T, X> ClusterConnectionManager<T, X>
where T: Authenticator + Send + Sync +'static
{
/// Creates a new instance of `ConnectionManager`.
/// It requires transport, authenticator and compression as inputs.
pub fn new(load_balancer: LoadBalancer<X>,
authenticator: T,
compression: Compression)
-> ClusterConnectionManager<T, X> |
}
impl<T: Authenticator + Send + Sync +'static,
X: CDRSTransport + Send + Sync +'static> r2d2::ManageConnection
for ClusterConnectionManager<T, X> {
type Connection = Session<T, X>;
type Error = CError;
fn connect(&self) -> Result<Self::Connection, Self::Error> {
let transport_res: CResult<X> = self.load_balancer
.next()
.ok_or_else(|| "Cannot get next node".into())
.and_then(|x| x.try_clone().map_err(|e| e.into()));
let transport = try!(transport_res);
let compression = self.compression;
let cdrs = CDRS::new(transport, self.authenticator.clone());
cdrs.start(compression)
}
fn is_valid(&self, connection: &mut Self::Connection) -> Result<(), Self::Error> {
let query = QueryBuilder::new("SELECT * FROM system.peers;").finalize();
connection.query(query, false, false).map(|_| ())
}
fn has_broken(&self, _connection: &mut Self::Connection) -> bool {
false
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn round_robin() {
let nodes = vec!["a", "b", "c"];
let nodes_c = nodes.clone();
let load_balancer = LoadBalancer::new(nodes, LoadBalancingStrategy::RoundRobin);
for i in 0..10 {
assert_eq!(&nodes_c[i % 3], load_balancer.next().unwrap());
}
}
#[test]
fn lb_random() {
let nodes = vec!["a", "b", "c", "d", "e", "f", "g"];
let load_balancer = LoadBalancer::new(nodes, LoadBalancingStrategy::Random);
for _ in 0..100 {
let s = load_balancer.next();
assert!(s.is_some());
}
}
}
| {
ClusterConnectionManager {
load_balancer: load_balancer,
authenticator: authenticator,
compression: compression,
}
} | identifier_body |
cluster.rs | //! This modules contains an implementation of [r2d2](https://github.com/sfackler/r2d2)
//! functionality of connection pools. To get more details about creating r2d2 pools
//! please refer to original documentation.
use std::iter::Iterator;
use query::QueryBuilder;
use client::{CDRS, Session};
use error::{Error as CError, Result as CResult};
use authenticators::Authenticator;
use compression::Compression;
use r2d2;
use transport::CDRSTransport;
use rand;
use std::sync::atomic::{AtomicUsize, Ordering};
/// Load balancing strategy
#[derive(PartialEq)]
pub enum LoadBalancingStrategy {
/// Round Robin balancing strategy
RoundRobin,
/// Random balancing strategy
Random,
}
impl LoadBalancingStrategy {
/// Returns next value for selected load balancing strategy
pub fn next<'a, N>(&'a self, nodes: &'a Vec<N>, i: usize) -> Option<&N> {
match *self {
LoadBalancingStrategy::Random => nodes.get(self.rnd_idx((0, Some(nodes.len())))),
LoadBalancingStrategy::RoundRobin => {
let mut cycle = nodes.iter().cycle().skip(i);
cycle.next()
}
}
}
/// Returns random number from a range
fn rnd_idx(&self, bounds: (usize, Option<usize>)) -> usize {
let min = bounds.0;
let max = bounds.1.unwrap_or(u8::max_value() as usize);
let rnd = rand::random::<usize>();
rnd % (max - min) + min
}
}
/// Load balancer
///
/// #Example
///
/// ```no_run
/// use cdrs::cluster::{LoadBalancingStrategy, LoadBalancer};
/// use cdrs::transport::TransportTcp;
/// let transports = vec![TransportTcp::new("127.0.0.1:9042"), TransportTcp::new("127.0.0.1:9042")];
/// let load_balancer = LoadBalancer::new(transports, LoadBalancingStrategy::RoundRobin);
/// let node = load_balancer.next().unwrap();
/// ```
pub struct LoadBalancer<T> {
strategy: LoadBalancingStrategy,
nodes: Vec<T>,
i: AtomicUsize,
}
impl<T> LoadBalancer<T> {
/// Factory function which creates new `LoadBalancer` with provided strategy.
pub fn new(nodes: Vec<T>, strategy: LoadBalancingStrategy) -> LoadBalancer<T> {
LoadBalancer {
nodes: nodes,
strategy: strategy,
i: AtomicUsize::new(0),
}
}
/// Returns next node basing on provided strategy.
pub fn next(&self) -> Option<&T> {
let next = self.strategy
.next(&self.nodes, self.i.load(Ordering::Relaxed) as usize);
if self.strategy == LoadBalancingStrategy::RoundRobin {
self.i.fetch_add(1, Ordering::Relaxed);
// prevent overflow
let i = self.i.load(Ordering::Relaxed);
match i.checked_rem(self.nodes.len() as usize) {
Some(rem) => self.i.store(rem, Ordering::Relaxed),
None => return None,
}
}
next
}
}
/// [r2d2](https://github.com/sfackler/r2d2) `ManageConnection`.
pub struct ClusterConnectionManager<T, X> {
load_balancer: LoadBalancer<X>,
authenticator: T,
compression: Compression,
}
impl<T, X> ClusterConnectionManager<T, X>
where T: Authenticator + Send + Sync +'static
{
/// Creates a new instance of `ConnectionManager`.
/// It requires transport, authenticator and compression as inputs.
pub fn new(load_balancer: LoadBalancer<X>,
authenticator: T,
compression: Compression)
-> ClusterConnectionManager<T, X> {
ClusterConnectionManager {
load_balancer: load_balancer,
authenticator: authenticator,
compression: compression,
}
} | X: CDRSTransport + Send + Sync +'static> r2d2::ManageConnection
for ClusterConnectionManager<T, X> {
type Connection = Session<T, X>;
type Error = CError;
fn connect(&self) -> Result<Self::Connection, Self::Error> {
let transport_res: CResult<X> = self.load_balancer
.next()
.ok_or_else(|| "Cannot get next node".into())
.and_then(|x| x.try_clone().map_err(|e| e.into()));
let transport = try!(transport_res);
let compression = self.compression;
let cdrs = CDRS::new(transport, self.authenticator.clone());
cdrs.start(compression)
}
fn is_valid(&self, connection: &mut Self::Connection) -> Result<(), Self::Error> {
let query = QueryBuilder::new("SELECT * FROM system.peers;").finalize();
connection.query(query, false, false).map(|_| ())
}
fn has_broken(&self, _connection: &mut Self::Connection) -> bool {
false
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn round_robin() {
let nodes = vec!["a", "b", "c"];
let nodes_c = nodes.clone();
let load_balancer = LoadBalancer::new(nodes, LoadBalancingStrategy::RoundRobin);
for i in 0..10 {
assert_eq!(&nodes_c[i % 3], load_balancer.next().unwrap());
}
}
#[test]
fn lb_random() {
let nodes = vec!["a", "b", "c", "d", "e", "f", "g"];
let load_balancer = LoadBalancer::new(nodes, LoadBalancingStrategy::Random);
for _ in 0..100 {
let s = load_balancer.next();
assert!(s.is_some());
}
}
} | }
impl<T: Authenticator + Send + Sync + 'static, | random_line_split |
str.rs | s: &str) -> Result<ByteString, ()> {
Ok(ByteString::new(s.to_owned().into_bytes()))
}
}
impl ops::Deref for ByteString {
type Target = [u8];
fn deref(&self) -> &[u8] {
&self.0
}
}
/// A string that is constructed from a UCS-2 buffer by replacing invalid code
/// points with the replacement character.
#[derive(Clone, Default, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
pub struct USVString(pub String);
impl Borrow<str> for USVString {
#[inline]
fn borrow(&self) -> &str {
&self.0
}
}
impl Deref for USVString {
type Target = str;
#[inline]
fn deref(&self) -> &str {
&self.0
}
}
impl DerefMut for USVString {
#[inline]
fn deref_mut(&mut self) -> &mut str {
&mut self.0
}
}
impl AsRef<str> for USVString {
fn as_ref(&self) -> &str {
&self.0
}
}
impl fmt::Display for USVString {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl PartialEq<str> for USVString {
fn eq(&self, other: &str) -> bool {
&**self == other
}
}
impl<'a> PartialEq<&'a str> for USVString {
fn eq(&self, other: &&'a str) -> bool {
&**self == *other
}
}
impl From<String> for USVString {
fn from(contents: String) -> USVString {
USVString(contents)
}
}
/// Returns whether `s` is a `token`, as defined by
/// [RFC 2616](http://tools.ietf.org/html/rfc2616#page-17).
pub fn is_token(s: &[u8]) -> bool {
if s.is_empty() {
return false; // A token must be at least a single character
}
s.iter().all(|&x| {
// http://tools.ietf.org/html/rfc2616#section-2.2
match x {
0..=31 | 127 => false, // CTLs
40 | 41 | 60 | 62 | 64 | 44 | 59 | 58 | 92 | 34 | 47 | 91 | 93 | 63 | 61 | 123 |
125 | 32 => false, // separators
x if x > 127 => false, // non-CHARs
_ => true,
}
})
}
/// A DOMString.
///
/// This type corresponds to the [`DOMString`](idl) type in WebIDL.
///
/// [idl]: https://heycam.github.io/webidl/#idl-DOMString
///
/// Conceptually, a DOMString has the same value space as a JavaScript String,
/// i.e., an array of 16-bit *code units* representing UTF-16, potentially with
/// unpaired surrogates present (also sometimes called WTF-16).
///
/// Currently, this type stores a Rust `String`, in order to avoid issues when
/// integrating with the rest of the Rust ecosystem and even the rest of the
/// browser itself.
///
/// However, Rust `String`s are guaranteed to be valid UTF-8, and as such have
/// a *smaller value space* than WTF-16 (i.e., some JavaScript String values
/// can not be represented as a Rust `String`). This introduces the question of
/// what to do with values being passed from JavaScript to Rust that contain
/// unpaired surrogates.
///
/// The hypothesis is that it does not matter much how exactly those values are
/// transformed, because passing unpaired surrogates into the DOM is very rare.
/// In order to test this hypothesis, Servo will panic when encountering any
/// unpaired surrogates on conversion to `DOMString` by default. (The command
/// line option `-Z replace-surrogates` instead causes Servo to replace the
/// unpaired surrogate by a U+FFFD replacement character.)
///
/// Currently, the lack of crash reports about this issue provides some
/// evidence to support the hypothesis. This evidence will hopefully be used to
/// convince other browser vendors that it would be safe to replace unpaired
/// surrogates at the boundary between JavaScript and native code. (This would
/// unify the `DOMString` and `USVString` types, both in the WebIDL standard
/// and in Servo.)
///
/// This type is currently `!Send`, in order to help with an independent
/// experiment to store `JSString`s rather than Rust `String`s.
#[derive(Clone, Debug, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
pub struct DOMS | ing, PhantomData<*const ()>);
impl DOMString {
/// Creates a new `DOMString`.
pub fn new() -> DOMString {
DOMString(String::new(), PhantomData)
}
/// Creates a new `DOMString` from a `String`.
pub fn from_string(s: String) -> DOMString {
DOMString(s, PhantomData)
}
/// Appends a given string slice onto the end of this String.
pub fn push_str(&mut self, string: &str) {
self.0.push_str(string)
}
/// Clears this `DOMString`, removing all contents.
pub fn clear(&mut self) {
self.0.clear()
}
/// Shortens this String to the specified length.
pub fn truncate(&mut self, new_len: usize) {
self.0.truncate(new_len);
}
/// Removes newline characters according to <https://infra.spec.whatwg.org/#strip-newlines>.
pub fn strip_newlines(&mut self) {
self.0.retain(|c| c!= '\r' && c!= '\n');
}
/// Removes leading and trailing ASCII whitespaces according to
/// <https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace>.
pub fn strip_leading_and_trailing_ascii_whitespace(&mut self) {
if self.0.len() == 0 {
return;
}
let trailing_whitespace_len = self
.0
.trim_end_matches(|ref c| char::is_ascii_whitespace(c))
.len();
self.0.truncate(trailing_whitespace_len);
if self.0.is_empty() {
return;
}
let first_non_whitespace = self.0.find(|ref c|!char::is_ascii_whitespace(c)).unwrap();
let _ = self.0.replace_range(0..first_non_whitespace, "");
}
/// Validates this `DOMString` is a time string according to
/// <https://html.spec.whatwg.org/multipage/#valid-time-string>.
pub fn is_valid_time_string(&self) -> bool {
enum State {
HourHigh,
HourLow09,
HourLow03,
MinuteColon,
MinuteHigh,
MinuteLow,
SecondColon,
SecondHigh,
SecondLow,
MilliStop,
MilliHigh,
MilliMiddle,
MilliLow,
Done,
Error,
}
let next_state = |valid: bool, next: State| -> State {
if valid {
next
} else {
State::Error
}
};
let state = self.chars().fold(State::HourHigh, |state, c| {
match state {
// Step 1 "HH"
State::HourHigh => match c {
'0' | '1' => State::HourLow09,
'2' => State::HourLow03,
_ => State::Error,
},
State::HourLow09 => next_state(c.is_digit(10), State::MinuteColon),
State::HourLow03 => next_state(c.is_digit(4), State::MinuteColon),
// Step 2 ":"
State::MinuteColon => next_state(c == ':', State::MinuteHigh),
// Step 3 "mm"
State::MinuteHigh => next_state(c.is_digit(6), State::MinuteLow),
State::MinuteLow => next_state(c.is_digit(10), State::SecondColon),
// Step 4.1 ":"
State::SecondColon => next_state(c == ':', State::SecondHigh),
// Step 4.2 "ss"
State::SecondHigh => next_state(c.is_digit(6), State::SecondLow),
State::SecondLow => next_state(c.is_digit(10), State::MilliStop),
// Step 4.3.1 "."
State::MilliStop => next_state(c == '.', State::MilliHigh),
// Step 4.3.2 "SSS"
State::MilliHigh => next_state(c.is_digit(10), State::MilliMiddle),
State::MilliMiddle => next_state(c.is_digit(10), State::MilliLow),
State::MilliLow => next_state(c.is_digit(10), State::Done),
_ => State::Error,
}
});
match state {
State::Done |
// Step 4 (optional)
State::SecondColon |
// Step 4.3 (optional)
State::MilliStop |
// Step 4.3.2 (only 1 digit required)
State::MilliMiddle | State::MilliLow => true,
_ => false
}
}
/// A valid date string should be "YYYY-MM-DD"
/// YYYY must be four or more digits, MM and DD both must be two digits
/// https://html.spec.whatwg.org/multipage/#valid-date-string
pub fn is_valid_date_string(&self) -> bool {
self.parse_date_string().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#parse-a-date-string
pub fn parse_date_string(&self) -> Result<(i32, u32, u32), ()> {
let value = &self.0;
// Step 1, 2, 3
let (year_int, month_int, day_int) = parse_date_component(value)?;
// Step 4
if value.split('-').nth(3).is_some() {
return Err(());
}
// Step 5, 6
Ok((year_int, month_int, day_int))
}
/// https://html.spec.whatwg.org/multipage/#parse-a-time-string
pub fn parse_time_string(&self) -> Result<(u32, u32, f64), ()> {
let value = &self.0;
// Step 1, 2, 3
let (hour_int, minute_int, second_float) = parse_time_component(value)?;
// Step 4
if value.split(':').nth(3).is_some() {
return Err(());
}
// Step 5, 6
Ok((hour_int, minute_int, second_float))
}
/// A valid month string should be "YYYY-MM"
/// YYYY must be four or more digits, MM both must be two digits
/// https://html.spec.whatwg.org/multipage/#valid-month-string
pub fn is_valid_month_string(&self) -> bool {
self.parse_month_string().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#parse-a-month-string
pub fn parse_month_string(&self) -> Result<(i32, u32), ()> {
let value = &self;
// Step 1, 2, 3
let (year_int, month_int) = parse_month_component(value)?;
// Step 4
if value.split("-").nth(2).is_some() {
return Err(());
}
// Step 5
Ok((year_int, month_int))
}
/// A valid week string should be like {YYYY}-W{WW}, such as "2017-W52"
/// YYYY must be four or more digits, WW both must be two digits
/// https://html.spec.whatwg.org/multipage/#valid-week-string
pub fn is_valid_week_string(&self) -> bool {
self.parse_week_string().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#parse-a-week-string
pub fn parse_week_string(&self) -> Result<(i32, u32), ()> {
let value = &self.0;
// Step 1, 2, 3
let mut iterator = value.split('-');
let year = iterator.next().ok_or(())?;
// Step 4
let year_int = year.parse::<i32>().map_err(|_| ())?;
if year.len() < 4 || year_int == 0 {
return Err(());
}
// Step 5, 6
let week = iterator.next().ok_or(())?;
let (week_first, week_last) = week.split_at(1);
if week_first!= "W" {
return Err(());
}
// Step 7
let week_int = week_last.parse::<u32>().map_err(|_| ())?;
if week_last.len()!= 2 {
return Err(());
}
// Step 8
let max_week = max_week_in_year(year_int);
// Step 9
if week_int < 1 || week_int > max_week {
return Err(());
}
// Step 10
if iterator.next().is_some() {
return Err(());
}
// Step 11
Ok((year_int, week_int))
}
/// https://html.spec.whatwg.org/multipage/#valid-floating-point-number
pub fn is_valid_floating_point_number_string(&self) -> bool {
lazy_static! {
static ref RE: Regex =
Regex::new(r"^-?(?:\d+\.\d+|\d+|\.\d+)(?:(e|E)(\+|\-)?\d+)?$").unwrap();
}
RE.is_match(&self.0) && self.parse_floating_point_number().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#rules-for-parsing-floating-point-number-values
pub fn parse_floating_point_number(&self) -> Result<f64, ()> {
// Steps 15-16 are telling us things about IEEE rounding modes
// for floating-point significands; this code assumes the Rust
// compiler already matches them in any cases where
// that actually matters. They are not
// related to f64::round(), which is for rounding to integers.
let input = &self.0;
match input.trim().parse::<f64>() {
Ok(val)
if!(
// A valid number is the same as what rust considers to be valid,
// except for +1., NaN, and Infinity.
val.is_infinite() ||
val.is_nan() ||
input.ends_with(".") ||
input.starts_with("+")
) =>
{
Ok(val)
},
_ => Err(()),
}
}
/// https://html.spec.whatwg.org/multipage/#best-representation-of-the-number-as-a-floating-point-number
pub fn set_best_representation_of_the_floating_point_number(&mut self) {
if let Ok(val) = self.parse_floating_point_number() {
self.0 = val.to_string();
}
}
/// A valid normalized local date and time string should be "{date}T{time}"
/// where date and time are both valid, and the time string must be as short as possible
/// https://html.spec.whatwg.org/multipage/#valid-normalised-local-date-and-time-string
pub fn convert_valid_normalized_local_date_and_time_string(&mut self) -> Result<(), ()> {
let ((year, month, day), (hour, minute, second)) =
self.parse_local_date_and_time_string()?;
if second == 0.0 {
self.0 = format!(
"{:04}-{:02}-{:02}T{:02}:{:02}",
year, month, day, hour, minute
);
} else if second < 10.0 {
// we need exactly one leading zero on the seconds,
// whatever their total string length might be
self.0 = format!(
"{:04}-{:02}-{:02}T{:02}:{:02}:0{}",
year, month, day, hour, minute, second
);
} else {
// we need no leading zeroes on the seconds
self.0 = format!(
"{:04}-{:02}-{:02}T{:02}:{:02}:{}",
year, month, day, hour, minute, second
);
}
Ok(())
}
/// https://html.spec.whatwg.org/multipage/#parse-a-local-date-and-time-string
pub fn parse_local_date_and_time_string(
&self,
) -> Result<((i32, u32, u32), (u32, u32, f64)), ()> {
let value = &self;
// Step 1, 2, 4
let mut iterator = if value.contains('T') {
value.split('T')
} else {
value.split(' ')
};
// Step 3
let date = iterator.next().ok_or(())?;
let date_tuple = parse_date_component(date)?;
// Step 5
let time = iterator.next().ok_or(())?;
let time_tuple = parse_time_component(time)?;
// Step 6
if iterator.next().is_some() {
return Err(());
}
// Step 7, 8, 9
Ok((date_tuple, time_tuple))
}
}
impl Borrow<str> for DOMString {
#[inline]
fn borrow(&self) -> &str {
&self.0
}
}
impl Default for DOMString {
fn default() -> Self {
DOMString(String::new(), PhantomData)
}
}
impl Deref for DOMString {
type Target = str;
#[inline]
fn deref(&self) -> &str {
&self.0
}
}
impl DerefMut for DOMString {
#[inline]
fn deref_mut(&mut self) -> &mut str {
&mut self.0
}
}
impl AsRef<str> for DOMString {
fn as_ref(&self) -> &str {
&self.0
}
}
impl fmt::Display for DOMString {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl PartialEq<str> for DOMString {
fn eq(&self, other: &str) -> bool {
&**self == other
}
}
impl<'a> PartialEq<&'a str> for DOMString {
fn eq(&self, other: &&'a str) -> bool {
&**self == *other
}
}
impl From<String> for DOMString {
fn from(contents: String) -> DOMString {
DOMString(contents, PhantomData)
}
}
impl<'a> From<&'a str> for DOMString {
fn from(contents: &str) -> DOMString {
DOMString::from(String::from(contents))
}
}
impl<'a> From<Cow<'a, str>> for DOMString {
fn from(contents: Cow<'a, str>) -> DOMString {
match contents {
Cow::Owned(s) => DOMString::from(s),
Cow::Borrowed(s) => DOMString::from(s),
}
}
}
impl From<DOMString> for LocalName {
fn from(contents: DOMString) -> LocalName {
LocalName::from(contents.0)
}
}
impl From<DOMString> for Namespace {
fn from(contents: DOMString) -> Namespace {
Namespace::from(contents.0)
}
}
impl From<DOMString> for Atom {
fn from(contents: DOMString) -> Atom {
Atom::from(contents.0)
}
}
impl From<DOMString> for String {
fn from(contents: DOMString) -> String {
contents.0
}
}
impl Into<Vec<u8>> for DOMString {
fn into(self) -> Vec<u8> {
self.0.into()
}
}
impl<'a> Into<Cow<'a, str>> for DOMString {
fn into(self) -> Cow<'a, str> {
self.0.into()
}
}
impl<'a> Into<CowRcStr<'a>> for DOMString {
fn into(self) -> CowRcStr<'a> {
self.0.into()
}
}
impl Extend<char> for DOMString {
fn extend<I>(&mut self, iterable: I)
where
I: IntoIterator<Item = char>,
{
self.0.extend(iterable)
}
}
/// https://html.spec.whatwg.org/multipage/#parse-a-month-component
fn parse_month_component(value: &str) -> Result<(i32, u32), ()> {
// Step 3
let mut iterator = value.split('-');
let year = iterator.next().ok_or(())?;
let month = iterator.next().ok_or(())?;
// Step 1, 2
let year_int = year.parse::<i32>().map_err(|_| ())?;
if year.len() < 4 || year_int == 0 {
return Err(());
}
// Step 4, 5
let month_int = month.parse::<u32>().map_err(|_| ())?;
if month.len()!= 2 || month_int > 12 || month_int < 1 {
| tring(Str | identifier_name |
str.rs | str(s: &str) -> Result<ByteString, ()> {
Ok(ByteString::new(s.to_owned().into_bytes()))
}
}
impl ops::Deref for ByteString {
type Target = [u8];
fn deref(&self) -> &[u8] {
&self.0
}
}
/// A string that is constructed from a UCS-2 buffer by replacing invalid code
/// points with the replacement character.
#[derive(Clone, Default, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
pub struct USVString(pub String);
impl Borrow<str> for USVString {
#[inline]
fn borrow(&self) -> &str {
&self.0
}
}
impl Deref for USVString {
type Target = str;
#[inline]
fn deref(&self) -> &str {
&self.0
}
}
impl DerefMut for USVString {
#[inline]
fn deref_mut(&mut self) -> &mut str {
&mut self.0
}
}
impl AsRef<str> for USVString {
fn as_ref(&self) -> &str {
&self.0
}
}
impl fmt::Display for USVString {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl PartialEq<str> for USVString {
fn eq(&self, other: &str) -> bool {
&**self == other
}
}
impl<'a> PartialEq<&'a str> for USVString {
fn eq(&self, other: &&'a str) -> bool {
&**self == *other
}
}
impl From<String> for USVString {
fn from(contents: String) -> USVString {
USVString(contents)
}
}
/// Returns whether `s` is a `token`, as defined by
/// [RFC 2616](http://tools.ietf.org/html/rfc2616#page-17).
pub fn is_token(s: &[u8]) -> bool {
if s.is_empty() {
return false; // A token must be at least a single character
}
s.iter().all(|&x| {
// http://tools.ietf.org/html/rfc2616#section-2.2
match x {
0..=31 | 127 => false, // CTLs
40 | 41 | 60 | 62 | 64 | 44 | 59 | 58 | 92 | 34 | 47 | 91 | 93 | 63 | 61 | 123 |
125 | 32 => false, // separators
x if x > 127 => false, // non-CHARs
_ => true,
}
})
}
/// A DOMString.
///
/// This type corresponds to the [`DOMString`](idl) type in WebIDL.
///
/// [idl]: https://heycam.github.io/webidl/#idl-DOMString
///
/// Conceptually, a DOMString has the same value space as a JavaScript String,
/// i.e., an array of 16-bit *code units* representing UTF-16, potentially with
/// unpaired surrogates present (also sometimes called WTF-16).
///
/// Currently, this type stores a Rust `String`, in order to avoid issues when
/// integrating with the rest of the Rust ecosystem and even the rest of the
/// browser itself.
///
/// However, Rust `String`s are guaranteed to be valid UTF-8, and as such have
/// a *smaller value space* than WTF-16 (i.e., some JavaScript String values
/// can not be represented as a Rust `String`). This introduces the question of
/// what to do with values being passed from JavaScript to Rust that contain
/// unpaired surrogates.
///
/// The hypothesis is that it does not matter much how exactly those values are
/// transformed, because passing unpaired surrogates into the DOM is very rare.
/// In order to test this hypothesis, Servo will panic when encountering any
/// unpaired surrogates on conversion to `DOMString` by default. (The command
/// line option `-Z replace-surrogates` instead causes Servo to replace the
/// unpaired surrogate by a U+FFFD replacement character.)
///
/// Currently, the lack of crash reports about this issue provides some
/// evidence to support the hypothesis. This evidence will hopefully be used to
/// convince other browser vendors that it would be safe to replace unpaired
/// surrogates at the boundary between JavaScript and native code. (This would
/// unify the `DOMString` and `USVString` types, both in the WebIDL standard
/// and in Servo.)
///
/// This type is currently `!Send`, in order to help with an independent
/// experiment to store `JSString`s rather than Rust `String`s.
#[derive(Clone, Debug, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
pub struct DOMString(String, PhantomData<*const ()>);
impl DOMString {
/// Creates a new `DOMString`.
pub fn new() -> DOMString {
DOMString(String::new(), PhantomData)
}
/// Creates a new `DOMString` from a `String`. |
/// Appends a given string slice onto the end of this String.
pub fn push_str(&mut self, string: &str) {
self.0.push_str(string)
}
/// Clears this `DOMString`, removing all contents.
pub fn clear(&mut self) {
self.0.clear()
}
/// Shortens this String to the specified length.
pub fn truncate(&mut self, new_len: usize) {
self.0.truncate(new_len);
}
/// Removes newline characters according to <https://infra.spec.whatwg.org/#strip-newlines>.
pub fn strip_newlines(&mut self) {
self.0.retain(|c| c!= '\r' && c!= '\n');
}
/// Removes leading and trailing ASCII whitespaces according to
/// <https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace>.
pub fn strip_leading_and_trailing_ascii_whitespace(&mut self) {
if self.0.len() == 0 {
return;
}
let trailing_whitespace_len = self
.0
.trim_end_matches(|ref c| char::is_ascii_whitespace(c))
.len();
self.0.truncate(trailing_whitespace_len);
if self.0.is_empty() {
return;
}
let first_non_whitespace = self.0.find(|ref c|!char::is_ascii_whitespace(c)).unwrap();
let _ = self.0.replace_range(0..first_non_whitespace, "");
}
/// Validates this `DOMString` is a time string according to
/// <https://html.spec.whatwg.org/multipage/#valid-time-string>.
pub fn is_valid_time_string(&self) -> bool {
enum State {
HourHigh,
HourLow09,
HourLow03,
MinuteColon,
MinuteHigh,
MinuteLow,
SecondColon,
SecondHigh,
SecondLow,
MilliStop,
MilliHigh,
MilliMiddle,
MilliLow,
Done,
Error,
}
let next_state = |valid: bool, next: State| -> State {
if valid {
next
} else {
State::Error
}
};
let state = self.chars().fold(State::HourHigh, |state, c| {
match state {
// Step 1 "HH"
State::HourHigh => match c {
'0' | '1' => State::HourLow09,
'2' => State::HourLow03,
_ => State::Error,
},
State::HourLow09 => next_state(c.is_digit(10), State::MinuteColon),
State::HourLow03 => next_state(c.is_digit(4), State::MinuteColon),
// Step 2 ":"
State::MinuteColon => next_state(c == ':', State::MinuteHigh),
// Step 3 "mm"
State::MinuteHigh => next_state(c.is_digit(6), State::MinuteLow),
State::MinuteLow => next_state(c.is_digit(10), State::SecondColon),
// Step 4.1 ":"
State::SecondColon => next_state(c == ':', State::SecondHigh),
// Step 4.2 "ss"
State::SecondHigh => next_state(c.is_digit(6), State::SecondLow),
State::SecondLow => next_state(c.is_digit(10), State::MilliStop),
// Step 4.3.1 "."
State::MilliStop => next_state(c == '.', State::MilliHigh),
// Step 4.3.2 "SSS"
State::MilliHigh => next_state(c.is_digit(10), State::MilliMiddle),
State::MilliMiddle => next_state(c.is_digit(10), State::MilliLow),
State::MilliLow => next_state(c.is_digit(10), State::Done),
_ => State::Error,
}
});
match state {
State::Done |
// Step 4 (optional)
State::SecondColon |
// Step 4.3 (optional)
State::MilliStop |
// Step 4.3.2 (only 1 digit required)
State::MilliMiddle | State::MilliLow => true,
_ => false
}
}
/// A valid date string should be "YYYY-MM-DD"
/// YYYY must be four or more digits, MM and DD both must be two digits
/// https://html.spec.whatwg.org/multipage/#valid-date-string
pub fn is_valid_date_string(&self) -> bool {
self.parse_date_string().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#parse-a-date-string
pub fn parse_date_string(&self) -> Result<(i32, u32, u32), ()> {
let value = &self.0;
// Step 1, 2, 3
let (year_int, month_int, day_int) = parse_date_component(value)?;
// Step 4
if value.split('-').nth(3).is_some() {
return Err(());
}
// Step 5, 6
Ok((year_int, month_int, day_int))
}
/// https://html.spec.whatwg.org/multipage/#parse-a-time-string
pub fn parse_time_string(&self) -> Result<(u32, u32, f64), ()> {
let value = &self.0;
// Step 1, 2, 3
let (hour_int, minute_int, second_float) = parse_time_component(value)?;
// Step 4
if value.split(':').nth(3).is_some() {
return Err(());
}
// Step 5, 6
Ok((hour_int, minute_int, second_float))
}
/// A valid month string should be "YYYY-MM"
/// YYYY must be four or more digits, MM both must be two digits
/// https://html.spec.whatwg.org/multipage/#valid-month-string
pub fn is_valid_month_string(&self) -> bool {
self.parse_month_string().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#parse-a-month-string
pub fn parse_month_string(&self) -> Result<(i32, u32), ()> {
let value = &self;
// Step 1, 2, 3
let (year_int, month_int) = parse_month_component(value)?;
// Step 4
if value.split("-").nth(2).is_some() {
return Err(());
}
// Step 5
Ok((year_int, month_int))
}
/// A valid week string should be like {YYYY}-W{WW}, such as "2017-W52"
/// YYYY must be four or more digits, WW both must be two digits
/// https://html.spec.whatwg.org/multipage/#valid-week-string
pub fn is_valid_week_string(&self) -> bool {
self.parse_week_string().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#parse-a-week-string
pub fn parse_week_string(&self) -> Result<(i32, u32), ()> {
let value = &self.0;
// Step 1, 2, 3
let mut iterator = value.split('-');
let year = iterator.next().ok_or(())?;
// Step 4
let year_int = year.parse::<i32>().map_err(|_| ())?;
if year.len() < 4 || year_int == 0 {
return Err(());
}
// Step 5, 6
let week = iterator.next().ok_or(())?;
let (week_first, week_last) = week.split_at(1);
if week_first!= "W" {
return Err(());
}
// Step 7
let week_int = week_last.parse::<u32>().map_err(|_| ())?;
if week_last.len()!= 2 {
return Err(());
}
// Step 8
let max_week = max_week_in_year(year_int);
// Step 9
if week_int < 1 || week_int > max_week {
return Err(());
}
// Step 10
if iterator.next().is_some() {
return Err(());
}
// Step 11
Ok((year_int, week_int))
}
/// https://html.spec.whatwg.org/multipage/#valid-floating-point-number
pub fn is_valid_floating_point_number_string(&self) -> bool {
lazy_static! {
static ref RE: Regex =
Regex::new(r"^-?(?:\d+\.\d+|\d+|\.\d+)(?:(e|E)(\+|\-)?\d+)?$").unwrap();
}
RE.is_match(&self.0) && self.parse_floating_point_number().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#rules-for-parsing-floating-point-number-values
pub fn parse_floating_point_number(&self) -> Result<f64, ()> {
// Steps 15-16 are telling us things about IEEE rounding modes
// for floating-point significands; this code assumes the Rust
// compiler already matches them in any cases where
// that actually matters. They are not
// related to f64::round(), which is for rounding to integers.
let input = &self.0;
match input.trim().parse::<f64>() {
Ok(val)
if!(
// A valid number is the same as what rust considers to be valid,
// except for +1., NaN, and Infinity.
val.is_infinite() ||
val.is_nan() ||
input.ends_with(".") ||
input.starts_with("+")
) =>
{
Ok(val)
},
_ => Err(()),
}
}
/// https://html.spec.whatwg.org/multipage/#best-representation-of-the-number-as-a-floating-point-number
pub fn set_best_representation_of_the_floating_point_number(&mut self) {
if let Ok(val) = self.parse_floating_point_number() {
self.0 = val.to_string();
}
}
/// A valid normalized local date and time string should be "{date}T{time}"
/// where date and time are both valid, and the time string must be as short as possible
/// https://html.spec.whatwg.org/multipage/#valid-normalised-local-date-and-time-string
pub fn convert_valid_normalized_local_date_and_time_string(&mut self) -> Result<(), ()> {
let ((year, month, day), (hour, minute, second)) =
self.parse_local_date_and_time_string()?;
if second == 0.0 {
self.0 = format!(
"{:04}-{:02}-{:02}T{:02}:{:02}",
year, month, day, hour, minute
);
} else if second < 10.0 {
// we need exactly one leading zero on the seconds,
// whatever their total string length might be
self.0 = format!(
"{:04}-{:02}-{:02}T{:02}:{:02}:0{}",
year, month, day, hour, minute, second
);
} else {
// we need no leading zeroes on the seconds
self.0 = format!(
"{:04}-{:02}-{:02}T{:02}:{:02}:{}",
year, month, day, hour, minute, second
);
}
Ok(())
}
/// https://html.spec.whatwg.org/multipage/#parse-a-local-date-and-time-string
pub fn parse_local_date_and_time_string(
&self,
) -> Result<((i32, u32, u32), (u32, u32, f64)), ()> {
let value = &self;
// Step 1, 2, 4
let mut iterator = if value.contains('T') {
value.split('T')
} else {
value.split(' ')
};
// Step 3
let date = iterator.next().ok_or(())?;
let date_tuple = parse_date_component(date)?;
// Step 5
let time = iterator.next().ok_or(())?;
let time_tuple = parse_time_component(time)?;
// Step 6
if iterator.next().is_some() {
return Err(());
}
// Step 7, 8, 9
Ok((date_tuple, time_tuple))
}
}
impl Borrow<str> for DOMString {
#[inline]
fn borrow(&self) -> &str {
&self.0
}
}
impl Default for DOMString {
fn default() -> Self {
DOMString(String::new(), PhantomData)
}
}
impl Deref for DOMString {
type Target = str;
#[inline]
fn deref(&self) -> &str {
&self.0
}
}
impl DerefMut for DOMString {
#[inline]
fn deref_mut(&mut self) -> &mut str {
&mut self.0
}
}
impl AsRef<str> for DOMString {
fn as_ref(&self) -> &str {
&self.0
}
}
impl fmt::Display for DOMString {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl PartialEq<str> for DOMString {
fn eq(&self, other: &str) -> bool {
&**self == other
}
}
impl<'a> PartialEq<&'a str> for DOMString {
fn eq(&self, other: &&'a str) -> bool {
&**self == *other
}
}
impl From<String> for DOMString {
fn from(contents: String) -> DOMString {
DOMString(contents, PhantomData)
}
}
impl<'a> From<&'a str> for DOMString {
fn from(contents: &str) -> DOMString {
DOMString::from(String::from(contents))
}
}
impl<'a> From<Cow<'a, str>> for DOMString {
fn from(contents: Cow<'a, str>) -> DOMString {
match contents {
Cow::Owned(s) => DOMString::from(s),
Cow::Borrowed(s) => DOMString::from(s),
}
}
}
impl From<DOMString> for LocalName {
fn from(contents: DOMString) -> LocalName {
LocalName::from(contents.0)
}
}
impl From<DOMString> for Namespace {
fn from(contents: DOMString) -> Namespace {
Namespace::from(contents.0)
}
}
impl From<DOMString> for Atom {
fn from(contents: DOMString) -> Atom {
Atom::from(contents.0)
}
}
impl From<DOMString> for String {
fn from(contents: DOMString) -> String {
contents.0
}
}
impl Into<Vec<u8>> for DOMString {
fn into(self) -> Vec<u8> {
self.0.into()
}
}
impl<'a> Into<Cow<'a, str>> for DOMString {
fn into(self) -> Cow<'a, str> {
self.0.into()
}
}
impl<'a> Into<CowRcStr<'a>> for DOMString {
fn into(self) -> CowRcStr<'a> {
self.0.into()
}
}
impl Extend<char> for DOMString {
fn extend<I>(&mut self, iterable: I)
where
I: IntoIterator<Item = char>,
{
self.0.extend(iterable)
}
}
/// https://html.spec.whatwg.org/multipage/#parse-a-month-component
fn parse_month_component(value: &str) -> Result<(i32, u32), ()> {
// Step 3
let mut iterator = value.split('-');
let year = iterator.next().ok_or(())?;
let month = iterator.next().ok_or(())?;
// Step 1, 2
let year_int = year.parse::<i32>().map_err(|_| ())?;
if year.len() < 4 || year_int == 0 {
return Err(());
}
// Step 4, 5
let month_int = month.parse::<u32>().map_err(|_| ())?;
if month.len()!= 2 || month_int > 12 || month_int < 1 {
| pub fn from_string(s: String) -> DOMString {
DOMString(s, PhantomData)
} | random_line_split |
macro_parser.rs | Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
//!
//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
//!
//! Remaining input: `b`
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b]
//!
//! - - - Advance over a `b`. - - -
//!
//! Remaining input: ``
//! eof: [a $( a )* a b ·]
pub use self::NamedMatch::*;
pub use self::ParseResult::*;
use self::TokenTreeOrTokenTreeVec::*;
use ast;
use ast::{TokenTree, Ident};
use ast::{TtDelimited, TtSequence, TtToken};
use codemap::{BytePos, mk_sp, Span};
use codemap;
use parse::lexer::*; //resolve bug?
use parse::ParseSess;
use parse::attr::ParserAttr;
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
use parse::token::{Eof, DocComment, MatchNt, SubstNt};
use parse::token::{Token, Nonterminal};
use parse::token;
use print::pprust;
use ptr::P;
use std::mem;
use std::rc::Rc;
use std::collections::HashMap;
use std::collections::hash_map::Entry::{Vacant, Occupied};
// To avoid costly uniqueness checks, we require that `MatchSeq` always has
// a nonempty body.
#[derive(Clone)]
enum TokenTreeOrTokenTreeVec {
Tt(ast::TokenTree),
TtSeq(Rc<Vec<ast::TokenTree>>),
}
impl TokenTreeOrTokenTreeVec {
fn len(&self) -> usize {
match self {
&TtSeq(ref v) => v.len(),
&Tt(ref tt) => tt.len(),
}
}
fn get_tt(&self, index: usize) -> TokenTree {
match self {
&TtSeq(ref v) => v[index].clone(),
&Tt(ref tt) => tt.get_tt(index),
}
}
}
/// an unzipping of `TokenTree`s
#[derive(Clone)]
struct MatcherTtFrame {
elts: TokenTreeOrTokenTreeVec,
idx: usize,
}
#[derive(Clone)]
pub struct MatcherPos {
stack: | erTtFrame>,
top_elts: TokenTreeOrTokenTreeVec,
sep: Option<Token>,
idx: usize,
up: Option<Box<MatcherPos>>,
matches: Vec<Vec<Rc<NamedMatch>>>,
match_lo: usize,
match_cur: usize,
match_hi: usize,
sp_lo: BytePos,
}
pub fn count_names(ms: &[TokenTree]) -> usize {
ms.iter().fold(0, |count, elt| {
count + match elt {
&TtSequence(_, ref seq) => {
seq.num_captures
}
&TtDelimited(_, ref delim) => {
count_names(&delim.tts)
}
&TtToken(_, MatchNt(..)) => {
1
}
&TtToken(_, _) => 0,
}
})
}
pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
-> Box<MatcherPos> {
let match_idx_hi = count_names(&ms[..]);
let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect();
Box::new(MatcherPos {
stack: vec![],
top_elts: TtSeq(ms),
sep: sep,
idx: 0,
up: None,
matches: matches,
match_lo: 0,
match_cur: 0,
match_hi: match_idx_hi,
sp_lo: lo
})
}
/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL:
/// so it is associated with a single ident in a parse, and all
/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type
/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a
/// single token::MATCH_NONTERMINAL in the TokenTree that produced it.
///
/// The in-memory structure of a particular NamedMatch represents the match
/// that occurred when a particular subset of a matcher was applied to a
/// particular token tree.
///
/// The width of each MatchedSeq in the NamedMatch, and the identity of the
/// `MatchedNonterminal`s, will depend on the token tree it was applied to:
/// each MatchedSeq corresponds to a single TTSeq in the originating
/// token tree. The depth of the NamedMatch structure will therefore depend
/// only on the nesting depth of `ast::TTSeq`s in the originating
/// token tree it was derived from.
pub enum NamedMatch {
MatchedSeq(Vec<Rc<NamedMatch>>, codemap::Span),
MatchedNonterminal(Nonterminal)
}
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-> HashMap<Ident, Rc<NamedMatch>> {
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) {
match m {
&TtSequence(_, ref seq) => {
for next_m in &seq.tts {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
&TtDelimited(_, ref delim) => {
for next_m in &delim.tts {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
&TtToken(sp, MatchNt(bind_name, _, _, _)) => {
match ret_val.entry(bind_name) {
Vacant(spot) => {
spot.insert(res[*idx].clone());
*idx += 1;
}
Occupied(..) => {
let string = token::get_ident(bind_name);
panic!(p_s.span_diagnostic
.span_fatal(sp,
&format!("duplicated bind name: {}",
&string)))
}
}
}
&TtToken(_, SubstNt(..)) => panic!("Cannot fill in a NT"),
&TtToken(_, _) => (),
}
}
let mut ret_val = HashMap::new();
let mut idx = 0;
for m in ms { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
ret_val
}
pub enum ParseResult<T> {
Success(T),
Failure(codemap::Span, String),
Error(codemap::Span, String)
}
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
pub fn parse_or_else(sess: &ParseSess,
cfg: ast::CrateConfig,
rdr: TtReader,
ms: Vec<TokenTree> )
-> HashMap<Ident, Rc<NamedMatch>> {
match parse(sess, cfg, rdr, &ms[..]) {
Success(m) => m,
Failure(sp, str) => {
panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
}
Error(sp, str) => {
panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
}
}
}
/// Perform a token equality check, ignoring syntax context (that is, an
/// unhygienic comparison)
pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
match (t1,t2) {
(&token::Ident(id1,_),&token::Ident(id2,_))
| (&token::Lifetime(id1),&token::Lifetime(id2)) =>
id1.name == id2.name,
_ => *t1 == *t2
}
}
pub fn parse(sess: &ParseSess,
cfg: ast::CrateConfig,
mut rdr: TtReader,
ms: &[TokenTree])
-> NamedParseResult {
let mut cur_eis = Vec::new();
cur_eis.push(initial_matcher_pos(Rc::new(ms.iter()
.cloned()
.collect()),
None,
rdr.peek().sp.lo));
loop {
let mut bb_eis = Vec::new(); // black-box parsed by parser.rs
let mut next_eis = Vec::new(); // or proceed normally
let mut eof_eis = Vec::new();
let TokenAndSpan { tok, sp } = rdr.peek();
/* we append new items to this while we go */
loop {
let mut ei = match cur_eis.pop() {
None => break, /* for each Earley Item */
Some(ei) => ei,
};
// When unzipped trees end, remove them
while ei.idx >= ei.top_elts.len() {
match ei.stack.pop() {
Some(MatcherTtFrame { elts, idx }) => {
ei.top_elts = elts;
ei.idx = idx + 1;
}
None => break
}
}
let idx = ei.idx;
let len = ei.top_elts.len();
/* at end of sequence */
if idx >= len {
// can't move out of `match`es, so:
if ei.up.is_some() {
// hack: a matcher sequence is repeating iff it has a
// parent (the top level is just a container)
// disregard separator, try to go up
// (remove this condition to make trailing seps ok)
if idx == len {
// pop from the matcher position
let mut new_pos = ei.up.clone().unwrap();
// update matches (the MBE "parse tree") by appending
// each tree as a subtree.
// I bet this is a perf problem: we're preemptively
// doing a lot of array work that will get thrown away
// most of the time.
// Only touch the binders we have actually bound
for idx in ei.match_lo..ei.match_hi {
let sub = (ei.matches[idx]).clone();
(&mut new_pos.matches[idx])
.push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo,
sp.hi))));
}
new_pos.match_cur = ei.match_hi;
new_pos.idx += 1;
cur_eis.push(new_pos);
}
// can we go around again?
// the *_t vars are workarounds for the lack of unary move
match ei.sep {
Some(ref t) if idx == len => { // we need a separator
// i'm conflicted about whether this should be hygienic....
// though in this case, if the separators are never legal
// idents, it shouldn't matter.
if token_name_eq(&tok, t) { //pass the separator
let mut ei_t = ei.clone();
// ei_t.match_cur = ei_t.match_lo;
ei_t.idx += 1;
next_eis.push(ei_t);
}
}
_ => { // we don't need a separator
let mut ei_t = ei;
ei_t.match_cur = ei_t.match_lo;
ei_t.idx = 0;
cur_eis.push(ei_t);
}
}
} else {
eof_eis.push(ei);
}
} else {
match ei.top_elts.get_tt(idx) {
/* need to descend into sequence */
TtSequence(sp, seq) => {
if seq.op == ast::ZeroOrMore {
let mut new_ei = ei.clone();
new_ei.match_cur += seq.num_captures;
new_ei.idx += 1;
//we specifically matched zero repeats.
for idx in ei.match_cur..ei.match_cur + seq.num_captures {
(&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp)));
}
cur_eis.push(new_ei);
}
let matches: Vec<_> = (0..ei.matches.len())
.map(|_| Vec::new()).collect();
let ei_t = ei;
cur_eis.push(Box::new(MatcherPos {
stack: vec![],
sep: seq.separator.clone(),
idx: 0,
matches: matches,
match_lo: ei_t.match_cur,
match_cur: ei_t.match_cur,
match_hi: ei_t.match_cur + seq.num_captures,
up: Some(ei_t),
sp_lo: sp.lo,
top_elts: Tt(TtSequence(sp, seq)),
}));
}
TtToken(_, MatchNt(..)) => {
// Built-in nonterminals never start with these tokens,
// so we can eliminate them from consideration.
match tok {
token::CloseDelim(_) => {},
_ => bb_eis.push(ei),
}
}
TtToken(sp, SubstNt(..)) => {
return Error(sp, "Cannot transcribe in macro LHS".to_string())
}
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
let idx = ei.idx;
ei.stack.push(MatcherTtFrame {
elts: lower_elts,
idx: idx,
});
ei.idx = 0;
cur_eis.push(ei);
}
TtToken(_, ref t) => {
let mut ei_t = ei.clone();
if token_name_eq(t,&tok) {
ei_t.idx += 1;
next_eis.push(ei_t);
}
}
}
}
}
/* error messages here could be improved with links to orig. rules */
if token_name_eq(&tok, &token::Eof) {
if eof_eis.len() == 1 {
let mut v = Vec::new();
for dv in &mut (&mut eof_eis[0]).matches {
v.push(dv.pop().unwrap());
}
return Success(nameize(sess, ms, &v[..]));
} else if eof_eis.len() > 1 {
return Error(sp, "ambiguity: multiple successful parses".to_string());
} else {
return Failure(sp, "unexpected end of macro invocation".to_string());
}
} else {
if (!bb_eis.is_empty() &&!next_eis.is_empty())
|| bb_eis.len() > 1 {
let nts = bb_eis.iter().map(|ei| {
match ei.top_elts.get_tt(ei.idx) {
TtToken(_, MatchNt(bind, name, _, _)) => {
(format!("{} ('{}')",
token:: | Vec<Match | identifier_name |
macro_parser.rs | Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
//!
//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
//!
//! Remaining input: `b`
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b]
//!
//! - - - Advance over a `b`. - - -
//!
//! Remaining input: ``
//! eof: [a $( a )* a b ·]
pub use self::NamedMatch::*;
pub use self::ParseResult::*;
use self::TokenTreeOrTokenTreeVec::*;
use ast;
use ast::{TokenTree, Ident};
use ast::{TtDelimited, TtSequence, TtToken};
use codemap::{BytePos, mk_sp, Span};
use codemap;
use parse::lexer::*; //resolve bug?
use parse::ParseSess;
use parse::attr::ParserAttr;
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
use parse::token::{Eof, DocComment, MatchNt, SubstNt};
use parse::token::{Token, Nonterminal};
use parse::token;
use print::pprust;
use ptr::P;
use std::mem;
use std::rc::Rc;
use std::collections::HashMap;
use std::collections::hash_map::Entry::{Vacant, Occupied};
// To avoid costly uniqueness checks, we require that `MatchSeq` always has
// a nonempty body.
#[derive(Clone)]
enum TokenTreeOrTokenTreeVec {
Tt(ast::TokenTree),
TtSeq(Rc<Vec<ast::TokenTree>>),
}
impl TokenTreeOrTokenTreeVec {
fn len(&self) -> usize {
match self {
&TtSeq(ref v) => v.len(),
&Tt(ref tt) => tt.len(),
}
}
fn get_tt(&self, index: usize) -> TokenTree {
match self {
&TtSeq(ref v) => v[index].clone(),
&Tt(ref tt) => tt.get_tt(index),
}
}
}
/// an unzipping of `TokenTree`s
#[derive(Clone)]
struct MatcherTtFrame {
elts: TokenTreeOrTokenTreeVec,
idx: usize,
}
#[derive(Clone)]
pub struct MatcherPos {
stack: Vec<MatcherTtFrame>,
top_elts: TokenTreeOrTokenTreeVec,
sep: Option<Token>,
idx: usize,
up: Option<Box<MatcherPos>>,
matches: Vec<Vec<Rc<NamedMatch>>>,
match_lo: usize,
match_cur: usize,
match_hi: usize,
sp_lo: BytePos,
}
pub fn count_names(ms: &[TokenTree]) -> usize {
ms.iter().fold(0, | r_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
-> Box<MatcherPos> {
let match_idx_hi = count_names(&ms[..]);
let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect();
Box::new(MatcherPos {
stack: vec![],
top_elts: TtSeq(ms),
sep: sep,
idx: 0,
up: None,
matches: matches,
match_lo: 0,
match_cur: 0,
match_hi: match_idx_hi,
sp_lo: lo
})
}
/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL:
/// so it is associated with a single ident in a parse, and all
/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type
/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a
/// single token::MATCH_NONTERMINAL in the TokenTree that produced it.
///
/// The in-memory structure of a particular NamedMatch represents the match
/// that occurred when a particular subset of a matcher was applied to a
/// particular token tree.
///
/// The width of each MatchedSeq in the NamedMatch, and the identity of the
/// `MatchedNonterminal`s, will depend on the token tree it was applied to:
/// each MatchedSeq corresponds to a single TTSeq in the originating
/// token tree. The depth of the NamedMatch structure will therefore depend
/// only on the nesting depth of `ast::TTSeq`s in the originating
/// token tree it was derived from.
pub enum NamedMatch {
MatchedSeq(Vec<Rc<NamedMatch>>, codemap::Span),
MatchedNonterminal(Nonterminal)
}
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-> HashMap<Ident, Rc<NamedMatch>> {
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) {
match m {
&TtSequence(_, ref seq) => {
for next_m in &seq.tts {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
&TtDelimited(_, ref delim) => {
for next_m in &delim.tts {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
&TtToken(sp, MatchNt(bind_name, _, _, _)) => {
match ret_val.entry(bind_name) {
Vacant(spot) => {
spot.insert(res[*idx].clone());
*idx += 1;
}
Occupied(..) => {
let string = token::get_ident(bind_name);
panic!(p_s.span_diagnostic
.span_fatal(sp,
&format!("duplicated bind name: {}",
&string)))
}
}
}
&TtToken(_, SubstNt(..)) => panic!("Cannot fill in a NT"),
&TtToken(_, _) => (),
}
}
let mut ret_val = HashMap::new();
let mut idx = 0;
for m in ms { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
ret_val
}
pub enum ParseResult<T> {
Success(T),
Failure(codemap::Span, String),
Error(codemap::Span, String)
}
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
pub fn parse_or_else(sess: &ParseSess,
cfg: ast::CrateConfig,
rdr: TtReader,
ms: Vec<TokenTree> )
-> HashMap<Ident, Rc<NamedMatch>> {
match parse(sess, cfg, rdr, &ms[..]) {
Success(m) => m,
Failure(sp, str) => {
panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
}
Error(sp, str) => {
panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
}
}
}
/// Perform a token equality check, ignoring syntax context (that is, an
/// unhygienic comparison)
pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
match (t1,t2) {
(&token::Ident(id1,_),&token::Ident(id2,_))
| (&token::Lifetime(id1),&token::Lifetime(id2)) =>
id1.name == id2.name,
_ => *t1 == *t2
}
}
pub fn parse(sess: &ParseSess,
cfg: ast::CrateConfig,
mut rdr: TtReader,
ms: &[TokenTree])
-> NamedParseResult {
let mut cur_eis = Vec::new();
cur_eis.push(initial_matcher_pos(Rc::new(ms.iter()
.cloned()
.collect()),
None,
rdr.peek().sp.lo));
loop {
let mut bb_eis = Vec::new(); // black-box parsed by parser.rs
let mut next_eis = Vec::new(); // or proceed normally
let mut eof_eis = Vec::new();
let TokenAndSpan { tok, sp } = rdr.peek();
/* we append new items to this while we go */
loop {
let mut ei = match cur_eis.pop() {
None => break, /* for each Earley Item */
Some(ei) => ei,
};
// When unzipped trees end, remove them
while ei.idx >= ei.top_elts.len() {
match ei.stack.pop() {
Some(MatcherTtFrame { elts, idx }) => {
ei.top_elts = elts;
ei.idx = idx + 1;
}
None => break
}
}
let idx = ei.idx;
let len = ei.top_elts.len();
/* at end of sequence */
if idx >= len {
// can't move out of `match`es, so:
if ei.up.is_some() {
// hack: a matcher sequence is repeating iff it has a
// parent (the top level is just a container)
// disregard separator, try to go up
// (remove this condition to make trailing seps ok)
if idx == len {
// pop from the matcher position
let mut new_pos = ei.up.clone().unwrap();
// update matches (the MBE "parse tree") by appending
// each tree as a subtree.
// I bet this is a perf problem: we're preemptively
// doing a lot of array work that will get thrown away
// most of the time.
// Only touch the binders we have actually bound
for idx in ei.match_lo..ei.match_hi {
let sub = (ei.matches[idx]).clone();
(&mut new_pos.matches[idx])
.push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo,
sp.hi))));
}
new_pos.match_cur = ei.match_hi;
new_pos.idx += 1;
cur_eis.push(new_pos);
}
// can we go around again?
// the *_t vars are workarounds for the lack of unary move
match ei.sep {
Some(ref t) if idx == len => { // we need a separator
// i'm conflicted about whether this should be hygienic....
// though in this case, if the separators are never legal
// idents, it shouldn't matter.
if token_name_eq(&tok, t) { //pass the separator
let mut ei_t = ei.clone();
// ei_t.match_cur = ei_t.match_lo;
ei_t.idx += 1;
next_eis.push(ei_t);
}
}
_ => { // we don't need a separator
let mut ei_t = ei;
ei_t.match_cur = ei_t.match_lo;
ei_t.idx = 0;
cur_eis.push(ei_t);
}
}
} else {
eof_eis.push(ei);
}
} else {
match ei.top_elts.get_tt(idx) {
/* need to descend into sequence */
TtSequence(sp, seq) => {
if seq.op == ast::ZeroOrMore {
let mut new_ei = ei.clone();
new_ei.match_cur += seq.num_captures;
new_ei.idx += 1;
//we specifically matched zero repeats.
for idx in ei.match_cur..ei.match_cur + seq.num_captures {
(&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp)));
}
cur_eis.push(new_ei);
}
let matches: Vec<_> = (0..ei.matches.len())
.map(|_| Vec::new()).collect();
let ei_t = ei;
cur_eis.push(Box::new(MatcherPos {
stack: vec![],
sep: seq.separator.clone(),
idx: 0,
matches: matches,
match_lo: ei_t.match_cur,
match_cur: ei_t.match_cur,
match_hi: ei_t.match_cur + seq.num_captures,
up: Some(ei_t),
sp_lo: sp.lo,
top_elts: Tt(TtSequence(sp, seq)),
}));
}
TtToken(_, MatchNt(..)) => {
// Built-in nonterminals never start with these tokens,
// so we can eliminate them from consideration.
match tok {
token::CloseDelim(_) => {},
_ => bb_eis.push(ei),
}
}
TtToken(sp, SubstNt(..)) => {
return Error(sp, "Cannot transcribe in macro LHS".to_string())
}
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
let idx = ei.idx;
ei.stack.push(MatcherTtFrame {
elts: lower_elts,
idx: idx,
});
ei.idx = 0;
cur_eis.push(ei);
}
TtToken(_, ref t) => {
let mut ei_t = ei.clone();
if token_name_eq(t,&tok) {
ei_t.idx += 1;
next_eis.push(ei_t);
}
}
}
}
}
/* error messages here could be improved with links to orig. rules */
if token_name_eq(&tok, &token::Eof) {
if eof_eis.len() == 1 {
let mut v = Vec::new();
for dv in &mut (&mut eof_eis[0]).matches {
v.push(dv.pop().unwrap());
}
return Success(nameize(sess, ms, &v[..]));
} else if eof_eis.len() > 1 {
return Error(sp, "ambiguity: multiple successful parses".to_string());
} else {
return Failure(sp, "unexpected end of macro invocation".to_string());
}
} else {
if (!bb_eis.is_empty() &&!next_eis.is_empty())
|| bb_eis.len() > 1 {
let nts = bb_eis.iter().map(|ei| {
match ei.top_elts.get_tt(ei.idx) {
TtToken(_, MatchNt(bind, name, _, _)) => {
(format!("{} ('{}')",
token:: | |count, elt| {
count + match elt {
&TtSequence(_, ref seq) => {
seq.num_captures
}
&TtDelimited(_, ref delim) => {
count_names(&delim.tts)
}
&TtToken(_, MatchNt(..)) => {
1
}
&TtToken(_, _) => 0,
}
})
}
pub fn initial_matche | identifier_body |
macro_parser.rs |
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
//!
//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
//!
//! Remaining input: `b`
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b]
//!
//! - - - Advance over a `b`. - - -
//!
//! Remaining input: ``
//! eof: [a $( a )* a b ·]
pub use self::NamedMatch::*;
pub use self::ParseResult::*;
use self::TokenTreeOrTokenTreeVec::*;
use ast;
use ast::{TokenTree, Ident};
use ast::{TtDelimited, TtSequence, TtToken};
use codemap::{BytePos, mk_sp, Span};
use codemap;
use parse::lexer::*; //resolve bug?
use parse::ParseSess;
use parse::attr::ParserAttr;
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
use parse::token::{Eof, DocComment, MatchNt, SubstNt};
use parse::token::{Token, Nonterminal};
use parse::token;
use print::pprust;
use ptr::P;
use std::mem;
use std::rc::Rc;
use std::collections::HashMap;
use std::collections::hash_map::Entry::{Vacant, Occupied};
// To avoid costly uniqueness checks, we require that `MatchSeq` always has
// a nonempty body.
#[derive(Clone)]
enum TokenTreeOrTokenTreeVec {
Tt(ast::TokenTree),
TtSeq(Rc<Vec<ast::TokenTree>>),
}
impl TokenTreeOrTokenTreeVec {
fn len(&self) -> usize {
match self {
&TtSeq(ref v) => v.len(),
&Tt(ref tt) => tt.len(),
}
}
fn get_tt(&self, index: usize) -> TokenTree {
match self {
&TtSeq(ref v) => v[index].clone(),
&Tt(ref tt) => tt.get_tt(index),
}
}
}
/// an unzipping of `TokenTree`s
#[derive(Clone)]
struct MatcherTtFrame {
elts: TokenTreeOrTokenTreeVec,
idx: usize,
}
#[derive(Clone)]
pub struct MatcherPos {
stack: Vec<MatcherTtFrame>,
top_elts: TokenTreeOrTokenTreeVec,
sep: Option<Token>,
idx: usize,
up: Option<Box<MatcherPos>>,
matches: Vec<Vec<Rc<NamedMatch>>>,
match_lo: usize,
match_cur: usize,
match_hi: usize,
sp_lo: BytePos,
}
pub fn count_names(ms: &[TokenTree]) -> usize {
ms.iter().fold(0, |count, elt| {
count + match elt {
&TtSequence(_, ref seq) => {
seq.num_captures
}
&TtDelimited(_, ref delim) => {
count_names(&delim.tts)
}
&TtToken(_, MatchNt(..)) => {
1
}
&TtToken(_, _) => 0,
}
})
}
pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
-> Box<MatcherPos> {
let match_idx_hi = count_names(&ms[..]);
let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect();
Box::new(MatcherPos {
stack: vec![],
top_elts: TtSeq(ms),
sep: sep,
idx: 0,
up: None,
matches: matches,
match_lo: 0,
match_cur: 0,
match_hi: match_idx_hi,
sp_lo: lo
})
}
/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL:
/// so it is associated with a single ident in a parse, and all
/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type
/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a
/// single token::MATCH_NONTERMINAL in the TokenTree that produced it.
///
/// The in-memory structure of a particular NamedMatch represents the match
/// that occurred when a particular subset of a matcher was applied to a
/// particular token tree.
///
/// The width of each MatchedSeq in the NamedMatch, and the identity of the
/// `MatchedNonterminal`s, will depend on the token tree it was applied to:
/// each MatchedSeq corresponds to a single TTSeq in the originating
/// token tree. The depth of the NamedMatch structure will therefore depend
/// only on the nesting depth of `ast::TTSeq`s in the originating
/// token tree it was derived from.
pub enum NamedMatch {
MatchedSeq(Vec<Rc<NamedMatch>>, codemap::Span),
MatchedNonterminal(Nonterminal)
}
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-> HashMap<Ident, Rc<NamedMatch>> {
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) {
match m {
&TtSequence(_, ref seq) => {
for next_m in &seq.tts {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
&TtDelimited(_, ref delim) => {
for next_m in &delim.tts {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
&TtToken(sp, MatchNt(bind_name, _, _, _)) => {
match ret_val.entry(bind_name) {
Vacant(spot) => {
spot.insert(res[*idx].clone());
*idx += 1;
}
Occupied(..) => {
let string = token::get_ident(bind_name);
panic!(p_s.span_diagnostic
.span_fatal(sp,
&format!("duplicated bind name: {}",
&string)))
}
}
}
&TtToken(_, SubstNt(..)) => panic!("Cannot fill in a NT"),
&TtToken(_, _) => (),
}
}
let mut ret_val = HashMap::new();
let mut idx = 0;
for m in ms { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
ret_val
}
pub enum ParseResult<T> {
Success(T),
Failure(codemap::Span, String),
Error(codemap::Span, String)
}
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
pub fn parse_or_else(sess: &ParseSess,
cfg: ast::CrateConfig,
rdr: TtReader,
ms: Vec<TokenTree> )
-> HashMap<Ident, Rc<NamedMatch>> {
match parse(sess, cfg, rdr, &ms[..]) {
Success(m) => m,
Failure(sp, str) => {
panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
}
Error(sp, str) => {
panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
}
}
}
/// Perform a token equality check, ignoring syntax context (that is, an
/// unhygienic comparison)
pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
match (t1,t2) {
(&token::Ident(id1,_),&token::Ident(id2,_))
| (&token::Lifetime(id1),&token::Lifetime(id2)) =>
id1.name == id2.name,
_ => *t1 == *t2
}
}
pub fn parse(sess: &ParseSess,
cfg: ast::CrateConfig,
mut rdr: TtReader,
ms: &[TokenTree])
-> NamedParseResult {
let mut cur_eis = Vec::new();
cur_eis.push(initial_matcher_pos(Rc::new(ms.iter()
.cloned()
.collect()),
None,
rdr.peek().sp.lo));
loop {
let mut bb_eis = Vec::new(); // black-box parsed by parser.rs
let mut next_eis = Vec::new(); // or proceed normally
let mut eof_eis = Vec::new();
let TokenAndSpan { tok, sp } = rdr.peek();
/* we append new items to this while we go */
loop {
let mut ei = match cur_eis.pop() {
None => break, /* for each Earley Item */
Some(ei) => ei,
};
// When unzipped trees end, remove them
while ei.idx >= ei.top_elts.len() {
match ei.stack.pop() {
Some(MatcherTtFrame { elts, idx }) => {
ei.top_elts = elts;
ei.idx = idx + 1;
}
None => break
}
}
let idx = ei.idx;
let len = ei.top_elts.len();
/* at end of sequence */
if idx >= len {
// can't move out of `match`es, so:
if ei.up.is_some() {
// hack: a matcher sequence is repeating iff it has a
// parent (the top level is just a container)
// disregard separator, try to go up
// (remove this condition to make trailing seps ok)
if idx == len {
// pop from the matcher position
let mut new_pos = ei.up.clone().unwrap();
// update matches (the MBE "parse tree") by appending
// each tree as a subtree.
// I bet this is a perf problem: we're preemptively
// doing a lot of array work that will get thrown away
// most of the time.
// Only touch the binders we have actually bound
for idx in ei.match_lo..ei.match_hi {
let sub = (ei.matches[idx]).clone();
(&mut new_pos.matches[idx])
.push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo,
sp.hi))));
}
new_pos.match_cur = ei.match_hi;
new_pos.idx += 1;
cur_eis.push(new_pos);
}
// can we go around again?
// the *_t vars are workarounds for the lack of unary move
match ei.sep {
Some(ref t) if idx == len => { // we need a separator
// i'm conflicted about whether this should be hygienic....
// though in this case, if the separators are never legal
// idents, it shouldn't matter.
if token_name_eq(&tok, t) { //pass the separator
let mut ei_t = ei.clone();
// ei_t.match_cur = ei_t.match_lo;
ei_t.idx += 1;
next_eis.push(ei_t);
}
}
_ => { // we don't need a separator
let mut ei_t = ei;
ei_t.match_cur = ei_t.match_lo;
ei_t.idx = 0;
cur_eis.push(ei_t);
}
}
} else {
eof_eis.push(ei);
}
} else { | if seq.op == ast::ZeroOrMore {
let mut new_ei = ei.clone();
new_ei.match_cur += seq.num_captures;
new_ei.idx += 1;
//we specifically matched zero repeats.
for idx in ei.match_cur..ei.match_cur + seq.num_captures {
(&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp)));
}
cur_eis.push(new_ei);
}
let matches: Vec<_> = (0..ei.matches.len())
.map(|_| Vec::new()).collect();
let ei_t = ei;
cur_eis.push(Box::new(MatcherPos {
stack: vec![],
sep: seq.separator.clone(),
idx: 0,
matches: matches,
match_lo: ei_t.match_cur,
match_cur: ei_t.match_cur,
match_hi: ei_t.match_cur + seq.num_captures,
up: Some(ei_t),
sp_lo: sp.lo,
top_elts: Tt(TtSequence(sp, seq)),
}));
}
TtToken(_, MatchNt(..)) => {
// Built-in nonterminals never start with these tokens,
// so we can eliminate them from consideration.
match tok {
token::CloseDelim(_) => {},
_ => bb_eis.push(ei),
}
}
TtToken(sp, SubstNt(..)) => {
return Error(sp, "Cannot transcribe in macro LHS".to_string())
}
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
let idx = ei.idx;
ei.stack.push(MatcherTtFrame {
elts: lower_elts,
idx: idx,
});
ei.idx = 0;
cur_eis.push(ei);
}
TtToken(_, ref t) => {
let mut ei_t = ei.clone();
if token_name_eq(t,&tok) {
ei_t.idx += 1;
next_eis.push(ei_t);
}
}
}
}
}
/* error messages here could be improved with links to orig. rules */
if token_name_eq(&tok, &token::Eof) {
if eof_eis.len() == 1 {
let mut v = Vec::new();
for dv in &mut (&mut eof_eis[0]).matches {
v.push(dv.pop().unwrap());
}
return Success(nameize(sess, ms, &v[..]));
} else if eof_eis.len() > 1 {
return Error(sp, "ambiguity: multiple successful parses".to_string());
} else {
return Failure(sp, "unexpected end of macro invocation".to_string());
}
} else {
if (!bb_eis.is_empty() &&!next_eis.is_empty())
|| bb_eis.len() > 1 {
let nts = bb_eis.iter().map(|ei| {
match ei.top_elts.get_tt(ei.idx) {
TtToken(_, MatchNt(bind, name, _, _)) => {
(format!("{} ('{}')",
token::get_ | match ei.top_elts.get_tt(idx) {
/* need to descend into sequence */
TtSequence(sp, seq) => { | random_line_split |
variable.rs | //! A basic `Variable` implementation.
//!
//! `FunctionBuilderContext`, `FunctionBuilder`, and related types have a `Variable`
//! type parameter, to allow frontends that identify variables with
//! their own index types to use them directly. Frontends which don't
//! can use the `Variable` defined here.
use cretonne::entity::EntityRef;
use std::u32;
///! An opaque reference to a variable.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Variable(u32);
impl Variable {
/// Create a new Variable with the given index.
pub fn with_u32(index: u32) -> Self |
}
impl EntityRef for Variable {
fn new(index: usize) -> Self {
debug_assert!(index < (u32::MAX as usize));
Variable(index as u32)
}
fn index(self) -> usize {
self.0 as usize
}
}
| {
debug_assert!(index < u32::MAX);
Variable(index)
} | identifier_body |
variable.rs | //! A basic `Variable` implementation.
//!
//! `FunctionBuilderContext`, `FunctionBuilder`, and related types have a `Variable`
//! type parameter, to allow frontends that identify variables with
//! their own index types to use them directly. Frontends which don't
//! can use the `Variable` defined here.
use cretonne::entity::EntityRef;
use std::u32;
///! An opaque reference to a variable.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Variable(u32);
impl Variable {
/// Create a new Variable with the given index.
pub fn with_u32(index: u32) -> Self {
debug_assert!(index < u32::MAX);
Variable(index)
}
}
impl EntityRef for Variable {
fn new(index: usize) -> Self {
debug_assert!(index < (u32::MAX as usize));
Variable(index as u32)
} |
fn index(self) -> usize {
self.0 as usize
}
} | random_line_split |
|
variable.rs | //! A basic `Variable` implementation.
//!
//! `FunctionBuilderContext`, `FunctionBuilder`, and related types have a `Variable`
//! type parameter, to allow frontends that identify variables with
//! their own index types to use them directly. Frontends which don't
//! can use the `Variable` defined here.
use cretonne::entity::EntityRef;
use std::u32;
///! An opaque reference to a variable.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Variable(u32);
impl Variable {
/// Create a new Variable with the given index.
pub fn with_u32(index: u32) -> Self {
debug_assert!(index < u32::MAX);
Variable(index)
}
}
impl EntityRef for Variable {
fn new(index: usize) -> Self {
debug_assert!(index < (u32::MAX as usize));
Variable(index as u32)
}
fn | (self) -> usize {
self.0 as usize
}
}
| index | identifier_name |
enclosure_getters.rs | // This file is part of feed.
//
// Copyright © 2015-2017 Chris Palmer <[email protected]>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation; either version 3 of the License, or
// (at your option) any later version.
//! The fields under enclosure can be retrieved by using the methods under
//! `Enclosure`.
use EnclosureGetters;
use rss::Enclosure;
impl EnclosureGetters for Enclosure
{
/// Get the url that exists under `Enclosure`.
///
/// # Examples
///
/// ```
/// use feed::{EnclosureBuilder, EnclosureGetters};
///
/// let url = "http://www.podtrac.com/pts/redirect.ogg/".to_owned()
/// + "traffic.libsyn.com/jnite/linuxactionshowep408.ogg";
///
/// let enclosure = EnclosureBuilder::new()
/// .url(url.as_ref())
/// .mime_type("audio/ogg")
/// .finalize()
/// .unwrap();
///
/// assert_eq!(url.to_owned(), enclosure.url())
/// ```
fn url(&self) -> String
{
self.url.clone()
}
/// Get the length that exists under `Enclosure`.
///
/// # Examples
///
/// ```
/// use feed::{EnclosureBuilder, EnclosureGetters};
///
/// let length: i64 = 70772893;
///
/// let url = "http://www.podtrac.com/pts/redirect.ogg/".to_owned()
/// + "traffic.libsyn.com/jnite/linuxactionshowep408.ogg";
///
/// let enclosure = EnclosureBuilder::new()
/// .url(url.as_str())
/// .length(length)
/// .mime_type("audio/ogg")
/// .finalize()
/// .unwrap();
///
/// assert_eq!(length.to_string(), enclosure.length())
/// ```
fn length(&self) -> String
{
self.length.clone()
}
/// Get the enclosure type that exists under `Enclosure`.
///
/// # Examples
///
/// ```
/// use feed::{EnclosureBuilder, EnclosureGetters};
///
/// let enclosure_type = "audio/ogg";
///
/// let url = "http://www.podtrac.com/pts/redirect.ogg/".to_owned()
/// + "traffic.libsyn.com/jnite/linuxactionshowep408.ogg";
///
/// let enclosure = EnclosureBuilder::new()
/// .url(url.as_str())
/// .mime_type(enclosure_type)
/// .finalize()
/// .unwrap();
///
/// assert_eq!(enclosure_type.to_owned(), enclosure.mime_type())
/// ```
fn mime_type(&self) -> String
{ | }
|
self.mime_type.clone()
}
| identifier_body |
enclosure_getters.rs | // This file is part of feed.
//
// Copyright © 2015-2017 Chris Palmer <[email protected]>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation; either version 3 of the License, or
// (at your option) any later version.
//! The fields under enclosure can be retrieved by using the methods under
//! `Enclosure`.
use EnclosureGetters;
use rss::Enclosure;
impl EnclosureGetters for Enclosure
{
/// Get the url that exists under `Enclosure`.
///
/// # Examples | ///
/// let url = "http://www.podtrac.com/pts/redirect.ogg/".to_owned()
/// + "traffic.libsyn.com/jnite/linuxactionshowep408.ogg";
///
/// let enclosure = EnclosureBuilder::new()
/// .url(url.as_ref())
/// .mime_type("audio/ogg")
/// .finalize()
/// .unwrap();
///
/// assert_eq!(url.to_owned(), enclosure.url())
/// ```
fn url(&self) -> String
{
self.url.clone()
}
/// Get the length that exists under `Enclosure`.
///
/// # Examples
///
/// ```
/// use feed::{EnclosureBuilder, EnclosureGetters};
///
/// let length: i64 = 70772893;
///
/// let url = "http://www.podtrac.com/pts/redirect.ogg/".to_owned()
/// + "traffic.libsyn.com/jnite/linuxactionshowep408.ogg";
///
/// let enclosure = EnclosureBuilder::new()
/// .url(url.as_str())
/// .length(length)
/// .mime_type("audio/ogg")
/// .finalize()
/// .unwrap();
///
/// assert_eq!(length.to_string(), enclosure.length())
/// ```
fn length(&self) -> String
{
self.length.clone()
}
/// Get the enclosure type that exists under `Enclosure`.
///
/// # Examples
///
/// ```
/// use feed::{EnclosureBuilder, EnclosureGetters};
///
/// let enclosure_type = "audio/ogg";
///
/// let url = "http://www.podtrac.com/pts/redirect.ogg/".to_owned()
/// + "traffic.libsyn.com/jnite/linuxactionshowep408.ogg";
///
/// let enclosure = EnclosureBuilder::new()
/// .url(url.as_str())
/// .mime_type(enclosure_type)
/// .finalize()
/// .unwrap();
///
/// assert_eq!(enclosure_type.to_owned(), enclosure.mime_type())
/// ```
fn mime_type(&self) -> String
{
self.mime_type.clone()
}
} | ///
/// ```
/// use feed::{EnclosureBuilder, EnclosureGetters}; | random_line_split |
enclosure_getters.rs | // This file is part of feed.
//
// Copyright © 2015-2017 Chris Palmer <[email protected]>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation; either version 3 of the License, or
// (at your option) any later version.
//! The fields under enclosure can be retrieved by using the methods under
//! `Enclosure`.
use EnclosureGetters;
use rss::Enclosure;
impl EnclosureGetters for Enclosure
{
/// Get the url that exists under `Enclosure`.
///
/// # Examples
///
/// ```
/// use feed::{EnclosureBuilder, EnclosureGetters};
///
/// let url = "http://www.podtrac.com/pts/redirect.ogg/".to_owned()
/// + "traffic.libsyn.com/jnite/linuxactionshowep408.ogg";
///
/// let enclosure = EnclosureBuilder::new()
/// .url(url.as_ref())
/// .mime_type("audio/ogg")
/// .finalize()
/// .unwrap();
///
/// assert_eq!(url.to_owned(), enclosure.url())
/// ```
fn url(&self) -> String
{
self.url.clone()
}
/// Get the length that exists under `Enclosure`.
///
/// # Examples
///
/// ```
/// use feed::{EnclosureBuilder, EnclosureGetters};
///
/// let length: i64 = 70772893;
///
/// let url = "http://www.podtrac.com/pts/redirect.ogg/".to_owned()
/// + "traffic.libsyn.com/jnite/linuxactionshowep408.ogg";
///
/// let enclosure = EnclosureBuilder::new()
/// .url(url.as_str())
/// .length(length)
/// .mime_type("audio/ogg")
/// .finalize()
/// .unwrap();
///
/// assert_eq!(length.to_string(), enclosure.length())
/// ```
fn l | &self) -> String
{
self.length.clone()
}
/// Get the enclosure type that exists under `Enclosure`.
///
/// # Examples
///
/// ```
/// use feed::{EnclosureBuilder, EnclosureGetters};
///
/// let enclosure_type = "audio/ogg";
///
/// let url = "http://www.podtrac.com/pts/redirect.ogg/".to_owned()
/// + "traffic.libsyn.com/jnite/linuxactionshowep408.ogg";
///
/// let enclosure = EnclosureBuilder::new()
/// .url(url.as_str())
/// .mime_type(enclosure_type)
/// .finalize()
/// .unwrap();
///
/// assert_eq!(enclosure_type.to_owned(), enclosure.mime_type())
/// ```
fn mime_type(&self) -> String
{
self.mime_type.clone()
}
}
| ength( | identifier_name |
deref_mut.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::cell::RefCell;
use core::cell::RefMut;
use core::ops::DerefMut;
use core::ops::Deref;
// pub struct RefCell<T:?Sized> {
// borrow: Cell<BorrowFlag>,
// value: UnsafeCell<T>,
// }
// impl<T> RefCell<T> {
// /// Creates a new `RefCell` containing `value`.
// ///
// /// # Examples
// ///
// /// ```
// /// use std::cell::RefCell;
// ///
// /// let c = RefCell::new(5);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn new(value: T) -> RefCell<T> {
// RefCell {
// value: UnsafeCell::new(value),
// borrow: Cell::new(UNUSED),
// }
// }
//
// /// Consumes the `RefCell`, returning the wrapped value.
// ///
// /// # Examples
// ///
// /// ```
// /// use std::cell::RefCell;
// ///
// /// let c = RefCell::new(5);
// ///
// /// let five = c.into_inner();
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn into_inner(self) -> T {
// // Since this function takes `self` (the `RefCell`) by value, the
// // compiler statically verifies that it is not currently borrowed.
// // Therefore the following assertion is just a `debug_assert!`.
// debug_assert!(self.borrow.get() == UNUSED);
// unsafe { self.value.into_inner() }
// }
// }
// pub struct RefMut<'b, T:?Sized + 'b> {
// // FIXME #12808: strange name to try to avoid interfering with
// // field accesses of the contained type via Deref
// _value: &'b mut T,
// _borrow: BorrowRefMut<'b>,
// }
// impl<'b, T:?Sized> Deref for RefMut<'b, T> {
// type Target = T;
//
// #[inline]
// fn deref<'a>(&'a self) -> &'a T {
// self._value
// }
// }
// impl<'b, T:?Sized> DerefMut for RefMut<'b, T> {
// #[inline]
// fn deref_mut<'a>(&'a mut self) -> &'a mut T {
// self._value
// }
// }
type T = i32;
#[test]
fn deref_test1() |
}
| {
let value: T = 68;
let refcell: RefCell<T> = RefCell::<T>::new(value);
let mut value_refmut: RefMut<T> = refcell.borrow_mut();
{
let deref_mut: &mut T = value_refmut.deref_mut();
*deref_mut = 500;
}
let value_ref: &T = value_refmut.deref();
assert_eq!(*value_ref, 500);
} | identifier_body |
deref_mut.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::cell::RefCell;
use core::cell::RefMut;
use core::ops::DerefMut;
use core::ops::Deref;
// pub struct RefCell<T:?Sized> {
// borrow: Cell<BorrowFlag>,
// value: UnsafeCell<T>,
// }
// impl<T> RefCell<T> {
// /// Creates a new `RefCell` containing `value`.
// ///
// /// # Examples
// ///
// /// ```
// /// use std::cell::RefCell;
// ///
// /// let c = RefCell::new(5);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn new(value: T) -> RefCell<T> {
// RefCell {
// value: UnsafeCell::new(value),
// borrow: Cell::new(UNUSED),
// }
// }
//
// /// Consumes the `RefCell`, returning the wrapped value.
// ///
// /// # Examples
// ///
// /// ```
// /// use std::cell::RefCell;
// ///
// /// let c = RefCell::new(5);
// ///
// /// let five = c.into_inner();
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn into_inner(self) -> T {
// // Since this function takes `self` (the `RefCell`) by value, the
// // compiler statically verifies that it is not currently borrowed.
// // Therefore the following assertion is just a `debug_assert!`.
// debug_assert!(self.borrow.get() == UNUSED);
// unsafe { self.value.into_inner() }
// }
// }
// pub struct RefMut<'b, T:?Sized + 'b> {
// // FIXME #12808: strange name to try to avoid interfering with
// // field accesses of the contained type via Deref
// _value: &'b mut T,
// _borrow: BorrowRefMut<'b>,
// }
// impl<'b, T:?Sized> Deref for RefMut<'b, T> {
// type Target = T;
//
// #[inline]
// fn deref<'a>(&'a self) -> &'a T {
// self._value
// }
// }
// impl<'b, T:?Sized> DerefMut for RefMut<'b, T> {
// #[inline]
// fn deref_mut<'a>(&'a mut self) -> &'a mut T {
// self._value
// }
// }
type T = i32;
#[test]
fn | () {
let value: T = 68;
let refcell: RefCell<T> = RefCell::<T>::new(value);
let mut value_refmut: RefMut<T> = refcell.borrow_mut();
{
let deref_mut: &mut T = value_refmut.deref_mut();
*deref_mut = 500;
}
let value_ref: &T = value_refmut.deref();
assert_eq!(*value_ref, 500);
}
}
| deref_test1 | identifier_name |
deref_mut.rs | #![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::cell::RefCell;
use core::cell::RefMut;
use core::ops::DerefMut;
use core::ops::Deref;
// pub struct RefCell<T:?Sized> {
// borrow: Cell<BorrowFlag>,
// value: UnsafeCell<T>,
// }
// impl<T> RefCell<T> {
// /// Creates a new `RefCell` containing `value`.
// ///
// /// # Examples
// ///
// /// ```
// /// use std::cell::RefCell;
// ///
// /// let c = RefCell::new(5);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn new(value: T) -> RefCell<T> {
// RefCell {
// value: UnsafeCell::new(value),
// borrow: Cell::new(UNUSED),
// }
// }
//
// /// Consumes the `RefCell`, returning the wrapped value. | // /// ```
// /// use std::cell::RefCell;
// ///
// /// let c = RefCell::new(5);
// ///
// /// let five = c.into_inner();
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn into_inner(self) -> T {
// // Since this function takes `self` (the `RefCell`) by value, the
// // compiler statically verifies that it is not currently borrowed.
// // Therefore the following assertion is just a `debug_assert!`.
// debug_assert!(self.borrow.get() == UNUSED);
// unsafe { self.value.into_inner() }
// }
// }
// pub struct RefMut<'b, T:?Sized + 'b> {
// // FIXME #12808: strange name to try to avoid interfering with
// // field accesses of the contained type via Deref
// _value: &'b mut T,
// _borrow: BorrowRefMut<'b>,
// }
// impl<'b, T:?Sized> Deref for RefMut<'b, T> {
// type Target = T;
//
// #[inline]
// fn deref<'a>(&'a self) -> &'a T {
// self._value
// }
// }
// impl<'b, T:?Sized> DerefMut for RefMut<'b, T> {
// #[inline]
// fn deref_mut<'a>(&'a mut self) -> &'a mut T {
// self._value
// }
// }
type T = i32;
#[test]
fn deref_test1() {
let value: T = 68;
let refcell: RefCell<T> = RefCell::<T>::new(value);
let mut value_refmut: RefMut<T> = refcell.borrow_mut();
{
let deref_mut: &mut T = value_refmut.deref_mut();
*deref_mut = 500;
}
let value_ref: &T = value_refmut.deref();
assert_eq!(*value_ref, 500);
}
} | // ///
// /// # Examples
// /// | random_line_split |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/// A random number generator which shares one instance of an `OsRng`.
///
/// A problem with `OsRng`, which is inherited by `StdRng` and so
/// `ThreadRng`, is that it reads from `/dev/random`, and so consumes
/// a file descriptor. For multi-threaded applications like Servo,
/// it is easy to exhaust the supply of file descriptors this way.
///
/// This crate fixes that, by only using one `OsRng`, which is just
/// used to seed and re-seed an `ServoRng`.
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[cfg(target_pointer_width = "64")]
use rand::isaac::Isaac64Rng as IsaacWordRng;
#[cfg(target_pointer_width = "32")]
use rand::isaac::IsaacRng as IsaacWordRng;
use rand::os::OsRng;
use rand::reseeding::{Reseeder, ReseedingRng};
pub use rand::{Rand, Rng, SeedableRng};
use std::cell::RefCell;
use std::mem;
use std::rc::Rc;
use std::sync::Mutex;
use std::u64;
use uuid::Uuid;
// Slightly annoying having to cast between sizes.
#[cfg(target_pointer_width = "64")]
fn as_isaac_seed(seed: &[usize]) -> &[u64] {
unsafe { mem::transmute(seed) }
}
#[cfg(target_pointer_width = "32")]
fn as_isaac_seed(seed: &[usize]) -> &[u32] {
unsafe { mem::transmute(seed) }
}
// The shared RNG which may hold on to a file descriptor
lazy_static! {
static ref OS_RNG: Mutex<OsRng> = match OsRng::new() {
Ok(r) => Mutex::new(r),
Err(e) => panic!("Failed to seed OsRng: {}", e),
};
}
// Generate 32K of data between reseedings
const RESEED_THRESHOLD: u64 = 32_768;
// An in-memory RNG that only uses the shared file descriptor for seeding and reseeding.
pub struct ServoRng {
rng: ReseedingRng<IsaacWordRng, ServoReseeder>,
}
impl Rng for ServoRng {
#[inline]
fn next_u32(&mut self) -> u32 {
self.rng.next_u32()
}
#[inline]
fn next_u64(&mut self) -> u64 {
self.rng.next_u64()
}
}
impl<'a> SeedableRng<&'a [usize]> for ServoRng {
/// Create a manually-reseeding instane of `ServoRng`.
///
/// Note that this RNG does not reseed itself, so care is needed to reseed the RNG
/// is required to be cryptographically sound.
fn from_seed(seed: &[usize]) -> ServoRng {
trace!("Creating new manually-reseeded ServoRng.");
let isaac_rng = IsaacWordRng::from_seed(as_isaac_seed(seed));
let reseeding_rng = ReseedingRng::new(isaac_rng, u64::MAX, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
/// Reseed the RNG.
fn reseed(&mut self, seed: &'a [usize]) {
trace!("Manually reseeding ServoRng.");
self.rng.reseed((ServoReseeder, as_isaac_seed(seed)))
}
}
impl ServoRng {
/// Create an auto-reseeding instance of `ServoRng`.
///
/// This uses the shared `OsRng`, so avoids consuming
/// a file descriptor.
pub fn new() -> ServoRng {
trace!("Creating new ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
let isaac_rng = IsaacWordRng::rand(&mut *os_rng);
let reseeding_rng = ReseedingRng::new(isaac_rng, RESEED_THRESHOLD, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
}
// The reseeder for the in-memory RNG.
struct ServoReseeder;
impl Reseeder<IsaacWordRng> for ServoReseeder {
fn reseed(&mut self, rng: &mut IsaacWordRng) {
trace!("Reseeding ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
*rng = IsaacWordRng::rand(&mut *os_rng);
}
}
impl Default for ServoReseeder {
fn default() -> ServoReseeder {
ServoReseeder
}
}
// A thread-local RNG, designed as a drop-in replacement for rand::ThreadRng.
#[derive(Clone)]
pub struct | {
rng: Rc<RefCell<ServoRng>>,
}
// A thread-local RNG, designed as a drop-in replacement for rand::thread_rng.
pub fn thread_rng() -> ServoThreadRng {
SERVO_THREAD_RNG.with(|t| t.clone())
}
thread_local! {
static SERVO_THREAD_RNG: ServoThreadRng = ServoThreadRng { rng: Rc::new(RefCell::new(ServoRng::new())) };
}
impl Rng for ServoThreadRng {
fn next_u32(&mut self) -> u32 {
self.rng.borrow_mut().next_u32()
}
fn next_u64(&mut self) -> u64 {
self.rng.borrow_mut().next_u64()
}
#[inline]
fn fill_bytes(&mut self, bytes: &mut [u8]) {
self.rng.borrow_mut().fill_bytes(bytes)
}
}
// Generates a random value using the thread-local random number generator.
// A drop-in replacement for rand::random.
#[inline]
pub fn random<T: Rand>() -> T {
thread_rng().gen()
}
// TODO(eijebong): Replace calls to this by random once `uuid::Uuid` implements `rand::Rand` again.
#[inline]
pub fn random_uuid() -> Uuid {
let mut bytes = [0; 16];
thread_rng().fill_bytes(&mut bytes);
Uuid::from_random_bytes(bytes)
}
| ServoThreadRng | identifier_name |
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/// A random number generator which shares one instance of an `OsRng`.
///
/// A problem with `OsRng`, which is inherited by `StdRng` and so
/// `ThreadRng`, is that it reads from `/dev/random`, and so consumes
/// a file descriptor. For multi-threaded applications like Servo,
/// it is easy to exhaust the supply of file descriptors this way.
///
/// This crate fixes that, by only using one `OsRng`, which is just
/// used to seed and re-seed an `ServoRng`.
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[cfg(target_pointer_width = "64")]
use rand::isaac::Isaac64Rng as IsaacWordRng;
#[cfg(target_pointer_width = "32")]
use rand::isaac::IsaacRng as IsaacWordRng;
use rand::os::OsRng;
use rand::reseeding::{Reseeder, ReseedingRng};
pub use rand::{Rand, Rng, SeedableRng};
use std::cell::RefCell;
use std::mem;
use std::rc::Rc;
use std::sync::Mutex;
use std::u64;
use uuid::Uuid;
// Slightly annoying having to cast between sizes.
#[cfg(target_pointer_width = "64")]
fn as_isaac_seed(seed: &[usize]) -> &[u64] {
unsafe { mem::transmute(seed) }
}
#[cfg(target_pointer_width = "32")]
fn as_isaac_seed(seed: &[usize]) -> &[u32] {
unsafe { mem::transmute(seed) }
}
// The shared RNG which may hold on to a file descriptor
lazy_static! {
static ref OS_RNG: Mutex<OsRng> = match OsRng::new() {
Ok(r) => Mutex::new(r),
Err(e) => panic!("Failed to seed OsRng: {}", e),
};
}
// Generate 32K of data between reseedings
const RESEED_THRESHOLD: u64 = 32_768;
// An in-memory RNG that only uses the shared file descriptor for seeding and reseeding.
pub struct ServoRng {
rng: ReseedingRng<IsaacWordRng, ServoReseeder>,
}
impl Rng for ServoRng {
#[inline]
fn next_u32(&mut self) -> u32 {
self.rng.next_u32()
}
#[inline]
fn next_u64(&mut self) -> u64 {
self.rng.next_u64()
}
}
impl<'a> SeedableRng<&'a [usize]> for ServoRng {
/// Create a manually-reseeding instane of `ServoRng`.
///
/// Note that this RNG does not reseed itself, so care is needed to reseed the RNG
/// is required to be cryptographically sound.
fn from_seed(seed: &[usize]) -> ServoRng {
trace!("Creating new manually-reseeded ServoRng.");
let isaac_rng = IsaacWordRng::from_seed(as_isaac_seed(seed));
let reseeding_rng = ReseedingRng::new(isaac_rng, u64::MAX, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
/// Reseed the RNG.
fn reseed(&mut self, seed: &'a [usize]) {
trace!("Manually reseeding ServoRng.");
self.rng.reseed((ServoReseeder, as_isaac_seed(seed)))
}
}
impl ServoRng {
/// Create an auto-reseeding instance of `ServoRng`.
///
/// This uses the shared `OsRng`, so avoids consuming
/// a file descriptor.
pub fn new() -> ServoRng {
trace!("Creating new ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
let isaac_rng = IsaacWordRng::rand(&mut *os_rng);
let reseeding_rng = ReseedingRng::new(isaac_rng, RESEED_THRESHOLD, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
}
// The reseeder for the in-memory RNG.
struct ServoReseeder;
impl Reseeder<IsaacWordRng> for ServoReseeder {
fn reseed(&mut self, rng: &mut IsaacWordRng) {
trace!("Reseeding ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
*rng = IsaacWordRng::rand(&mut *os_rng);
}
}
impl Default for ServoReseeder {
fn default() -> ServoReseeder {
ServoReseeder
}
}
// A thread-local RNG, designed as a drop-in replacement for rand::ThreadRng.
#[derive(Clone)]
pub struct ServoThreadRng {
rng: Rc<RefCell<ServoRng>>,
}
// A thread-local RNG, designed as a drop-in replacement for rand::thread_rng.
pub fn thread_rng() -> ServoThreadRng {
SERVO_THREAD_RNG.with(|t| t.clone())
}
thread_local! {
static SERVO_THREAD_RNG: ServoThreadRng = ServoThreadRng { rng: Rc::new(RefCell::new(ServoRng::new())) };
}
impl Rng for ServoThreadRng {
fn next_u32(&mut self) -> u32 {
self.rng.borrow_mut().next_u32()
}
fn next_u64(&mut self) -> u64 {
self.rng.borrow_mut().next_u64()
}
#[inline]
fn fill_bytes(&mut self, bytes: &mut [u8]) {
self.rng.borrow_mut().fill_bytes(bytes)
}
}
// Generates a random value using the thread-local random number generator.
// A drop-in replacement for rand::random.
#[inline]
pub fn random<T: Rand>() -> T {
thread_rng().gen()
}
// TODO(eijebong): Replace calls to this by random once `uuid::Uuid` implements `rand::Rand` again.
#[inline]
pub fn random_uuid() -> Uuid | {
let mut bytes = [0; 16];
thread_rng().fill_bytes(&mut bytes);
Uuid::from_random_bytes(bytes)
} | identifier_body |
|
lib.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/// A random number generator which shares one instance of an `OsRng`.
///
/// A problem with `OsRng`, which is inherited by `StdRng` and so
/// `ThreadRng`, is that it reads from `/dev/random`, and so consumes
/// a file descriptor. For multi-threaded applications like Servo,
/// it is easy to exhaust the supply of file descriptors this way.
///
/// This crate fixes that, by only using one `OsRng`, which is just
/// used to seed and re-seed an `ServoRng`.
| #[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[cfg(target_pointer_width = "64")]
use rand::isaac::Isaac64Rng as IsaacWordRng;
#[cfg(target_pointer_width = "32")]
use rand::isaac::IsaacRng as IsaacWordRng;
use rand::os::OsRng;
use rand::reseeding::{Reseeder, ReseedingRng};
pub use rand::{Rand, Rng, SeedableRng};
use std::cell::RefCell;
use std::mem;
use std::rc::Rc;
use std::sync::Mutex;
use std::u64;
use uuid::Uuid;
// Slightly annoying having to cast between sizes.
#[cfg(target_pointer_width = "64")]
fn as_isaac_seed(seed: &[usize]) -> &[u64] {
unsafe { mem::transmute(seed) }
}
#[cfg(target_pointer_width = "32")]
fn as_isaac_seed(seed: &[usize]) -> &[u32] {
unsafe { mem::transmute(seed) }
}
// The shared RNG which may hold on to a file descriptor
lazy_static! {
static ref OS_RNG: Mutex<OsRng> = match OsRng::new() {
Ok(r) => Mutex::new(r),
Err(e) => panic!("Failed to seed OsRng: {}", e),
};
}
// Generate 32K of data between reseedings
const RESEED_THRESHOLD: u64 = 32_768;
// An in-memory RNG that only uses the shared file descriptor for seeding and reseeding.
pub struct ServoRng {
rng: ReseedingRng<IsaacWordRng, ServoReseeder>,
}
impl Rng for ServoRng {
#[inline]
fn next_u32(&mut self) -> u32 {
self.rng.next_u32()
}
#[inline]
fn next_u64(&mut self) -> u64 {
self.rng.next_u64()
}
}
impl<'a> SeedableRng<&'a [usize]> for ServoRng {
/// Create a manually-reseeding instane of `ServoRng`.
///
/// Note that this RNG does not reseed itself, so care is needed to reseed the RNG
/// is required to be cryptographically sound.
fn from_seed(seed: &[usize]) -> ServoRng {
trace!("Creating new manually-reseeded ServoRng.");
let isaac_rng = IsaacWordRng::from_seed(as_isaac_seed(seed));
let reseeding_rng = ReseedingRng::new(isaac_rng, u64::MAX, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
/// Reseed the RNG.
fn reseed(&mut self, seed: &'a [usize]) {
trace!("Manually reseeding ServoRng.");
self.rng.reseed((ServoReseeder, as_isaac_seed(seed)))
}
}
impl ServoRng {
/// Create an auto-reseeding instance of `ServoRng`.
///
/// This uses the shared `OsRng`, so avoids consuming
/// a file descriptor.
pub fn new() -> ServoRng {
trace!("Creating new ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
let isaac_rng = IsaacWordRng::rand(&mut *os_rng);
let reseeding_rng = ReseedingRng::new(isaac_rng, RESEED_THRESHOLD, ServoReseeder);
ServoRng { rng: reseeding_rng }
}
}
// The reseeder for the in-memory RNG.
struct ServoReseeder;
impl Reseeder<IsaacWordRng> for ServoReseeder {
fn reseed(&mut self, rng: &mut IsaacWordRng) {
trace!("Reseeding ServoRng.");
let mut os_rng = OS_RNG.lock().expect("Poisoned lock.");
*rng = IsaacWordRng::rand(&mut *os_rng);
}
}
impl Default for ServoReseeder {
fn default() -> ServoReseeder {
ServoReseeder
}
}
// A thread-local RNG, designed as a drop-in replacement for rand::ThreadRng.
#[derive(Clone)]
pub struct ServoThreadRng {
rng: Rc<RefCell<ServoRng>>,
}
// A thread-local RNG, designed as a drop-in replacement for rand::thread_rng.
pub fn thread_rng() -> ServoThreadRng {
SERVO_THREAD_RNG.with(|t| t.clone())
}
thread_local! {
static SERVO_THREAD_RNG: ServoThreadRng = ServoThreadRng { rng: Rc::new(RefCell::new(ServoRng::new())) };
}
impl Rng for ServoThreadRng {
fn next_u32(&mut self) -> u32 {
self.rng.borrow_mut().next_u32()
}
fn next_u64(&mut self) -> u64 {
self.rng.borrow_mut().next_u64()
}
#[inline]
fn fill_bytes(&mut self, bytes: &mut [u8]) {
self.rng.borrow_mut().fill_bytes(bytes)
}
}
// Generates a random value using the thread-local random number generator.
// A drop-in replacement for rand::random.
#[inline]
pub fn random<T: Rand>() -> T {
thread_rng().gen()
}
// TODO(eijebong): Replace calls to this by random once `uuid::Uuid` implements `rand::Rand` again.
#[inline]
pub fn random_uuid() -> Uuid {
let mut bytes = [0; 16];
thread_rng().fill_bytes(&mut bytes);
Uuid::from_random_bytes(bytes)
} | random_line_split |
|
channel.rs | use futures::channel::mpsc;
use futures::executor::block_on;
use futures::future::poll_fn;
use futures::sink::SinkExt;
use futures::stream::StreamExt;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
#[test]
fn sequence() {
let (tx, rx) = mpsc::channel(1);
let amt = 20;
let t = thread::spawn(move || block_on(send_sequence(amt, tx)));
let list: Vec<_> = block_on(rx.collect());
let mut list = list.into_iter();
for i in (1..=amt).rev() {
assert_eq!(list.next(), Some(i));
}
assert_eq!(list.next(), None);
t.join().unwrap();
}
async fn send_sequence(n: u32, mut sender: mpsc::Sender<u32>) {
for x in 0..n {
sender.send(n - x).await.unwrap();
}
}
#[test]
fn drop_sender() |
#[test]
fn drop_rx() {
let (mut tx, rx) = mpsc::channel::<u32>(1);
block_on(tx.send(1)).unwrap();
drop(rx);
assert!(block_on(tx.send(1)).is_err());
}
#[test]
fn drop_order() {
static DROPS: AtomicUsize = AtomicUsize::new(0);
let (mut tx, rx) = mpsc::channel(1);
struct A;
impl Drop for A {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
block_on(tx.send(A)).unwrap();
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
drop(rx);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
assert!(block_on(tx.send(A)).is_err());
assert_eq!(DROPS.load(Ordering::SeqCst), 2);
}
| {
let (tx, mut rx) = mpsc::channel::<u32>(1);
drop(tx);
let f = poll_fn(|cx| rx.poll_next_unpin(cx));
assert_eq!(block_on(f), None)
} | identifier_body |
channel.rs | use futures::channel::mpsc;
use futures::executor::block_on;
use futures::future::poll_fn;
use futures::sink::SinkExt;
use futures::stream::StreamExt;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
#[test]
fn | () {
let (tx, rx) = mpsc::channel(1);
let amt = 20;
let t = thread::spawn(move || block_on(send_sequence(amt, tx)));
let list: Vec<_> = block_on(rx.collect());
let mut list = list.into_iter();
for i in (1..=amt).rev() {
assert_eq!(list.next(), Some(i));
}
assert_eq!(list.next(), None);
t.join().unwrap();
}
async fn send_sequence(n: u32, mut sender: mpsc::Sender<u32>) {
for x in 0..n {
sender.send(n - x).await.unwrap();
}
}
#[test]
fn drop_sender() {
let (tx, mut rx) = mpsc::channel::<u32>(1);
drop(tx);
let f = poll_fn(|cx| rx.poll_next_unpin(cx));
assert_eq!(block_on(f), None)
}
#[test]
fn drop_rx() {
let (mut tx, rx) = mpsc::channel::<u32>(1);
block_on(tx.send(1)).unwrap();
drop(rx);
assert!(block_on(tx.send(1)).is_err());
}
#[test]
fn drop_order() {
static DROPS: AtomicUsize = AtomicUsize::new(0);
let (mut tx, rx) = mpsc::channel(1);
struct A;
impl Drop for A {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
block_on(tx.send(A)).unwrap();
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
drop(rx);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
assert!(block_on(tx.send(A)).is_err());
assert_eq!(DROPS.load(Ordering::SeqCst), 2);
}
| sequence | identifier_name |
channel.rs | use futures::channel::mpsc;
use futures::executor::block_on;
use futures::future::poll_fn;
use futures::sink::SinkExt;
use futures::stream::StreamExt;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
#[test]
fn sequence() {
let (tx, rx) = mpsc::channel(1);
let amt = 20;
let t = thread::spawn(move || block_on(send_sequence(amt, tx)));
let list: Vec<_> = block_on(rx.collect());
let mut list = list.into_iter();
for i in (1..=amt).rev() {
assert_eq!(list.next(), Some(i));
}
assert_eq!(list.next(), None);
t.join().unwrap();
}
async fn send_sequence(n: u32, mut sender: mpsc::Sender<u32>) {
for x in 0..n {
sender.send(n - x).await.unwrap();
}
}
#[test]
fn drop_sender() {
let (tx, mut rx) = mpsc::channel::<u32>(1);
drop(tx);
let f = poll_fn(|cx| rx.poll_next_unpin(cx));
assert_eq!(block_on(f), None)
}
#[test]
fn drop_rx() {
let (mut tx, rx) = mpsc::channel::<u32>(1);
block_on(tx.send(1)).unwrap();
drop(rx);
assert!(block_on(tx.send(1)).is_err()); | static DROPS: AtomicUsize = AtomicUsize::new(0);
let (mut tx, rx) = mpsc::channel(1);
struct A;
impl Drop for A {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
}
}
block_on(tx.send(A)).unwrap();
assert_eq!(DROPS.load(Ordering::SeqCst), 0);
drop(rx);
assert_eq!(DROPS.load(Ordering::SeqCst), 1);
assert!(block_on(tx.send(A)).is_err());
assert_eq!(DROPS.load(Ordering::SeqCst), 2);
} | }
#[test]
fn drop_order() { | random_line_split |
env.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Runtime environment settings
use from_str::FromStr;
use option::{Some, None};
use os;
// Note that these are all accessed without any synchronization.
// They are expected to be initialized once then left alone.
static mut MIN_STACK: uint = 2 * 1024 * 1024;
static mut DEBUG_BORROW: bool = false;
static mut POISON_ON_FREE: bool = false;
pub fn init() {
unsafe {
match os::getenv("RUST_MIN_STACK") {
Some(s) => match FromStr::from_str(s) {
Some(i) => MIN_STACK = i,
None => ()
},
None => ()
}
match os::getenv("RUST_DEBUG_BORROW") {
Some(_) => DEBUG_BORROW = true,
None => ()
}
match os::getenv("RUST_POISON_ON_FREE") {
Some(_) => POISON_ON_FREE = true,
None => ()
}
}
}
pub fn | () -> uint {
unsafe { MIN_STACK }
}
pub fn debug_borrow() -> bool {
unsafe { DEBUG_BORROW }
}
pub fn poison_on_free() -> bool {
unsafe { POISON_ON_FREE }
}
| min_stack | identifier_name |
env.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Runtime environment settings
use from_str::FromStr;
use option::{Some, None};
use os;
// Note that these are all accessed without any synchronization.
// They are expected to be initialized once then left alone.
static mut MIN_STACK: uint = 2 * 1024 * 1024;
static mut DEBUG_BORROW: bool = false;
static mut POISON_ON_FREE: bool = false;
pub fn init() {
unsafe {
match os::getenv("RUST_MIN_STACK") {
Some(s) => match FromStr::from_str(s) {
Some(i) => MIN_STACK = i,
None => ()
},
None => ()
}
match os::getenv("RUST_DEBUG_BORROW") {
Some(_) => DEBUG_BORROW = true,
None => ()
}
match os::getenv("RUST_POISON_ON_FREE") {
Some(_) => POISON_ON_FREE = true,
None => ()
}
}
}
pub fn min_stack() -> uint {
unsafe { MIN_STACK }
}
pub fn debug_borrow() -> bool {
unsafe { DEBUG_BORROW }
}
pub fn poison_on_free() -> bool { | } | unsafe { POISON_ON_FREE } | random_line_split |
env.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Runtime environment settings
use from_str::FromStr;
use option::{Some, None};
use os;
// Note that these are all accessed without any synchronization.
// They are expected to be initialized once then left alone.
static mut MIN_STACK: uint = 2 * 1024 * 1024;
static mut DEBUG_BORROW: bool = false;
static mut POISON_ON_FREE: bool = false;
pub fn init() {
unsafe {
match os::getenv("RUST_MIN_STACK") {
Some(s) => match FromStr::from_str(s) {
Some(i) => MIN_STACK = i,
None => ()
},
None => ()
}
match os::getenv("RUST_DEBUG_BORROW") {
Some(_) => DEBUG_BORROW = true,
None => ()
}
match os::getenv("RUST_POISON_ON_FREE") {
Some(_) => POISON_ON_FREE = true,
None => ()
}
}
}
pub fn min_stack() -> uint {
unsafe { MIN_STACK }
}
pub fn debug_borrow() -> bool {
unsafe { DEBUG_BORROW }
}
pub fn poison_on_free() -> bool | {
unsafe { POISON_ON_FREE }
} | identifier_body |
|
tail.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::{Context, Error};
use bookmarks::{BookmarkUpdateLog, BookmarkUpdateLogEntry, Freshness};
use cloned::cloned;
use context::CoreContext;
use futures::{
future::{self, FutureExt},
stream::{self, StreamExt},
TryStreamExt,
};
use mononoke_types::RepositoryId;
use scuba_ext::MononokeScubaSampleBuilder;
use slog::debug;
use std::sync::Arc;
use std::time::Duration;
use crate::reporting::log_noop_iteration_to_scuba;
const SLEEP_SECS: u64 = 10;
const SIGNLE_DB_QUERY_ENTRIES_LIMIT: u64 = 10;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct QueueSize(pub usize);
/// Adds remaining number of changesets to sync to each entry
/// The idea is to indicate the number of changesets, left to sync
/// *after* a given changeset has been synced, therefore `n-i-1`
/// For example, `[c1, c2, c3]` will turn into `[(c1, 2), (c2, 1), (c3, 0)]`
fn add_queue_sizes<T>(
items: Vec<T>,
initial_queue_size: usize,
) -> impl Iterator<Item = (T, QueueSize)> {
items
.into_iter()
.enumerate()
.map(move |(i, item)| (item, QueueSize(initial_queue_size - i - 1)))
}
/// Run a queue size query, consume the produced `Result` and turn it
/// into an `Option`, suitable for `unfold`
async fn query_queue_size(
ctx: CoreContext,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
current_id: u64,
) -> Result<u64, Error> {
bookmark_update_log
.count_further_bookmark_log_entries(ctx.clone(), current_id, None)
.await
.map(|queue_size| {
debug!(ctx.logger(), "queue size query returned: {}", queue_size);
queue_size
})
}
/// Produce an infinite stream of `Result` from a fallible item factory
/// Two differences with the normal `unfold`:
/// - this one does not expect the item factory (`f`) to return an `Option`,
/// so there's no way to terminate a stream from within `f`
/// - this one expects `f` to return a `Result`, which is threaded downstream
/// and allows the consumer of the stream to terminate it on `Err`
/// The main motivation for this is to be able to use `?` in the item factory
fn unfold_forever<T, F, Fut, Item>(
init: T,
mut f: F,
) -> impl stream::Stream<Item = Result<Item, Error>>
where
T: Copy,
F: FnMut(T) -> Fut,
Fut: future::Future<Output = Result<(Item, T), Error>>,
{
stream::unfold(init, move |iteration_value| {
f(iteration_value).then(move |result| {
match result {
Ok((item, next_it_val)) => future::ready(Some((Ok(item), next_it_val))),
Err(e) => future::ready(Some((Err(e), iteration_value))),
}
})
})
}
pub(crate) fn tail_entries(
ctx: CoreContext,
start_id: u64,
repo_id: RepositoryId,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
scuba_sample: MononokeScubaSampleBuilder,
) -> impl stream::Stream<Item = Result<(BookmarkUpdateLogEntry, QueueSize), Error>> {
unfold_forever((0, start_id), move |(iteration, current_id)| {
cloned!(ctx, bookmark_update_log, scuba_sample);
async move {
let entries: Vec<Result<_, Error>> = bookmark_update_log
.read_next_bookmark_log_entries(
ctx.clone(),
current_id,
SIGNLE_DB_QUERY_ENTRIES_LIMIT,
Freshness::MaybeStale,
)
.collect()
.await;
let entries: Result<Vec<_>, Error> = entries.into_iter().collect();
let entries: Vec<_> = entries.context("While querying bookmarks_update_log")?;
let queue_size =
query_queue_size(ctx.clone(), bookmark_update_log.clone(), current_id).await?;
match entries.last().map(|last_item_ref| last_item_ref.id) {
Some(last_entry_id) => |
None => {
debug!(
ctx.logger(),
"tail_entries: no more entries during iteration {}. Sleeping.", iteration
);
log_noop_iteration_to_scuba(scuba_sample, repo_id);
tokio::time::sleep(Duration::new(SLEEP_SECS, 0)).await;
Ok((stream::empty().boxed(), (iteration + 1, current_id)))
}
}
}
})
.try_flatten()
}
| {
debug!(
ctx.logger(),
"tail_entries generating, iteration {}", iteration
);
let entries_with_queue_size: std::iter::Map<_, _> =
add_queue_sizes(entries, queue_size as usize).map(Ok);
Ok((
stream::iter(entries_with_queue_size).boxed(),
(iteration + 1, last_entry_id as u64),
))
} | conditional_block |
tail.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::{Context, Error};
use bookmarks::{BookmarkUpdateLog, BookmarkUpdateLogEntry, Freshness};
use cloned::cloned;
use context::CoreContext;
use futures::{
future::{self, FutureExt},
stream::{self, StreamExt},
TryStreamExt,
};
use mononoke_types::RepositoryId;
use scuba_ext::MononokeScubaSampleBuilder;
use slog::debug;
use std::sync::Arc;
use std::time::Duration;
use crate::reporting::log_noop_iteration_to_scuba;
const SLEEP_SECS: u64 = 10;
const SIGNLE_DB_QUERY_ENTRIES_LIMIT: u64 = 10;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct QueueSize(pub usize);
/// Adds remaining number of changesets to sync to each entry
/// The idea is to indicate the number of changesets, left to sync
/// *after* a given changeset has been synced, therefore `n-i-1`
/// For example, `[c1, c2, c3]` will turn into `[(c1, 2), (c2, 1), (c3, 0)]`
fn add_queue_sizes<T>(
items: Vec<T>,
initial_queue_size: usize,
) -> impl Iterator<Item = (T, QueueSize)> {
items
.into_iter()
.enumerate()
.map(move |(i, item)| (item, QueueSize(initial_queue_size - i - 1)))
}
/// Run a queue size query, consume the produced `Result` and turn it
/// into an `Option`, suitable for `unfold`
async fn query_queue_size(
ctx: CoreContext,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
current_id: u64,
) -> Result<u64, Error> {
bookmark_update_log
.count_further_bookmark_log_entries(ctx.clone(), current_id, None)
.await
.map(|queue_size| {
debug!(ctx.logger(), "queue size query returned: {}", queue_size);
queue_size
})
}
/// Produce an infinite stream of `Result` from a fallible item factory
/// Two differences with the normal `unfold`:
/// - this one does not expect the item factory (`f`) to return an `Option`,
/// so there's no way to terminate a stream from within `f`
/// - this one expects `f` to return a `Result`, which is threaded downstream
/// and allows the consumer of the stream to terminate it on `Err`
/// The main motivation for this is to be able to use `?` in the item factory
fn unfold_forever<T, F, Fut, Item>(
init: T,
mut f: F,
) -> impl stream::Stream<Item = Result<Item, Error>>
where
T: Copy,
F: FnMut(T) -> Fut,
Fut: future::Future<Output = Result<(Item, T), Error>>,
{
stream::unfold(init, move |iteration_value| {
f(iteration_value).then(move |result| {
match result {
Ok((item, next_it_val)) => future::ready(Some((Ok(item), next_it_val))),
Err(e) => future::ready(Some((Err(e), iteration_value))),
}
})
})
}
pub(crate) fn tail_entries(
ctx: CoreContext,
start_id: u64,
repo_id: RepositoryId,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
scuba_sample: MononokeScubaSampleBuilder,
) -> impl stream::Stream<Item = Result<(BookmarkUpdateLogEntry, QueueSize), Error>> | match entries.last().map(|last_item_ref| last_item_ref.id) {
Some(last_entry_id) => {
debug!(
ctx.logger(),
"tail_entries generating, iteration {}", iteration
);
let entries_with_queue_size: std::iter::Map<_, _> =
add_queue_sizes(entries, queue_size as usize).map(Ok);
Ok((
stream::iter(entries_with_queue_size).boxed(),
(iteration + 1, last_entry_id as u64),
))
}
None => {
debug!(
ctx.logger(),
"tail_entries: no more entries during iteration {}. Sleeping.", iteration
);
log_noop_iteration_to_scuba(scuba_sample, repo_id);
tokio::time::sleep(Duration::new(SLEEP_SECS, 0)).await;
Ok((stream::empty().boxed(), (iteration + 1, current_id)))
}
}
}
})
.try_flatten()
}
| {
unfold_forever((0, start_id), move |(iteration, current_id)| {
cloned!(ctx, bookmark_update_log, scuba_sample);
async move {
let entries: Vec<Result<_, Error>> = bookmark_update_log
.read_next_bookmark_log_entries(
ctx.clone(),
current_id,
SIGNLE_DB_QUERY_ENTRIES_LIMIT,
Freshness::MaybeStale,
)
.collect()
.await;
let entries: Result<Vec<_>, Error> = entries.into_iter().collect();
let entries: Vec<_> = entries.context("While querying bookmarks_update_log")?;
let queue_size =
query_queue_size(ctx.clone(), bookmark_update_log.clone(), current_id).await?;
| identifier_body |
tail.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::{Context, Error};
use bookmarks::{BookmarkUpdateLog, BookmarkUpdateLogEntry, Freshness};
use cloned::cloned;
use context::CoreContext;
use futures::{
future::{self, FutureExt},
stream::{self, StreamExt},
TryStreamExt,
};
use mononoke_types::RepositoryId;
use scuba_ext::MononokeScubaSampleBuilder;
use slog::debug;
use std::sync::Arc;
use std::time::Duration;
use crate::reporting::log_noop_iteration_to_scuba;
const SLEEP_SECS: u64 = 10;
const SIGNLE_DB_QUERY_ENTRIES_LIMIT: u64 = 10;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct QueueSize(pub usize);
/// Adds remaining number of changesets to sync to each entry
/// The idea is to indicate the number of changesets, left to sync
/// *after* a given changeset has been synced, therefore `n-i-1`
/// For example, `[c1, c2, c3]` will turn into `[(c1, 2), (c2, 1), (c3, 0)]`
fn add_queue_sizes<T>(
items: Vec<T>,
initial_queue_size: usize,
) -> impl Iterator<Item = (T, QueueSize)> {
items
.into_iter()
.enumerate()
.map(move |(i, item)| (item, QueueSize(initial_queue_size - i - 1)))
}
/// Run a queue size query, consume the produced `Result` and turn it
/// into an `Option`, suitable for `unfold`
async fn query_queue_size(
ctx: CoreContext,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
current_id: u64,
) -> Result<u64, Error> {
bookmark_update_log
.count_further_bookmark_log_entries(ctx.clone(), current_id, None)
.await
.map(|queue_size| {
debug!(ctx.logger(), "queue size query returned: {}", queue_size);
queue_size
})
}
/// Produce an infinite stream of `Result` from a fallible item factory
/// Two differences with the normal `unfold`:
/// - this one does not expect the item factory (`f`) to return an `Option`,
/// so there's no way to terminate a stream from within `f`
/// - this one expects `f` to return a `Result`, which is threaded downstream
/// and allows the consumer of the stream to terminate it on `Err`
/// The main motivation for this is to be able to use `?` in the item factory
fn | <T, F, Fut, Item>(
init: T,
mut f: F,
) -> impl stream::Stream<Item = Result<Item, Error>>
where
T: Copy,
F: FnMut(T) -> Fut,
Fut: future::Future<Output = Result<(Item, T), Error>>,
{
stream::unfold(init, move |iteration_value| {
f(iteration_value).then(move |result| {
match result {
Ok((item, next_it_val)) => future::ready(Some((Ok(item), next_it_val))),
Err(e) => future::ready(Some((Err(e), iteration_value))),
}
})
})
}
pub(crate) fn tail_entries(
ctx: CoreContext,
start_id: u64,
repo_id: RepositoryId,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
scuba_sample: MononokeScubaSampleBuilder,
) -> impl stream::Stream<Item = Result<(BookmarkUpdateLogEntry, QueueSize), Error>> {
unfold_forever((0, start_id), move |(iteration, current_id)| {
cloned!(ctx, bookmark_update_log, scuba_sample);
async move {
let entries: Vec<Result<_, Error>> = bookmark_update_log
.read_next_bookmark_log_entries(
ctx.clone(),
current_id,
SIGNLE_DB_QUERY_ENTRIES_LIMIT,
Freshness::MaybeStale,
)
.collect()
.await;
let entries: Result<Vec<_>, Error> = entries.into_iter().collect();
let entries: Vec<_> = entries.context("While querying bookmarks_update_log")?;
let queue_size =
query_queue_size(ctx.clone(), bookmark_update_log.clone(), current_id).await?;
match entries.last().map(|last_item_ref| last_item_ref.id) {
Some(last_entry_id) => {
debug!(
ctx.logger(),
"tail_entries generating, iteration {}", iteration
);
let entries_with_queue_size: std::iter::Map<_, _> =
add_queue_sizes(entries, queue_size as usize).map(Ok);
Ok((
stream::iter(entries_with_queue_size).boxed(),
(iteration + 1, last_entry_id as u64),
))
}
None => {
debug!(
ctx.logger(),
"tail_entries: no more entries during iteration {}. Sleeping.", iteration
);
log_noop_iteration_to_scuba(scuba_sample, repo_id);
tokio::time::sleep(Duration::new(SLEEP_SECS, 0)).await;
Ok((stream::empty().boxed(), (iteration + 1, current_id)))
}
}
}
})
.try_flatten()
}
| unfold_forever | identifier_name |
tail.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use anyhow::{Context, Error};
use bookmarks::{BookmarkUpdateLog, BookmarkUpdateLogEntry, Freshness};
use cloned::cloned;
use context::CoreContext;
use futures::{
future::{self, FutureExt},
stream::{self, StreamExt},
TryStreamExt,
};
use mononoke_types::RepositoryId;
use scuba_ext::MononokeScubaSampleBuilder;
use slog::debug;
use std::sync::Arc;
use std::time::Duration;
use crate::reporting::log_noop_iteration_to_scuba;
const SLEEP_SECS: u64 = 10;
const SIGNLE_DB_QUERY_ENTRIES_LIMIT: u64 = 10;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct QueueSize(pub usize);
/// Adds remaining number of changesets to sync to each entry
/// The idea is to indicate the number of changesets, left to sync
/// *after* a given changeset has been synced, therefore `n-i-1`
/// For example, `[c1, c2, c3]` will turn into `[(c1, 2), (c2, 1), (c3, 0)]`
fn add_queue_sizes<T>(
items: Vec<T>,
initial_queue_size: usize,
) -> impl Iterator<Item = (T, QueueSize)> {
items
.into_iter()
.enumerate()
.map(move |(i, item)| (item, QueueSize(initial_queue_size - i - 1)))
}
/// Run a queue size query, consume the produced `Result` and turn it
/// into an `Option`, suitable for `unfold`
async fn query_queue_size(
ctx: CoreContext,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
current_id: u64,
) -> Result<u64, Error> {
bookmark_update_log
.count_further_bookmark_log_entries(ctx.clone(), current_id, None)
.await
.map(|queue_size| {
debug!(ctx.logger(), "queue size query returned: {}", queue_size);
queue_size
})
}
/// Produce an infinite stream of `Result` from a fallible item factory
/// Two differences with the normal `unfold`:
/// - this one does not expect the item factory (`f`) to return an `Option`,
/// so there's no way to terminate a stream from within `f`
/// - this one expects `f` to return a `Result`, which is threaded downstream
/// and allows the consumer of the stream to terminate it on `Err`
/// The main motivation for this is to be able to use `?` in the item factory
fn unfold_forever<T, F, Fut, Item>(
init: T,
mut f: F,
) -> impl stream::Stream<Item = Result<Item, Error>>
where
T: Copy,
F: FnMut(T) -> Fut,
Fut: future::Future<Output = Result<(Item, T), Error>>,
{
stream::unfold(init, move |iteration_value| {
f(iteration_value).then(move |result| {
match result {
Ok((item, next_it_val)) => future::ready(Some((Ok(item), next_it_val))),
Err(e) => future::ready(Some((Err(e), iteration_value))),
}
})
}) |
pub(crate) fn tail_entries(
ctx: CoreContext,
start_id: u64,
repo_id: RepositoryId,
bookmark_update_log: Arc<dyn BookmarkUpdateLog>,
scuba_sample: MononokeScubaSampleBuilder,
) -> impl stream::Stream<Item = Result<(BookmarkUpdateLogEntry, QueueSize), Error>> {
unfold_forever((0, start_id), move |(iteration, current_id)| {
cloned!(ctx, bookmark_update_log, scuba_sample);
async move {
let entries: Vec<Result<_, Error>> = bookmark_update_log
.read_next_bookmark_log_entries(
ctx.clone(),
current_id,
SIGNLE_DB_QUERY_ENTRIES_LIMIT,
Freshness::MaybeStale,
)
.collect()
.await;
let entries: Result<Vec<_>, Error> = entries.into_iter().collect();
let entries: Vec<_> = entries.context("While querying bookmarks_update_log")?;
let queue_size =
query_queue_size(ctx.clone(), bookmark_update_log.clone(), current_id).await?;
match entries.last().map(|last_item_ref| last_item_ref.id) {
Some(last_entry_id) => {
debug!(
ctx.logger(),
"tail_entries generating, iteration {}", iteration
);
let entries_with_queue_size: std::iter::Map<_, _> =
add_queue_sizes(entries, queue_size as usize).map(Ok);
Ok((
stream::iter(entries_with_queue_size).boxed(),
(iteration + 1, last_entry_id as u64),
))
}
None => {
debug!(
ctx.logger(),
"tail_entries: no more entries during iteration {}. Sleeping.", iteration
);
log_noop_iteration_to_scuba(scuba_sample, repo_id);
tokio::time::sleep(Duration::new(SLEEP_SECS, 0)).await;
Ok((stream::empty().boxed(), (iteration + 1, current_id)))
}
}
}
})
.try_flatten()
} | } | random_line_split |
keycodes.rs | // https://stackoverflow.
// com/questions/3202629/where-can-i-find-a-list-of-mac-virtual-key-codes
/* keycodes for keys that are independent of keyboard layout */
#![allow(non_upper_case_globals)]
#![allow(dead_code)]
pub const kVK_Return: u16 = 0x24;
pub const kVK_Tab: u16 = 0x30;
pub const kVK_Space: u16 = 0x31;
pub const kVK_Delete: u16 = 0x33;
pub const kVK_Escape: u16 = 0x35;
pub const kVK_Command: u16 = 0x37;
pub const kVK_Shift: u16 = 0x38;
pub const kVK_CapsLock: u16 = 0x39;
pub const kVK_Option: u16 = 0x3A;
pub const kVK_Control: u16 = 0x3B;
pub const kVK_RightShift: u16 = 0x3C;
pub const kVK_RightOption: u16 = 0x3D;
pub const kVK_RightControl: u16 = 0x3E;
pub const kVK_Function: u16 = 0x3F;
pub const kVK_F17: u16 = 0x40;
pub const kVK_VolumeUp: u16 = 0x48;
pub const kVK_VolumeDown: u16 = 0x49; | pub const kVK_F5: u16 = 0x60;
pub const kVK_F6: u16 = 0x61;
pub const kVK_F7: u16 = 0x62;
pub const kVK_F3: u16 = 0x63;
pub const kVK_F8: u16 = 0x64;
pub const kVK_F9: u16 = 0x65;
pub const kVK_F11: u16 = 0x67;
pub const kVK_F13: u16 = 0x69;
pub const kVK_F16: u16 = 0x6A;
pub const kVK_F14: u16 = 0x6B;
pub const kVK_F10: u16 = 0x6D;
pub const kVK_F12: u16 = 0x6F;
pub const kVK_F15: u16 = 0x71;
pub const kVK_Help: u16 = 0x72;
pub const kVK_Home: u16 = 0x73;
pub const kVK_PageUp: u16 = 0x74;
pub const kVK_ForwardDelete: u16 = 0x75;
pub const kVK_F4: u16 = 0x76;
pub const kVK_End: u16 = 0x77;
pub const kVK_F2: u16 = 0x78;
pub const kVK_PageDown: u16 = 0x79;
pub const kVK_F1: u16 = 0x7A;
pub const kVK_LeftArrow: u16 = 0x7B;
pub const kVK_RightArrow: u16 = 0x7C;
pub const kVK_DownArrow: u16 = 0x7D;
pub const kVK_UpArrow: u16 = 0x7E; | pub const kVK_Mute: u16 = 0x4A;
pub const kVK_F18: u16 = 0x4F;
pub const kVK_F19: u16 = 0x50;
pub const kVK_F20: u16 = 0x5A; | random_line_split |
self_authentication.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// https://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
//! Self-authentication example.
// For explanation of lint checks, run `rustc -W help`.
#![deny(unsafe_code)]
#![warn(
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
#[macro_use]
extern crate unwrap;
use rand::thread_rng;
use safe_app::CoreError;
use safe_authenticator::{AuthError, Authenticator};
use safe_nd::{ClientFullId, Error as SndError};
fn main() {
unwrap!(safe_core::utils::logging::init(true));
let mut secret_0 = String::new();
let mut secret_1 = String::new();
println!("\nDo you already have an account created (enter Y for yes)?");
let mut user_option = String::new();
let _ = std::io::stdin().read_line(&mut user_option);
user_option = user_option.trim().to_string();
if user_option!= "Y" && user_option!= "y" {
println!("\n\tAccount Creation");
println!("\t================");
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
| // FIXME - pass secret key of the wallet as an argument
let client_id = ClientFullId::new_bls(&mut thread_rng());
// Account Creation
println!("\nTrying to create an account...");
match Authenticator::create_acc(secret_0.as_str(), secret_1.as_str(), client_id, || ()) {
Ok(_) => (),
Err(AuthError::CoreError(CoreError::DataError(SndError::LoginPacketExists))) => {
println!(
"ERROR: This domain is already taken. Please retry with different \
locator and/or password"
);
return;
}
Err(err) => panic!("{:?}", err),
}
println!("Account created successfully!");
println!("\n\n\tAuto Account Login");
println!("\t==================");
// Log into the created account.
{
println!("\nTrying to log into the created account using supplied credentials...");
let _ = unwrap!(Authenticator::login(secret_0, secret_1, || ()));
println!("Account login successful!");
}
}
println!("\n\n\tManual Account Login");
println!("\t====================");
loop {
secret_0 = String::new();
secret_1 = String::new();
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
// Log into the created account.
{
println!("\nTrying to log in...");
match Authenticator::login(secret_0, secret_1, || ()) {
Ok(_) => {
println!("Account login successful!");
break;
}
Err(error) => println!("ERROR: Account login failed!\nReason: {:?}\n\n", error),
}
}
}
} | println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
| random_line_split |
self_authentication.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// https://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
//! Self-authentication example.
// For explanation of lint checks, run `rustc -W help`.
#![deny(unsafe_code)]
#![warn(
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
#[macro_use]
extern crate unwrap;
use rand::thread_rng;
use safe_app::CoreError;
use safe_authenticator::{AuthError, Authenticator};
use safe_nd::{ClientFullId, Error as SndError};
fn main() | println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
// FIXME - pass secret key of the wallet as an argument
let client_id = ClientFullId::new_bls(&mut thread_rng());
// Account Creation
println!("\nTrying to create an account...");
match Authenticator::create_acc(secret_0.as_str(), secret_1.as_str(), client_id, || ()) {
Ok(_) => (),
Err(AuthError::CoreError(CoreError::DataError(SndError::LoginPacketExists))) => {
println!(
"ERROR: This domain is already taken. Please retry with different \
locator and/or password"
);
return;
}
Err(err) => panic!("{:?}", err),
}
println!("Account created successfully!");
println!("\n\n\tAuto Account Login");
println!("\t==================");
// Log into the created account.
{
println!("\nTrying to log into the created account using supplied credentials...");
let _ = unwrap!(Authenticator::login(secret_0, secret_1, || ()));
println!("Account login successful!");
}
}
println!("\n\n\tManual Account Login");
println!("\t====================");
loop {
secret_0 = String::new();
secret_1 = String::new();
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
// Log into the created account.
{
println!("\nTrying to log in...");
match Authenticator::login(secret_0, secret_1, || ()) {
Ok(_) => {
println!("Account login successful!");
break;
}
Err(error) => println!("ERROR: Account login failed!\nReason: {:?}\n\n", error),
}
}
}
}
| {
unwrap!(safe_core::utils::logging::init(true));
let mut secret_0 = String::new();
let mut secret_1 = String::new();
println!("\nDo you already have an account created (enter Y for yes)?");
let mut user_option = String::new();
let _ = std::io::stdin().read_line(&mut user_option);
user_option = user_option.trim().to_string();
if user_option != "Y" && user_option != "y" {
println!("\n\tAccount Creation");
println!("\t================");
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
| identifier_body |
self_authentication.rs | // Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT
// https://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD
// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,
// modified, or distributed except according to those terms. Please review the Licences for the
// specific language governing permissions and limitations relating to use of the SAFE Network
// Software.
//! Self-authentication example.
// For explanation of lint checks, run `rustc -W help`.
#![deny(unsafe_code)]
#![warn(
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results
)]
#[macro_use]
extern crate unwrap;
use rand::thread_rng;
use safe_app::CoreError;
use safe_authenticator::{AuthError, Authenticator};
use safe_nd::{ClientFullId, Error as SndError};
fn | () {
unwrap!(safe_core::utils::logging::init(true));
let mut secret_0 = String::new();
let mut secret_1 = String::new();
println!("\nDo you already have an account created (enter Y for yes)?");
let mut user_option = String::new();
let _ = std::io::stdin().read_line(&mut user_option);
user_option = user_option.trim().to_string();
if user_option!= "Y" && user_option!= "y" {
println!("\n\tAccount Creation");
println!("\t================");
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
// FIXME - pass secret key of the wallet as an argument
let client_id = ClientFullId::new_bls(&mut thread_rng());
// Account Creation
println!("\nTrying to create an account...");
match Authenticator::create_acc(secret_0.as_str(), secret_1.as_str(), client_id, || ()) {
Ok(_) => (),
Err(AuthError::CoreError(CoreError::DataError(SndError::LoginPacketExists))) => {
println!(
"ERROR: This domain is already taken. Please retry with different \
locator and/or password"
);
return;
}
Err(err) => panic!("{:?}", err),
}
println!("Account created successfully!");
println!("\n\n\tAuto Account Login");
println!("\t==================");
// Log into the created account.
{
println!("\nTrying to log into the created account using supplied credentials...");
let _ = unwrap!(Authenticator::login(secret_0, secret_1, || ()));
println!("Account login successful!");
}
}
println!("\n\n\tManual Account Login");
println!("\t====================");
loop {
secret_0 = String::new();
secret_1 = String::new();
println!("\n------------ Enter account-locator ---------------");
let _ = std::io::stdin().read_line(&mut secret_0);
secret_0 = secret_0.trim().to_string();
println!("\n------------ Enter password ---------------");
let _ = std::io::stdin().read_line(&mut secret_1);
secret_1 = secret_1.trim().to_string();
// Log into the created account.
{
println!("\nTrying to log in...");
match Authenticator::login(secret_0, secret_1, || ()) {
Ok(_) => {
println!("Account login successful!");
break;
}
Err(error) => println!("ERROR: Account login failed!\nReason: {:?}\n\n", error),
}
}
}
}
| main | identifier_name |
database.rs | // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result;
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
use std::collections::HashMap;
use std::path::Path;
fn trace(s: &str) {
println!("sql: {}", s)
}
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
let mut db = Connection::open(path)?;
if std::env::var("TRACESQL").is_ok() {
db.trace(Some(trace));
}
db.pragma_update(None, "page_size", &4096)?;
db.pragma_update(None, "legacy_file_format", &false)?;
db.pragma_update_and_check(None, "journal_mode", &"wal", |_| Ok(()))?;
initial_db_setup(&mut db)?;
Ok(db)
}
fn initial_db_setup(db: &mut Connection) -> Result<()> {
// tables already exist?
if db
.prepare("select null from sqlite_master where type = 'table' and name ='media'")?
.exists(NO_PARAMS)?
{
return Ok(());
}
db.execute("begin", NO_PARAMS)?;
db.execute_batch(include_str!("schema.sql"))?;
db.execute_batch("commit; vacuum; analyze;")?;
Ok(())
}
#[derive(Debug, PartialEq)]
pub struct MediaEntry {
pub fname: String,
/// If None, file has been deleted
pub sha1: Option<[u8; 20]>,
// Modification time; 0 if deleted
pub mtime: i64,
/// True if changed since last sync
pub sync_required: bool,
}
#[derive(Debug, PartialEq)]
pub struct MediaDatabaseMetadata {
pub folder_mtime: i64,
pub last_sync_usn: i32,
}
/// Helper to prepare a statement, or return a previously prepared one.
macro_rules! cached_sql {
( $label:expr, $db:expr, $sql:expr ) => {{
if $label.is_none() {
$label = Some($db.prepare($sql)?);
}
$label.as_mut().unwrap()
}};
}
pub struct MediaDatabaseContext<'a> {
db: &'a Connection,
get_entry_stmt: Option<Statement<'a>>,
update_entry_stmt: Option<Statement<'a>>,
remove_entry_stmt: Option<Statement<'a>>,
}
impl MediaDatabaseContext<'_> {
pub(super) fn new(db: &Connection) -> MediaDatabaseContext {
MediaDatabaseContext {
db,
get_entry_stmt: None,
update_entry_stmt: None,
remove_entry_stmt: None,
}
}
/// Execute the provided closure in a transaction, rolling back if
/// an error is returned.
pub(super) fn transact<F, R>(&mut self, func: F) -> Result<R>
where
F: FnOnce(&mut MediaDatabaseContext) -> Result<R>,
{
self.begin()?;
let mut res = func(self);
if res.is_ok() {
if let Err(e) = self.commit() {
res = Err(e);
}
}
if res.is_err() {
self.rollback()?;
}
res
}
fn begin(&mut self) -> Result<()> {
self.db.execute_batch("begin immediate").map_err(Into::into)
}
fn commit(&mut self) -> Result<()> {
self.db.execute_batch("commit").map_err(Into::into)
}
fn rollback(&mut self) -> Result<()> {
self.db.execute_batch("rollback").map_err(Into::into)
}
pub(super) fn get_entry(&mut self, fname: &str) -> Result<Option<MediaEntry>> {
let stmt = cached_sql!(
self.get_entry_stmt,
self.db,
"
select fname, csum, mtime, dirty from media where fname=?"
);
stmt.query_row(params![fname], row_to_entry)
.optional()
.map_err(Into::into)
}
pub(super) fn | (&mut self, entry: &MediaEntry) -> Result<()> {
let stmt = cached_sql!(
self.update_entry_stmt,
self.db,
"
insert or replace into media (fname, csum, mtime, dirty)
values (?,?,?,?)"
);
let sha1_str = entry.sha1.map(hex::encode);
stmt.execute(params![
entry.fname,
sha1_str,
entry.mtime,
entry.sync_required
])?;
Ok(())
}
pub(super) fn remove_entry(&mut self, fname: &str) -> Result<()> {
let stmt = cached_sql!(
self.remove_entry_stmt,
self.db,
"
delete from media where fname=?"
);
stmt.execute(params![fname])?;
Ok(())
}
pub(super) fn get_meta(&mut self) -> Result<MediaDatabaseMetadata> {
let mut stmt = self.db.prepare("select dirMod, lastUsn from meta")?;
stmt.query_row(NO_PARAMS, |row| {
Ok(MediaDatabaseMetadata {
folder_mtime: row.get(0)?,
last_sync_usn: row.get(1)?,
})
})
.map_err(Into::into)
}
pub(super) fn set_meta(&mut self, meta: &MediaDatabaseMetadata) -> Result<()> {
let mut stmt = self.db.prepare("update meta set dirMod =?, lastUsn =?")?;
stmt.execute(params![meta.folder_mtime, meta.last_sync_usn])?;
Ok(())
}
pub(super) fn count(&mut self) -> Result<u32> {
self.db
.query_row(
"select count(*) from media where csum is not null",
NO_PARAMS,
|row| Ok(row.get(0)?),
)
.map_err(Into::into)
}
pub(super) fn get_pending_uploads(&mut self, max_entries: u32) -> Result<Vec<MediaEntry>> {
let mut stmt = self
.db
.prepare("select fname from media where dirty=1 limit?")?;
let results: Result<Vec<_>> = stmt
.query_and_then(params![max_entries], |row| {
let fname = row.get_raw(0).as_str()?;
Ok(self.get_entry(fname)?.unwrap())
})?
.collect();
results
}
pub(super) fn all_mtimes(&mut self) -> Result<HashMap<String, i64>> {
let mut stmt = self
.db
.prepare("select fname, mtime from media where csum is not null")?;
let map: std::result::Result<HashMap<String, i64>, rusqlite::Error> = stmt
.query_map(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?)))?
.collect();
Ok(map?)
}
pub(super) fn force_resync(&mut self) -> Result<()> {
self.db
.execute_batch("delete from media; update meta set lastUsn = 0, dirMod = 0")
.map_err(Into::into)
}
}
fn row_to_entry(row: &Row) -> rusqlite::Result<MediaEntry> {
// map the string checksum into bytes
let sha1_str: Option<String> = row.get(1)?;
let sha1_array = if let Some(s) = sha1_str {
let mut arr = [0; 20];
match hex::decode_to_slice(s, arr.as_mut()) {
Ok(_) => Some(arr),
_ => None,
}
} else {
None
};
// and return the entry
Ok(MediaEntry {
fname: row.get(0)?,
sha1: sha1_array,
mtime: row.get(2)?,
sync_required: row.get(3)?,
})
}
#[cfg(test)]
mod test {
use crate::err::Result;
use crate::media::database::MediaEntry;
use crate::media::files::sha1_of_data;
use crate::media::MediaManager;
use tempfile::NamedTempFile;
#[test]
fn database() -> Result<()> {
let db_file = NamedTempFile::new()?;
let db_file_path = db_file.path().to_str().unwrap();
let mut mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
ctx.transact(|ctx| {
// no entry exists yet
assert_eq!(ctx.get_entry("test.mp3")?, None);
// add one
let mut entry = MediaEntry {
fname: "test.mp3".into(),
sha1: None,
mtime: 0,
sync_required: false,
};
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
// update it
entry.sha1 = Some(sha1_of_data(b"hello"));
entry.mtime = 123;
entry.sync_required = true;
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
assert_eq!(ctx.get_pending_uploads(25)?, vec![entry]);
let mut meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 0);
assert_eq!(meta.last_sync_usn, 0);
meta.folder_mtime = 123;
meta.last_sync_usn = 321;
ctx.set_meta(&meta)?;
meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
assert_eq!(meta.last_sync_usn, 321);
Ok(())
})?;
// reopen database and ensure data was committed
drop(ctx);
drop(mgr);
mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
let meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
Ok(())
}
}
| set_entry | identifier_name |
database.rs | // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result;
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
use std::collections::HashMap;
use std::path::Path;
fn trace(s: &str) {
println!("sql: {}", s)
}
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
let mut db = Connection::open(path)?;
if std::env::var("TRACESQL").is_ok() {
db.trace(Some(trace));
}
db.pragma_update(None, "page_size", &4096)?;
db.pragma_update(None, "legacy_file_format", &false)?;
db.pragma_update_and_check(None, "journal_mode", &"wal", |_| Ok(()))?;
initial_db_setup(&mut db)?;
Ok(db)
}
fn initial_db_setup(db: &mut Connection) -> Result<()> {
// tables already exist?
if db
.prepare("select null from sqlite_master where type = 'table' and name ='media'")?
.exists(NO_PARAMS)?
{
return Ok(());
}
db.execute("begin", NO_PARAMS)?;
db.execute_batch(include_str!("schema.sql"))?;
db.execute_batch("commit; vacuum; analyze;")?;
Ok(())
}
#[derive(Debug, PartialEq)]
pub struct MediaEntry {
pub fname: String,
/// If None, file has been deleted
pub sha1: Option<[u8; 20]>,
// Modification time; 0 if deleted
pub mtime: i64,
/// True if changed since last sync
pub sync_required: bool,
}
#[derive(Debug, PartialEq)]
pub struct MediaDatabaseMetadata {
pub folder_mtime: i64,
pub last_sync_usn: i32,
}
/// Helper to prepare a statement, or return a previously prepared one.
macro_rules! cached_sql {
( $label:expr, $db:expr, $sql:expr ) => {{
if $label.is_none() {
$label = Some($db.prepare($sql)?);
}
$label.as_mut().unwrap()
}};
}
pub struct MediaDatabaseContext<'a> {
db: &'a Connection,
get_entry_stmt: Option<Statement<'a>>,
update_entry_stmt: Option<Statement<'a>>,
remove_entry_stmt: Option<Statement<'a>>,
}
impl MediaDatabaseContext<'_> {
pub(super) fn new(db: &Connection) -> MediaDatabaseContext {
MediaDatabaseContext {
db,
get_entry_stmt: None,
update_entry_stmt: None,
remove_entry_stmt: None,
}
}
/// Execute the provided closure in a transaction, rolling back if
/// an error is returned.
pub(super) fn transact<F, R>(&mut self, func: F) -> Result<R>
where
F: FnOnce(&mut MediaDatabaseContext) -> Result<R>,
{
self.begin()?;
let mut res = func(self);
if res.is_ok() {
if let Err(e) = self.commit() |
}
if res.is_err() {
self.rollback()?;
}
res
}
fn begin(&mut self) -> Result<()> {
self.db.execute_batch("begin immediate").map_err(Into::into)
}
fn commit(&mut self) -> Result<()> {
self.db.execute_batch("commit").map_err(Into::into)
}
fn rollback(&mut self) -> Result<()> {
self.db.execute_batch("rollback").map_err(Into::into)
}
pub(super) fn get_entry(&mut self, fname: &str) -> Result<Option<MediaEntry>> {
let stmt = cached_sql!(
self.get_entry_stmt,
self.db,
"
select fname, csum, mtime, dirty from media where fname=?"
);
stmt.query_row(params![fname], row_to_entry)
.optional()
.map_err(Into::into)
}
pub(super) fn set_entry(&mut self, entry: &MediaEntry) -> Result<()> {
let stmt = cached_sql!(
self.update_entry_stmt,
self.db,
"
insert or replace into media (fname, csum, mtime, dirty)
values (?,?,?,?)"
);
let sha1_str = entry.sha1.map(hex::encode);
stmt.execute(params![
entry.fname,
sha1_str,
entry.mtime,
entry.sync_required
])?;
Ok(())
}
pub(super) fn remove_entry(&mut self, fname: &str) -> Result<()> {
let stmt = cached_sql!(
self.remove_entry_stmt,
self.db,
"
delete from media where fname=?"
);
stmt.execute(params![fname])?;
Ok(())
}
pub(super) fn get_meta(&mut self) -> Result<MediaDatabaseMetadata> {
let mut stmt = self.db.prepare("select dirMod, lastUsn from meta")?;
stmt.query_row(NO_PARAMS, |row| {
Ok(MediaDatabaseMetadata {
folder_mtime: row.get(0)?,
last_sync_usn: row.get(1)?,
})
})
.map_err(Into::into)
}
pub(super) fn set_meta(&mut self, meta: &MediaDatabaseMetadata) -> Result<()> {
let mut stmt = self.db.prepare("update meta set dirMod =?, lastUsn =?")?;
stmt.execute(params![meta.folder_mtime, meta.last_sync_usn])?;
Ok(())
}
pub(super) fn count(&mut self) -> Result<u32> {
self.db
.query_row(
"select count(*) from media where csum is not null",
NO_PARAMS,
|row| Ok(row.get(0)?),
)
.map_err(Into::into)
}
pub(super) fn get_pending_uploads(&mut self, max_entries: u32) -> Result<Vec<MediaEntry>> {
let mut stmt = self
.db
.prepare("select fname from media where dirty=1 limit?")?;
let results: Result<Vec<_>> = stmt
.query_and_then(params![max_entries], |row| {
let fname = row.get_raw(0).as_str()?;
Ok(self.get_entry(fname)?.unwrap())
})?
.collect();
results
}
pub(super) fn all_mtimes(&mut self) -> Result<HashMap<String, i64>> {
let mut stmt = self
.db
.prepare("select fname, mtime from media where csum is not null")?;
let map: std::result::Result<HashMap<String, i64>, rusqlite::Error> = stmt
.query_map(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?)))?
.collect();
Ok(map?)
}
pub(super) fn force_resync(&mut self) -> Result<()> {
self.db
.execute_batch("delete from media; update meta set lastUsn = 0, dirMod = 0")
.map_err(Into::into)
}
}
fn row_to_entry(row: &Row) -> rusqlite::Result<MediaEntry> {
// map the string checksum into bytes
let sha1_str: Option<String> = row.get(1)?;
let sha1_array = if let Some(s) = sha1_str {
let mut arr = [0; 20];
match hex::decode_to_slice(s, arr.as_mut()) {
Ok(_) => Some(arr),
_ => None,
}
} else {
None
};
// and return the entry
Ok(MediaEntry {
fname: row.get(0)?,
sha1: sha1_array,
mtime: row.get(2)?,
sync_required: row.get(3)?,
})
}
#[cfg(test)]
mod test {
use crate::err::Result;
use crate::media::database::MediaEntry;
use crate::media::files::sha1_of_data;
use crate::media::MediaManager;
use tempfile::NamedTempFile;
#[test]
fn database() -> Result<()> {
let db_file = NamedTempFile::new()?;
let db_file_path = db_file.path().to_str().unwrap();
let mut mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
ctx.transact(|ctx| {
// no entry exists yet
assert_eq!(ctx.get_entry("test.mp3")?, None);
// add one
let mut entry = MediaEntry {
fname: "test.mp3".into(),
sha1: None,
mtime: 0,
sync_required: false,
};
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
// update it
entry.sha1 = Some(sha1_of_data(b"hello"));
entry.mtime = 123;
entry.sync_required = true;
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
assert_eq!(ctx.get_pending_uploads(25)?, vec![entry]);
let mut meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 0);
assert_eq!(meta.last_sync_usn, 0);
meta.folder_mtime = 123;
meta.last_sync_usn = 321;
ctx.set_meta(&meta)?;
meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
assert_eq!(meta.last_sync_usn, 321);
Ok(())
})?;
// reopen database and ensure data was committed
drop(ctx);
drop(mgr);
mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
let meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
Ok(())
}
}
| {
res = Err(e);
} | conditional_block |
database.rs | // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result;
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
use std::collections::HashMap;
use std::path::Path;
fn trace(s: &str) {
println!("sql: {}", s)
}
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
let mut db = Connection::open(path)?; |
db.pragma_update(None, "page_size", &4096)?;
db.pragma_update(None, "legacy_file_format", &false)?;
db.pragma_update_and_check(None, "journal_mode", &"wal", |_| Ok(()))?;
initial_db_setup(&mut db)?;
Ok(db)
}
fn initial_db_setup(db: &mut Connection) -> Result<()> {
// tables already exist?
if db
.prepare("select null from sqlite_master where type = 'table' and name ='media'")?
.exists(NO_PARAMS)?
{
return Ok(());
}
db.execute("begin", NO_PARAMS)?;
db.execute_batch(include_str!("schema.sql"))?;
db.execute_batch("commit; vacuum; analyze;")?;
Ok(())
}
#[derive(Debug, PartialEq)]
pub struct MediaEntry {
pub fname: String,
/// If None, file has been deleted
pub sha1: Option<[u8; 20]>,
// Modification time; 0 if deleted
pub mtime: i64,
/// True if changed since last sync
pub sync_required: bool,
}
#[derive(Debug, PartialEq)]
pub struct MediaDatabaseMetadata {
pub folder_mtime: i64,
pub last_sync_usn: i32,
}
/// Helper to prepare a statement, or return a previously prepared one.
macro_rules! cached_sql {
( $label:expr, $db:expr, $sql:expr ) => {{
if $label.is_none() {
$label = Some($db.prepare($sql)?);
}
$label.as_mut().unwrap()
}};
}
pub struct MediaDatabaseContext<'a> {
db: &'a Connection,
get_entry_stmt: Option<Statement<'a>>,
update_entry_stmt: Option<Statement<'a>>,
remove_entry_stmt: Option<Statement<'a>>,
}
impl MediaDatabaseContext<'_> {
pub(super) fn new(db: &Connection) -> MediaDatabaseContext {
MediaDatabaseContext {
db,
get_entry_stmt: None,
update_entry_stmt: None,
remove_entry_stmt: None,
}
}
/// Execute the provided closure in a transaction, rolling back if
/// an error is returned.
pub(super) fn transact<F, R>(&mut self, func: F) -> Result<R>
where
F: FnOnce(&mut MediaDatabaseContext) -> Result<R>,
{
self.begin()?;
let mut res = func(self);
if res.is_ok() {
if let Err(e) = self.commit() {
res = Err(e);
}
}
if res.is_err() {
self.rollback()?;
}
res
}
fn begin(&mut self) -> Result<()> {
self.db.execute_batch("begin immediate").map_err(Into::into)
}
fn commit(&mut self) -> Result<()> {
self.db.execute_batch("commit").map_err(Into::into)
}
fn rollback(&mut self) -> Result<()> {
self.db.execute_batch("rollback").map_err(Into::into)
}
pub(super) fn get_entry(&mut self, fname: &str) -> Result<Option<MediaEntry>> {
let stmt = cached_sql!(
self.get_entry_stmt,
self.db,
"
select fname, csum, mtime, dirty from media where fname=?"
);
stmt.query_row(params![fname], row_to_entry)
.optional()
.map_err(Into::into)
}
pub(super) fn set_entry(&mut self, entry: &MediaEntry) -> Result<()> {
let stmt = cached_sql!(
self.update_entry_stmt,
self.db,
"
insert or replace into media (fname, csum, mtime, dirty)
values (?,?,?,?)"
);
let sha1_str = entry.sha1.map(hex::encode);
stmt.execute(params![
entry.fname,
sha1_str,
entry.mtime,
entry.sync_required
])?;
Ok(())
}
pub(super) fn remove_entry(&mut self, fname: &str) -> Result<()> {
let stmt = cached_sql!(
self.remove_entry_stmt,
self.db,
"
delete from media where fname=?"
);
stmt.execute(params![fname])?;
Ok(())
}
pub(super) fn get_meta(&mut self) -> Result<MediaDatabaseMetadata> {
let mut stmt = self.db.prepare("select dirMod, lastUsn from meta")?;
stmt.query_row(NO_PARAMS, |row| {
Ok(MediaDatabaseMetadata {
folder_mtime: row.get(0)?,
last_sync_usn: row.get(1)?,
})
})
.map_err(Into::into)
}
pub(super) fn set_meta(&mut self, meta: &MediaDatabaseMetadata) -> Result<()> {
let mut stmt = self.db.prepare("update meta set dirMod =?, lastUsn =?")?;
stmt.execute(params![meta.folder_mtime, meta.last_sync_usn])?;
Ok(())
}
pub(super) fn count(&mut self) -> Result<u32> {
self.db
.query_row(
"select count(*) from media where csum is not null",
NO_PARAMS,
|row| Ok(row.get(0)?),
)
.map_err(Into::into)
}
pub(super) fn get_pending_uploads(&mut self, max_entries: u32) -> Result<Vec<MediaEntry>> {
let mut stmt = self
.db
.prepare("select fname from media where dirty=1 limit?")?;
let results: Result<Vec<_>> = stmt
.query_and_then(params![max_entries], |row| {
let fname = row.get_raw(0).as_str()?;
Ok(self.get_entry(fname)?.unwrap())
})?
.collect();
results
}
pub(super) fn all_mtimes(&mut self) -> Result<HashMap<String, i64>> {
let mut stmt = self
.db
.prepare("select fname, mtime from media where csum is not null")?;
let map: std::result::Result<HashMap<String, i64>, rusqlite::Error> = stmt
.query_map(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?)))?
.collect();
Ok(map?)
}
pub(super) fn force_resync(&mut self) -> Result<()> {
self.db
.execute_batch("delete from media; update meta set lastUsn = 0, dirMod = 0")
.map_err(Into::into)
}
}
fn row_to_entry(row: &Row) -> rusqlite::Result<MediaEntry> {
// map the string checksum into bytes
let sha1_str: Option<String> = row.get(1)?;
let sha1_array = if let Some(s) = sha1_str {
let mut arr = [0; 20];
match hex::decode_to_slice(s, arr.as_mut()) {
Ok(_) => Some(arr),
_ => None,
}
} else {
None
};
// and return the entry
Ok(MediaEntry {
fname: row.get(0)?,
sha1: sha1_array,
mtime: row.get(2)?,
sync_required: row.get(3)?,
})
}
#[cfg(test)]
mod test {
use crate::err::Result;
use crate::media::database::MediaEntry;
use crate::media::files::sha1_of_data;
use crate::media::MediaManager;
use tempfile::NamedTempFile;
#[test]
fn database() -> Result<()> {
let db_file = NamedTempFile::new()?;
let db_file_path = db_file.path().to_str().unwrap();
let mut mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
ctx.transact(|ctx| {
// no entry exists yet
assert_eq!(ctx.get_entry("test.mp3")?, None);
// add one
let mut entry = MediaEntry {
fname: "test.mp3".into(),
sha1: None,
mtime: 0,
sync_required: false,
};
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
// update it
entry.sha1 = Some(sha1_of_data(b"hello"));
entry.mtime = 123;
entry.sync_required = true;
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
assert_eq!(ctx.get_pending_uploads(25)?, vec![entry]);
let mut meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 0);
assert_eq!(meta.last_sync_usn, 0);
meta.folder_mtime = 123;
meta.last_sync_usn = 321;
ctx.set_meta(&meta)?;
meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
assert_eq!(meta.last_sync_usn, 321);
Ok(())
})?;
// reopen database and ensure data was committed
drop(ctx);
drop(mgr);
mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
let meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
Ok(())
}
} |
if std::env::var("TRACESQL").is_ok() {
db.trace(Some(trace));
} | random_line_split |
database.rs | // Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use crate::err::Result;
use rusqlite::{params, Connection, OptionalExtension, Row, Statement, NO_PARAMS};
use std::collections::HashMap;
use std::path::Path;
fn trace(s: &str) {
println!("sql: {}", s)
}
pub(super) fn open_or_create<P: AsRef<Path>>(path: P) -> Result<Connection> {
let mut db = Connection::open(path)?;
if std::env::var("TRACESQL").is_ok() {
db.trace(Some(trace));
}
db.pragma_update(None, "page_size", &4096)?;
db.pragma_update(None, "legacy_file_format", &false)?;
db.pragma_update_and_check(None, "journal_mode", &"wal", |_| Ok(()))?;
initial_db_setup(&mut db)?;
Ok(db)
}
fn initial_db_setup(db: &mut Connection) -> Result<()> {
// tables already exist?
if db
.prepare("select null from sqlite_master where type = 'table' and name ='media'")?
.exists(NO_PARAMS)?
{
return Ok(());
}
db.execute("begin", NO_PARAMS)?;
db.execute_batch(include_str!("schema.sql"))?;
db.execute_batch("commit; vacuum; analyze;")?;
Ok(())
}
#[derive(Debug, PartialEq)]
pub struct MediaEntry {
pub fname: String,
/// If None, file has been deleted
pub sha1: Option<[u8; 20]>,
// Modification time; 0 if deleted
pub mtime: i64,
/// True if changed since last sync
pub sync_required: bool,
}
#[derive(Debug, PartialEq)]
pub struct MediaDatabaseMetadata {
pub folder_mtime: i64,
pub last_sync_usn: i32,
}
/// Helper to prepare a statement, or return a previously prepared one.
macro_rules! cached_sql {
( $label:expr, $db:expr, $sql:expr ) => {{
if $label.is_none() {
$label = Some($db.prepare($sql)?);
}
$label.as_mut().unwrap()
}};
}
pub struct MediaDatabaseContext<'a> {
db: &'a Connection,
get_entry_stmt: Option<Statement<'a>>,
update_entry_stmt: Option<Statement<'a>>,
remove_entry_stmt: Option<Statement<'a>>,
}
impl MediaDatabaseContext<'_> {
pub(super) fn new(db: &Connection) -> MediaDatabaseContext {
MediaDatabaseContext {
db,
get_entry_stmt: None,
update_entry_stmt: None,
remove_entry_stmt: None,
}
}
/// Execute the provided closure in a transaction, rolling back if
/// an error is returned.
pub(super) fn transact<F, R>(&mut self, func: F) -> Result<R>
where
F: FnOnce(&mut MediaDatabaseContext) -> Result<R>,
{
self.begin()?;
let mut res = func(self);
if res.is_ok() {
if let Err(e) = self.commit() {
res = Err(e);
}
}
if res.is_err() {
self.rollback()?;
}
res
}
fn begin(&mut self) -> Result<()> {
self.db.execute_batch("begin immediate").map_err(Into::into)
}
fn commit(&mut self) -> Result<()> {
self.db.execute_batch("commit").map_err(Into::into)
}
fn rollback(&mut self) -> Result<()> {
self.db.execute_batch("rollback").map_err(Into::into)
}
pub(super) fn get_entry(&mut self, fname: &str) -> Result<Option<MediaEntry>> {
let stmt = cached_sql!(
self.get_entry_stmt,
self.db,
"
select fname, csum, mtime, dirty from media where fname=?"
);
stmt.query_row(params![fname], row_to_entry)
.optional()
.map_err(Into::into)
}
pub(super) fn set_entry(&mut self, entry: &MediaEntry) -> Result<()> {
let stmt = cached_sql!(
self.update_entry_stmt,
self.db,
"
insert or replace into media (fname, csum, mtime, dirty)
values (?,?,?,?)"
);
let sha1_str = entry.sha1.map(hex::encode);
stmt.execute(params![
entry.fname,
sha1_str,
entry.mtime,
entry.sync_required
])?;
Ok(())
}
pub(super) fn remove_entry(&mut self, fname: &str) -> Result<()> {
let stmt = cached_sql!(
self.remove_entry_stmt,
self.db,
"
delete from media where fname=?"
);
stmt.execute(params![fname])?;
Ok(())
}
pub(super) fn get_meta(&mut self) -> Result<MediaDatabaseMetadata> {
let mut stmt = self.db.prepare("select dirMod, lastUsn from meta")?;
stmt.query_row(NO_PARAMS, |row| {
Ok(MediaDatabaseMetadata {
folder_mtime: row.get(0)?,
last_sync_usn: row.get(1)?,
})
})
.map_err(Into::into)
}
pub(super) fn set_meta(&mut self, meta: &MediaDatabaseMetadata) -> Result<()> {
let mut stmt = self.db.prepare("update meta set dirMod =?, lastUsn =?")?;
stmt.execute(params![meta.folder_mtime, meta.last_sync_usn])?;
Ok(())
}
pub(super) fn count(&mut self) -> Result<u32> {
self.db
.query_row(
"select count(*) from media where csum is not null",
NO_PARAMS,
|row| Ok(row.get(0)?),
)
.map_err(Into::into)
}
pub(super) fn get_pending_uploads(&mut self, max_entries: u32) -> Result<Vec<MediaEntry>> {
let mut stmt = self
.db
.prepare("select fname from media where dirty=1 limit?")?;
let results: Result<Vec<_>> = stmt
.query_and_then(params![max_entries], |row| {
let fname = row.get_raw(0).as_str()?;
Ok(self.get_entry(fname)?.unwrap())
})?
.collect();
results
}
pub(super) fn all_mtimes(&mut self) -> Result<HashMap<String, i64>> {
let mut stmt = self
.db
.prepare("select fname, mtime from media where csum is not null")?;
let map: std::result::Result<HashMap<String, i64>, rusqlite::Error> = stmt
.query_map(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?)))?
.collect();
Ok(map?)
}
pub(super) fn force_resync(&mut self) -> Result<()> |
}
fn row_to_entry(row: &Row) -> rusqlite::Result<MediaEntry> {
// map the string checksum into bytes
let sha1_str: Option<String> = row.get(1)?;
let sha1_array = if let Some(s) = sha1_str {
let mut arr = [0; 20];
match hex::decode_to_slice(s, arr.as_mut()) {
Ok(_) => Some(arr),
_ => None,
}
} else {
None
};
// and return the entry
Ok(MediaEntry {
fname: row.get(0)?,
sha1: sha1_array,
mtime: row.get(2)?,
sync_required: row.get(3)?,
})
}
#[cfg(test)]
mod test {
use crate::err::Result;
use crate::media::database::MediaEntry;
use crate::media::files::sha1_of_data;
use crate::media::MediaManager;
use tempfile::NamedTempFile;
#[test]
fn database() -> Result<()> {
let db_file = NamedTempFile::new()?;
let db_file_path = db_file.path().to_str().unwrap();
let mut mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
ctx.transact(|ctx| {
// no entry exists yet
assert_eq!(ctx.get_entry("test.mp3")?, None);
// add one
let mut entry = MediaEntry {
fname: "test.mp3".into(),
sha1: None,
mtime: 0,
sync_required: false,
};
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
// update it
entry.sha1 = Some(sha1_of_data(b"hello"));
entry.mtime = 123;
entry.sync_required = true;
ctx.set_entry(&entry)?;
assert_eq!(ctx.get_entry("test.mp3")?.unwrap(), entry);
assert_eq!(ctx.get_pending_uploads(25)?, vec![entry]);
let mut meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 0);
assert_eq!(meta.last_sync_usn, 0);
meta.folder_mtime = 123;
meta.last_sync_usn = 321;
ctx.set_meta(&meta)?;
meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
assert_eq!(meta.last_sync_usn, 321);
Ok(())
})?;
// reopen database and ensure data was committed
drop(ctx);
drop(mgr);
mgr = MediaManager::new("/dummy", db_file_path)?;
let mut ctx = mgr.dbctx();
let meta = ctx.get_meta()?;
assert_eq!(meta.folder_mtime, 123);
Ok(())
}
}
| {
self.db
.execute_batch("delete from media; update meta set lastUsn = 0, dirMod = 0")
.map_err(Into::into)
} | identifier_body |
lib.rs | // Copyright 2018 Developers of the Rand project.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The PCG random number generators.
//!
//! This is a native Rust implementation of a small selection of PCG generators.
//! The primary goal of this crate is simple, minimal, well-tested code; in
//! other words it is explicitly not a goal to re-implement all of PCG.
//!
//! This crate provides:
//!
//! - `Pcg32` aka `Lcg64Xsh32`, officially known as `pcg32`, a general
//! purpose RNG. This is a good choice on both 32-bit and 64-bit CPUs
//! (for 32-bit output).
//! - `Pcg64` aka `Lcg128Xsl64`, officially known as `pcg64`, a general
//! purpose RNG. This is a good choice on 64-bit CPUs.
//! - `Pcg64Mcg` aka `Mcg128Xsl64`, officially known as `pcg64_fast`,
//! a general purpose RNG using 128-bit multiplications. This has poor
//! performance on 32-bit CPUs but is a good choice on 64-bit CPUs for
//! both 32-bit and 64-bit output.
//!
//! Both of these use 16 bytes of state and 128-bit seeds, and are considered
//! value-stable (i.e. any change affecting the output given a fixed seed would
//! be considered a breaking change to the crate).
#![doc(
html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk.png", | html_root_url = "https://rust-random.github.io/rand/"
)]
#![deny(missing_docs)]
#![deny(missing_debug_implementations)]
#![no_std]
#[cfg(not(target_os = "emscripten"))] mod pcg128;
mod pcg64;
#[cfg(not(target_os = "emscripten"))]
pub use self::pcg128::{Lcg128Xsl64, Mcg128Xsl64, Pcg64, Pcg64Mcg};
pub use self::pcg64::{Lcg64Xsh32, Pcg32}; | html_favicon_url = "https://www.rust-lang.org/favicon.ico", | random_line_split |
core_sphere.rs | extern crate rustcmb;
use rustcmb::core::sphere;
const SIZE: usize = 4;
fn main() {
let default_field = sphere::Field::default();
let field = sphere::Field::new(SIZE);
let mut mut_field = sphere::Field::new(SIZE);
println!("default_field: {:?}", default_field);
println!("field: {:?}", field);
println!("mut_field: {:?}", mut_field);
println!("field size: {:?}", field.size());
println!("field at (2, 2): {:?}", field.at(2, 2));
*mut_field.at_mut(2, 2) = 1.;
println!("mut_field: {:?}", mut_field);
for i in mut_field.i_begin()..mut_field.i_end() {
for j in mut_field.j_begin()..mut_field.j_end() {
*mut_field.at_mut(i, j) = (i + j) as f64;
}
}
println!("mut_field after simple indexing: {:?}", mut_field); |
let mut foo_coef = sphere::Coef::new(SIZE);
for l in foo_coef.a_l_begin()..foo_coef.a_l_end() {
for m in foo_coef.a_m_begin()..foo_coef.a_m_end(l) {
*foo_coef.a_at_mut(l, m) = (l + m + 5) as f64;
}
}
for i in foo_coef.b_l_begin()..foo_coef.b_l_end() {
for j in foo_coef.b_m_begin()..foo_coef.b_m_end(i) {
*foo_coef.b_at_mut(i, j) = (i + j + 10) as f64;
}
}
println!("foo_coef after simple indexing: {:?}", foo_coef);
} | random_line_split |
|
core_sphere.rs | extern crate rustcmb;
use rustcmb::core::sphere;
const SIZE: usize = 4;
fn | () {
let default_field = sphere::Field::default();
let field = sphere::Field::new(SIZE);
let mut mut_field = sphere::Field::new(SIZE);
println!("default_field: {:?}", default_field);
println!("field: {:?}", field);
println!("mut_field: {:?}", mut_field);
println!("field size: {:?}", field.size());
println!("field at (2, 2): {:?}", field.at(2, 2));
*mut_field.at_mut(2, 2) = 1.;
println!("mut_field: {:?}", mut_field);
for i in mut_field.i_begin()..mut_field.i_end() {
for j in mut_field.j_begin()..mut_field.j_end() {
*mut_field.at_mut(i, j) = (i + j) as f64;
}
}
println!("mut_field after simple indexing: {:?}", mut_field);
let mut foo_coef = sphere::Coef::new(SIZE);
for l in foo_coef.a_l_begin()..foo_coef.a_l_end() {
for m in foo_coef.a_m_begin()..foo_coef.a_m_end(l) {
*foo_coef.a_at_mut(l, m) = (l + m + 5) as f64;
}
}
for i in foo_coef.b_l_begin()..foo_coef.b_l_end() {
for j in foo_coef.b_m_begin()..foo_coef.b_m_end(i) {
*foo_coef.b_at_mut(i, j) = (i + j + 10) as f64;
}
}
println!("foo_coef after simple indexing: {:?}", foo_coef);
}
| main | identifier_name |
core_sphere.rs | extern crate rustcmb;
use rustcmb::core::sphere;
const SIZE: usize = 4;
fn main() | }
println!("mut_field after simple indexing: {:?}", mut_field);
let mut foo_coef = sphere::Coef::new(SIZE);
for l in foo_coef.a_l_begin()..foo_coef.a_l_end() {
for m in foo_coef.a_m_begin()..foo_coef.a_m_end(l) {
*foo_coef.a_at_mut(l, m) = (l + m + 5) as f64;
}
}
for i in foo_coef.b_l_begin()..foo_coef.b_l_end() {
for j in foo_coef.b_m_begin()..foo_coef.b_m_end(i) {
*foo_coef.b_at_mut(i, j) = (i + j + 10) as f64;
}
}
println!("foo_coef after simple indexing: {:?}", foo_coef);
}
| {
let default_field = sphere::Field::default();
let field = sphere::Field::new(SIZE);
let mut mut_field = sphere::Field::new(SIZE);
println!("default_field: {:?}", default_field);
println!("field: {:?}", field);
println!("mut_field: {:?}", mut_field);
println!("field size: {:?}", field.size());
println!("field at (2, 2): {:?}", field.at(2, 2));
*mut_field.at_mut(2, 2) = 1.;
println!("mut_field: {:?}", mut_field);
for i in mut_field.i_begin()..mut_field.i_end() {
for j in mut_field.j_begin()..mut_field.j_end() {
*mut_field.at_mut(i, j) = (i + j) as f64;
} | identifier_body |
compiled.rs | if portable.
pub static boolfnames: &'static[&'static str] = &["auto_left_margin", "auto_right_margin",
"no_esc_ctlc", "ceol_standout_glitch", "eat_newline_glitch", "erase_overstrike", "generic_type",
"hard_copy", "has_meta_key", "has_status_line", "insert_null_glitch", "memory_above",
"memory_below", "move_insert_mode", "move_standout_mode", "over_strike", "status_line_esc_ok",
"dest_tabs_magic_smso", "tilde_glitch", "transparent_underline", "xon_xoff", "needs_xon_xoff",
"prtr_silent", "hard_cursor", "non_rev_rmcup", "no_pad_char", "non_dest_scroll_region",
"can_change", "back_color_erase", "hue_lightness_saturation", "col_addr_glitch",
"cr_cancels_micro_mode", "has_print_wheel", "row_addr_glitch", "semi_auto_right_margin",
"cpi_changes_res", "lpi_changes_res", "backspaces_with_bs", "crt_no_scrolling",
"no_correctly_working_cr", "gnu_has_meta_key", "linefeed_is_newline", "has_hardware_tabs",
"return_does_clr_eol"];
pub static boolnames: &'static[&'static str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
"gn", "hc", "km", "hs", "in", "db", "da", "mir", "msgr", "os", "eslok", "xt", "hz", "ul", "xon",
"nxon", "mc5i", "chts", "nrrmc", "npc", "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy",
"xvpa", "sam", "cpix", "lpix", "OTbs", "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr"];
pub static numfnames: &'static[&'static str] = &[ "columns", "init_tabs", "lines",
"lines_of_memory", "magic_cookie_glitch", "padding_baud_rate", "virtual_terminal",
"width_status_line", "num_labels", "label_height", "label_width", "max_attributes",
"maximum_windows", "max_colors", "max_pairs", "no_color_video", "buffer_capacity",
"dot_vert_spacing", "dot_horz_spacing", "max_micro_address", "max_micro_jump", "micro_col_size",
"micro_line_size", "number_of_pins", "output_res_char", "output_res_line",
"output_res_horz_inch", "output_res_vert_inch", "print_rate", "wide_char_size", "buttons",
"bit_image_entwining", "bit_image_type", "magic_cookie_glitch_ul", "carriage_return_delay",
"new_line_delay", "backspace_delay", "horizontal_tab_delay", "number_of_function_keys"];
pub static numnames: &'static[&'static str] = &[ "cols", "it", "lines", "lm", "xmc", "pb",
"vt", "wsl", "nlab", "lh", "lw", "ma", "wnum", "colors", "pairs", "ncv", "bufsz", "spinv",
"spinh", "maddr", "mjump", "mcs", "mls", "npins", "orc", "orl", "orhi", "orvi", "cps", "widcs",
"btns", "bitwin", "bitype", "UTug", "OTdC", "OTdN", "OTdB", "OTdT", "OTkn"];
pub static stringfnames: &'static[&'static str] = &[ "back_tab", "bell", "carriage_return",
"change_scroll_region", "clear_all_tabs", "clear_screen", "clr_eol", "clr_eos",
"column_address", "command_character", "cursor_address", "cursor_down", "cursor_home",
"cursor_invisible", "cursor_left", "cursor_mem_address", "cursor_normal", "cursor_right",
"cursor_to_ll", "cursor_up", "cursor_visible", "delete_character", "delete_line",
"dis_status_line", "down_half_line", "enter_alt_charset_mode", "enter_blink_mode",
"enter_bold_mode", "enter_ca_mode", "enter_delete_mode", "enter_dim_mode", "enter_insert_mode",
"enter_secure_mode", "enter_protected_mode", "enter_reverse_mode", "enter_standout_mode",
"enter_underline_mode", "erase_chars", "exit_alt_charset_mode", "exit_attribute_mode",
"exit_ca_mode", "exit_delete_mode", "exit_insert_mode", "exit_standout_mode",
"exit_underline_mode", "flash_screen", "form_feed", "from_status_line", "init_1string",
"init_2string", "init_3string", "init_file", "insert_character", "insert_line",
"insert_padding", "key_backspace", "key_catab", "key_clear", "key_ctab", "key_dc", "key_dl",
"key_down", "key_eic", "key_eol", "key_eos", "key_f0", "key_f1", "key_f10", "key_f2", "key_f3",
"key_f4", "key_f5", "key_f6", "key_f7", "key_f8", "key_f9", "key_home", "key_ic", "key_il",
"key_left", "key_ll", "key_npage", "key_ppage", "key_right", "key_sf", "key_sr", "key_stab",
"key_up", "keypad_local", "keypad_xmit", "lab_f0", "lab_f1", "lab_f10", "lab_f2", "lab_f3",
"lab_f4", "lab_f5", "lab_f6", "lab_f7", "lab_f8", "lab_f9", "meta_off", "meta_on", "newline",
"pad_char", "parm_dch", "parm_delete_line", "parm_down_cursor", "parm_ich", "parm_index",
"parm_insert_line", "parm_left_cursor", "parm_right_cursor", "parm_rindex", "parm_up_cursor",
"pkey_key", "pkey_local", "pkey_xmit", "print_screen", "prtr_off", "prtr_on", "repeat_char",
"reset_1string", "reset_2string", "reset_3string", "reset_file", "restore_cursor",
"row_address", "save_cursor", "scroll_forward", "scroll_reverse", "set_attributes", "set_tab",
"set_window", "tab", "to_status_line", "underline_char", "up_half_line", "init_prog", "key_a1",
"key_a3", "key_b2", "key_c1", "key_c3", "prtr_non", "char_padding", "acs_chars", "plab_norm",
"key_btab", "enter_xon_mode", "exit_xon_mode", "enter_am_mode", "exit_am_mode", "xon_character",
"xoff_character", "ena_acs", "label_on", "label_off", "key_beg", "key_cancel", "key_close",
"key_command", "key_copy", "key_create", "key_end", "key_enter", "key_exit", "key_find",
"key_help", "key_mark", "key_message", "key_move", "key_next", "key_open", "key_options",
"key_previous", "key_print", "key_redo", "key_reference", "key_refresh", "key_replace",
"key_restart", "key_resume", "key_save", "key_suspend", "key_undo", "key_sbeg", "key_scancel",
"key_scommand", "key_scopy", "key_screate", "key_sdc", "key_sdl", "key_select", "key_send",
"key_seol", "key_sexit", "key_sfind", "key_shelp", "key_shome", "key_sic", "key_sleft",
"key_smessage", "key_smove", "key_snext", "key_soptions", "key_sprevious", "key_sprint",
"key_sredo", "key_sreplace", "key_sright", "key_srsume", "key_ssave", "key_ssuspend",
"key_sundo", "req_for_input", "key_f11", "key_f12", "key_f13", "key_f14", "key_f15", "key_f16",
"key_f17", "key_f18", "key_f19", "key_f20", "key_f21", "key_f22", "key_f23", "key_f24",
"key_f25", "key_f26", "key_f27", "key_f28", "key_f29", "key_f30", "key_f31", "key_f32",
"key_f33", "key_f34", "key_f35", "key_f36", "key_f37", "key_f38", "key_f39", "key_f40",
"key_f41", "key_f42", "key_f43", "key_f44", "key_f45", "key_f46", "key_f47", "key_f48",
"key_f49", "key_f50", "key_f51", "key_f52", "key_f53", "key_f54", "key_f55", "key_f56",
"key_f57", "key_f58", "key_f59", "key_f60", "key_f61", "key_f62", "key_f63", "clr_bol",
"clear_margins", "set_left_margin", "set_right_margin", "label_format", "set_clock",
"display_clock", "remove_clock", "create_window", "goto_window", "hangup", "dial_phone",
"quick_dial", "tone", "pulse", "flash_hook", "fixed_pause", "wait_tone", "user0", "user1",
"user2", "user3", "user4", "user5", "user6", "user7", "user8", "user9", "orig_pair",
"orig_colors", "initialize_color", "initialize_pair", "set_color_pair", "set_foreground",
"set_background", "change_char_pitch", "change_line_pitch", "change_res_horz",
"change_res_vert", "define_char", "enter_doublewide_mode", "enter_draft_quality",
"enter_italics_mode", "enter_leftward_mode", "enter_micro_mode", "enter_near_letter_quality",
"enter_normal_quality", "enter_shadow_mode", "enter_subscript_mode", "enter_superscript_mode",
"enter_upward_mode", "exit_doublewide_mode", "exit_italics_mode", "exit_leftward_mode",
"exit_micro_mode", "exit_shadow_mode", "exit_subscript_mode", "exit_superscript_mode",
"exit_upward_mode", "micro_column_address", "micro_down", "micro_left", "micro_right",
"micro_row_address", "micro_up", "order_of_pins", "parm_down_micro", "parm_left_micro",
"parm_right_micro", "parm_up_micro", "select_char_set", "set_bottom_margin",
"set_bottom_margin_parm", "set_left_margin_parm", "set_right_margin_parm", "set_top_margin",
"set_top_margin_parm", "start_bit_image", "start_char_set_def", "stop_bit_image",
"stop_char_set_def", "subscript_characters", "superscript_characters", "these_cause_cr",
"zero_motion", "char_set_names", "key_mouse", "mouse_info", "req_mouse_pos", "get_mouse",
"set_a_foreground", "set_a_background", "pkey_plab", "device_type", "code_set_init",
"set0_des_seq", "set1_des_seq", "set2_des_seq", "set3_des_seq", "set_lr_margin",
"set_tb_margin", "bit_image_repeat", "bit_image_newline", "bit_image_carriage_return",
"color_names", "define_bit_image_region", "end_bit_image_region", "set_color_band",
"set_page_length", "display_pc_char", "enter_pc_charset_mode", "exit_pc_charset_mode",
"enter_scancode_mode", "exit_scancode_mode", "pc_term_options", "scancode_escape",
"alt_scancode_esc", "enter_horizontal_hl_mode", "enter_left_hl_mode", "enter_low_hl_mode",
"enter_right_hl_mode", "enter_top_hl_mode", "enter_vertical_hl_mode", "set_a_attributes",
"set_pglen_inch", "termcap_init2", "termcap_reset", "linefeed_if_not_lf", "backspace_if_not_bs",
"other_non_function_keys", "arrow_key_map", "acs_ulcorner", "acs_llcorner", "acs_urcorner",
"acs_lrcorner", "acs_ltee", "acs_rtee", "acs_btee", "acs_ttee", "acs_hline", "acs_vline",
"acs_plus", "memory_lock", "memory_unlock", "box_chars_1"];
pub static stringnames: &'static[&'static str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear",
"_", "_", "hpa", "cmdch", "cup", "cud1", "home", "civis", "cub1", "mrcup", "cnorm", "cuf1",
"ll", "cuu1", "cvvis", "dch1", "dl1", "dsl", "hd", "smacs", "blink", "bold", "smcup", "smdc",
"dim", "smir", "invis", "prot", "rev", "smso", "smul", "ech", "rmacs", "sgr0", "rmcup", "rmdc",
"rmir", "rmso", "rmul", "flash", "ff", "fsl", "is1", "is2", "is3", "if", "ich1", "il1", "ip",
"kbs", "ktbc", "kclr", "kctab", "_", "_", "kcud1", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "khome", "_", "_", "kcub1", "_", "knp", "kpp", "kcuf1", "_", "_",
"khts", "_", "rmkx", "smkx", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "rmm", "_",
"_", "pad", "dch", "dl", "cud", "ich", "indn", "il", "cub", "cuf", "rin", "cuu", "pfkey",
"pfloc", "pfx", "mc0", "mc4", "_", "rep", "rs1", "rs2", "rs3", "rf", "rc", "vpa", "sc", "ind",
"ri", "sgr", "_", "wind", "_", "tsl", "uc", "hu", "iprog", "_", "_", "_", "_", "_", "mc5p",
"rmp", "acsc", "pln", "kcbt", "smxon", "rmxon", "smam", "rmam", "xonc", "xoffc", "_", "smln",
"rmln", "_", "kcan", "kclo", "kcmd", "kcpy", "kcrt", "_", "kent", "kext", "kfnd", "khlp",
"kmrk", "kmsg", "kmov", "knxt", "kopn", "kopt", "kprv", "kprt", "krdo", "kref", "krfr", "krpl",
"krst", "kres", "ksav", "kspd", "kund", "kBEG", "kCAN", "kCMD", "kCPY", "kCRT", "_", "_",
"kslt", "kEND", "kEOL", "kEXT", "kFND", "kHLP", "kHOM", "_", "kLFT", "kMSG", "kMOV", "kNXT",
"kOPT", "kPRV", "kPRT", "kRDO", "kRPL", "kRIT", "kRES", "kSAV", "kSPD", "kUND", "rfi", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"dclk", "rmclk", "cwin", "wingo", "_", "dial", "qdial", "_", "_", "hook", "pause", "wait", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "op", "oc", "initc", "initp", "scp", "setf",
"setb", "cpi", "lpi", "chr", "cvr", "defc", "swidm", "sdrfq", "sitm", "slm", "smicm", "snlq",
"snrmq", "sshm", "ssubm", "ssupm", "sum", "rwidm", "ritm", "rlm", "rmicm", "rshm", "rsubm",
"rsupm", "rum", "mhpa", "mcud1", "mcub1", "mcuf1", "mvpa", "mcuu1", "porder", "mcud", "mcub",
"mcuf", "mcuu", "scs", "smgb", "smgbp", "smglp", "smgrp", "smgt", "smgtp", "sbim", "scsd",
"rbim", "rcsd", "subcs", "supcs", "docr", "zerom", "csnm", "kmous", "minfo", "reqmp", "getm",
"setaf", "setab", "pfxl", "devt", "csin", "s0ds", "s1ds", "s2ds", "s3ds", "smglr", "smgtb",
"birep", "binel", "bicr", "colornm", "defbi", "endbi", "setcolor", "slines", "dispc", "smpch",
"rmpch", "smsc", "rmsc", "pctrm", "scesc", "scesa", "ehhlm", "elhlm", "elohlm", "erhlm",
"ethlm", "evhlm", "sgr1", "slength", "OTi2", "OTrs", "OTnl", "OTbs", "OTko", "OTma", "OTG2",
"OTG3", "OTG1", "OTG4", "OTGR", "OTGL", "OTGU", "OTGD", "OTGH", "OTGV", "OTGC", "meml", "memu",
"box1"];
fn read_le_u16(r: &mut io::Read) -> io::Result<u16> {
let mut b = [0; 2];
let mut amt = 0;
while amt < b.len() {
match try!(r.read(&mut b[amt..])) {
0 => return Err(io::Error::new(io::ErrorKind::Other, "end of file")),
n => amt += n,
}
}
Ok((b[0] as u16) | ((b[1] as u16) << 8))
}
fn read_byte(r: &mut io::Read) -> io::Result<u8> {
match r.bytes().next() {
Some(s) => s,
None => Err(io::Error::new(io::ErrorKind::Other, "end of file"))
}
}
/// Parse a compiled terminfo entry, using long capability names if `longnames`
/// is true
pub fn parse(file: &mut io::Read, longnames: bool) -> Result<TermInfo, String> |
// According to the spec, these fields must be >= -1 where -1 means that the feature is not
// supported. Using 0 instead of -1 works because we skip sections with length 0.
macro_rules! read_nonneg {
() => {{
match try!(read_le_u16(file)) as i16 {
n if n >= 0 => n as usize,
-1 => 0,
_ => return Err("incompatible file: length fields must be >= -1".to_string()),
}
}}
}
let names_bytes = read_nonneg!();
let bools_bytes = read_nonneg!();
let numbers_count = read_nonneg!();
let string_offsets_count = read_nonneg!();
let string_table_bytes = read_nonneg!();
if names_bytes == 0 {
return Err("incompatible file: names field must be \
at least 1 byte wide".to_string());
}
if bools_bytes > boolnames.len() {
return Err("incompatible file: more booleans than \
expected".to_string());
}
if numbers_count > numnames.len() {
return Err("incompatible file: more numbers than \
expected".to_string());
}
if string_offsets_count > stringnames.len() {
return Err("incompatible file: more string offsets than \
expected".to_string());
}
// don't read NUL
let mut bytes = Vec::new();
try!(file.take((names_bytes - 1) as u64).read_to_end(&mut bytes));
let names_str = match String::from_utf8(bytes) {
Ok(s) => s,
Err(_) => return Err("input not utf-8".to_string()),
};
let term_names: Vec<String> = names_str.split('|')
.map(|s| s.to_string())
.collect();
// consume NUL
if try!(read_byte(file))!= b'\0' {
return Err("incompatible file: missing null terminator \
for names section".to_string());
}
let bools_map: HashMap<String, bool> = try!(
(0..bools_bytes).filter_map(|i| match read_byte(file) {
Err(e) => Some(Err(e)),
Ok(1) => Some(Ok((bnames[i].to_string(), true))),
Ok(_) => None
}).collect());
if (bools_bytes + names_bytes) % 2 == 1 {
try!(read_byte(file)); // compensate for padding
}
let numbers_map: HashMap<String, u16> = try!(
(0..numbers_count).filter_map(|i| match read_le_u16(file) {
Ok(0xFFFF) => None,
Ok(n) => Some(Ok((nnames[i].to_string(), n))),
Err(e) => Some(Err(e))
}).collect());
let string_map: HashMap<String, Vec<u8>> = if string_offsets_count > 0 {
let string_offsets: Vec<u16> = try!((0..string_offsets_count).map(|_| {
read_le_u16(file)
}).collect());
let mut string_table = Vec::new();
try!(file.take(string_table_bytes as u64).read_to_end(&mut string_table));
try!(string_offsets.into_iter().enumerate().filter(|&(_, offset)| {
// non-entry
offset!= 0xFFFF
}).map(|(i, offset)| {
let offset = offset as usize;
let name = if snames[i] == "_" {
stringfnames[i]
} else {
snames[i]
};
if offset == 0xFFFE {
// undocumented: FFFE indicates cap@, which means the capability is not present
// unsure if the handling for this is correct
return Ok((name.to_string(), Vec::new()));
}
// Find the offset of the NUL we want to go to
let nulpos = string_table[offset..string_table_bytes].iter().position(|&b| b == 0);
match nulpos {
Some(len) => Ok((name.to_string(), string_table[offset..offset + len].to_vec())),
None => Err("invalid file: missing NUL in string_table".to_string()),
}
}).collect())
} else {
HashMap::new()
};
// And that's all there is to it
Ok(TermInfo {
names: term_names,
bools: bools_map,
numbers: numbers_map,
strings: string_map
})
}
/// Create a dummy TermInfo struct for msys terminals
pub fn msys_terminfo() -> TermInfo {
let mut strings = HashMap::new();
strings.insert("sgr0".to_string(), b"\x1B[0m".to_vec());
strings.insert("bold | {
macro_rules! try( ($e:expr) => (
match $e {
Ok(e) => e,
Err(e) => return Err(format!("{}", e))
}
) );
let (bnames, snames, nnames) = if longnames {
(boolfnames, stringfnames, numfnames)
} else {
(boolnames, stringnames, numnames)
};
// Check magic number
let magic = try!(read_le_u16(file));
if magic != 0x011A {
return Err(format!("invalid magic number: expected {:x}, found {:x}",
0x011A, magic));
} | identifier_body |
compiled.rs | if portable.
pub static boolfnames: &'static[&'static str] = &["auto_left_margin", "auto_right_margin",
"no_esc_ctlc", "ceol_standout_glitch", "eat_newline_glitch", "erase_overstrike", "generic_type",
"hard_copy", "has_meta_key", "has_status_line", "insert_null_glitch", "memory_above",
"memory_below", "move_insert_mode", "move_standout_mode", "over_strike", "status_line_esc_ok",
"dest_tabs_magic_smso", "tilde_glitch", "transparent_underline", "xon_xoff", "needs_xon_xoff",
"prtr_silent", "hard_cursor", "non_rev_rmcup", "no_pad_char", "non_dest_scroll_region",
"can_change", "back_color_erase", "hue_lightness_saturation", "col_addr_glitch",
"cr_cancels_micro_mode", "has_print_wheel", "row_addr_glitch", "semi_auto_right_margin",
"cpi_changes_res", "lpi_changes_res", "backspaces_with_bs", "crt_no_scrolling",
"no_correctly_working_cr", "gnu_has_meta_key", "linefeed_is_newline", "has_hardware_tabs",
"return_does_clr_eol"];
pub static boolnames: &'static[&'static str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
"gn", "hc", "km", "hs", "in", "db", "da", "mir", "msgr", "os", "eslok", "xt", "hz", "ul", "xon",
"nxon", "mc5i", "chts", "nrrmc", "npc", "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy",
"xvpa", "sam", "cpix", "lpix", "OTbs", "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr"];
pub static numfnames: &'static[&'static str] = &[ "columns", "init_tabs", "lines",
"lines_of_memory", "magic_cookie_glitch", "padding_baud_rate", "virtual_terminal",
"width_status_line", "num_labels", "label_height", "label_width", "max_attributes",
"maximum_windows", "max_colors", "max_pairs", "no_color_video", "buffer_capacity",
"dot_vert_spacing", "dot_horz_spacing", "max_micro_address", "max_micro_jump", "micro_col_size",
"micro_line_size", "number_of_pins", "output_res_char", "output_res_line",
"output_res_horz_inch", "output_res_vert_inch", "print_rate", "wide_char_size", "buttons",
"bit_image_entwining", "bit_image_type", "magic_cookie_glitch_ul", "carriage_return_delay",
"new_line_delay", "backspace_delay", "horizontal_tab_delay", "number_of_function_keys"];
pub static numnames: &'static[&'static str] = &[ "cols", "it", "lines", "lm", "xmc", "pb",
"vt", "wsl", "nlab", "lh", "lw", "ma", "wnum", "colors", "pairs", "ncv", "bufsz", "spinv",
"spinh", "maddr", "mjump", "mcs", "mls", "npins", "orc", "orl", "orhi", "orvi", "cps", "widcs",
"btns", "bitwin", "bitype", "UTug", "OTdC", "OTdN", "OTdB", "OTdT", "OTkn"];
pub static stringfnames: &'static[&'static str] = &[ "back_tab", "bell", "carriage_return",
"change_scroll_region", "clear_all_tabs", "clear_screen", "clr_eol", "clr_eos",
"column_address", "command_character", "cursor_address", "cursor_down", "cursor_home",
"cursor_invisible", "cursor_left", "cursor_mem_address", "cursor_normal", "cursor_right",
"cursor_to_ll", "cursor_up", "cursor_visible", "delete_character", "delete_line",
"dis_status_line", "down_half_line", "enter_alt_charset_mode", "enter_blink_mode",
"enter_bold_mode", "enter_ca_mode", "enter_delete_mode", "enter_dim_mode", "enter_insert_mode",
"enter_secure_mode", "enter_protected_mode", "enter_reverse_mode", "enter_standout_mode",
"enter_underline_mode", "erase_chars", "exit_alt_charset_mode", "exit_attribute_mode",
"exit_ca_mode", "exit_delete_mode", "exit_insert_mode", "exit_standout_mode",
"exit_underline_mode", "flash_screen", "form_feed", "from_status_line", "init_1string",
"init_2string", "init_3string", "init_file", "insert_character", "insert_line",
"insert_padding", "key_backspace", "key_catab", "key_clear", "key_ctab", "key_dc", "key_dl",
"key_down", "key_eic", "key_eol", "key_eos", "key_f0", "key_f1", "key_f10", "key_f2", "key_f3",
"key_f4", "key_f5", "key_f6", "key_f7", "key_f8", "key_f9", "key_home", "key_ic", "key_il",
"key_left", "key_ll", "key_npage", "key_ppage", "key_right", "key_sf", "key_sr", "key_stab",
"key_up", "keypad_local", "keypad_xmit", "lab_f0", "lab_f1", "lab_f10", "lab_f2", "lab_f3",
"lab_f4", "lab_f5", "lab_f6", "lab_f7", "lab_f8", "lab_f9", "meta_off", "meta_on", "newline",
"pad_char", "parm_dch", "parm_delete_line", "parm_down_cursor", "parm_ich", "parm_index",
"parm_insert_line", "parm_left_cursor", "parm_right_cursor", "parm_rindex", "parm_up_cursor",
"pkey_key", "pkey_local", "pkey_xmit", "print_screen", "prtr_off", "prtr_on", "repeat_char",
"reset_1string", "reset_2string", "reset_3string", "reset_file", "restore_cursor",
"row_address", "save_cursor", "scroll_forward", "scroll_reverse", "set_attributes", "set_tab",
"set_window", "tab", "to_status_line", "underline_char", "up_half_line", "init_prog", "key_a1",
"key_a3", "key_b2", "key_c1", "key_c3", "prtr_non", "char_padding", "acs_chars", "plab_norm",
"key_btab", "enter_xon_mode", "exit_xon_mode", "enter_am_mode", "exit_am_mode", "xon_character",
"xoff_character", "ena_acs", "label_on", "label_off", "key_beg", "key_cancel", "key_close",
"key_command", "key_copy", "key_create", "key_end", "key_enter", "key_exit", "key_find",
"key_help", "key_mark", "key_message", "key_move", "key_next", "key_open", "key_options",
"key_previous", "key_print", "key_redo", "key_reference", "key_refresh", "key_replace",
"key_restart", "key_resume", "key_save", "key_suspend", "key_undo", "key_sbeg", "key_scancel",
"key_scommand", "key_scopy", "key_screate", "key_sdc", "key_sdl", "key_select", "key_send",
"key_seol", "key_sexit", "key_sfind", "key_shelp", "key_shome", "key_sic", "key_sleft",
"key_smessage", "key_smove", "key_snext", "key_soptions", "key_sprevious", "key_sprint",
"key_sredo", "key_sreplace", "key_sright", "key_srsume", "key_ssave", "key_ssuspend",
"key_sundo", "req_for_input", "key_f11", "key_f12", "key_f13", "key_f14", "key_f15", "key_f16",
"key_f17", "key_f18", "key_f19", "key_f20", "key_f21", "key_f22", "key_f23", "key_f24",
"key_f25", "key_f26", "key_f27", "key_f28", "key_f29", "key_f30", "key_f31", "key_f32",
"key_f33", "key_f34", "key_f35", "key_f36", "key_f37", "key_f38", "key_f39", "key_f40",
"key_f41", "key_f42", "key_f43", "key_f44", "key_f45", "key_f46", "key_f47", "key_f48",
"key_f49", "key_f50", "key_f51", "key_f52", "key_f53", "key_f54", "key_f55", "key_f56",
"key_f57", "key_f58", "key_f59", "key_f60", "key_f61", "key_f62", "key_f63", "clr_bol",
"clear_margins", "set_left_margin", "set_right_margin", "label_format", "set_clock",
"display_clock", "remove_clock", "create_window", "goto_window", "hangup", "dial_phone",
"quick_dial", "tone", "pulse", "flash_hook", "fixed_pause", "wait_tone", "user0", "user1",
"user2", "user3", "user4", "user5", "user6", "user7", "user8", "user9", "orig_pair",
"orig_colors", "initialize_color", "initialize_pair", "set_color_pair", "set_foreground",
"set_background", "change_char_pitch", "change_line_pitch", "change_res_horz",
"change_res_vert", "define_char", "enter_doublewide_mode", "enter_draft_quality",
"enter_italics_mode", "enter_leftward_mode", "enter_micro_mode", "enter_near_letter_quality",
"enter_normal_quality", "enter_shadow_mode", "enter_subscript_mode", "enter_superscript_mode",
"enter_upward_mode", "exit_doublewide_mode", "exit_italics_mode", "exit_leftward_mode",
"exit_micro_mode", "exit_shadow_mode", "exit_subscript_mode", "exit_superscript_mode",
"exit_upward_mode", "micro_column_address", "micro_down", "micro_left", "micro_right",
"micro_row_address", "micro_up", "order_of_pins", "parm_down_micro", "parm_left_micro",
"parm_right_micro", "parm_up_micro", "select_char_set", "set_bottom_margin",
"set_bottom_margin_parm", "set_left_margin_parm", "set_right_margin_parm", "set_top_margin",
"set_top_margin_parm", "start_bit_image", "start_char_set_def", "stop_bit_image",
"stop_char_set_def", "subscript_characters", "superscript_characters", "these_cause_cr",
"zero_motion", "char_set_names", "key_mouse", "mouse_info", "req_mouse_pos", "get_mouse",
"set_a_foreground", "set_a_background", "pkey_plab", "device_type", "code_set_init",
"set0_des_seq", "set1_des_seq", "set2_des_seq", "set3_des_seq", "set_lr_margin",
"set_tb_margin", "bit_image_repeat", "bit_image_newline", "bit_image_carriage_return",
"color_names", "define_bit_image_region", "end_bit_image_region", "set_color_band",
"set_page_length", "display_pc_char", "enter_pc_charset_mode", "exit_pc_charset_mode",
"enter_scancode_mode", "exit_scancode_mode", "pc_term_options", "scancode_escape",
"alt_scancode_esc", "enter_horizontal_hl_mode", "enter_left_hl_mode", "enter_low_hl_mode",
"enter_right_hl_mode", "enter_top_hl_mode", "enter_vertical_hl_mode", "set_a_attributes",
"set_pglen_inch", "termcap_init2", "termcap_reset", "linefeed_if_not_lf", "backspace_if_not_bs",
"other_non_function_keys", "arrow_key_map", "acs_ulcorner", "acs_llcorner", "acs_urcorner",
"acs_lrcorner", "acs_ltee", "acs_rtee", "acs_btee", "acs_ttee", "acs_hline", "acs_vline",
"acs_plus", "memory_lock", "memory_unlock", "box_chars_1"];
pub static stringnames: &'static[&'static str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear",
"_", "_", "hpa", "cmdch", "cup", "cud1", "home", "civis", "cub1", "mrcup", "cnorm", "cuf1",
"ll", "cuu1", "cvvis", "dch1", "dl1", "dsl", "hd", "smacs", "blink", "bold", "smcup", "smdc",
"dim", "smir", "invis", "prot", "rev", "smso", "smul", "ech", "rmacs", "sgr0", "rmcup", "rmdc",
"rmir", "rmso", "rmul", "flash", "ff", "fsl", "is1", "is2", "is3", "if", "ich1", "il1", "ip",
"kbs", "ktbc", "kclr", "kctab", "_", "_", "kcud1", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "khome", "_", "_", "kcub1", "_", "knp", "kpp", "kcuf1", "_", "_",
"khts", "_", "rmkx", "smkx", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "rmm", "_",
"_", "pad", "dch", "dl", "cud", "ich", "indn", "il", "cub", "cuf", "rin", "cuu", "pfkey",
"pfloc", "pfx", "mc0", "mc4", "_", "rep", "rs1", "rs2", "rs3", "rf", "rc", "vpa", "sc", "ind",
"ri", "sgr", "_", "wind", "_", "tsl", "uc", "hu", "iprog", "_", "_", "_", "_", "_", "mc5p",
"rmp", "acsc", "pln", "kcbt", "smxon", "rmxon", "smam", "rmam", "xonc", "xoffc", "_", "smln",
"rmln", "_", "kcan", "kclo", "kcmd", "kcpy", "kcrt", "_", "kent", "kext", "kfnd", "khlp",
"kmrk", "kmsg", "kmov", "knxt", "kopn", "kopt", "kprv", "kprt", "krdo", "kref", "krfr", "krpl",
"krst", "kres", "ksav", "kspd", "kund", "kBEG", "kCAN", "kCMD", "kCPY", "kCRT", "_", "_",
"kslt", "kEND", "kEOL", "kEXT", "kFND", "kHLP", "kHOM", "_", "kLFT", "kMSG", "kMOV", "kNXT",
"kOPT", "kPRV", "kPRT", "kRDO", "kRPL", "kRIT", "kRES", "kSAV", "kSPD", "kUND", "rfi", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"dclk", "rmclk", "cwin", "wingo", "_", "dial", "qdial", "_", "_", "hook", "pause", "wait", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "op", "oc", "initc", "initp", "scp", "setf",
"setb", "cpi", "lpi", "chr", "cvr", "defc", "swidm", "sdrfq", "sitm", "slm", "smicm", "snlq",
"snrmq", "sshm", "ssubm", "ssupm", "sum", "rwidm", "ritm", "rlm", "rmicm", "rshm", "rsubm",
"rsupm", "rum", "mhpa", "mcud1", "mcub1", "mcuf1", "mvpa", "mcuu1", "porder", "mcud", "mcub",
"mcuf", "mcuu", "scs", "smgb", "smgbp", "smglp", "smgrp", "smgt", "smgtp", "sbim", "scsd",
"rbim", "rcsd", "subcs", "supcs", "docr", "zerom", "csnm", "kmous", "minfo", "reqmp", "getm",
"setaf", "setab", "pfxl", "devt", "csin", "s0ds", "s1ds", "s2ds", "s3ds", "smglr", "smgtb",
"birep", "binel", "bicr", "colornm", "defbi", "endbi", "setcolor", "slines", "dispc", "smpch",
"rmpch", "smsc", "rmsc", "pctrm", "scesc", "scesa", "ehhlm", "elhlm", "elohlm", "erhlm",
"ethlm", "evhlm", "sgr1", "slength", "OTi2", "OTrs", "OTnl", "OTbs", "OTko", "OTma", "OTG2",
"OTG3", "OTG1", "OTG4", "OTGR", "OTGL", "OTGU", "OTGD", "OTGH", "OTGV", "OTGC", "meml", "memu",
"box1"];
fn read_le_u16(r: &mut io::Read) -> io::Result<u16> {
let mut b = [0; 2];
let mut amt = 0;
while amt < b.len() {
match try!(r.read(&mut b[amt..])) {
0 => return Err(io::Error::new(io::ErrorKind::Other, "end of file")),
n => amt += n,
}
}
Ok((b[0] as u16) | ((b[1] as u16) << 8))
}
fn read_byte(r: &mut io::Read) -> io::Result<u8> {
match r.bytes().next() {
Some(s) => s,
None => Err(io::Error::new(io::ErrorKind::Other, "end of file"))
}
}
/// Parse a compiled terminfo entry, using long capability names if `longnames`
/// is true
pub fn | (file: &mut io::Read, longnames: bool) -> Result<TermInfo, String> {
macro_rules! try( ($e:expr) => (
match $e {
Ok(e) => e,
Err(e) => return Err(format!("{}", e))
}
) );
let (bnames, snames, nnames) = if longnames {
(boolfnames, stringfnames, numfnames)
} else {
(boolnames, stringnames, numnames)
};
// Check magic number
let magic = try!(read_le_u16(file));
if magic!= 0x011A {
return Err(format!("invalid magic number: expected {:x}, found {:x}",
0x011A, magic));
}
// According to the spec, these fields must be >= -1 where -1 means that the feature is not
// supported. Using 0 instead of -1 works because we skip sections with length 0.
macro_rules! read_nonneg {
() => {{
match try!(read_le_u16(file)) as i16 {
n if n >= 0 => n as usize,
-1 => 0,
_ => return Err("incompatible file: length fields must be >= -1".to_string()),
}
}}
}
let names_bytes = read_nonneg!();
let bools_bytes = read_nonneg!();
let numbers_count = read_nonneg!();
let string_offsets_count = read_nonneg!();
let string_table_bytes = read_nonneg!();
if names_bytes == 0 {
return Err("incompatible file: names field must be \
at least 1 byte wide".to_string());
}
if bools_bytes > boolnames.len() {
return Err("incompatible file: more booleans than \
expected".to_string());
}
if numbers_count > numnames.len() {
return Err("incompatible file: more numbers than \
expected".to_string());
}
if string_offsets_count > stringnames.len() {
return Err("incompatible file: more string offsets than \
expected".to_string());
}
// don't read NUL
let mut bytes = Vec::new();
try!(file.take((names_bytes - 1) as u64).read_to_end(&mut bytes));
let names_str = match String::from_utf8(bytes) {
Ok(s) => s,
Err(_) => return Err("input not utf-8".to_string()),
};
let term_names: Vec<String> = names_str.split('|')
.map(|s| s.to_string())
.collect();
// consume NUL
if try!(read_byte(file))!= b'\0' {
return Err("incompatible file: missing null terminator \
for names section".to_string());
}
let bools_map: HashMap<String, bool> = try!(
(0..bools_bytes).filter_map(|i| match read_byte(file) {
Err(e) => Some(Err(e)),
Ok(1) => Some(Ok((bnames[i].to_string(), true))),
Ok(_) => None
}).collect());
if (bools_bytes + names_bytes) % 2 == 1 {
try!(read_byte(file)); // compensate for padding
}
let numbers_map: HashMap<String, u16> = try!(
(0..numbers_count).filter_map(|i| match read_le_u16(file) {
Ok(0xFFFF) => None,
Ok(n) => Some(Ok((nnames[i].to_string(), n))),
Err(e) => Some(Err(e))
}).collect());
let string_map: HashMap<String, Vec<u8>> = if string_offsets_count > 0 {
let string_offsets: Vec<u16> = try!((0..string_offsets_count).map(|_| {
read_le_u16(file)
}).collect());
let mut string_table = Vec::new();
try!(file.take(string_table_bytes as u64).read_to_end(&mut string_table));
try!(string_offsets.into_iter().enumerate().filter(|&(_, offset)| {
// non-entry
offset!= 0xFFFF
}).map(|(i, offset)| {
let offset = offset as usize;
let name = if snames[i] == "_" {
stringfnames[i]
} else {
snames[i]
};
if offset == 0xFFFE {
// undocumented: FFFE indicates cap@, which means the capability is not present
// unsure if the handling for this is correct
return Ok((name.to_string(), Vec::new()));
}
// Find the offset of the NUL we want to go to
let nulpos = string_table[offset..string_table_bytes].iter().position(|&b| b == 0);
match nulpos {
Some(len) => Ok((name.to_string(), string_table[offset..offset + len].to_vec())),
None => Err("invalid file: missing NUL in string_table".to_string()),
}
}).collect())
} else {
HashMap::new()
};
// And that's all there is to it
Ok(TermInfo {
names: term_names,
bools: bools_map,
numbers: numbers_map,
strings: string_map
})
}
/// Create a dummy TermInfo struct for msys terminals
pub fn msys_terminfo() -> TermInfo {
let mut strings = HashMap::new();
strings.insert("sgr0".to_string(), b"\x1B[0m".to_vec());
strings.insert("bold | parse | identifier_name |
compiled.rs | sure if portable.
pub static boolfnames: &'static[&'static str] = &["auto_left_margin", "auto_right_margin",
"no_esc_ctlc", "ceol_standout_glitch", "eat_newline_glitch", "erase_overstrike", "generic_type",
"hard_copy", "has_meta_key", "has_status_line", "insert_null_glitch", "memory_above",
"memory_below", "move_insert_mode", "move_standout_mode", "over_strike", "status_line_esc_ok",
"dest_tabs_magic_smso", "tilde_glitch", "transparent_underline", "xon_xoff", "needs_xon_xoff",
"prtr_silent", "hard_cursor", "non_rev_rmcup", "no_pad_char", "non_dest_scroll_region",
"can_change", "back_color_erase", "hue_lightness_saturation", "col_addr_glitch",
"cr_cancels_micro_mode", "has_print_wheel", "row_addr_glitch", "semi_auto_right_margin",
"cpi_changes_res", "lpi_changes_res", "backspaces_with_bs", "crt_no_scrolling",
"no_correctly_working_cr", "gnu_has_meta_key", "linefeed_is_newline", "has_hardware_tabs",
"return_does_clr_eol"];
pub static boolnames: &'static[&'static str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
"gn", "hc", "km", "hs", "in", "db", "da", "mir", "msgr", "os", "eslok", "xt", "hz", "ul", "xon",
"nxon", "mc5i", "chts", "nrrmc", "npc", "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy",
"xvpa", "sam", "cpix", "lpix", "OTbs", "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr"];
pub static numfnames: &'static[&'static str] = &[ "columns", "init_tabs", "lines",
"lines_of_memory", "magic_cookie_glitch", "padding_baud_rate", "virtual_terminal",
"width_status_line", "num_labels", "label_height", "label_width", "max_attributes",
"maximum_windows", "max_colors", "max_pairs", "no_color_video", "buffer_capacity",
"dot_vert_spacing", "dot_horz_spacing", "max_micro_address", "max_micro_jump", "micro_col_size",
"micro_line_size", "number_of_pins", "output_res_char", "output_res_line",
"output_res_horz_inch", "output_res_vert_inch", "print_rate", "wide_char_size", "buttons",
"bit_image_entwining", "bit_image_type", "magic_cookie_glitch_ul", "carriage_return_delay",
"new_line_delay", "backspace_delay", "horizontal_tab_delay", "number_of_function_keys"];
pub static numnames: &'static[&'static str] = &[ "cols", "it", "lines", "lm", "xmc", "pb",
"vt", "wsl", "nlab", "lh", "lw", "ma", "wnum", "colors", "pairs", "ncv", "bufsz", "spinv",
"spinh", "maddr", "mjump", "mcs", "mls", "npins", "orc", "orl", "orhi", "orvi", "cps", "widcs",
"btns", "bitwin", "bitype", "UTug", "OTdC", "OTdN", "OTdB", "OTdT", "OTkn"];
pub static stringfnames: &'static[&'static str] = &[ "back_tab", "bell", "carriage_return",
"change_scroll_region", "clear_all_tabs", "clear_screen", "clr_eol", "clr_eos",
"column_address", "command_character", "cursor_address", "cursor_down", "cursor_home",
"cursor_invisible", "cursor_left", "cursor_mem_address", "cursor_normal", "cursor_right",
"cursor_to_ll", "cursor_up", "cursor_visible", "delete_character", "delete_line",
"dis_status_line", "down_half_line", "enter_alt_charset_mode", "enter_blink_mode",
"enter_bold_mode", "enter_ca_mode", "enter_delete_mode", "enter_dim_mode", "enter_insert_mode",
"enter_secure_mode", "enter_protected_mode", "enter_reverse_mode", "enter_standout_mode",
"enter_underline_mode", "erase_chars", "exit_alt_charset_mode", "exit_attribute_mode",
"exit_ca_mode", "exit_delete_mode", "exit_insert_mode", "exit_standout_mode",
"exit_underline_mode", "flash_screen", "form_feed", "from_status_line", "init_1string",
"init_2string", "init_3string", "init_file", "insert_character", "insert_line",
"insert_padding", "key_backspace", "key_catab", "key_clear", "key_ctab", "key_dc", "key_dl",
"key_down", "key_eic", "key_eol", "key_eos", "key_f0", "key_f1", "key_f10", "key_f2", "key_f3",
"key_f4", "key_f5", "key_f6", "key_f7", "key_f8", "key_f9", "key_home", "key_ic", "key_il",
"key_left", "key_ll", "key_npage", "key_ppage", "key_right", "key_sf", "key_sr", "key_stab",
"key_up", "keypad_local", "keypad_xmit", "lab_f0", "lab_f1", "lab_f10", "lab_f2", "lab_f3",
"lab_f4", "lab_f5", "lab_f6", "lab_f7", "lab_f8", "lab_f9", "meta_off", "meta_on", "newline",
"pad_char", "parm_dch", "parm_delete_line", "parm_down_cursor", "parm_ich", "parm_index",
"parm_insert_line", "parm_left_cursor", "parm_right_cursor", "parm_rindex", "parm_up_cursor",
"pkey_key", "pkey_local", "pkey_xmit", "print_screen", "prtr_off", "prtr_on", "repeat_char",
"reset_1string", "reset_2string", "reset_3string", "reset_file", "restore_cursor",
"row_address", "save_cursor", "scroll_forward", "scroll_reverse", "set_attributes", "set_tab",
"set_window", "tab", "to_status_line", "underline_char", "up_half_line", "init_prog", "key_a1",
"key_a3", "key_b2", "key_c1", "key_c3", "prtr_non", "char_padding", "acs_chars", "plab_norm",
"key_btab", "enter_xon_mode", "exit_xon_mode", "enter_am_mode", "exit_am_mode", "xon_character",
"xoff_character", "ena_acs", "label_on", "label_off", "key_beg", "key_cancel", "key_close",
"key_command", "key_copy", "key_create", "key_end", "key_enter", "key_exit", "key_find",
"key_help", "key_mark", "key_message", "key_move", "key_next", "key_open", "key_options",
"key_previous", "key_print", "key_redo", "key_reference", "key_refresh", "key_replace",
"key_restart", "key_resume", "key_save", "key_suspend", "key_undo", "key_sbeg", "key_scancel",
"key_scommand", "key_scopy", "key_screate", "key_sdc", "key_sdl", "key_select", "key_send",
"key_seol", "key_sexit", "key_sfind", "key_shelp", "key_shome", "key_sic", "key_sleft",
"key_smessage", "key_smove", "key_snext", "key_soptions", "key_sprevious", "key_sprint",
"key_sredo", "key_sreplace", "key_sright", "key_srsume", "key_ssave", "key_ssuspend",
"key_sundo", "req_for_input", "key_f11", "key_f12", "key_f13", "key_f14", "key_f15", "key_f16",
"key_f17", "key_f18", "key_f19", "key_f20", "key_f21", "key_f22", "key_f23", "key_f24",
"key_f25", "key_f26", "key_f27", "key_f28", "key_f29", "key_f30", "key_f31", "key_f32",
"key_f33", "key_f34", "key_f35", "key_f36", "key_f37", "key_f38", "key_f39", "key_f40",
"key_f41", "key_f42", "key_f43", "key_f44", "key_f45", "key_f46", "key_f47", "key_f48",
"key_f49", "key_f50", "key_f51", "key_f52", "key_f53", "key_f54", "key_f55", "key_f56",
"key_f57", "key_f58", "key_f59", "key_f60", "key_f61", "key_f62", "key_f63", "clr_bol",
"clear_margins", "set_left_margin", "set_right_margin", "label_format", "set_clock",
"display_clock", "remove_clock", "create_window", "goto_window", "hangup", "dial_phone",
"quick_dial", "tone", "pulse", "flash_hook", "fixed_pause", "wait_tone", "user0", "user1",
"user2", "user3", "user4", "user5", "user6", "user7", "user8", "user9", "orig_pair",
"orig_colors", "initialize_color", "initialize_pair", "set_color_pair", "set_foreground",
"set_background", "change_char_pitch", "change_line_pitch", "change_res_horz",
"change_res_vert", "define_char", "enter_doublewide_mode", "enter_draft_quality",
"enter_italics_mode", "enter_leftward_mode", "enter_micro_mode", "enter_near_letter_quality",
"enter_normal_quality", "enter_shadow_mode", "enter_subscript_mode", "enter_superscript_mode",
"enter_upward_mode", "exit_doublewide_mode", "exit_italics_mode", "exit_leftward_mode",
"exit_micro_mode", "exit_shadow_mode", "exit_subscript_mode", "exit_superscript_mode",
"exit_upward_mode", "micro_column_address", "micro_down", "micro_left", "micro_right",
"micro_row_address", "micro_up", "order_of_pins", "parm_down_micro", "parm_left_micro",
"parm_right_micro", "parm_up_micro", "select_char_set", "set_bottom_margin",
"set_bottom_margin_parm", "set_left_margin_parm", "set_right_margin_parm", "set_top_margin",
"set_top_margin_parm", "start_bit_image", "start_char_set_def", "stop_bit_image",
"stop_char_set_def", "subscript_characters", "superscript_characters", "these_cause_cr",
"zero_motion", "char_set_names", "key_mouse", "mouse_info", "req_mouse_pos", "get_mouse",
"set_a_foreground", "set_a_background", "pkey_plab", "device_type", "code_set_init",
"set0_des_seq", "set1_des_seq", "set2_des_seq", "set3_des_seq", "set_lr_margin",
"set_tb_margin", "bit_image_repeat", "bit_image_newline", "bit_image_carriage_return",
"color_names", "define_bit_image_region", "end_bit_image_region", "set_color_band",
"set_page_length", "display_pc_char", "enter_pc_charset_mode", "exit_pc_charset_mode",
"enter_scancode_mode", "exit_scancode_mode", "pc_term_options", "scancode_escape",
"alt_scancode_esc", "enter_horizontal_hl_mode", "enter_left_hl_mode", "enter_low_hl_mode",
"enter_right_hl_mode", "enter_top_hl_mode", "enter_vertical_hl_mode", "set_a_attributes",
"set_pglen_inch", "termcap_init2", "termcap_reset", "linefeed_if_not_lf", "backspace_if_not_bs",
"other_non_function_keys", "arrow_key_map", "acs_ulcorner", "acs_llcorner", "acs_urcorner",
"acs_lrcorner", "acs_ltee", "acs_rtee", "acs_btee", "acs_ttee", "acs_hline", "acs_vline",
"acs_plus", "memory_lock", "memory_unlock", "box_chars_1"];
pub static stringnames: &'static[&'static str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear",
"_", "_", "hpa", "cmdch", "cup", "cud1", "home", "civis", "cub1", "mrcup", "cnorm", "cuf1",
"ll", "cuu1", "cvvis", "dch1", "dl1", "dsl", "hd", "smacs", "blink", "bold", "smcup", "smdc",
"dim", "smir", "invis", "prot", "rev", "smso", "smul", "ech", "rmacs", "sgr0", "rmcup", "rmdc",
"rmir", "rmso", "rmul", "flash", "ff", "fsl", "is1", "is2", "is3", "if", "ich1", "il1", "ip",
"kbs", "ktbc", "kclr", "kctab", "_", "_", "kcud1", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "khome", "_", "_", "kcub1", "_", "knp", "kpp", "kcuf1", "_", "_",
"khts", "_", "rmkx", "smkx", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "rmm", "_",
"_", "pad", "dch", "dl", "cud", "ich", "indn", "il", "cub", "cuf", "rin", "cuu", "pfkey",
"pfloc", "pfx", "mc0", "mc4", "_", "rep", "rs1", "rs2", "rs3", "rf", "rc", "vpa", "sc", "ind",
"ri", "sgr", "_", "wind", "_", "tsl", "uc", "hu", "iprog", "_", "_", "_", "_", "_", "mc5p",
"rmp", "acsc", "pln", "kcbt", "smxon", "rmxon", "smam", "rmam", "xonc", "xoffc", "_", "smln",
"rmln", "_", "kcan", "kclo", "kcmd", "kcpy", "kcrt", "_", "kent", "kext", "kfnd", "khlp",
"kmrk", "kmsg", "kmov", "knxt", "kopn", "kopt", "kprv", "kprt", "krdo", "kref", "krfr", "krpl",
"krst", "kres", "ksav", "kspd", "kund", "kBEG", "kCAN", "kCMD", "kCPY", "kCRT", "_", "_",
"kslt", "kEND", "kEOL", "kEXT", "kFND", "kHLP", "kHOM", "_", "kLFT", "kMSG", "kMOV", "kNXT",
"kOPT", "kPRV", "kPRT", "kRDO", "kRPL", "kRIT", "kRES", "kSAV", "kSPD", "kUND", "rfi", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"dclk", "rmclk", "cwin", "wingo", "_", "dial", "qdial", "_", "_", "hook", "pause", "wait", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "op", "oc", "initc", "initp", "scp", "setf",
"setb", "cpi", "lpi", "chr", "cvr", "defc", "swidm", "sdrfq", "sitm", "slm", "smicm", "snlq",
"snrmq", "sshm", "ssubm", "ssupm", "sum", "rwidm", "ritm", "rlm", "rmicm", "rshm", "rsubm",
"rsupm", "rum", "mhpa", "mcud1", "mcub1", "mcuf1", "mvpa", "mcuu1", "porder", "mcud", "mcub",
"mcuf", "mcuu", "scs", "smgb", "smgbp", "smglp", "smgrp", "smgt", "smgtp", "sbim", "scsd",
"rbim", "rcsd", "subcs", "supcs", "docr", "zerom", "csnm", "kmous", "minfo", "reqmp", "getm",
"setaf", "setab", "pfxl", "devt", "csin", "s0ds", "s1ds", "s2ds", "s3ds", "smglr", "smgtb",
"birep", "binel", "bicr", "colornm", "defbi", "endbi", "setcolor", "slines", "dispc", "smpch",
"rmpch", "smsc", "rmsc", "pctrm", "scesc", "scesa", "ehhlm", "elhlm", "elohlm", "erhlm",
"ethlm", "evhlm", "sgr1", "slength", "OTi2", "OTrs", "OTnl", "OTbs", "OTko", "OTma", "OTG2",
"OTG3", "OTG1", "OTG4", "OTGR", "OTGL", "OTGU", "OTGD", "OTGH", "OTGV", "OTGC", "meml", "memu",
"box1"];
fn read_le_u16(r: &mut io::Read) -> io::Result<u16> {
let mut b = [0; 2];
let mut amt = 0;
while amt < b.len() {
match try!(r.read(&mut b[amt..])) {
0 => return Err(io::Error::new(io::ErrorKind::Other, "end of file")),
n => amt += n,
}
}
Ok((b[0] as u16) | ((b[1] as u16) << 8))
}
fn read_byte(r: &mut io::Read) -> io::Result<u8> {
match r.bytes().next() {
Some(s) => s,
None => Err(io::Error::new(io::ErrorKind::Other, "end of file"))
}
}
/// Parse a compiled terminfo entry, using long capability names if `longnames`
/// is true
pub fn parse(file: &mut io::Read, longnames: bool) -> Result<TermInfo, String> {
macro_rules! try( ($e:expr) => (
match $e {
Ok(e) => e,
Err(e) => return Err(format!("{}", e))
}
) );
let (bnames, snames, nnames) = if longnames {
(boolfnames, stringfnames, numfnames)
} else {
(boolnames, stringnames, numnames)
};
// Check magic number
let magic = try!(read_le_u16(file));
if magic!= 0x011A {
return Err(format!("invalid magic number: expected {:x}, found {:x}",
0x011A, magic));
}
// According to the spec, these fields must be >= -1 where -1 means that the feature is not
// supported. Using 0 instead of -1 works because we skip sections with length 0.
macro_rules! read_nonneg {
() => {{
match try!(read_le_u16(file)) as i16 {
n if n >= 0 => n as usize,
-1 => 0,
_ => return Err("incompatible file: length fields must be >= -1".to_string()),
}
}}
}
let names_bytes = read_nonneg!();
let bools_bytes = read_nonneg!();
let numbers_count = read_nonneg!();
let string_offsets_count = read_nonneg!();
let string_table_bytes = read_nonneg!();
if names_bytes == 0 {
return Err("incompatible file: names field must be \
at least 1 byte wide".to_string());
}
if bools_bytes > boolnames.len() {
return Err("incompatible file: more booleans than \
expected".to_string());
}
if numbers_count > numnames.len() {
return Err("incompatible file: more numbers than \
expected".to_string());
}
if string_offsets_count > stringnames.len() {
return Err("incompatible file: more string offsets than \
expected".to_string());
}
// don't read NUL
let mut bytes = Vec::new();
try!(file.take((names_bytes - 1) as u64).read_to_end(&mut bytes));
let names_str = match String::from_utf8(bytes) {
Ok(s) => s,
Err(_) => return Err("input not utf-8".to_string()),
};
let term_names: Vec<String> = names_str.split('|')
.map(|s| s.to_string())
.collect();
// consume NUL
if try!(read_byte(file))!= b'\0' {
return Err("incompatible file: missing null terminator \
for names section".to_string());
}
let bools_map: HashMap<String, bool> = try!(
(0..bools_bytes).filter_map(|i| match read_byte(file) {
Err(e) => Some(Err(e)),
Ok(1) => Some(Ok((bnames[i].to_string(), true))),
Ok(_) => None
}).collect());
if (bools_bytes + names_bytes) % 2 == 1 {
try!(read_byte(file)); // compensate for padding
} |
let numbers_map: HashMap<String, u16> = try!(
(0..numbers_count).filter_map(|i| match read_le_u16(file) {
Ok(0xFFFF) => None,
Ok(n) => Some(Ok((nnames[i].to_string(), n))),
Err(e) => Some(Err(e))
}).collect());
let string_map: HashMap<String, Vec<u8>> = if string_offsets_count > 0 {
let string_offsets: Vec<u16> = try!((0..string_offsets_count).map(|_| {
read_le_u16(file)
}).collect());
let mut string_table = Vec::new();
try!(file.take(string_table_bytes as u64).read_to_end(&mut string_table));
try!(string_offsets.into_iter().enumerate().filter(|&(_, offset)| {
// non-entry
offset!= 0xFFFF
}).map(|(i, offset)| {
let offset = offset as usize;
let name = if snames[i] == "_" {
stringfnames[i]
} else {
snames[i]
};
if offset == 0xFFFE {
// undocumented: FFFE indicates cap@, which means the capability is not present
// unsure if the handling for this is correct
return Ok((name.to_string(), Vec::new()));
}
// Find the offset of the NUL we want to go to
let nulpos = string_table[offset..string_table_bytes].iter().position(|&b| b == 0);
match nulpos {
Some(len) => Ok((name.to_string(), string_table[offset..offset + len].to_vec())),
None => Err("invalid file: missing NUL in string_table".to_string()),
}
}).collect())
} else {
HashMap::new()
};
// And that's all there is to it
Ok(TermInfo {
names: term_names,
bools: bools_map,
numbers: numbers_map,
strings: string_map
})
}
/// Create a dummy TermInfo struct for msys terminals
pub fn msys_terminfo() -> TermInfo {
let mut strings = HashMap::new();
strings.insert("sgr0".to_string(), b"\x1B[0m".to_vec());
strings.insert("bold". | random_line_split |
|
compiled.rs | if portable.
pub static boolfnames: &'static[&'static str] = &["auto_left_margin", "auto_right_margin",
"no_esc_ctlc", "ceol_standout_glitch", "eat_newline_glitch", "erase_overstrike", "generic_type",
"hard_copy", "has_meta_key", "has_status_line", "insert_null_glitch", "memory_above",
"memory_below", "move_insert_mode", "move_standout_mode", "over_strike", "status_line_esc_ok",
"dest_tabs_magic_smso", "tilde_glitch", "transparent_underline", "xon_xoff", "needs_xon_xoff",
"prtr_silent", "hard_cursor", "non_rev_rmcup", "no_pad_char", "non_dest_scroll_region",
"can_change", "back_color_erase", "hue_lightness_saturation", "col_addr_glitch",
"cr_cancels_micro_mode", "has_print_wheel", "row_addr_glitch", "semi_auto_right_margin",
"cpi_changes_res", "lpi_changes_res", "backspaces_with_bs", "crt_no_scrolling",
"no_correctly_working_cr", "gnu_has_meta_key", "linefeed_is_newline", "has_hardware_tabs",
"return_does_clr_eol"];
pub static boolnames: &'static[&'static str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
"gn", "hc", "km", "hs", "in", "db", "da", "mir", "msgr", "os", "eslok", "xt", "hz", "ul", "xon",
"nxon", "mc5i", "chts", "nrrmc", "npc", "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy",
"xvpa", "sam", "cpix", "lpix", "OTbs", "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr"];
pub static numfnames: &'static[&'static str] = &[ "columns", "init_tabs", "lines",
"lines_of_memory", "magic_cookie_glitch", "padding_baud_rate", "virtual_terminal",
"width_status_line", "num_labels", "label_height", "label_width", "max_attributes",
"maximum_windows", "max_colors", "max_pairs", "no_color_video", "buffer_capacity",
"dot_vert_spacing", "dot_horz_spacing", "max_micro_address", "max_micro_jump", "micro_col_size",
"micro_line_size", "number_of_pins", "output_res_char", "output_res_line",
"output_res_horz_inch", "output_res_vert_inch", "print_rate", "wide_char_size", "buttons",
"bit_image_entwining", "bit_image_type", "magic_cookie_glitch_ul", "carriage_return_delay",
"new_line_delay", "backspace_delay", "horizontal_tab_delay", "number_of_function_keys"];
pub static numnames: &'static[&'static str] = &[ "cols", "it", "lines", "lm", "xmc", "pb",
"vt", "wsl", "nlab", "lh", "lw", "ma", "wnum", "colors", "pairs", "ncv", "bufsz", "spinv",
"spinh", "maddr", "mjump", "mcs", "mls", "npins", "orc", "orl", "orhi", "orvi", "cps", "widcs",
"btns", "bitwin", "bitype", "UTug", "OTdC", "OTdN", "OTdB", "OTdT", "OTkn"];
pub static stringfnames: &'static[&'static str] = &[ "back_tab", "bell", "carriage_return",
"change_scroll_region", "clear_all_tabs", "clear_screen", "clr_eol", "clr_eos",
"column_address", "command_character", "cursor_address", "cursor_down", "cursor_home",
"cursor_invisible", "cursor_left", "cursor_mem_address", "cursor_normal", "cursor_right",
"cursor_to_ll", "cursor_up", "cursor_visible", "delete_character", "delete_line",
"dis_status_line", "down_half_line", "enter_alt_charset_mode", "enter_blink_mode",
"enter_bold_mode", "enter_ca_mode", "enter_delete_mode", "enter_dim_mode", "enter_insert_mode",
"enter_secure_mode", "enter_protected_mode", "enter_reverse_mode", "enter_standout_mode",
"enter_underline_mode", "erase_chars", "exit_alt_charset_mode", "exit_attribute_mode",
"exit_ca_mode", "exit_delete_mode", "exit_insert_mode", "exit_standout_mode",
"exit_underline_mode", "flash_screen", "form_feed", "from_status_line", "init_1string",
"init_2string", "init_3string", "init_file", "insert_character", "insert_line",
"insert_padding", "key_backspace", "key_catab", "key_clear", "key_ctab", "key_dc", "key_dl",
"key_down", "key_eic", "key_eol", "key_eos", "key_f0", "key_f1", "key_f10", "key_f2", "key_f3",
"key_f4", "key_f5", "key_f6", "key_f7", "key_f8", "key_f9", "key_home", "key_ic", "key_il",
"key_left", "key_ll", "key_npage", "key_ppage", "key_right", "key_sf", "key_sr", "key_stab",
"key_up", "keypad_local", "keypad_xmit", "lab_f0", "lab_f1", "lab_f10", "lab_f2", "lab_f3",
"lab_f4", "lab_f5", "lab_f6", "lab_f7", "lab_f8", "lab_f9", "meta_off", "meta_on", "newline",
"pad_char", "parm_dch", "parm_delete_line", "parm_down_cursor", "parm_ich", "parm_index",
"parm_insert_line", "parm_left_cursor", "parm_right_cursor", "parm_rindex", "parm_up_cursor",
"pkey_key", "pkey_local", "pkey_xmit", "print_screen", "prtr_off", "prtr_on", "repeat_char",
"reset_1string", "reset_2string", "reset_3string", "reset_file", "restore_cursor",
"row_address", "save_cursor", "scroll_forward", "scroll_reverse", "set_attributes", "set_tab",
"set_window", "tab", "to_status_line", "underline_char", "up_half_line", "init_prog", "key_a1",
"key_a3", "key_b2", "key_c1", "key_c3", "prtr_non", "char_padding", "acs_chars", "plab_norm",
"key_btab", "enter_xon_mode", "exit_xon_mode", "enter_am_mode", "exit_am_mode", "xon_character",
"xoff_character", "ena_acs", "label_on", "label_off", "key_beg", "key_cancel", "key_close",
"key_command", "key_copy", "key_create", "key_end", "key_enter", "key_exit", "key_find",
"key_help", "key_mark", "key_message", "key_move", "key_next", "key_open", "key_options",
"key_previous", "key_print", "key_redo", "key_reference", "key_refresh", "key_replace",
"key_restart", "key_resume", "key_save", "key_suspend", "key_undo", "key_sbeg", "key_scancel",
"key_scommand", "key_scopy", "key_screate", "key_sdc", "key_sdl", "key_select", "key_send",
"key_seol", "key_sexit", "key_sfind", "key_shelp", "key_shome", "key_sic", "key_sleft",
"key_smessage", "key_smove", "key_snext", "key_soptions", "key_sprevious", "key_sprint",
"key_sredo", "key_sreplace", "key_sright", "key_srsume", "key_ssave", "key_ssuspend",
"key_sundo", "req_for_input", "key_f11", "key_f12", "key_f13", "key_f14", "key_f15", "key_f16",
"key_f17", "key_f18", "key_f19", "key_f20", "key_f21", "key_f22", "key_f23", "key_f24",
"key_f25", "key_f26", "key_f27", "key_f28", "key_f29", "key_f30", "key_f31", "key_f32",
"key_f33", "key_f34", "key_f35", "key_f36", "key_f37", "key_f38", "key_f39", "key_f40",
"key_f41", "key_f42", "key_f43", "key_f44", "key_f45", "key_f46", "key_f47", "key_f48",
"key_f49", "key_f50", "key_f51", "key_f52", "key_f53", "key_f54", "key_f55", "key_f56",
"key_f57", "key_f58", "key_f59", "key_f60", "key_f61", "key_f62", "key_f63", "clr_bol",
"clear_margins", "set_left_margin", "set_right_margin", "label_format", "set_clock",
"display_clock", "remove_clock", "create_window", "goto_window", "hangup", "dial_phone",
"quick_dial", "tone", "pulse", "flash_hook", "fixed_pause", "wait_tone", "user0", "user1",
"user2", "user3", "user4", "user5", "user6", "user7", "user8", "user9", "orig_pair",
"orig_colors", "initialize_color", "initialize_pair", "set_color_pair", "set_foreground",
"set_background", "change_char_pitch", "change_line_pitch", "change_res_horz",
"change_res_vert", "define_char", "enter_doublewide_mode", "enter_draft_quality",
"enter_italics_mode", "enter_leftward_mode", "enter_micro_mode", "enter_near_letter_quality",
"enter_normal_quality", "enter_shadow_mode", "enter_subscript_mode", "enter_superscript_mode",
"enter_upward_mode", "exit_doublewide_mode", "exit_italics_mode", "exit_leftward_mode",
"exit_micro_mode", "exit_shadow_mode", "exit_subscript_mode", "exit_superscript_mode",
"exit_upward_mode", "micro_column_address", "micro_down", "micro_left", "micro_right",
"micro_row_address", "micro_up", "order_of_pins", "parm_down_micro", "parm_left_micro",
"parm_right_micro", "parm_up_micro", "select_char_set", "set_bottom_margin",
"set_bottom_margin_parm", "set_left_margin_parm", "set_right_margin_parm", "set_top_margin",
"set_top_margin_parm", "start_bit_image", "start_char_set_def", "stop_bit_image",
"stop_char_set_def", "subscript_characters", "superscript_characters", "these_cause_cr",
"zero_motion", "char_set_names", "key_mouse", "mouse_info", "req_mouse_pos", "get_mouse",
"set_a_foreground", "set_a_background", "pkey_plab", "device_type", "code_set_init",
"set0_des_seq", "set1_des_seq", "set2_des_seq", "set3_des_seq", "set_lr_margin",
"set_tb_margin", "bit_image_repeat", "bit_image_newline", "bit_image_carriage_return",
"color_names", "define_bit_image_region", "end_bit_image_region", "set_color_band",
"set_page_length", "display_pc_char", "enter_pc_charset_mode", "exit_pc_charset_mode",
"enter_scancode_mode", "exit_scancode_mode", "pc_term_options", "scancode_escape",
"alt_scancode_esc", "enter_horizontal_hl_mode", "enter_left_hl_mode", "enter_low_hl_mode",
"enter_right_hl_mode", "enter_top_hl_mode", "enter_vertical_hl_mode", "set_a_attributes",
"set_pglen_inch", "termcap_init2", "termcap_reset", "linefeed_if_not_lf", "backspace_if_not_bs",
"other_non_function_keys", "arrow_key_map", "acs_ulcorner", "acs_llcorner", "acs_urcorner",
"acs_lrcorner", "acs_ltee", "acs_rtee", "acs_btee", "acs_ttee", "acs_hline", "acs_vline",
"acs_plus", "memory_lock", "memory_unlock", "box_chars_1"];
pub static stringnames: &'static[&'static str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear",
"_", "_", "hpa", "cmdch", "cup", "cud1", "home", "civis", "cub1", "mrcup", "cnorm", "cuf1",
"ll", "cuu1", "cvvis", "dch1", "dl1", "dsl", "hd", "smacs", "blink", "bold", "smcup", "smdc",
"dim", "smir", "invis", "prot", "rev", "smso", "smul", "ech", "rmacs", "sgr0", "rmcup", "rmdc",
"rmir", "rmso", "rmul", "flash", "ff", "fsl", "is1", "is2", "is3", "if", "ich1", "il1", "ip",
"kbs", "ktbc", "kclr", "kctab", "_", "_", "kcud1", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "khome", "_", "_", "kcub1", "_", "knp", "kpp", "kcuf1", "_", "_",
"khts", "_", "rmkx", "smkx", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "rmm", "_",
"_", "pad", "dch", "dl", "cud", "ich", "indn", "il", "cub", "cuf", "rin", "cuu", "pfkey",
"pfloc", "pfx", "mc0", "mc4", "_", "rep", "rs1", "rs2", "rs3", "rf", "rc", "vpa", "sc", "ind",
"ri", "sgr", "_", "wind", "_", "tsl", "uc", "hu", "iprog", "_", "_", "_", "_", "_", "mc5p",
"rmp", "acsc", "pln", "kcbt", "smxon", "rmxon", "smam", "rmam", "xonc", "xoffc", "_", "smln",
"rmln", "_", "kcan", "kclo", "kcmd", "kcpy", "kcrt", "_", "kent", "kext", "kfnd", "khlp",
"kmrk", "kmsg", "kmov", "knxt", "kopn", "kopt", "kprv", "kprt", "krdo", "kref", "krfr", "krpl",
"krst", "kres", "ksav", "kspd", "kund", "kBEG", "kCAN", "kCMD", "kCPY", "kCRT", "_", "_",
"kslt", "kEND", "kEOL", "kEXT", "kFND", "kHLP", "kHOM", "_", "kLFT", "kMSG", "kMOV", "kNXT",
"kOPT", "kPRV", "kPRT", "kRDO", "kRPL", "kRIT", "kRES", "kSAV", "kSPD", "kUND", "rfi", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_",
"dclk", "rmclk", "cwin", "wingo", "_", "dial", "qdial", "_", "_", "hook", "pause", "wait", "_",
"_", "_", "_", "_", "_", "_", "_", "_", "_", "op", "oc", "initc", "initp", "scp", "setf",
"setb", "cpi", "lpi", "chr", "cvr", "defc", "swidm", "sdrfq", "sitm", "slm", "smicm", "snlq",
"snrmq", "sshm", "ssubm", "ssupm", "sum", "rwidm", "ritm", "rlm", "rmicm", "rshm", "rsubm",
"rsupm", "rum", "mhpa", "mcud1", "mcub1", "mcuf1", "mvpa", "mcuu1", "porder", "mcud", "mcub",
"mcuf", "mcuu", "scs", "smgb", "smgbp", "smglp", "smgrp", "smgt", "smgtp", "sbim", "scsd",
"rbim", "rcsd", "subcs", "supcs", "docr", "zerom", "csnm", "kmous", "minfo", "reqmp", "getm",
"setaf", "setab", "pfxl", "devt", "csin", "s0ds", "s1ds", "s2ds", "s3ds", "smglr", "smgtb",
"birep", "binel", "bicr", "colornm", "defbi", "endbi", "setcolor", "slines", "dispc", "smpch",
"rmpch", "smsc", "rmsc", "pctrm", "scesc", "scesa", "ehhlm", "elhlm", "elohlm", "erhlm",
"ethlm", "evhlm", "sgr1", "slength", "OTi2", "OTrs", "OTnl", "OTbs", "OTko", "OTma", "OTG2",
"OTG3", "OTG1", "OTG4", "OTGR", "OTGL", "OTGU", "OTGD", "OTGH", "OTGV", "OTGC", "meml", "memu",
"box1"];
fn read_le_u16(r: &mut io::Read) -> io::Result<u16> {
let mut b = [0; 2];
let mut amt = 0;
while amt < b.len() {
match try!(r.read(&mut b[amt..])) {
0 => return Err(io::Error::new(io::ErrorKind::Other, "end of file")),
n => amt += n,
}
}
Ok((b[0] as u16) | ((b[1] as u16) << 8))
}
fn read_byte(r: &mut io::Read) -> io::Result<u8> {
match r.bytes().next() {
Some(s) => s,
None => Err(io::Error::new(io::ErrorKind::Other, "end of file"))
}
}
/// Parse a compiled terminfo entry, using long capability names if `longnames`
/// is true
pub fn parse(file: &mut io::Read, longnames: bool) -> Result<TermInfo, String> {
macro_rules! try( ($e:expr) => (
match $e {
Ok(e) => e,
Err(e) => return Err(format!("{}", e))
}
) );
let (bnames, snames, nnames) = if longnames {
(boolfnames, stringfnames, numfnames)
} else {
(boolnames, stringnames, numnames)
};
// Check magic number
let magic = try!(read_le_u16(file));
if magic!= 0x011A {
return Err(format!("invalid magic number: expected {:x}, found {:x}",
0x011A, magic));
}
// According to the spec, these fields must be >= -1 where -1 means that the feature is not
// supported. Using 0 instead of -1 works because we skip sections with length 0.
macro_rules! read_nonneg {
() => {{
match try!(read_le_u16(file)) as i16 {
n if n >= 0 => n as usize,
-1 => 0,
_ => return Err("incompatible file: length fields must be >= -1".to_string()),
}
}}
}
let names_bytes = read_nonneg!();
let bools_bytes = read_nonneg!();
let numbers_count = read_nonneg!();
let string_offsets_count = read_nonneg!();
let string_table_bytes = read_nonneg!();
if names_bytes == 0 {
return Err("incompatible file: names field must be \
at least 1 byte wide".to_string());
}
if bools_bytes > boolnames.len() {
return Err("incompatible file: more booleans than \
expected".to_string());
}
if numbers_count > numnames.len() {
return Err("incompatible file: more numbers than \
expected".to_string());
}
if string_offsets_count > stringnames.len() {
return Err("incompatible file: more string offsets than \
expected".to_string());
}
// don't read NUL
let mut bytes = Vec::new();
try!(file.take((names_bytes - 1) as u64).read_to_end(&mut bytes));
let names_str = match String::from_utf8(bytes) {
Ok(s) => s,
Err(_) => return Err("input not utf-8".to_string()),
};
let term_names: Vec<String> = names_str.split('|')
.map(|s| s.to_string())
.collect();
// consume NUL
if try!(read_byte(file))!= b'\0' {
return Err("incompatible file: missing null terminator \
for names section".to_string());
}
let bools_map: HashMap<String, bool> = try!(
(0..bools_bytes).filter_map(|i| match read_byte(file) {
Err(e) => Some(Err(e)),
Ok(1) => Some(Ok((bnames[i].to_string(), true))),
Ok(_) => None
}).collect());
if (bools_bytes + names_bytes) % 2 == 1 {
try!(read_byte(file)); // compensate for padding
}
let numbers_map: HashMap<String, u16> = try!(
(0..numbers_count).filter_map(|i| match read_le_u16(file) {
Ok(0xFFFF) => None,
Ok(n) => Some(Ok((nnames[i].to_string(), n))),
Err(e) => Some(Err(e))
}).collect());
let string_map: HashMap<String, Vec<u8>> = if string_offsets_count > 0 {
let string_offsets: Vec<u16> = try!((0..string_offsets_count).map(|_| {
read_le_u16(file)
}).collect());
let mut string_table = Vec::new();
try!(file.take(string_table_bytes as u64).read_to_end(&mut string_table));
try!(string_offsets.into_iter().enumerate().filter(|&(_, offset)| {
// non-entry
offset!= 0xFFFF
}).map(|(i, offset)| {
let offset = offset as usize;
let name = if snames[i] == "_" | else {
snames[i]
};
if offset == 0xFFFE {
// undocumented: FFFE indicates cap@, which means the capability is not present
// unsure if the handling for this is correct
return Ok((name.to_string(), Vec::new()));
}
// Find the offset of the NUL we want to go to
let nulpos = string_table[offset..string_table_bytes].iter().position(|&b| b == 0);
match nulpos {
Some(len) => Ok((name.to_string(), string_table[offset..offset + len].to_vec())),
None => Err("invalid file: missing NUL in string_table".to_string()),
}
}).collect())
} else {
HashMap::new()
};
// And that's all there is to it
Ok(TermInfo {
names: term_names,
bools: bools_map,
numbers: numbers_map,
strings: string_map
})
}
/// Create a dummy TermInfo struct for msys terminals
pub fn msys_terminfo() -> TermInfo {
let mut strings = HashMap::new();
strings.insert("sgr0".to_string(), b"\x1B[0m".to_vec());
strings.insert("bold | {
stringfnames[i]
} | conditional_block |
gstr.rs | // This file is part of Grust, GObject introspection bindings for Rust
//
// Copyright (C) 2013-2015 Mikhail Zabaluev <[email protected]>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
use glib as ffi;
use types::{gchar,gpointer};
use libc;
use std::ffi::{CStr, CString, NulError};
use std::mem;
use std::ops::Deref;
use std::str;
pub struct OwnedGStr {
ptr: *const gchar,
}
impl OwnedGStr {
pub unsafe fn from_ptr(ptr: *mut gchar) -> OwnedGStr {
OwnedGStr { ptr: ptr }
}
}
impl Deref for OwnedGStr {
type Target = CStr;
fn deref(&self) -> &CStr {
unsafe { CStr::from_ptr(self.ptr) }
}
}
impl Drop for OwnedGStr {
fn | (&mut self) {
unsafe { ffi::g_free(self.ptr as gpointer) }
}
}
impl Clone for OwnedGStr {
fn clone(&self) -> OwnedGStr {
unsafe {
OwnedGStr::from_ptr(ffi::g_strdup(self.ptr))
}
}
}
impl PartialEq for OwnedGStr {
fn eq(&self, other: &OwnedGStr) -> bool {
unsafe { libc::strcmp(self.ptr, other.ptr) == 0 }
}
}
impl Eq for OwnedGStr { }
pub struct Utf8 {
inner: CStr
}
impl Utf8 {
#[inline]
pub fn as_ptr(&self) -> *const gchar {
self.inner.as_ptr()
}
#[inline]
pub fn to_str(&self) -> &str {
unsafe { str::from_utf8_unchecked(self.inner.to_bytes()) }
}
pub fn from_static_str(s: &'static str) -> &'static Utf8 {
assert!(s.ends_with("\0"),
"static string is not null-terminated: \"{}\"", s);
unsafe { Utf8::from_ptr(s.as_ptr() as *const gchar) }
}
pub unsafe fn from_ptr<'a>(ptr: *const gchar) -> &'a Utf8 {
mem::transmute(CStr::from_ptr(ptr))
}
}
impl AsRef<CStr> for Utf8 {
#[inline]
fn as_ref(&self) -> &CStr { &self.inner }
}
pub struct Utf8String {
inner: CString
}
impl Deref for Utf8String {
type Target = Utf8;
fn deref(&self) -> &Utf8 {
unsafe { Utf8::from_ptr(self.inner.as_ptr()) }
}
}
impl Utf8String {
pub fn new<T>(t: T) -> Result<Utf8String, NulError>
where T: Into<String>
{
let c_str = try!(CString::new(t.into()));
Ok(Utf8String { inner: c_str })
}
}
| drop | identifier_name |
gstr.rs | // This file is part of Grust, GObject introspection bindings for Rust
//
// Copyright (C) 2013-2015 Mikhail Zabaluev <[email protected]>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
use glib as ffi;
use types::{gchar,gpointer};
use libc;
use std::ffi::{CStr, CString, NulError};
use std::mem;
use std::ops::Deref;
use std::str;
pub struct OwnedGStr {
ptr: *const gchar,
}
impl OwnedGStr {
pub unsafe fn from_ptr(ptr: *mut gchar) -> OwnedGStr {
OwnedGStr { ptr: ptr }
}
}
impl Deref for OwnedGStr {
type Target = CStr;
fn deref(&self) -> &CStr {
unsafe { CStr::from_ptr(self.ptr) }
}
}
impl Drop for OwnedGStr {
fn drop(&mut self) {
unsafe { ffi::g_free(self.ptr as gpointer) }
}
}
impl Clone for OwnedGStr {
fn clone(&self) -> OwnedGStr {
unsafe {
OwnedGStr::from_ptr(ffi::g_strdup(self.ptr))
}
}
}
impl PartialEq for OwnedGStr {
fn eq(&self, other: &OwnedGStr) -> bool {
unsafe { libc::strcmp(self.ptr, other.ptr) == 0 }
}
}
impl Eq for OwnedGStr { }
pub struct Utf8 {
inner: CStr
}
impl Utf8 {
#[inline]
pub fn as_ptr(&self) -> *const gchar {
self.inner.as_ptr()
}
#[inline]
pub fn to_str(&self) -> &str {
unsafe { str::from_utf8_unchecked(self.inner.to_bytes()) }
}
pub fn from_static_str(s: &'static str) -> &'static Utf8 {
assert!(s.ends_with("\0"),
"static string is not null-terminated: \"{}\"", s);
unsafe { Utf8::from_ptr(s.as_ptr() as *const gchar) }
}
pub unsafe fn from_ptr<'a>(ptr: *const gchar) -> &'a Utf8 {
mem::transmute(CStr::from_ptr(ptr))
}
}
impl AsRef<CStr> for Utf8 { | inner: CString
}
impl Deref for Utf8String {
type Target = Utf8;
fn deref(&self) -> &Utf8 {
unsafe { Utf8::from_ptr(self.inner.as_ptr()) }
}
}
impl Utf8String {
pub fn new<T>(t: T) -> Result<Utf8String, NulError>
where T: Into<String>
{
let c_str = try!(CString::new(t.into()));
Ok(Utf8String { inner: c_str })
}
} | #[inline]
fn as_ref(&self) -> &CStr { &self.inner }
}
pub struct Utf8String { | random_line_split |
gstr.rs | // This file is part of Grust, GObject introspection bindings for Rust
//
// Copyright (C) 2013-2015 Mikhail Zabaluev <[email protected]>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
use glib as ffi;
use types::{gchar,gpointer};
use libc;
use std::ffi::{CStr, CString, NulError};
use std::mem;
use std::ops::Deref;
use std::str;
pub struct OwnedGStr {
ptr: *const gchar,
}
impl OwnedGStr {
pub unsafe fn from_ptr(ptr: *mut gchar) -> OwnedGStr {
OwnedGStr { ptr: ptr }
}
}
impl Deref for OwnedGStr {
type Target = CStr;
fn deref(&self) -> &CStr {
unsafe { CStr::from_ptr(self.ptr) }
}
}
impl Drop for OwnedGStr {
fn drop(&mut self) {
unsafe { ffi::g_free(self.ptr as gpointer) }
}
}
impl Clone for OwnedGStr {
fn clone(&self) -> OwnedGStr {
unsafe {
OwnedGStr::from_ptr(ffi::g_strdup(self.ptr))
}
}
}
impl PartialEq for OwnedGStr {
fn eq(&self, other: &OwnedGStr) -> bool {
unsafe { libc::strcmp(self.ptr, other.ptr) == 0 }
}
}
impl Eq for OwnedGStr { }
pub struct Utf8 {
inner: CStr
}
impl Utf8 {
#[inline]
pub fn as_ptr(&self) -> *const gchar {
self.inner.as_ptr()
}
#[inline]
pub fn to_str(&self) -> &str {
unsafe { str::from_utf8_unchecked(self.inner.to_bytes()) }
}
pub fn from_static_str(s: &'static str) -> &'static Utf8 |
pub unsafe fn from_ptr<'a>(ptr: *const gchar) -> &'a Utf8 {
mem::transmute(CStr::from_ptr(ptr))
}
}
impl AsRef<CStr> for Utf8 {
#[inline]
fn as_ref(&self) -> &CStr { &self.inner }
}
pub struct Utf8String {
inner: CString
}
impl Deref for Utf8String {
type Target = Utf8;
fn deref(&self) -> &Utf8 {
unsafe { Utf8::from_ptr(self.inner.as_ptr()) }
}
}
impl Utf8String {
pub fn new<T>(t: T) -> Result<Utf8String, NulError>
where T: Into<String>
{
let c_str = try!(CString::new(t.into()));
Ok(Utf8String { inner: c_str })
}
}
| {
assert!(s.ends_with("\0"),
"static string is not null-terminated: \"{}\"", s);
unsafe { Utf8::from_ptr(s.as_ptr() as *const gchar) }
} | identifier_body |
interval.rs | //! Support for creating futures that represent intervals.
//!
//! This module contains the `Interval` type which is a stream that will
//! resolve at a fixed intervals in future
use std::io;
use std::time::{Duration, Instant};
use futures::{Poll, Async};
use futures::stream::{Stream};
use reactor::{Remote, Handle};
use reactor::timeout_token::TimeoutToken;
/// A stream representing notifications at fixed interval
///
/// Intervals are created through the `Interval::new` or
/// `Interval::new_at` methods indicating when a first notification
/// should be triggered and when it will be repeated.
///
/// Note that timeouts are not intended for high resolution timers, but rather
/// they will likely fire some granularity after the exact instant that they're
/// otherwise indicated to fire at.
pub struct Interval {
token: TimeoutToken,
next: Instant,
interval: Duration,
handle: Remote,
}
impl Interval {
/// Creates a new interval which will fire at `dur` time into the future,
/// and will repeat every `dur` interval after
///
/// This function will return a future that will resolve to the actual
/// interval object. The interval object itself is then a stream which will
/// be set to fire at the specified intervals
pub fn new(dur: Duration, handle: &Handle) -> io::Result<Interval> {
Interval::new_at(Instant::now() + dur, dur, handle)
}
/// Creates a new interval which will fire at the time specified by `at`,
/// and then will repeat every `dur` interval after
///
/// This function will return a future that will resolve to the actual
/// timeout object. The timeout object itself is then a future which will be
/// set to fire at the specified point in the future.
pub fn new_at(at: Instant, dur: Duration, handle: &Handle)
-> io::Result<Interval>
{
Ok(Interval {
token: try!(TimeoutToken::new(at, &handle)),
next: at,
interval: dur,
handle: handle.remote().clone(),
})
}
}
impl Stream for Interval {
type Item = ();
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<()>, io::Error> {
// TODO: is this fast enough?
let now = Instant::now();
if self.next <= now {
self.next = next_interval(self.next, now, self.interval);
self.token.reset_timeout(self.next, &self.handle);
Ok(Async::Ready(Some(())))
} else {
self.token.update_timeout(&self.handle);
Ok(Async::NotReady)
}
}
}
impl Drop for Interval {
fn drop(&mut self) {
self.token.cancel_timeout(&self.handle);
}
}
/// Converts Duration object to raw nanoseconds if possible
///
/// This is useful to divide intervals.
///
/// While technically for large duration it's impossible to represent any
/// duration as nanoseconds, the largest duration we can represent is about
/// 427_000 years. Large enough for any interval we would use or calculate in
/// tokio.
fn duration_to_nanos(dur: Duration) -> Option<u64> {
dur.as_secs()
.checked_mul(1_000_000_000)
.and_then(|v| v.checked_add(dur.subsec_nanos() as u64))
}
fn next_interval(prev: Instant, now: Instant, interval: Duration) -> Instant {
let new = prev + interval;
if new > now {
return new;
} else {
let spent_ns = duration_to_nanos(now.duration_since(prev))
.expect("interval should be expired");
let interval_ns = duration_to_nanos(interval)
.expect("interval is less that 427 thousand years");
let mult = spent_ns/interval_ns + 1;
assert!(mult < (1 << 32),
"can't skip more than 4 billion intervals of {:?} \
(trying to skip {})", interval, mult);
return prev + interval * (mult as u32);
}
}
#[cfg(test)]
mod test {
use std::time::{Instant, Duration};
use super::next_interval;
struct Timeline(Instant);
impl Timeline {
fn new() -> Timeline {
Timeline(Instant::now())
}
fn at(&self, millis: u64) -> Instant {
self.0 + Duration::from_millis(millis)
}
fn at_ns(&self, sec: u64, nanos: u32) -> Instant {
self.0 + Duration::new(sec, nanos)
}
}
fn dur(millis: u64) -> Duration {
Duration::from_millis(millis)
}
#[test]
fn norm_next() |
#[test]
fn fast_forward() {
let tm = Timeline::new();
assert_eq!(next_interval(tm.at(1), tm.at(1000), dur(10)),
tm.at(1001));
assert_eq!(next_interval(tm.at(7777), tm.at(8888), dur(100)),
tm.at(8977));
assert_eq!(next_interval(tm.at(1), tm.at(10000), dur(2100)),
tm.at(10501));
}
/// TODO: this test actually should be successful, but since we can't
/// multiply Duration on anything larger than u32 easily we decided
/// to allow thit to fail for now
#[test]
#[should_panic(expected = "can't skip more than 4 billion intervals")]
fn large_skip() {
let tm = Timeline::new();
assert_eq!(next_interval(
tm.at_ns(0, 1), tm.at_ns(25, 0), Duration::new(0, 2)),
tm.at_ns(25, 1));
}
}
| {
let tm = Timeline::new();
assert_eq!(next_interval(tm.at(1), tm.at(2), dur(10)), tm.at(11));
assert_eq!(next_interval(tm.at(7777), tm.at(7788), dur(100)),
tm.at(7877));
assert_eq!(next_interval(tm.at(1), tm.at(1000), dur(2100)),
tm.at(2101));
} | identifier_body |
interval.rs | //! Support for creating futures that represent intervals.
//!
//! This module contains the `Interval` type which is a stream that will
//! resolve at a fixed intervals in future
use std::io;
use std::time::{Duration, Instant};
use futures::{Poll, Async};
use futures::stream::{Stream};
use reactor::{Remote, Handle};
use reactor::timeout_token::TimeoutToken;
/// A stream representing notifications at fixed interval
///
/// Intervals are created through the `Interval::new` or
/// `Interval::new_at` methods indicating when a first notification
/// should be triggered and when it will be repeated.
///
/// Note that timeouts are not intended for high resolution timers, but rather
/// they will likely fire some granularity after the exact instant that they're
/// otherwise indicated to fire at.
pub struct Interval {
token: TimeoutToken,
next: Instant,
interval: Duration,
handle: Remote,
}
impl Interval {
/// Creates a new interval which will fire at `dur` time into the future,
/// and will repeat every `dur` interval after
///
/// This function will return a future that will resolve to the actual
/// interval object. The interval object itself is then a stream which will
/// be set to fire at the specified intervals
pub fn new(dur: Duration, handle: &Handle) -> io::Result<Interval> {
Interval::new_at(Instant::now() + dur, dur, handle)
}
/// Creates a new interval which will fire at the time specified by `at`,
/// and then will repeat every `dur` interval after
///
/// This function will return a future that will resolve to the actual
/// timeout object. The timeout object itself is then a future which will be
/// set to fire at the specified point in the future.
pub fn | (at: Instant, dur: Duration, handle: &Handle)
-> io::Result<Interval>
{
Ok(Interval {
token: try!(TimeoutToken::new(at, &handle)),
next: at,
interval: dur,
handle: handle.remote().clone(),
})
}
}
impl Stream for Interval {
type Item = ();
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<()>, io::Error> {
// TODO: is this fast enough?
let now = Instant::now();
if self.next <= now {
self.next = next_interval(self.next, now, self.interval);
self.token.reset_timeout(self.next, &self.handle);
Ok(Async::Ready(Some(())))
} else {
self.token.update_timeout(&self.handle);
Ok(Async::NotReady)
}
}
}
impl Drop for Interval {
fn drop(&mut self) {
self.token.cancel_timeout(&self.handle);
}
}
/// Converts Duration object to raw nanoseconds if possible
///
/// This is useful to divide intervals.
///
/// While technically for large duration it's impossible to represent any
/// duration as nanoseconds, the largest duration we can represent is about
/// 427_000 years. Large enough for any interval we would use or calculate in
/// tokio.
fn duration_to_nanos(dur: Duration) -> Option<u64> {
dur.as_secs()
.checked_mul(1_000_000_000)
.and_then(|v| v.checked_add(dur.subsec_nanos() as u64))
}
fn next_interval(prev: Instant, now: Instant, interval: Duration) -> Instant {
let new = prev + interval;
if new > now {
return new;
} else {
let spent_ns = duration_to_nanos(now.duration_since(prev))
.expect("interval should be expired");
let interval_ns = duration_to_nanos(interval)
.expect("interval is less that 427 thousand years");
let mult = spent_ns/interval_ns + 1;
assert!(mult < (1 << 32),
"can't skip more than 4 billion intervals of {:?} \
(trying to skip {})", interval, mult);
return prev + interval * (mult as u32);
}
}
#[cfg(test)]
mod test {
use std::time::{Instant, Duration};
use super::next_interval;
struct Timeline(Instant);
impl Timeline {
fn new() -> Timeline {
Timeline(Instant::now())
}
fn at(&self, millis: u64) -> Instant {
self.0 + Duration::from_millis(millis)
}
fn at_ns(&self, sec: u64, nanos: u32) -> Instant {
self.0 + Duration::new(sec, nanos)
}
}
fn dur(millis: u64) -> Duration {
Duration::from_millis(millis)
}
#[test]
fn norm_next() {
let tm = Timeline::new();
assert_eq!(next_interval(tm.at(1), tm.at(2), dur(10)), tm.at(11));
assert_eq!(next_interval(tm.at(7777), tm.at(7788), dur(100)),
tm.at(7877));
assert_eq!(next_interval(tm.at(1), tm.at(1000), dur(2100)),
tm.at(2101));
}
#[test]
fn fast_forward() {
let tm = Timeline::new();
assert_eq!(next_interval(tm.at(1), tm.at(1000), dur(10)),
tm.at(1001));
assert_eq!(next_interval(tm.at(7777), tm.at(8888), dur(100)),
tm.at(8977));
assert_eq!(next_interval(tm.at(1), tm.at(10000), dur(2100)),
tm.at(10501));
}
/// TODO: this test actually should be successful, but since we can't
/// multiply Duration on anything larger than u32 easily we decided
/// to allow thit to fail for now
#[test]
#[should_panic(expected = "can't skip more than 4 billion intervals")]
fn large_skip() {
let tm = Timeline::new();
assert_eq!(next_interval(
tm.at_ns(0, 1), tm.at_ns(25, 0), Duration::new(0, 2)),
tm.at_ns(25, 1));
}
}
| new_at | identifier_name |
interval.rs | //! Support for creating futures that represent intervals.
//!
//! This module contains the `Interval` type which is a stream that will
//! resolve at a fixed intervals in future
use std::io;
use std::time::{Duration, Instant};
use futures::{Poll, Async};
use futures::stream::{Stream};
use reactor::{Remote, Handle};
use reactor::timeout_token::TimeoutToken;
/// A stream representing notifications at fixed interval
///
/// Intervals are created through the `Interval::new` or
/// `Interval::new_at` methods indicating when a first notification
/// should be triggered and when it will be repeated.
///
/// Note that timeouts are not intended for high resolution timers, but rather
/// they will likely fire some granularity after the exact instant that they're
/// otherwise indicated to fire at.
pub struct Interval {
token: TimeoutToken,
next: Instant,
interval: Duration,
handle: Remote,
}
impl Interval {
/// Creates a new interval which will fire at `dur` time into the future,
/// and will repeat every `dur` interval after
///
/// This function will return a future that will resolve to the actual
/// interval object. The interval object itself is then a stream which will
/// be set to fire at the specified intervals
pub fn new(dur: Duration, handle: &Handle) -> io::Result<Interval> {
Interval::new_at(Instant::now() + dur, dur, handle)
}
/// Creates a new interval which will fire at the time specified by `at`,
/// and then will repeat every `dur` interval after
///
/// This function will return a future that will resolve to the actual
/// timeout object. The timeout object itself is then a future which will be
/// set to fire at the specified point in the future.
pub fn new_at(at: Instant, dur: Duration, handle: &Handle)
-> io::Result<Interval>
{
Ok(Interval {
token: try!(TimeoutToken::new(at, &handle)),
next: at,
interval: dur,
handle: handle.remote().clone(),
})
}
}
impl Stream for Interval {
type Item = ();
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<()>, io::Error> {
// TODO: is this fast enough?
let now = Instant::now();
if self.next <= now | else {
self.token.update_timeout(&self.handle);
Ok(Async::NotReady)
}
}
}
impl Drop for Interval {
fn drop(&mut self) {
self.token.cancel_timeout(&self.handle);
}
}
/// Converts Duration object to raw nanoseconds if possible
///
/// This is useful to divide intervals.
///
/// While technically for large duration it's impossible to represent any
/// duration as nanoseconds, the largest duration we can represent is about
/// 427_000 years. Large enough for any interval we would use or calculate in
/// tokio.
fn duration_to_nanos(dur: Duration) -> Option<u64> {
dur.as_secs()
.checked_mul(1_000_000_000)
.and_then(|v| v.checked_add(dur.subsec_nanos() as u64))
}
fn next_interval(prev: Instant, now: Instant, interval: Duration) -> Instant {
let new = prev + interval;
if new > now {
return new;
} else {
let spent_ns = duration_to_nanos(now.duration_since(prev))
.expect("interval should be expired");
let interval_ns = duration_to_nanos(interval)
.expect("interval is less that 427 thousand years");
let mult = spent_ns/interval_ns + 1;
assert!(mult < (1 << 32),
"can't skip more than 4 billion intervals of {:?} \
(trying to skip {})", interval, mult);
return prev + interval * (mult as u32);
}
}
#[cfg(test)]
mod test {
use std::time::{Instant, Duration};
use super::next_interval;
struct Timeline(Instant);
impl Timeline {
fn new() -> Timeline {
Timeline(Instant::now())
}
fn at(&self, millis: u64) -> Instant {
self.0 + Duration::from_millis(millis)
}
fn at_ns(&self, sec: u64, nanos: u32) -> Instant {
self.0 + Duration::new(sec, nanos)
}
}
fn dur(millis: u64) -> Duration {
Duration::from_millis(millis)
}
#[test]
fn norm_next() {
let tm = Timeline::new();
assert_eq!(next_interval(tm.at(1), tm.at(2), dur(10)), tm.at(11));
assert_eq!(next_interval(tm.at(7777), tm.at(7788), dur(100)),
tm.at(7877));
assert_eq!(next_interval(tm.at(1), tm.at(1000), dur(2100)),
tm.at(2101));
}
#[test]
fn fast_forward() {
let tm = Timeline::new();
assert_eq!(next_interval(tm.at(1), tm.at(1000), dur(10)),
tm.at(1001));
assert_eq!(next_interval(tm.at(7777), tm.at(8888), dur(100)),
tm.at(8977));
assert_eq!(next_interval(tm.at(1), tm.at(10000), dur(2100)),
tm.at(10501));
}
/// TODO: this test actually should be successful, but since we can't
/// multiply Duration on anything larger than u32 easily we decided
/// to allow thit to fail for now
#[test]
#[should_panic(expected = "can't skip more than 4 billion intervals")]
fn large_skip() {
let tm = Timeline::new();
assert_eq!(next_interval(
tm.at_ns(0, 1), tm.at_ns(25, 0), Duration::new(0, 2)),
tm.at_ns(25, 1));
}
}
| {
self.next = next_interval(self.next, now, self.interval);
self.token.reset_timeout(self.next, &self.handle);
Ok(Async::Ready(Some(())))
} | conditional_block |
interval.rs | //! Support for creating futures that represent intervals.
//!
//! This module contains the `Interval` type which is a stream that will
//! resolve at a fixed intervals in future
use std::io;
use std::time::{Duration, Instant};
use futures::{Poll, Async};
use futures::stream::{Stream};
use reactor::{Remote, Handle};
use reactor::timeout_token::TimeoutToken;
/// A stream representing notifications at fixed interval
///
/// Intervals are created through the `Interval::new` or
/// `Interval::new_at` methods indicating when a first notification
/// should be triggered and when it will be repeated.
///
/// Note that timeouts are not intended for high resolution timers, but rather | /// otherwise indicated to fire at.
pub struct Interval {
token: TimeoutToken,
next: Instant,
interval: Duration,
handle: Remote,
}
impl Interval {
/// Creates a new interval which will fire at `dur` time into the future,
/// and will repeat every `dur` interval after
///
/// This function will return a future that will resolve to the actual
/// interval object. The interval object itself is then a stream which will
/// be set to fire at the specified intervals
pub fn new(dur: Duration, handle: &Handle) -> io::Result<Interval> {
Interval::new_at(Instant::now() + dur, dur, handle)
}
/// Creates a new interval which will fire at the time specified by `at`,
/// and then will repeat every `dur` interval after
///
/// This function will return a future that will resolve to the actual
/// timeout object. The timeout object itself is then a future which will be
/// set to fire at the specified point in the future.
pub fn new_at(at: Instant, dur: Duration, handle: &Handle)
-> io::Result<Interval>
{
Ok(Interval {
token: try!(TimeoutToken::new(at, &handle)),
next: at,
interval: dur,
handle: handle.remote().clone(),
})
}
}
impl Stream for Interval {
type Item = ();
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<()>, io::Error> {
// TODO: is this fast enough?
let now = Instant::now();
if self.next <= now {
self.next = next_interval(self.next, now, self.interval);
self.token.reset_timeout(self.next, &self.handle);
Ok(Async::Ready(Some(())))
} else {
self.token.update_timeout(&self.handle);
Ok(Async::NotReady)
}
}
}
impl Drop for Interval {
fn drop(&mut self) {
self.token.cancel_timeout(&self.handle);
}
}
/// Converts Duration object to raw nanoseconds if possible
///
/// This is useful to divide intervals.
///
/// While technically for large duration it's impossible to represent any
/// duration as nanoseconds, the largest duration we can represent is about
/// 427_000 years. Large enough for any interval we would use or calculate in
/// tokio.
fn duration_to_nanos(dur: Duration) -> Option<u64> {
dur.as_secs()
.checked_mul(1_000_000_000)
.and_then(|v| v.checked_add(dur.subsec_nanos() as u64))
}
fn next_interval(prev: Instant, now: Instant, interval: Duration) -> Instant {
let new = prev + interval;
if new > now {
return new;
} else {
let spent_ns = duration_to_nanos(now.duration_since(prev))
.expect("interval should be expired");
let interval_ns = duration_to_nanos(interval)
.expect("interval is less that 427 thousand years");
let mult = spent_ns/interval_ns + 1;
assert!(mult < (1 << 32),
"can't skip more than 4 billion intervals of {:?} \
(trying to skip {})", interval, mult);
return prev + interval * (mult as u32);
}
}
#[cfg(test)]
mod test {
use std::time::{Instant, Duration};
use super::next_interval;
struct Timeline(Instant);
impl Timeline {
fn new() -> Timeline {
Timeline(Instant::now())
}
fn at(&self, millis: u64) -> Instant {
self.0 + Duration::from_millis(millis)
}
fn at_ns(&self, sec: u64, nanos: u32) -> Instant {
self.0 + Duration::new(sec, nanos)
}
}
fn dur(millis: u64) -> Duration {
Duration::from_millis(millis)
}
#[test]
fn norm_next() {
let tm = Timeline::new();
assert_eq!(next_interval(tm.at(1), tm.at(2), dur(10)), tm.at(11));
assert_eq!(next_interval(tm.at(7777), tm.at(7788), dur(100)),
tm.at(7877));
assert_eq!(next_interval(tm.at(1), tm.at(1000), dur(2100)),
tm.at(2101));
}
#[test]
fn fast_forward() {
let tm = Timeline::new();
assert_eq!(next_interval(tm.at(1), tm.at(1000), dur(10)),
tm.at(1001));
assert_eq!(next_interval(tm.at(7777), tm.at(8888), dur(100)),
tm.at(8977));
assert_eq!(next_interval(tm.at(1), tm.at(10000), dur(2100)),
tm.at(10501));
}
/// TODO: this test actually should be successful, but since we can't
/// multiply Duration on anything larger than u32 easily we decided
/// to allow thit to fail for now
#[test]
#[should_panic(expected = "can't skip more than 4 billion intervals")]
fn large_skip() {
let tm = Timeline::new();
assert_eq!(next_interval(
tm.at_ns(0, 1), tm.at_ns(25, 0), Duration::new(0, 2)),
tm.at_ns(25, 1));
}
} | /// they will likely fire some granularity after the exact instant that they're | random_line_split |
location.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::LocationBinding;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::window::Window;
use page::Page;
use servo_util::str::DOMString;
use serialize::{Encoder, Encodable};
use std::rc::Rc;
use url::query_to_str;
#[deriving(Encodable)]
pub struct Location {
reflector_: Reflector, //XXXjdm cycle: window->Location->window
page: Rc<Page>,
}
impl Location {
pub fn new_inherited(page: Rc<Page>) -> Location {
Location {
reflector_: Reflector::new(),
page: page
}
}
pub fn new(window: &JSRef<Window>, page: Rc<Page>) -> Temporary<Location> {
reflect_dom_object(box Location::new_inherited(page),
window,
LocationBinding::Wrap)
}
}
pub trait LocationMethods {
fn Href(&self) -> DOMString;
fn Search(&self) -> DOMString;
}
impl<'a> LocationMethods for JSRef<'a, Location> {
fn Href(&self) -> DOMString {
self.page.get_url().to_str()
}
fn Search(&self) -> DOMString {
let query = query_to_str(&self.page.get_url().query);
if query.as_slice() == "" | else {
"?".to_string().append(query.as_slice())
}
}
}
impl Reflectable for Location {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
| {
query
} | conditional_block |
location.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::LocationBinding;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::window::Window;
use page::Page;
use servo_util::str::DOMString;
use serialize::{Encoder, Encodable};
use std::rc::Rc;
use url::query_to_str;
#[deriving(Encodable)]
pub struct Location {
reflector_: Reflector, //XXXjdm cycle: window->Location->window
page: Rc<Page>,
}
impl Location {
pub fn new_inherited(page: Rc<Page>) -> Location {
Location {
reflector_: Reflector::new(),
page: page
}
}
pub fn new(window: &JSRef<Window>, page: Rc<Page>) -> Temporary<Location> {
reflect_dom_object(box Location::new_inherited(page),
window,
LocationBinding::Wrap)
}
}
pub trait LocationMethods {
fn Href(&self) -> DOMString;
fn Search(&self) -> DOMString;
}
impl<'a> LocationMethods for JSRef<'a, Location> {
fn Href(&self) -> DOMString {
self.page.get_url().to_str()
}
fn | (&self) -> DOMString {
let query = query_to_str(&self.page.get_url().query);
if query.as_slice() == "" {
query
} else {
"?".to_string().append(query.as_slice())
}
}
}
impl Reflectable for Location {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
| Search | identifier_name |
location.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::LocationBinding;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::window::Window;
use page::Page;
use servo_util::str::DOMString;
use serialize::{Encoder, Encodable};
use std::rc::Rc;
use url::query_to_str;
#[deriving(Encodable)]
pub struct Location {
reflector_: Reflector, //XXXjdm cycle: window->Location->window
page: Rc<Page>,
}
impl Location {
pub fn new_inherited(page: Rc<Page>) -> Location { |
pub fn new(window: &JSRef<Window>, page: Rc<Page>) -> Temporary<Location> {
reflect_dom_object(box Location::new_inherited(page),
window,
LocationBinding::Wrap)
}
}
pub trait LocationMethods {
fn Href(&self) -> DOMString;
fn Search(&self) -> DOMString;
}
impl<'a> LocationMethods for JSRef<'a, Location> {
fn Href(&self) -> DOMString {
self.page.get_url().to_str()
}
fn Search(&self) -> DOMString {
let query = query_to_str(&self.page.get_url().query);
if query.as_slice() == "" {
query
} else {
"?".to_string().append(query.as_slice())
}
}
}
impl Reflectable for Location {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
} | Location {
reflector_: Reflector::new(),
page: page
}
} | random_line_split |
location.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::LocationBinding;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::window::Window;
use page::Page;
use servo_util::str::DOMString;
use serialize::{Encoder, Encodable};
use std::rc::Rc;
use url::query_to_str;
#[deriving(Encodable)]
pub struct Location {
reflector_: Reflector, //XXXjdm cycle: window->Location->window
page: Rc<Page>,
}
impl Location {
pub fn new_inherited(page: Rc<Page>) -> Location {
Location {
reflector_: Reflector::new(),
page: page
}
}
pub fn new(window: &JSRef<Window>, page: Rc<Page>) -> Temporary<Location> {
reflect_dom_object(box Location::new_inherited(page),
window,
LocationBinding::Wrap)
}
}
pub trait LocationMethods {
fn Href(&self) -> DOMString;
fn Search(&self) -> DOMString;
}
impl<'a> LocationMethods for JSRef<'a, Location> {
fn Href(&self) -> DOMString |
fn Search(&self) -> DOMString {
let query = query_to_str(&self.page.get_url().query);
if query.as_slice() == "" {
query
} else {
"?".to_string().append(query.as_slice())
}
}
}
impl Reflectable for Location {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
| {
self.page.get_url().to_str()
} | identifier_body |
main.rs | extern crate chrono;
#[macro_use]
extern crate derive_builder;
extern crate fern;
extern crate inflector;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate rand;
extern crate rayon;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate anyhow;
extern crate clap;
extern crate serde_json;
extern crate spade;
extern crate statrs;
extern crate toml;
mod config;
mod economy;
mod entities;
mod game;
mod generators;
mod resources;
mod simulator;
mod utils;
use anyhow::Result;
use clap::{AppSettings, Clap};
use log::LevelFilter;
use std::{fs::File, io, io::Read};
fn main() {
// Init logger
setup_logger().unwrap();
// Parse the command line.
let opts: Opts = Opts::parse();
match opts.subcmd {
SubCommand::NewGame(t) => {
// Display title.
let title = include_str!("../res/title.txt");
println!("{}", title);
// Gets a value for config if supplied by user, or defaults to "genconfig.toml"
let config = match parse_config(&t.config_path) {
Ok(config) => config,
Err(e) => {
warn!(
"Failed to get specified config at {}: due to {}. Using default",
&t.config_path, e
);
config::GameConfig::default()
}
};
// Start simulator
let mut simulator = simulator::Simulator::new(config);
simulator.new_game();
}
}
// TODO: Implement the rest of the program.
}
/// This doc string acts as a help message when the user runs '--help'
/// as do all doc strings on fields
#[derive(Clap)]
#[clap(version = "1.0", author = "Viktor H. <[email protected]>")]
#[clap(setting = AppSettings::ColoredHelp)]
struct Opts {
/// A level of verbosity, and can be used multiple times
#[clap(short, long, parse(from_occurrences))]
verbose: i32,
#[clap(subcommand)]
subcmd: SubCommand,
}
#[derive(Clap)]
enum SubCommand {
#[clap(version = "1.3", author = "Viktor H. <[email protected]>")]
NewGame(NewGame),
//TODO: Add additional subcommands; serve (for server) etc.
}
/// Subcommand for generating a new world.
#[derive(Clap)]
struct NewGame {
#[clap(short, long, default_value = "genconfig.toml")]
config_path: String,
}
/// Try parse the Generation Config at the specified path.
fn parse_config(path: &str) -> Result<config::GameConfig> {
let mut file = File::open(&path)?;
let mut file_content = String::new();
file.read_to_string(&mut file_content)?;
let config: config::GameConfig = toml::from_str(&file_content)?;
Ok(config)
}
pub fn setup_logger() -> Result<()> | {
fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"{}[{}][{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
record.target(),
record.level(),
message
))
})
.level(log::LevelFilter::Off)
.level_for("gemini", LevelFilter::Trace)
.chain(io::stdout())
.apply()?;
Ok(())
} | identifier_body |
|
main.rs | extern crate chrono;
#[macro_use]
extern crate derive_builder;
extern crate fern;
extern crate inflector;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate rand;
extern crate rayon;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate anyhow;
extern crate clap;
extern crate serde_json;
extern crate spade;
extern crate statrs;
extern crate toml;
mod config;
mod economy;
mod entities;
mod game;
mod generators;
mod resources;
mod simulator;
mod utils;
use anyhow::Result;
use clap::{AppSettings, Clap};
use log::LevelFilter;
use std::{fs::File, io, io::Read};
fn main() {
// Init logger
setup_logger().unwrap();
// Parse the command line.
let opts: Opts = Opts::parse();
match opts.subcmd {
SubCommand::NewGame(t) => {
// Display title.
let title = include_str!("../res/title.txt");
println!("{}", title);
// Gets a value for config if supplied by user, or defaults to "genconfig.toml"
let config = match parse_config(&t.config_path) {
Ok(config) => config,
Err(e) => {
warn!(
"Failed to get specified config at {}: due to {}. Using default",
&t.config_path, e
);
config::GameConfig::default()
}
};
// Start simulator
let mut simulator = simulator::Simulator::new(config);
| }
/// This doc string acts as a help message when the user runs '--help'
/// as do all doc strings on fields
#[derive(Clap)]
#[clap(version = "1.0", author = "Viktor H. <[email protected]>")]
#[clap(setting = AppSettings::ColoredHelp)]
struct Opts {
/// A level of verbosity, and can be used multiple times
#[clap(short, long, parse(from_occurrences))]
verbose: i32,
#[clap(subcommand)]
subcmd: SubCommand,
}
#[derive(Clap)]
enum SubCommand {
#[clap(version = "1.3", author = "Viktor H. <[email protected]>")]
NewGame(NewGame),
//TODO: Add additional subcommands; serve (for server) etc.
}
/// Subcommand for generating a new world.
#[derive(Clap)]
struct NewGame {
#[clap(short, long, default_value = "genconfig.toml")]
config_path: String,
}
/// Try parse the Generation Config at the specified path.
fn parse_config(path: &str) -> Result<config::GameConfig> {
let mut file = File::open(&path)?;
let mut file_content = String::new();
file.read_to_string(&mut file_content)?;
let config: config::GameConfig = toml::from_str(&file_content)?;
Ok(config)
}
pub fn setup_logger() -> Result<()> {
fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"{}[{}][{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
record.target(),
record.level(),
message
))
})
.level(log::LevelFilter::Off)
.level_for("gemini", LevelFilter::Trace)
.chain(io::stdout())
.apply()?;
Ok(())
} | simulator.new_game();
}
}
// TODO: Implement the rest of the program. | random_line_split |
main.rs | extern crate chrono;
#[macro_use]
extern crate derive_builder;
extern crate fern;
extern crate inflector;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
extern crate rand;
extern crate rayon;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate anyhow;
extern crate clap;
extern crate serde_json;
extern crate spade;
extern crate statrs;
extern crate toml;
mod config;
mod economy;
mod entities;
mod game;
mod generators;
mod resources;
mod simulator;
mod utils;
use anyhow::Result;
use clap::{AppSettings, Clap};
use log::LevelFilter;
use std::{fs::File, io, io::Read};
fn main() {
// Init logger
setup_logger().unwrap();
// Parse the command line.
let opts: Opts = Opts::parse();
match opts.subcmd {
SubCommand::NewGame(t) => {
// Display title.
let title = include_str!("../res/title.txt");
println!("{}", title);
// Gets a value for config if supplied by user, or defaults to "genconfig.toml"
let config = match parse_config(&t.config_path) {
Ok(config) => config,
Err(e) => {
warn!(
"Failed to get specified config at {}: due to {}. Using default",
&t.config_path, e
);
config::GameConfig::default()
}
};
// Start simulator
let mut simulator = simulator::Simulator::new(config);
simulator.new_game();
}
}
// TODO: Implement the rest of the program.
}
/// This doc string acts as a help message when the user runs '--help'
/// as do all doc strings on fields
#[derive(Clap)]
#[clap(version = "1.0", author = "Viktor H. <[email protected]>")]
#[clap(setting = AppSettings::ColoredHelp)]
struct Opts {
/// A level of verbosity, and can be used multiple times
#[clap(short, long, parse(from_occurrences))]
verbose: i32,
#[clap(subcommand)]
subcmd: SubCommand,
}
#[derive(Clap)]
enum | {
#[clap(version = "1.3", author = "Viktor H. <[email protected]>")]
NewGame(NewGame),
//TODO: Add additional subcommands; serve (for server) etc.
}
/// Subcommand for generating a new world.
#[derive(Clap)]
struct NewGame {
#[clap(short, long, default_value = "genconfig.toml")]
config_path: String,
}
/// Try parse the Generation Config at the specified path.
fn parse_config(path: &str) -> Result<config::GameConfig> {
let mut file = File::open(&path)?;
let mut file_content = String::new();
file.read_to_string(&mut file_content)?;
let config: config::GameConfig = toml::from_str(&file_content)?;
Ok(config)
}
pub fn setup_logger() -> Result<()> {
fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"{}[{}][{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
record.target(),
record.level(),
message
))
})
.level(log::LevelFilter::Off)
.level_for("gemini", LevelFilter::Trace)
.chain(io::stdout())
.apply()?;
Ok(())
}
| SubCommand | identifier_name |
main.rs | extern crate serenity;
use serenity::prelude::*;
use serenity::model::*;
use std::env;
// Serenity implements transparent sharding in a way that you do not need to
// manually handle separate processes or connections manually.
//
// Transparent sharding is useful for a shared cache. Instead of having caches
// with duplicated data, a shared cache means all your data can be easily
// accessible across all shards.
//
// If your bot is on many guilds - or over the maximum of 2500 - then you
// should/must use guild sharding.
//
// This is an example file showing how guild sharding works. For this to
// properly be able to be seen in effect, your bot should be in at least 2
// guilds.
//
// Taking a scenario of 2 guilds, try saying "!ping" in one guild. It should
// print either "0" or "1" in the console. Saying "!ping" in the other guild,
// it should cache the other number in the console. This confirms that guild
// sharding works.
struct Handler;
impl EventHandler for Handler {
fn | (&self, ctx: Context, msg: Message) {
if msg.content == "!ping" {
// The current shard needs to be unlocked so it can be read from, as
// multiple threads may otherwise attempt to read from or mutate it
// concurrently.
{
let shard = ctx.shard.lock();
let shard_info = shard.shard_info();
println!("Shard {}", shard_info[0]);
}
if let Err(why) = msg.channel_id.say("Pong!") {
println!("Error sending message: {:?}", why);
}
}
}
fn on_ready(&self, _: Context, ready: Ready) {
println!("{} is connected!", ready.user.name);
}
}
fn main() {
// Configure the client with your Discord bot token in the environment.
let token = env::var("DISCORD_TOKEN")
.expect("Expected a token in the environment");
let mut client = Client::new(&token, Handler);
// The total number of shards to use. The "current shard number" of a
// shard - that is, the shard it is assigned to - is indexed at 0,
// while the total shard count is indexed at 1.
//
// This means if you have 5 shards, your total shard count will be 5, while
// each shard will be assigned numbers 0 through 4.
if let Err(why) = client.start_shards(2) {
println!("Client error: {:?}", why);
}
}
| on_message | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.