file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
raw.rs
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use crate::common::*;
use crate::user;
use chrono;
use serde::Deserialize;
use super::DMEntities;
#[derive(Debug, Deserialize)]
pub struct
|
{
///Numeric ID for this DM.
pub id: u64,
///UTC timestamp from when this DM was created.
#[serde(deserialize_with = "deserialize_datetime")]
pub created_at: chrono::DateTime<chrono::Utc>,
///The text of the DM.
pub text: String,
///Link, hashtag, and user mention information parsed out of the DM.
pub entities: DMEntities,
///The screen name of the user who sent the DM.
pub sender_screen_name: String,
///The ID of the user who sent the DM.
pub sender_id: u64,
///Full information of the user who sent the DM.
pub sender: Box<user::TwitterUser>,
///The screen name of the user who received the DM.
pub recipient_screen_name: String,
///The ID of the user who received the DM.
pub recipient_id: u64,
///Full information for the user who received the DM.
pub recipient: Box<user::TwitterUser>,
}
|
RawDirectMessage
|
identifier_name
|
mod.rs
|
pub type Tag = u32;
pub type Vec2 = (f64, f64);
pub type Vec3 = (f64, f64, f64);
pub type Vec4 = (f64, f64, f64, f64);
pub type Box2 = (Vec2, Vec2);
#[derive(PartialEq, Debug)]
pub enum
|
{
Tag(Tag),
Bool(bool),
BoolArray(Box<[bool]>),
Int(i32),
IntArray(Box<[i32]>),
Double(f64),
DoubleArray(Box<[f64]>),
Vec2(Vec2),
Vec2Array(Box<[Vec2]>),
Vec3(Vec3),
Vec3Array(Box<[Vec3]>),
Vec4(Vec4),
Vec4Array(Box<[Vec4]>),
Box2(Box2),
Box2Array(Box<[Box2]>),
String(Box<str>),
Blob(Box<[u8]>)
}
#[macro_use]
pub mod macros;
pub mod reader;
mod binary_reader;
mod text_reader;
pub mod writer;
mod binary_writer;
mod text_writer;
pub use self::reader::{Token, Reader};
pub use self::binary_reader::BinaryReader;
pub use self::text_reader::TextReader;
pub use self::writer::Writer;
pub use self::binary_writer::BinaryWriter;
pub use self::text_writer::TextWriter;
|
Value
|
identifier_name
|
mod.rs
|
pub type Tag = u32;
pub type Vec2 = (f64, f64);
pub type Vec3 = (f64, f64, f64);
pub type Vec4 = (f64, f64, f64, f64);
pub type Box2 = (Vec2, Vec2);
#[derive(PartialEq, Debug)]
pub enum Value {
Tag(Tag),
Bool(bool),
BoolArray(Box<[bool]>),
Int(i32),
IntArray(Box<[i32]>),
Double(f64),
DoubleArray(Box<[f64]>),
Vec2(Vec2),
Vec2Array(Box<[Vec2]>),
Vec3(Vec3),
Vec3Array(Box<[Vec3]>),
Vec4(Vec4),
|
Box2Array(Box<[Box2]>),
String(Box<str>),
Blob(Box<[u8]>)
}
#[macro_use]
pub mod macros;
pub mod reader;
mod binary_reader;
mod text_reader;
pub mod writer;
mod binary_writer;
mod text_writer;
pub use self::reader::{Token, Reader};
pub use self::binary_reader::BinaryReader;
pub use self::text_reader::TextReader;
pub use self::writer::Writer;
pub use self::binary_writer::BinaryWriter;
pub use self::text_writer::TextWriter;
|
Vec4Array(Box<[Vec4]>),
Box2(Box2),
|
random_line_split
|
pinger.rs
|
extern crate byteorder;
extern crate fcp;
extern crate fcp_cryptoauth;
extern crate hex;
extern crate rand;
use std::collections::{HashMap, VecDeque};
use std::iter::FromIterator;
use std::net::{IpAddr, Ipv6Addr, SocketAddr, UdpSocket};
use std::str::FromStr;
use fcp_cryptoauth::keys::ToBase32;
use fcp_cryptoauth::*;
use fcp::encoding_scheme::{EncodingScheme, EncodingSchemeForm};
use fcp::operation::{Director, ForwardPath};
use fcp::packets::control::ControlPacket;
use fcp::packets::data::DataPacket;
use fcp::packets::data::Payload as DataPayload;
use fcp::packets::route::RoutePacketBuilder;
use fcp::packets::switch::Payload as SwitchPayload;
use fcp::packets::switch::SwitchPacket;
use fcp::passive_switch::PassiveSwitch;
use fcp::plumbing::NetworkAdapterTrait;
use fcp::plumbing::Plumbing;
use fcp::session_manager::{MySessionHandle, SessionManager};
use fcp::udp_adapter::{UdpAdapter, UdpPeer};
use fcp::node::{Address, Node};
use fcp::router::Router;
use rand::Rng;
/// Main data structure of the switch.
struct Pinger {
plumbing: Plumbing<Router, UdpAdapter<String>>,
ping_targets: Vec<Address>,
ping_nodes: Vec<Node>,
address_to_my_handle: HashMap<Address, MySessionHandle>,
}
impl Pinger {
/// Instanciates a switch.
fn new(
sock: UdpSocket,
peers: HashMap<Director, UdpPeer<String>>,
my_pk: PublicKey,
my_sk: SecretKey,
allowed_peers: HashMap<Credentials, String>,
ping_targets: Vec<Address>,
) -> Pinger {
let udp_adapter = UdpAdapter::new(
sock,
my_pk.clone(),
my_sk.clone(),
allowed_peers.clone(),
peers,
);
let session_manager = SessionManager::new(my_pk.clone(), my_sk.clone());
let plumbing = Plumbing {
network_adapter: udp_adapter,
switch: PassiveSwitch::new(my_pk, my_sk, allowed_peers),
router: Router::new(my_pk),
session_manager: session_manager,
pongs: None,
rx_buffer: VecDeque::new(),
};
Pinger {
plumbing: plumbing,
ping_targets: ping_targets,
ping_nodes: Vec::new(),
address_to_my_handle: HashMap::new(),
}
}
/// Sometimes (random) sends a switch as a reply to the packet.
fn random_send_switch_ping(&mut self, _my_handle: MySessionHandle, path: ForwardPath) {
if rand::thread_rng().next_u32() > 0xafffffff {
let ping = ControlPacket::Ping {
version: 18,
opaque_data: vec![1, 2, 3, 4, 5, 6, 7, 8],
};
let packet_response = SwitchPacket::new(path, SwitchPayload::Control(ping));
self.plumbing.dispatch(packet_response, 0b001);
}
}
fn
|
(&mut self, node: &Node, message: DataPacket) {
let node_pk = PublicKey::from_slice(node.public_key()).unwrap();
let addr = publickey_to_ipv6addr(&node_pk).into();
let my_handle_opt = self.address_to_my_handle.get(&addr).map(|h| *h);
match my_handle_opt {
Some(my_handle) => self.send_message_to_my_handle(my_handle, message),
None => {
println!("Creating CA session for node {}", Ipv6Addr::from(&addr));
let credentials = Credentials::None;
let path = node.path().clone();
let handle = self
.plumbing
.session_manager
.add_outgoing(Some(path), node_pk);
self.address_to_my_handle.insert(addr.into(), handle);
self.send_message_to_my_handle(handle, message)
}
}
}
fn send_message_to_my_handle(&mut self, my_handle: MySessionHandle, message: DataPacket) {
let mut packets = Vec::new();
{
let session = self
.plumbing
.session_manager
.get_session(my_handle)
.unwrap();
let their_handle = session.their_handle().unwrap();
println!(
"Sending inner ca message to handle {:?} with path {:?}: {}",
my_handle, session.path, message
);
for packet_response in session.conn.wrap_message_immediately(message.raw()) {
let switch_packet = SwitchPacket::new(
session.path.unwrap(),
SwitchPayload::CryptoAuthData(their_handle.0, packet_response),
);
packets.push(switch_packet);
}
}
for packet in packets {
self.dispatch(packet, 0b001);
}
}
fn ping_node(&mut self, node: &Node) {
let node_pk = PublicKey::from_slice(node.public_key()).unwrap();
let addr = publickey_to_ipv6addr(&node_pk);
println!("Pinging node {}", Ipv6Addr::from(addr));
let encoding_scheme = EncodingScheme::from_iter(
vec![EncodingSchemeForm {
prefix: 0,
bit_count: 3,
prefix_length: 0,
}]
.iter(),
);
let route_packet = RoutePacketBuilder::new(18, b"blah".to_vec())
.query("pn".to_owned())
.encoding_index(0)
.encoding_scheme(encoding_scheme)
.target_address(vec![0, 0, 0, 0, 0, 0, 0, 0])
.finalize();
let ping_message = DataPacket::new(1, &DataPayload::RoutePacket(route_packet));
self.send_message_to_node(node, ping_message);
}
fn try_connect_ping_target(&mut self, address: &Address) {
println!("Trying to connect to {}", Ipv6Addr::from(address));
let (node_opt, messages) = {
let (node_opt, messages) = self.plumbing.router.get_node(address, 42);
let messages: Vec<_> = messages
.into_iter()
.map(|(node, msg)| (node.clone(), msg))
.collect();
(node_opt.cloned(), messages)
};
if let Some(node) = node_opt {
println!(
"Found node. pk: {}",
PublicKey(*node.public_key()).to_base32()
);
self.ping_nodes.push(node);
};
println!("{} router messages", messages.len());
for (query_node, message) in messages {
let message = DataPacket::new(1, &DataPayload::RoutePacket(message));
self.send_message_to_node(&query_node, message);
}
}
/// Sometimes (random) sends `pn` queries.
fn random_ping_node(&mut self) {
if rand::thread_rng().next_u32() > 0xafffffff || true {
println!("Pinging nodes.");
for address in self.ping_targets.clone() {
if!self.address_to_my_handle.contains_key(&address) {
self.try_connect_ping_target(&address)
}
}
for node in self.ping_nodes.clone() {
self.ping_node(&node);
}
}
}
fn dispatch(&mut self, packet: SwitchPacket, from_interface: Director) {
self.plumbing.dispatch(packet, from_interface);
}
fn loop_(&mut self) {
loop {
let packets = self.plumbing.session_manager.upkeep();
for packet in packets {
self.plumbing.dispatch(packet, 0b001);
}
let mut targets = Vec::new();
for (my_handle, ref mut session) in self.plumbing.session_manager.sessions.iter_mut() {
targets.push((*my_handle, session.path))
}
for (my_handle, path) in targets {
let path = path.unwrap();
self.random_send_switch_ping(my_handle, path);
}
self.random_ping_node();
if let Some((director, messages)) = self.plumbing.network_adapter.recv_from() {
for message in messages.into_iter() {
self.dispatch(message, director);
}
}
}
}
}
pub fn main() {
fcp_cryptoauth::init();
let my_sk =
SecretKey::from_hex(b"ac3e53b518e68449692b0b2f2926ef2fdc1eac5b9dbd10a48114263b8c8ed12e")
.unwrap();
let my_pk =
PublicKey::from_base32(b"2wrpv8p4tjwm532sjxcbqzkp7kdwfwzzbg7g0n5l6g3s8df4kvv0.k").unwrap();
let their_pk =
PublicKey::from_base32(b"g0pt6kwnwj8ndktjhs7pmcl14rg6uugn8kt4nykudtl96r27sch0.k").unwrap();
let login = "foo".to_owned().into_bytes();
let password = "bar".to_owned().into_bytes();
let credentials = Credentials::LoginPassword {
login: login,
password: password,
};
let mut allowed_peers = HashMap::new();
allowed_peers.insert(credentials.clone(), "my peer".to_owned());
let ping_targets = vec![
Address::from(&Ipv6Addr::from_str("fcd6:9c33:dd06:3320:8dbe:ab19:c87:f6e3").unwrap()),
Address::from(&Ipv6Addr::from_str("fcb9:326d:37d5:c57b:7ee5:28b5:7aa5:525").unwrap()),
];
let sock = UdpSocket::bind("[::1]:12345").unwrap();
let dest = SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 20984);
let conn = CAWrapper::new_outgoing_connection(
my_pk,
my_sk.clone(),
their_pk,
credentials,
Some(allowed_peers.clone()),
"my peer".to_owned(),
None,
);
let mut peers = HashMap::new();
peers.insert(
0b011,
UdpPeer {
ca_session: conn,
addr: dest,
},
);
let mut pinger = Pinger::new(sock, peers, my_pk, my_sk, allowed_peers, ping_targets);
pinger.loop_();
}
|
send_message_to_node
|
identifier_name
|
pinger.rs
|
extern crate byteorder;
extern crate fcp;
extern crate fcp_cryptoauth;
extern crate hex;
extern crate rand;
use std::collections::{HashMap, VecDeque};
use std::iter::FromIterator;
use std::net::{IpAddr, Ipv6Addr, SocketAddr, UdpSocket};
use std::str::FromStr;
use fcp_cryptoauth::keys::ToBase32;
use fcp_cryptoauth::*;
use fcp::encoding_scheme::{EncodingScheme, EncodingSchemeForm};
use fcp::operation::{Director, ForwardPath};
use fcp::packets::control::ControlPacket;
use fcp::packets::data::DataPacket;
use fcp::packets::data::Payload as DataPayload;
use fcp::packets::route::RoutePacketBuilder;
use fcp::packets::switch::Payload as SwitchPayload;
use fcp::packets::switch::SwitchPacket;
use fcp::passive_switch::PassiveSwitch;
use fcp::plumbing::NetworkAdapterTrait;
use fcp::plumbing::Plumbing;
use fcp::session_manager::{MySessionHandle, SessionManager};
use fcp::udp_adapter::{UdpAdapter, UdpPeer};
use fcp::node::{Address, Node};
use fcp::router::Router;
use rand::Rng;
/// Main data structure of the switch.
struct Pinger {
plumbing: Plumbing<Router, UdpAdapter<String>>,
ping_targets: Vec<Address>,
ping_nodes: Vec<Node>,
address_to_my_handle: HashMap<Address, MySessionHandle>,
}
impl Pinger {
/// Instanciates a switch.
fn new(
sock: UdpSocket,
peers: HashMap<Director, UdpPeer<String>>,
my_pk: PublicKey,
my_sk: SecretKey,
allowed_peers: HashMap<Credentials, String>,
ping_targets: Vec<Address>,
) -> Pinger {
let udp_adapter = UdpAdapter::new(
sock,
my_pk.clone(),
my_sk.clone(),
allowed_peers.clone(),
peers,
);
let session_manager = SessionManager::new(my_pk.clone(), my_sk.clone());
let plumbing = Plumbing {
network_adapter: udp_adapter,
switch: PassiveSwitch::new(my_pk, my_sk, allowed_peers),
router: Router::new(my_pk),
session_manager: session_manager,
pongs: None,
rx_buffer: VecDeque::new(),
};
Pinger {
plumbing: plumbing,
ping_targets: ping_targets,
ping_nodes: Vec::new(),
address_to_my_handle: HashMap::new(),
}
}
/// Sometimes (random) sends a switch as a reply to the packet.
fn random_send_switch_ping(&mut self, _my_handle: MySessionHandle, path: ForwardPath) {
if rand::thread_rng().next_u32() > 0xafffffff {
let ping = ControlPacket::Ping {
version: 18,
opaque_data: vec![1, 2, 3, 4, 5, 6, 7, 8],
};
let packet_response = SwitchPacket::new(path, SwitchPayload::Control(ping));
self.plumbing.dispatch(packet_response, 0b001);
}
}
fn send_message_to_node(&mut self, node: &Node, message: DataPacket) {
let node_pk = PublicKey::from_slice(node.public_key()).unwrap();
let addr = publickey_to_ipv6addr(&node_pk).into();
let my_handle_opt = self.address_to_my_handle.get(&addr).map(|h| *h);
match my_handle_opt {
Some(my_handle) => self.send_message_to_my_handle(my_handle, message),
None => {
println!("Creating CA session for node {}", Ipv6Addr::from(&addr));
let credentials = Credentials::None;
let path = node.path().clone();
let handle = self
.plumbing
.session_manager
.add_outgoing(Some(path), node_pk);
self.address_to_my_handle.insert(addr.into(), handle);
self.send_message_to_my_handle(handle, message)
}
}
}
fn send_message_to_my_handle(&mut self, my_handle: MySessionHandle, message: DataPacket) {
let mut packets = Vec::new();
{
let session = self
.plumbing
.session_manager
.get_session(my_handle)
.unwrap();
let their_handle = session.their_handle().unwrap();
println!(
"Sending inner ca message to handle {:?} with path {:?}: {}",
my_handle, session.path, message
);
for packet_response in session.conn.wrap_message_immediately(message.raw()) {
let switch_packet = SwitchPacket::new(
session.path.unwrap(),
SwitchPayload::CryptoAuthData(their_handle.0, packet_response),
);
packets.push(switch_packet);
}
}
for packet in packets {
self.dispatch(packet, 0b001);
}
}
fn ping_node(&mut self, node: &Node) {
let node_pk = PublicKey::from_slice(node.public_key()).unwrap();
let addr = publickey_to_ipv6addr(&node_pk);
println!("Pinging node {}", Ipv6Addr::from(addr));
let encoding_scheme = EncodingScheme::from_iter(
vec![EncodingSchemeForm {
prefix: 0,
bit_count: 3,
prefix_length: 0,
}]
.iter(),
);
let route_packet = RoutePacketBuilder::new(18, b"blah".to_vec())
.query("pn".to_owned())
.encoding_index(0)
.encoding_scheme(encoding_scheme)
.target_address(vec![0, 0, 0, 0, 0, 0, 0, 0])
.finalize();
let ping_message = DataPacket::new(1, &DataPayload::RoutePacket(route_packet));
self.send_message_to_node(node, ping_message);
}
fn try_connect_ping_target(&mut self, address: &Address) {
println!("Trying to connect to {}", Ipv6Addr::from(address));
let (node_opt, messages) = {
let (node_opt, messages) = self.plumbing.router.get_node(address, 42);
let messages: Vec<_> = messages
.into_iter()
.map(|(node, msg)| (node.clone(), msg))
.collect();
(node_opt.cloned(), messages)
};
if let Some(node) = node_opt {
println!(
"Found node. pk: {}",
PublicKey(*node.public_key()).to_base32()
);
self.ping_nodes.push(node);
};
println!("{} router messages", messages.len());
for (query_node, message) in messages {
let message = DataPacket::new(1, &DataPayload::RoutePacket(message));
self.send_message_to_node(&query_node, message);
}
}
/// Sometimes (random) sends `pn` queries.
fn random_ping_node(&mut self) {
if rand::thread_rng().next_u32() > 0xafffffff || true
|
}
fn dispatch(&mut self, packet: SwitchPacket, from_interface: Director) {
self.plumbing.dispatch(packet, from_interface);
}
fn loop_(&mut self) {
loop {
let packets = self.plumbing.session_manager.upkeep();
for packet in packets {
self.plumbing.dispatch(packet, 0b001);
}
let mut targets = Vec::new();
for (my_handle, ref mut session) in self.plumbing.session_manager.sessions.iter_mut() {
targets.push((*my_handle, session.path))
}
for (my_handle, path) in targets {
let path = path.unwrap();
self.random_send_switch_ping(my_handle, path);
}
self.random_ping_node();
if let Some((director, messages)) = self.plumbing.network_adapter.recv_from() {
for message in messages.into_iter() {
self.dispatch(message, director);
}
}
}
}
}
pub fn main() {
fcp_cryptoauth::init();
let my_sk =
SecretKey::from_hex(b"ac3e53b518e68449692b0b2f2926ef2fdc1eac5b9dbd10a48114263b8c8ed12e")
.unwrap();
let my_pk =
PublicKey::from_base32(b"2wrpv8p4tjwm532sjxcbqzkp7kdwfwzzbg7g0n5l6g3s8df4kvv0.k").unwrap();
let their_pk =
PublicKey::from_base32(b"g0pt6kwnwj8ndktjhs7pmcl14rg6uugn8kt4nykudtl96r27sch0.k").unwrap();
let login = "foo".to_owned().into_bytes();
let password = "bar".to_owned().into_bytes();
let credentials = Credentials::LoginPassword {
login: login,
password: password,
};
let mut allowed_peers = HashMap::new();
allowed_peers.insert(credentials.clone(), "my peer".to_owned());
let ping_targets = vec![
Address::from(&Ipv6Addr::from_str("fcd6:9c33:dd06:3320:8dbe:ab19:c87:f6e3").unwrap()),
Address::from(&Ipv6Addr::from_str("fcb9:326d:37d5:c57b:7ee5:28b5:7aa5:525").unwrap()),
];
let sock = UdpSocket::bind("[::1]:12345").unwrap();
let dest = SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 20984);
let conn = CAWrapper::new_outgoing_connection(
my_pk,
my_sk.clone(),
their_pk,
credentials,
Some(allowed_peers.clone()),
"my peer".to_owned(),
None,
);
let mut peers = HashMap::new();
peers.insert(
0b011,
UdpPeer {
ca_session: conn,
addr: dest,
},
);
let mut pinger = Pinger::new(sock, peers, my_pk, my_sk, allowed_peers, ping_targets);
pinger.loop_();
}
|
{
println!("Pinging nodes.");
for address in self.ping_targets.clone() {
if !self.address_to_my_handle.contains_key(&address) {
self.try_connect_ping_target(&address)
}
}
for node in self.ping_nodes.clone() {
self.ping_node(&node);
}
}
|
conditional_block
|
pinger.rs
|
extern crate byteorder;
extern crate fcp;
extern crate fcp_cryptoauth;
extern crate hex;
extern crate rand;
use std::collections::{HashMap, VecDeque};
use std::iter::FromIterator;
use std::net::{IpAddr, Ipv6Addr, SocketAddr, UdpSocket};
use std::str::FromStr;
use fcp_cryptoauth::keys::ToBase32;
use fcp_cryptoauth::*;
use fcp::encoding_scheme::{EncodingScheme, EncodingSchemeForm};
use fcp::operation::{Director, ForwardPath};
use fcp::packets::control::ControlPacket;
use fcp::packets::data::DataPacket;
use fcp::packets::data::Payload as DataPayload;
use fcp::packets::route::RoutePacketBuilder;
use fcp::packets::switch::Payload as SwitchPayload;
use fcp::packets::switch::SwitchPacket;
use fcp::passive_switch::PassiveSwitch;
use fcp::plumbing::NetworkAdapterTrait;
use fcp::plumbing::Plumbing;
use fcp::session_manager::{MySessionHandle, SessionManager};
use fcp::udp_adapter::{UdpAdapter, UdpPeer};
use fcp::node::{Address, Node};
use fcp::router::Router;
use rand::Rng;
/// Main data structure of the switch.
struct Pinger {
plumbing: Plumbing<Router, UdpAdapter<String>>,
ping_targets: Vec<Address>,
ping_nodes: Vec<Node>,
address_to_my_handle: HashMap<Address, MySessionHandle>,
}
impl Pinger {
/// Instanciates a switch.
fn new(
sock: UdpSocket,
peers: HashMap<Director, UdpPeer<String>>,
my_pk: PublicKey,
my_sk: SecretKey,
allowed_peers: HashMap<Credentials, String>,
ping_targets: Vec<Address>,
) -> Pinger {
let udp_adapter = UdpAdapter::new(
sock,
my_pk.clone(),
my_sk.clone(),
allowed_peers.clone(),
peers,
);
let session_manager = SessionManager::new(my_pk.clone(), my_sk.clone());
let plumbing = Plumbing {
network_adapter: udp_adapter,
switch: PassiveSwitch::new(my_pk, my_sk, allowed_peers),
router: Router::new(my_pk),
session_manager: session_manager,
pongs: None,
rx_buffer: VecDeque::new(),
};
Pinger {
plumbing: plumbing,
ping_targets: ping_targets,
ping_nodes: Vec::new(),
address_to_my_handle: HashMap::new(),
}
}
/// Sometimes (random) sends a switch as a reply to the packet.
fn random_send_switch_ping(&mut self, _my_handle: MySessionHandle, path: ForwardPath) {
if rand::thread_rng().next_u32() > 0xafffffff {
let ping = ControlPacket::Ping {
version: 18,
opaque_data: vec![1, 2, 3, 4, 5, 6, 7, 8],
};
let packet_response = SwitchPacket::new(path, SwitchPayload::Control(ping));
self.plumbing.dispatch(packet_response, 0b001);
}
}
fn send_message_to_node(&mut self, node: &Node, message: DataPacket) {
let node_pk = PublicKey::from_slice(node.public_key()).unwrap();
let addr = publickey_to_ipv6addr(&node_pk).into();
let my_handle_opt = self.address_to_my_handle.get(&addr).map(|h| *h);
match my_handle_opt {
Some(my_handle) => self.send_message_to_my_handle(my_handle, message),
None => {
println!("Creating CA session for node {}", Ipv6Addr::from(&addr));
let credentials = Credentials::None;
let path = node.path().clone();
let handle = self
.plumbing
.session_manager
.add_outgoing(Some(path), node_pk);
self.address_to_my_handle.insert(addr.into(), handle);
self.send_message_to_my_handle(handle, message)
}
}
}
fn send_message_to_my_handle(&mut self, my_handle: MySessionHandle, message: DataPacket) {
let mut packets = Vec::new();
{
let session = self
.plumbing
.session_manager
.get_session(my_handle)
.unwrap();
let their_handle = session.their_handle().unwrap();
println!(
"Sending inner ca message to handle {:?} with path {:?}: {}",
my_handle, session.path, message
);
for packet_response in session.conn.wrap_message_immediately(message.raw()) {
let switch_packet = SwitchPacket::new(
session.path.unwrap(),
SwitchPayload::CryptoAuthData(their_handle.0, packet_response),
);
packets.push(switch_packet);
}
}
for packet in packets {
self.dispatch(packet, 0b001);
}
}
fn ping_node(&mut self, node: &Node) {
let node_pk = PublicKey::from_slice(node.public_key()).unwrap();
let addr = publickey_to_ipv6addr(&node_pk);
println!("Pinging node {}", Ipv6Addr::from(addr));
let encoding_scheme = EncodingScheme::from_iter(
vec![EncodingSchemeForm {
prefix: 0,
bit_count: 3,
prefix_length: 0,
}]
.iter(),
);
let route_packet = RoutePacketBuilder::new(18, b"blah".to_vec())
.query("pn".to_owned())
.encoding_index(0)
.encoding_scheme(encoding_scheme)
.target_address(vec![0, 0, 0, 0, 0, 0, 0, 0])
.finalize();
let ping_message = DataPacket::new(1, &DataPayload::RoutePacket(route_packet));
self.send_message_to_node(node, ping_message);
}
fn try_connect_ping_target(&mut self, address: &Address) {
println!("Trying to connect to {}", Ipv6Addr::from(address));
let (node_opt, messages) = {
let (node_opt, messages) = self.plumbing.router.get_node(address, 42);
let messages: Vec<_> = messages
.into_iter()
.map(|(node, msg)| (node.clone(), msg))
.collect();
(node_opt.cloned(), messages)
};
if let Some(node) = node_opt {
println!(
"Found node. pk: {}",
PublicKey(*node.public_key()).to_base32()
);
self.ping_nodes.push(node);
};
println!("{} router messages", messages.len());
for (query_node, message) in messages {
let message = DataPacket::new(1, &DataPayload::RoutePacket(message));
self.send_message_to_node(&query_node, message);
}
}
/// Sometimes (random) sends `pn` queries.
fn random_ping_node(&mut self) {
if rand::thread_rng().next_u32() > 0xafffffff || true {
println!("Pinging nodes.");
for address in self.ping_targets.clone() {
if!self.address_to_my_handle.contains_key(&address) {
self.try_connect_ping_target(&address)
}
}
for node in self.ping_nodes.clone() {
self.ping_node(&node);
}
}
}
fn dispatch(&mut self, packet: SwitchPacket, from_interface: Director)
|
fn loop_(&mut self) {
loop {
let packets = self.plumbing.session_manager.upkeep();
for packet in packets {
self.plumbing.dispatch(packet, 0b001);
}
let mut targets = Vec::new();
for (my_handle, ref mut session) in self.plumbing.session_manager.sessions.iter_mut() {
targets.push((*my_handle, session.path))
}
for (my_handle, path) in targets {
let path = path.unwrap();
self.random_send_switch_ping(my_handle, path);
}
self.random_ping_node();
if let Some((director, messages)) = self.plumbing.network_adapter.recv_from() {
for message in messages.into_iter() {
self.dispatch(message, director);
}
}
}
}
}
pub fn main() {
fcp_cryptoauth::init();
let my_sk =
SecretKey::from_hex(b"ac3e53b518e68449692b0b2f2926ef2fdc1eac5b9dbd10a48114263b8c8ed12e")
.unwrap();
let my_pk =
PublicKey::from_base32(b"2wrpv8p4tjwm532sjxcbqzkp7kdwfwzzbg7g0n5l6g3s8df4kvv0.k").unwrap();
let their_pk =
PublicKey::from_base32(b"g0pt6kwnwj8ndktjhs7pmcl14rg6uugn8kt4nykudtl96r27sch0.k").unwrap();
let login = "foo".to_owned().into_bytes();
let password = "bar".to_owned().into_bytes();
let credentials = Credentials::LoginPassword {
login: login,
password: password,
};
let mut allowed_peers = HashMap::new();
allowed_peers.insert(credentials.clone(), "my peer".to_owned());
let ping_targets = vec![
Address::from(&Ipv6Addr::from_str("fcd6:9c33:dd06:3320:8dbe:ab19:c87:f6e3").unwrap()),
Address::from(&Ipv6Addr::from_str("fcb9:326d:37d5:c57b:7ee5:28b5:7aa5:525").unwrap()),
];
let sock = UdpSocket::bind("[::1]:12345").unwrap();
let dest = SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 20984);
let conn = CAWrapper::new_outgoing_connection(
my_pk,
my_sk.clone(),
their_pk,
credentials,
Some(allowed_peers.clone()),
"my peer".to_owned(),
None,
);
let mut peers = HashMap::new();
peers.insert(
0b011,
UdpPeer {
ca_session: conn,
addr: dest,
},
);
let mut pinger = Pinger::new(sock, peers, my_pk, my_sk, allowed_peers, ping_targets);
pinger.loop_();
}
|
{
self.plumbing.dispatch(packet, from_interface);
}
|
identifier_body
|
pinger.rs
|
extern crate byteorder;
extern crate fcp;
extern crate fcp_cryptoauth;
extern crate hex;
extern crate rand;
use std::collections::{HashMap, VecDeque};
use std::iter::FromIterator;
use std::net::{IpAddr, Ipv6Addr, SocketAddr, UdpSocket};
use std::str::FromStr;
use fcp_cryptoauth::keys::ToBase32;
use fcp_cryptoauth::*;
use fcp::encoding_scheme::{EncodingScheme, EncodingSchemeForm};
use fcp::operation::{Director, ForwardPath};
use fcp::packets::control::ControlPacket;
use fcp::packets::data::DataPacket;
use fcp::packets::data::Payload as DataPayload;
use fcp::packets::route::RoutePacketBuilder;
use fcp::packets::switch::Payload as SwitchPayload;
use fcp::packets::switch::SwitchPacket;
use fcp::passive_switch::PassiveSwitch;
use fcp::plumbing::NetworkAdapterTrait;
use fcp::plumbing::Plumbing;
use fcp::session_manager::{MySessionHandle, SessionManager};
use fcp::udp_adapter::{UdpAdapter, UdpPeer};
use fcp::node::{Address, Node};
use fcp::router::Router;
use rand::Rng;
/// Main data structure of the switch.
struct Pinger {
plumbing: Plumbing<Router, UdpAdapter<String>>,
ping_targets: Vec<Address>,
ping_nodes: Vec<Node>,
address_to_my_handle: HashMap<Address, MySessionHandle>,
}
impl Pinger {
/// Instanciates a switch.
fn new(
sock: UdpSocket,
peers: HashMap<Director, UdpPeer<String>>,
my_pk: PublicKey,
|
allowed_peers: HashMap<Credentials, String>,
ping_targets: Vec<Address>,
) -> Pinger {
let udp_adapter = UdpAdapter::new(
sock,
my_pk.clone(),
my_sk.clone(),
allowed_peers.clone(),
peers,
);
let session_manager = SessionManager::new(my_pk.clone(), my_sk.clone());
let plumbing = Plumbing {
network_adapter: udp_adapter,
switch: PassiveSwitch::new(my_pk, my_sk, allowed_peers),
router: Router::new(my_pk),
session_manager: session_manager,
pongs: None,
rx_buffer: VecDeque::new(),
};
Pinger {
plumbing: plumbing,
ping_targets: ping_targets,
ping_nodes: Vec::new(),
address_to_my_handle: HashMap::new(),
}
}
/// Sometimes (random) sends a switch as a reply to the packet.
fn random_send_switch_ping(&mut self, _my_handle: MySessionHandle, path: ForwardPath) {
if rand::thread_rng().next_u32() > 0xafffffff {
let ping = ControlPacket::Ping {
version: 18,
opaque_data: vec![1, 2, 3, 4, 5, 6, 7, 8],
};
let packet_response = SwitchPacket::new(path, SwitchPayload::Control(ping));
self.plumbing.dispatch(packet_response, 0b001);
}
}
fn send_message_to_node(&mut self, node: &Node, message: DataPacket) {
let node_pk = PublicKey::from_slice(node.public_key()).unwrap();
let addr = publickey_to_ipv6addr(&node_pk).into();
let my_handle_opt = self.address_to_my_handle.get(&addr).map(|h| *h);
match my_handle_opt {
Some(my_handle) => self.send_message_to_my_handle(my_handle, message),
None => {
println!("Creating CA session for node {}", Ipv6Addr::from(&addr));
let credentials = Credentials::None;
let path = node.path().clone();
let handle = self
.plumbing
.session_manager
.add_outgoing(Some(path), node_pk);
self.address_to_my_handle.insert(addr.into(), handle);
self.send_message_to_my_handle(handle, message)
}
}
}
fn send_message_to_my_handle(&mut self, my_handle: MySessionHandle, message: DataPacket) {
let mut packets = Vec::new();
{
let session = self
.plumbing
.session_manager
.get_session(my_handle)
.unwrap();
let their_handle = session.their_handle().unwrap();
println!(
"Sending inner ca message to handle {:?} with path {:?}: {}",
my_handle, session.path, message
);
for packet_response in session.conn.wrap_message_immediately(message.raw()) {
let switch_packet = SwitchPacket::new(
session.path.unwrap(),
SwitchPayload::CryptoAuthData(their_handle.0, packet_response),
);
packets.push(switch_packet);
}
}
for packet in packets {
self.dispatch(packet, 0b001);
}
}
fn ping_node(&mut self, node: &Node) {
let node_pk = PublicKey::from_slice(node.public_key()).unwrap();
let addr = publickey_to_ipv6addr(&node_pk);
println!("Pinging node {}", Ipv6Addr::from(addr));
let encoding_scheme = EncodingScheme::from_iter(
vec![EncodingSchemeForm {
prefix: 0,
bit_count: 3,
prefix_length: 0,
}]
.iter(),
);
let route_packet = RoutePacketBuilder::new(18, b"blah".to_vec())
.query("pn".to_owned())
.encoding_index(0)
.encoding_scheme(encoding_scheme)
.target_address(vec![0, 0, 0, 0, 0, 0, 0, 0])
.finalize();
let ping_message = DataPacket::new(1, &DataPayload::RoutePacket(route_packet));
self.send_message_to_node(node, ping_message);
}
fn try_connect_ping_target(&mut self, address: &Address) {
println!("Trying to connect to {}", Ipv6Addr::from(address));
let (node_opt, messages) = {
let (node_opt, messages) = self.plumbing.router.get_node(address, 42);
let messages: Vec<_> = messages
.into_iter()
.map(|(node, msg)| (node.clone(), msg))
.collect();
(node_opt.cloned(), messages)
};
if let Some(node) = node_opt {
println!(
"Found node. pk: {}",
PublicKey(*node.public_key()).to_base32()
);
self.ping_nodes.push(node);
};
println!("{} router messages", messages.len());
for (query_node, message) in messages {
let message = DataPacket::new(1, &DataPayload::RoutePacket(message));
self.send_message_to_node(&query_node, message);
}
}
/// Sometimes (random) sends `pn` queries.
fn random_ping_node(&mut self) {
if rand::thread_rng().next_u32() > 0xafffffff || true {
println!("Pinging nodes.");
for address in self.ping_targets.clone() {
if!self.address_to_my_handle.contains_key(&address) {
self.try_connect_ping_target(&address)
}
}
for node in self.ping_nodes.clone() {
self.ping_node(&node);
}
}
}
fn dispatch(&mut self, packet: SwitchPacket, from_interface: Director) {
self.plumbing.dispatch(packet, from_interface);
}
fn loop_(&mut self) {
loop {
let packets = self.plumbing.session_manager.upkeep();
for packet in packets {
self.plumbing.dispatch(packet, 0b001);
}
let mut targets = Vec::new();
for (my_handle, ref mut session) in self.plumbing.session_manager.sessions.iter_mut() {
targets.push((*my_handle, session.path))
}
for (my_handle, path) in targets {
let path = path.unwrap();
self.random_send_switch_ping(my_handle, path);
}
self.random_ping_node();
if let Some((director, messages)) = self.plumbing.network_adapter.recv_from() {
for message in messages.into_iter() {
self.dispatch(message, director);
}
}
}
}
}
pub fn main() {
fcp_cryptoauth::init();
let my_sk =
SecretKey::from_hex(b"ac3e53b518e68449692b0b2f2926ef2fdc1eac5b9dbd10a48114263b8c8ed12e")
.unwrap();
let my_pk =
PublicKey::from_base32(b"2wrpv8p4tjwm532sjxcbqzkp7kdwfwzzbg7g0n5l6g3s8df4kvv0.k").unwrap();
let their_pk =
PublicKey::from_base32(b"g0pt6kwnwj8ndktjhs7pmcl14rg6uugn8kt4nykudtl96r27sch0.k").unwrap();
let login = "foo".to_owned().into_bytes();
let password = "bar".to_owned().into_bytes();
let credentials = Credentials::LoginPassword {
login: login,
password: password,
};
let mut allowed_peers = HashMap::new();
allowed_peers.insert(credentials.clone(), "my peer".to_owned());
let ping_targets = vec![
Address::from(&Ipv6Addr::from_str("fcd6:9c33:dd06:3320:8dbe:ab19:c87:f6e3").unwrap()),
Address::from(&Ipv6Addr::from_str("fcb9:326d:37d5:c57b:7ee5:28b5:7aa5:525").unwrap()),
];
let sock = UdpSocket::bind("[::1]:12345").unwrap();
let dest = SocketAddr::new(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)), 20984);
let conn = CAWrapper::new_outgoing_connection(
my_pk,
my_sk.clone(),
their_pk,
credentials,
Some(allowed_peers.clone()),
"my peer".to_owned(),
None,
);
let mut peers = HashMap::new();
peers.insert(
0b011,
UdpPeer {
ca_session: conn,
addr: dest,
},
);
let mut pinger = Pinger::new(sock, peers, my_pk, my_sk, allowed_peers, ping_targets);
pinger.loop_();
}
|
my_sk: SecretKey,
|
random_line_split
|
module.rs
|
use std::ffi::CStr;
use std::marker::PhantomData;
use llvm_sys::prelude::*;
use llvm_sys::core::*;
use llvm_sys::target::LLVMSetModuleDataLayout;
use id::{Id, IdRef};
use inheritance::{upcast, DerivesFrom};
use opaque::Opaque;
use owned::{Owned, DropInPlace};
use llvm::{Context, FunctionType, Type, Global, Function, FunctionLabel, DataLayout};
pub struct Module<'cid: 'context, 'context,'mid> {
_id: Id<'mid>,
_context: PhantomData<&'context Context<'cid>>,
_opaque: Opaque
}
impl<'cid, 'context,'mid> DropInPlace for Module<'cid, 'context,'mid> {
unsafe fn drop_in_place(&mut self) {
LLVMDisposeModule(self.as_raw());
}
}
impl<'cid, 'context,'mid> Module<'cid, 'context,'mid> {
pub fn new(_id: Id<'mid>, name: &CStr, context: &'context Context<'cid>) -> Owned<Module<'cid, 'context,'mid>> {
unsafe {
Owned::from_raw(
LLVMModuleCreateWithNameInContext(name.as_ptr(), context.as_raw()) as *mut Module
)
}
}
pub fn set_data_layout(&mut self, layout: &DataLayout) {
unsafe {
LLVMSetModuleDataLayout(self.as_raw(), layout.as_raw());
}
}
pub fn set_target_triple(&mut self, triple: &CStr) {
unsafe {
LLVMSetTarget(self.as_raw(), triple.as_ptr());
}
}
pub fn builder<'module>(&'module mut self) -> &'module mut ModuleBuilder<'cid,'mid,'module> {
unsafe {
&mut *(self as *mut Module as *mut ModuleBuilder)
}
}
pub fn as_raw(&self) -> LLVMModuleRef {
self as *const Module as *mut Module as LLVMModuleRef
}
}
pub struct ModuleBuilder<'cid:'module,'mid:'module,'module> {
_marker: PhantomData<&'module mut ()>,
_module_id: IdRef<'mid>,
_context_id: IdRef<'cid>,
_opaque: Opaque
}
impl<'cid,'mid,'module> ModuleBuilder<'cid,'mid,'module> {
pub fn add_global<Ty: DerivesFrom<Type<'cid>> +?Sized>(&mut self, name: &CStr, ty: &Ty) -> &'module mut Global<'cid,'mid, Ty> {
unsafe {
&mut *(LLVMAddGlobal(self.as_raw(), upcast(ty).as_raw(), name.as_ptr()) as *mut Global<Ty>)
}
}
pub fn add_function(&mut self, name: &CStr, ty: &FunctionType<'cid>) -> &'module mut Function<'cid,'mid> {
unsafe {
&mut *(LLVMAddFunction(self.as_raw(), name.as_ptr(), upcast::<_,Type>(ty).as_raw()) as *mut Function)
}
}
pub fn get_named_function(&self, name: &CStr) -> Option<&'module FunctionLabel<'cid,'mid>> {
unsafe {
let old = LLVMGetNamedFunction(self.as_raw(), name.as_ptr());
if old.is_null() {
None
} else {
Some(&*(old as *mut FunctionLabel))
}
}
}
pub fn reborrow<'a>(&'a mut self) -> &'a mut ModuleBuilder<'cid,'mid, 'a> {
unsafe {
&mut *(self.as_raw() as *mut ModuleBuilder)
}
}
pub fn as_raw(&self) -> LLVMModuleRef
|
}
|
{
self as *const ModuleBuilder as *mut ModuleBuilder as LLVMModuleRef
}
|
identifier_body
|
module.rs
|
use std::ffi::CStr;
use std::marker::PhantomData;
use llvm_sys::prelude::*;
use llvm_sys::core::*;
use llvm_sys::target::LLVMSetModuleDataLayout;
use id::{Id, IdRef};
use inheritance::{upcast, DerivesFrom};
use opaque::Opaque;
use owned::{Owned, DropInPlace};
use llvm::{Context, FunctionType, Type, Global, Function, FunctionLabel, DataLayout};
pub struct Module<'cid: 'context, 'context,'mid> {
_id: Id<'mid>,
_context: PhantomData<&'context Context<'cid>>,
_opaque: Opaque
}
impl<'cid, 'context,'mid> DropInPlace for Module<'cid, 'context,'mid> {
unsafe fn drop_in_place(&mut self) {
LLVMDisposeModule(self.as_raw());
}
}
impl<'cid, 'context,'mid> Module<'cid, 'context,'mid> {
pub fn new(_id: Id<'mid>, name: &CStr, context: &'context Context<'cid>) -> Owned<Module<'cid, 'context,'mid>> {
unsafe {
|
LLVMModuleCreateWithNameInContext(name.as_ptr(), context.as_raw()) as *mut Module
)
}
}
pub fn set_data_layout(&mut self, layout: &DataLayout) {
unsafe {
LLVMSetModuleDataLayout(self.as_raw(), layout.as_raw());
}
}
pub fn set_target_triple(&mut self, triple: &CStr) {
unsafe {
LLVMSetTarget(self.as_raw(), triple.as_ptr());
}
}
pub fn builder<'module>(&'module mut self) -> &'module mut ModuleBuilder<'cid,'mid,'module> {
unsafe {
&mut *(self as *mut Module as *mut ModuleBuilder)
}
}
pub fn as_raw(&self) -> LLVMModuleRef {
self as *const Module as *mut Module as LLVMModuleRef
}
}
pub struct ModuleBuilder<'cid:'module,'mid:'module,'module> {
_marker: PhantomData<&'module mut ()>,
_module_id: IdRef<'mid>,
_context_id: IdRef<'cid>,
_opaque: Opaque
}
impl<'cid,'mid,'module> ModuleBuilder<'cid,'mid,'module> {
pub fn add_global<Ty: DerivesFrom<Type<'cid>> +?Sized>(&mut self, name: &CStr, ty: &Ty) -> &'module mut Global<'cid,'mid, Ty> {
unsafe {
&mut *(LLVMAddGlobal(self.as_raw(), upcast(ty).as_raw(), name.as_ptr()) as *mut Global<Ty>)
}
}
pub fn add_function(&mut self, name: &CStr, ty: &FunctionType<'cid>) -> &'module mut Function<'cid,'mid> {
unsafe {
&mut *(LLVMAddFunction(self.as_raw(), name.as_ptr(), upcast::<_,Type>(ty).as_raw()) as *mut Function)
}
}
pub fn get_named_function(&self, name: &CStr) -> Option<&'module FunctionLabel<'cid,'mid>> {
unsafe {
let old = LLVMGetNamedFunction(self.as_raw(), name.as_ptr());
if old.is_null() {
None
} else {
Some(&*(old as *mut FunctionLabel))
}
}
}
pub fn reborrow<'a>(&'a mut self) -> &'a mut ModuleBuilder<'cid,'mid, 'a> {
unsafe {
&mut *(self.as_raw() as *mut ModuleBuilder)
}
}
pub fn as_raw(&self) -> LLVMModuleRef {
self as *const ModuleBuilder as *mut ModuleBuilder as LLVMModuleRef
}
}
|
Owned::from_raw(
|
random_line_split
|
module.rs
|
use std::ffi::CStr;
use std::marker::PhantomData;
use llvm_sys::prelude::*;
use llvm_sys::core::*;
use llvm_sys::target::LLVMSetModuleDataLayout;
use id::{Id, IdRef};
use inheritance::{upcast, DerivesFrom};
use opaque::Opaque;
use owned::{Owned, DropInPlace};
use llvm::{Context, FunctionType, Type, Global, Function, FunctionLabel, DataLayout};
pub struct
|
<'cid: 'context, 'context,'mid> {
_id: Id<'mid>,
_context: PhantomData<&'context Context<'cid>>,
_opaque: Opaque
}
impl<'cid, 'context,'mid> DropInPlace for Module<'cid, 'context,'mid> {
unsafe fn drop_in_place(&mut self) {
LLVMDisposeModule(self.as_raw());
}
}
impl<'cid, 'context,'mid> Module<'cid, 'context,'mid> {
pub fn new(_id: Id<'mid>, name: &CStr, context: &'context Context<'cid>) -> Owned<Module<'cid, 'context,'mid>> {
unsafe {
Owned::from_raw(
LLVMModuleCreateWithNameInContext(name.as_ptr(), context.as_raw()) as *mut Module
)
}
}
pub fn set_data_layout(&mut self, layout: &DataLayout) {
unsafe {
LLVMSetModuleDataLayout(self.as_raw(), layout.as_raw());
}
}
pub fn set_target_triple(&mut self, triple: &CStr) {
unsafe {
LLVMSetTarget(self.as_raw(), triple.as_ptr());
}
}
pub fn builder<'module>(&'module mut self) -> &'module mut ModuleBuilder<'cid,'mid,'module> {
unsafe {
&mut *(self as *mut Module as *mut ModuleBuilder)
}
}
pub fn as_raw(&self) -> LLVMModuleRef {
self as *const Module as *mut Module as LLVMModuleRef
}
}
pub struct ModuleBuilder<'cid:'module,'mid:'module,'module> {
_marker: PhantomData<&'module mut ()>,
_module_id: IdRef<'mid>,
_context_id: IdRef<'cid>,
_opaque: Opaque
}
impl<'cid,'mid,'module> ModuleBuilder<'cid,'mid,'module> {
pub fn add_global<Ty: DerivesFrom<Type<'cid>> +?Sized>(&mut self, name: &CStr, ty: &Ty) -> &'module mut Global<'cid,'mid, Ty> {
unsafe {
&mut *(LLVMAddGlobal(self.as_raw(), upcast(ty).as_raw(), name.as_ptr()) as *mut Global<Ty>)
}
}
pub fn add_function(&mut self, name: &CStr, ty: &FunctionType<'cid>) -> &'module mut Function<'cid,'mid> {
unsafe {
&mut *(LLVMAddFunction(self.as_raw(), name.as_ptr(), upcast::<_,Type>(ty).as_raw()) as *mut Function)
}
}
pub fn get_named_function(&self, name: &CStr) -> Option<&'module FunctionLabel<'cid,'mid>> {
unsafe {
let old = LLVMGetNamedFunction(self.as_raw(), name.as_ptr());
if old.is_null() {
None
} else {
Some(&*(old as *mut FunctionLabel))
}
}
}
pub fn reborrow<'a>(&'a mut self) -> &'a mut ModuleBuilder<'cid,'mid, 'a> {
unsafe {
&mut *(self.as_raw() as *mut ModuleBuilder)
}
}
pub fn as_raw(&self) -> LLVMModuleRef {
self as *const ModuleBuilder as *mut ModuleBuilder as LLVMModuleRef
}
}
|
Module
|
identifier_name
|
module.rs
|
use std::ffi::CStr;
use std::marker::PhantomData;
use llvm_sys::prelude::*;
use llvm_sys::core::*;
use llvm_sys::target::LLVMSetModuleDataLayout;
use id::{Id, IdRef};
use inheritance::{upcast, DerivesFrom};
use opaque::Opaque;
use owned::{Owned, DropInPlace};
use llvm::{Context, FunctionType, Type, Global, Function, FunctionLabel, DataLayout};
pub struct Module<'cid: 'context, 'context,'mid> {
_id: Id<'mid>,
_context: PhantomData<&'context Context<'cid>>,
_opaque: Opaque
}
impl<'cid, 'context,'mid> DropInPlace for Module<'cid, 'context,'mid> {
unsafe fn drop_in_place(&mut self) {
LLVMDisposeModule(self.as_raw());
}
}
impl<'cid, 'context,'mid> Module<'cid, 'context,'mid> {
pub fn new(_id: Id<'mid>, name: &CStr, context: &'context Context<'cid>) -> Owned<Module<'cid, 'context,'mid>> {
unsafe {
Owned::from_raw(
LLVMModuleCreateWithNameInContext(name.as_ptr(), context.as_raw()) as *mut Module
)
}
}
pub fn set_data_layout(&mut self, layout: &DataLayout) {
unsafe {
LLVMSetModuleDataLayout(self.as_raw(), layout.as_raw());
}
}
pub fn set_target_triple(&mut self, triple: &CStr) {
unsafe {
LLVMSetTarget(self.as_raw(), triple.as_ptr());
}
}
pub fn builder<'module>(&'module mut self) -> &'module mut ModuleBuilder<'cid,'mid,'module> {
unsafe {
&mut *(self as *mut Module as *mut ModuleBuilder)
}
}
pub fn as_raw(&self) -> LLVMModuleRef {
self as *const Module as *mut Module as LLVMModuleRef
}
}
pub struct ModuleBuilder<'cid:'module,'mid:'module,'module> {
_marker: PhantomData<&'module mut ()>,
_module_id: IdRef<'mid>,
_context_id: IdRef<'cid>,
_opaque: Opaque
}
impl<'cid,'mid,'module> ModuleBuilder<'cid,'mid,'module> {
pub fn add_global<Ty: DerivesFrom<Type<'cid>> +?Sized>(&mut self, name: &CStr, ty: &Ty) -> &'module mut Global<'cid,'mid, Ty> {
unsafe {
&mut *(LLVMAddGlobal(self.as_raw(), upcast(ty).as_raw(), name.as_ptr()) as *mut Global<Ty>)
}
}
pub fn add_function(&mut self, name: &CStr, ty: &FunctionType<'cid>) -> &'module mut Function<'cid,'mid> {
unsafe {
&mut *(LLVMAddFunction(self.as_raw(), name.as_ptr(), upcast::<_,Type>(ty).as_raw()) as *mut Function)
}
}
pub fn get_named_function(&self, name: &CStr) -> Option<&'module FunctionLabel<'cid,'mid>> {
unsafe {
let old = LLVMGetNamedFunction(self.as_raw(), name.as_ptr());
if old.is_null() {
None
} else
|
}
}
pub fn reborrow<'a>(&'a mut self) -> &'a mut ModuleBuilder<'cid,'mid, 'a> {
unsafe {
&mut *(self.as_raw() as *mut ModuleBuilder)
}
}
pub fn as_raw(&self) -> LLVMModuleRef {
self as *const ModuleBuilder as *mut ModuleBuilder as LLVMModuleRef
}
}
|
{
Some(&*(old as *mut FunctionLabel))
}
|
conditional_block
|
template.rs
|
use std::fs::{self, File};
use std::io::{self, Error, ErrorKind, Read};
use std::path::{Path, PathBuf};
use super::{Name, Statement};
// A binding of template source file information and the parsed AST.
#[derive(Debug)]
pub struct Template {
pub tree: Statement,
pub path: PathBuf,
pub name: String,
id: String,
}
impl Template {
/// Parses each template file in the directory tree.
pub fn parse<P>(directory: P) -> io::Result<Vec<Template>>
where
P: AsRef<Path>,
{
let base = directory.as_ref();
parse_dir(base, base)
}
/// Creates a template from file name and root AST node.
///
/// The file name is used as an identifier in compiled function names
/// to ensure uniqueness when linked with other templates. It provides
/// a stable name to be referenced as a partial in other templates.
pub fn new(base: &Path, path: PathBuf, tree: Statement) -> Self {
let name = name(base, &path);
let id = Name::new(&name).id();
Template {
tree: tree,
path: path,
name: name,
id: id,
}
}
pub fn name(&self) -> Name {
Name::new(&self.name)
}
}
/// Creates a shortened path name for a template file name. The base directory
/// being compiled and the file extension is stripped off to create the short
/// name: `app/templates/include/header.mustache -> include/header`.
fn
|
(base: &Path, path: &Path) -> String {
let base = path.strip_prefix(base).unwrap();
let stem = base.file_stem().unwrap();
let name = base.with_file_name(stem);
String::from(name.to_str().unwrap())
}
fn parse_dir(base: &Path, dir: &Path) -> io::Result<Vec<Template>> {
let mut templates = Vec::new();
if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let path = entry?.path();
if path.is_dir() {
templates.append(&mut parse_dir(base, &path)?);
} else {
let tree = parse(&path)?;
let template = Template::new(base, path, tree);
templates.push(template);
}
}
}
Ok(templates)
}
fn parse(path: &Path) -> io::Result<Statement> {
let mut file = File::open(path)?;
let mut template = String::new();
file.read_to_string(&mut template)?;
match Statement::parse(&template) {
Ok(tree) => Ok(tree),
Err(e) => {
let message = format!("Error parsing {:?}\n{}", path, e);
Err(Error::new(ErrorKind::Other, message))
}
}
}
#[cfg(test)]
mod tests {
use super::super::Statement;
use super::Template;
use std::path::PathBuf;
#[test]
fn name() {
let base = PathBuf::from("app/templates");
let path = PathBuf::from("app/templates/include/header.mustache");
let tree = Statement::Content(String::from("test"));
let template = Template::new(&base, path, tree);
assert_eq!("include/header", template.name);
assert_eq!("include_header", template.id);
}
}
|
name
|
identifier_name
|
template.rs
|
use std::fs::{self, File};
use std::io::{self, Error, ErrorKind, Read};
use std::path::{Path, PathBuf};
use super::{Name, Statement};
// A binding of template source file information and the parsed AST.
#[derive(Debug)]
pub struct Template {
pub tree: Statement,
pub path: PathBuf,
pub name: String,
id: String,
}
impl Template {
/// Parses each template file in the directory tree.
pub fn parse<P>(directory: P) -> io::Result<Vec<Template>>
where
P: AsRef<Path>,
{
let base = directory.as_ref();
parse_dir(base, base)
}
/// Creates a template from file name and root AST node.
///
/// The file name is used as an identifier in compiled function names
/// to ensure uniqueness when linked with other templates. It provides
/// a stable name to be referenced as a partial in other templates.
pub fn new(base: &Path, path: PathBuf, tree: Statement) -> Self {
let name = name(base, &path);
let id = Name::new(&name).id();
Template {
tree: tree,
path: path,
name: name,
id: id,
}
}
pub fn name(&self) -> Name {
Name::new(&self.name)
}
|
}
/// Creates a shortened path name for a template file name. The base directory
/// being compiled and the file extension is stripped off to create the short
/// name: `app/templates/include/header.mustache -> include/header`.
fn name(base: &Path, path: &Path) -> String {
let base = path.strip_prefix(base).unwrap();
let stem = base.file_stem().unwrap();
let name = base.with_file_name(stem);
String::from(name.to_str().unwrap())
}
fn parse_dir(base: &Path, dir: &Path) -> io::Result<Vec<Template>> {
let mut templates = Vec::new();
if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let path = entry?.path();
if path.is_dir() {
templates.append(&mut parse_dir(base, &path)?);
} else {
let tree = parse(&path)?;
let template = Template::new(base, path, tree);
templates.push(template);
}
}
}
Ok(templates)
}
fn parse(path: &Path) -> io::Result<Statement> {
let mut file = File::open(path)?;
let mut template = String::new();
file.read_to_string(&mut template)?;
match Statement::parse(&template) {
Ok(tree) => Ok(tree),
Err(e) => {
let message = format!("Error parsing {:?}\n{}", path, e);
Err(Error::new(ErrorKind::Other, message))
}
}
}
#[cfg(test)]
mod tests {
use super::super::Statement;
use super::Template;
use std::path::PathBuf;
#[test]
fn name() {
let base = PathBuf::from("app/templates");
let path = PathBuf::from("app/templates/include/header.mustache");
let tree = Statement::Content(String::from("test"));
let template = Template::new(&base, path, tree);
assert_eq!("include/header", template.name);
assert_eq!("include_header", template.id);
}
}
|
random_line_split
|
|
template.rs
|
use std::fs::{self, File};
use std::io::{self, Error, ErrorKind, Read};
use std::path::{Path, PathBuf};
use super::{Name, Statement};
// A binding of template source file information and the parsed AST.
#[derive(Debug)]
pub struct Template {
pub tree: Statement,
pub path: PathBuf,
pub name: String,
id: String,
}
impl Template {
/// Parses each template file in the directory tree.
pub fn parse<P>(directory: P) -> io::Result<Vec<Template>>
where
P: AsRef<Path>,
{
let base = directory.as_ref();
parse_dir(base, base)
}
/// Creates a template from file name and root AST node.
///
/// The file name is used as an identifier in compiled function names
/// to ensure uniqueness when linked with other templates. It provides
/// a stable name to be referenced as a partial in other templates.
pub fn new(base: &Path, path: PathBuf, tree: Statement) -> Self {
let name = name(base, &path);
let id = Name::new(&name).id();
Template {
tree: tree,
path: path,
name: name,
id: id,
}
}
pub fn name(&self) -> Name
|
}
/// Creates a shortened path name for a template file name. The base directory
/// being compiled and the file extension is stripped off to create the short
/// name: `app/templates/include/header.mustache -> include/header`.
fn name(base: &Path, path: &Path) -> String {
let base = path.strip_prefix(base).unwrap();
let stem = base.file_stem().unwrap();
let name = base.with_file_name(stem);
String::from(name.to_str().unwrap())
}
fn parse_dir(base: &Path, dir: &Path) -> io::Result<Vec<Template>> {
let mut templates = Vec::new();
if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let path = entry?.path();
if path.is_dir() {
templates.append(&mut parse_dir(base, &path)?);
} else {
let tree = parse(&path)?;
let template = Template::new(base, path, tree);
templates.push(template);
}
}
}
Ok(templates)
}
fn parse(path: &Path) -> io::Result<Statement> {
let mut file = File::open(path)?;
let mut template = String::new();
file.read_to_string(&mut template)?;
match Statement::parse(&template) {
Ok(tree) => Ok(tree),
Err(e) => {
let message = format!("Error parsing {:?}\n{}", path, e);
Err(Error::new(ErrorKind::Other, message))
}
}
}
#[cfg(test)]
mod tests {
use super::super::Statement;
use super::Template;
use std::path::PathBuf;
#[test]
fn name() {
let base = PathBuf::from("app/templates");
let path = PathBuf::from("app/templates/include/header.mustache");
let tree = Statement::Content(String::from("test"));
let template = Template::new(&base, path, tree);
assert_eq!("include/header", template.name);
assert_eq!("include_header", template.id);
}
}
|
{
Name::new(&self.name)
}
|
identifier_body
|
lib.rs
|
#![allow(dead_code)]
use std::collections::{BTreeMap, HashMap};
use std::iter::FromIterator;
type DocId = i32;
const MIN_DOC_ID: DocId = std::i32::MIN;
const MAX_DOC_ID: DocId = std::i32::MAX;
#[derive(Clone, Debug, PartialEq)]
struct ScoredDoc {
id: DocId,
score: f32,
}
struct Term<'a> {
field: &'a str,
token: &'a str,
weight: f32,
}
// used to populate an index in a simple way
type IndexEntry<'a> = (&'a str, &'a str, DocId, f32);
type DensePostingList = Vec<ScoredDoc>;
type WeightedPostingListIterator<'a> = (f32, std::slice::Iter<'a, ScoredDoc>);
#[derive(Debug)]
struct InvertedIndexNaive<'a> {
// TODO: dictionaries and IDF's, etc as well
posting_lists: HashMap<&'a str, HashMap<&'a str, DensePostingList>>,
}
impl<'a> InvertedIndexNaive<'a> {
// TODO: figure out the lifetime stuff to make this return an iterator and can early terminate on
fn get_lists_for_terms(&self, terms: &[Term<'a>]) -> Vec<Option<(f32, &DensePostingList)>> {
terms
.iter()
.map(|term| {
self.posting_lists.get(term.field).and_then(|field_result| {
field_result.get(term.token).map(|list| (term.weight, list))
})
})
.collect()
}
}
impl<'a> FromIterator<IndexEntry<'a>> for InvertedIndexNaive<'a> {
fn from_iter<T: IntoIterator<Item = IndexEntry<'a>>>(iter: T) -> Self {
// insert field -> (token -> (doc id -> score))
let mut nested_maps = HashMap::new();
for (field, token, doc_id, payload) in iter {
let field_entry = nested_maps.entry(field).or_insert_with(HashMap::new);
let token_entry = field_entry.entry(token).or_insert_with(BTreeMap::new);
token_entry.insert(doc_id, payload);
}
let mut posting_lists = HashMap::new();
// turn BTreeMaps into DesnPostingLists
for (field, token_map) in nested_maps {
let field_entry = posting_lists.entry(field).or_insert_with(HashMap::new);
for (token, score_map) in token_map {
let posting_list = score_map
.iter()
.map(|x| ScoredDoc {
id: *x.0,
score: *x.1,
})
.collect();
field_entry.insert(token, posting_list);
}
}
InvertedIndexNaive { posting_lists }
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
enum AndState {
Searching,
IterationsAfterEmptyList(usize),
Done,
}
impl AndState {
fn next_state(&self, frontier_length: usize) -> Self {
match self {
AndState::Searching => AndState::IterationsAfterEmptyList(0),
AndState::IterationsAfterEmptyList(i) => {
if *i == frontier_length - 1 {
AndState::Done
} else {
AndState::IterationsAfterEmptyList(i + 1)
}
}
AndState::Done => AndState::Done,
}
}
}
impl Default for AndState {
fn default() -> Self {
AndState::Done
}
}
#[derive(Default)]
struct And<'a> {
state: AndState,
frontier: Vec<WeightedPostingListIterator<'a>>,
last_viable_id: i32,
}
// TODO: ctor that takes in existing iterators
impl<'a> And<'a> {
fn new(index: &'a InvertedIndexNaive, terms: &[Term<'a>]) -> Self {
let mut frontier = Vec::with_capacity(terms.len());
for lookup in index.get_lists_for_terms(terms) {
match lookup {
None => return And::default(),
Some((weight, list)) => frontier.push((weight, list.iter())),
}
}
And {
state: AndState::Searching,
frontier,
last_viable_id: MIN_DOC_ID,
}
}
}
impl Iterator for And<'_> {
type Item = ScoredDoc;
fn next(&mut self) -> Option<ScoredDoc> {
let frontier_length = self.frontier.len();
let mut matched_doc_count = 0;
let mut score = 0.0;
while self.state!= AndState::Done {
for (weight, posting_list) in self.frontier.iter_mut() {
let mut current_state = self.state;
let mut last_viable_id = self.last_viable_id;
let mut list_iter = posting_list.skip_while(|doc| doc.id < last_viable_id);
match list_iter.next() {
None => current_state = current_state.next_state(frontier_length),
Some(doc) =>
|
}
if matched_doc_count == frontier_length - 1 {
return Some(ScoredDoc {
id: last_viable_id,
score,
});
}
self.last_viable_id = last_viable_id;
self.state = current_state;
if self.state == AndState::Done {
break;
}
}
}
None
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty_and() {
let inverted_index = &InvertedIndexNaive {
posting_lists: HashMap::new(),
};
let term = &vec![Term {
field: "your",
token: "mom",
weight: 9000.1,
}];
let and = And::new(inverted_index, term);
assert_eq!(and.count(), 0);
}
#[test]
fn nonempty_and() {
let inverted_index: InvertedIndexNaive = [
("description", "very", 1, 1.0),
("description", "very", 5, 1.0),
("description", "very", 6, 1.0),
("description", "human", 2, 1.0),
("description", "human", 5, 2.0),
("description", "human", 6, 1.0),
("description", "like", 3, 1.0),
("description", "like", 5, 3.0),
("description", "eyes", 4, 1.0),
("description", "eyes", 5, 4.0),
("description", "eyes", 6, 1.0),
("title", "manul", 1, 1.0),
("title", "manul", 5, 5.0),
("title", "manul", 6, 1.0),
("title", "cat", 2, 1.0),
("title", "cat", 5, 6.0),
("title", "cat", 6, 1.0),
("title", "facial", 3, 1.0),
("title", "facial", 5, 7.0),
("title", "facial", 6, 1.0),
("title", "expression", 4, 1.0),
("title", "expression", 5, 8.0),
("title", "expression", 6, 1.0),
]
.iter()
.cloned()
.collect();
let terms = [
Term {
field: "description",
token: "very",
weight: 1.0,
},
Term {
field: "description",
token: "human",
weight: 10.0,
},
Term {
field: "description",
token: "like",
weight: 100.0,
},
Term {
field: "description",
token: "eyes",
weight: 1_000.0,
},
Term {
field: "title",
token: "manul",
weight: 10_000.0,
},
Term {
field: "title",
token: "cat",
weight: 100_000.0,
},
Term {
field: "title",
token: "facial",
weight: 1_000_000.0,
},
Term {
field: "title",
token: "expression",
weight: 10_000_000.0,
},
];
let and = And::new(&inverted_index, &terms);
let actual: Vec<ScoredDoc> = and.collect();
assert_eq!(
actual,
vec![ScoredDoc {
id: 5,
score: 87_654_321.0
}]
)
}
}
|
{
let score_contribution = *weight * doc.score;
if doc.id == last_viable_id {
score += score_contribution;
matched_doc_count += 1;
} else {
score = score_contribution;
matched_doc_count = 0;
last_viable_id = doc.id;
}
}
|
conditional_block
|
lib.rs
|
#![allow(dead_code)]
use std::collections::{BTreeMap, HashMap};
use std::iter::FromIterator;
type DocId = i32;
const MIN_DOC_ID: DocId = std::i32::MIN;
const MAX_DOC_ID: DocId = std::i32::MAX;
#[derive(Clone, Debug, PartialEq)]
struct ScoredDoc {
id: DocId,
score: f32,
|
struct Term<'a> {
field: &'a str,
token: &'a str,
weight: f32,
}
// used to populate an index in a simple way
type IndexEntry<'a> = (&'a str, &'a str, DocId, f32);
type DensePostingList = Vec<ScoredDoc>;
type WeightedPostingListIterator<'a> = (f32, std::slice::Iter<'a, ScoredDoc>);
#[derive(Debug)]
struct InvertedIndexNaive<'a> {
// TODO: dictionaries and IDF's, etc as well
posting_lists: HashMap<&'a str, HashMap<&'a str, DensePostingList>>,
}
impl<'a> InvertedIndexNaive<'a> {
// TODO: figure out the lifetime stuff to make this return an iterator and can early terminate on
fn get_lists_for_terms(&self, terms: &[Term<'a>]) -> Vec<Option<(f32, &DensePostingList)>> {
terms
.iter()
.map(|term| {
self.posting_lists.get(term.field).and_then(|field_result| {
field_result.get(term.token).map(|list| (term.weight, list))
})
})
.collect()
}
}
impl<'a> FromIterator<IndexEntry<'a>> for InvertedIndexNaive<'a> {
fn from_iter<T: IntoIterator<Item = IndexEntry<'a>>>(iter: T) -> Self {
// insert field -> (token -> (doc id -> score))
let mut nested_maps = HashMap::new();
for (field, token, doc_id, payload) in iter {
let field_entry = nested_maps.entry(field).or_insert_with(HashMap::new);
let token_entry = field_entry.entry(token).or_insert_with(BTreeMap::new);
token_entry.insert(doc_id, payload);
}
let mut posting_lists = HashMap::new();
// turn BTreeMaps into DesnPostingLists
for (field, token_map) in nested_maps {
let field_entry = posting_lists.entry(field).or_insert_with(HashMap::new);
for (token, score_map) in token_map {
let posting_list = score_map
.iter()
.map(|x| ScoredDoc {
id: *x.0,
score: *x.1,
})
.collect();
field_entry.insert(token, posting_list);
}
}
InvertedIndexNaive { posting_lists }
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
enum AndState {
Searching,
IterationsAfterEmptyList(usize),
Done,
}
impl AndState {
fn next_state(&self, frontier_length: usize) -> Self {
match self {
AndState::Searching => AndState::IterationsAfterEmptyList(0),
AndState::IterationsAfterEmptyList(i) => {
if *i == frontier_length - 1 {
AndState::Done
} else {
AndState::IterationsAfterEmptyList(i + 1)
}
}
AndState::Done => AndState::Done,
}
}
}
impl Default for AndState {
fn default() -> Self {
AndState::Done
}
}
#[derive(Default)]
struct And<'a> {
state: AndState,
frontier: Vec<WeightedPostingListIterator<'a>>,
last_viable_id: i32,
}
// TODO: ctor that takes in existing iterators
impl<'a> And<'a> {
fn new(index: &'a InvertedIndexNaive, terms: &[Term<'a>]) -> Self {
let mut frontier = Vec::with_capacity(terms.len());
for lookup in index.get_lists_for_terms(terms) {
match lookup {
None => return And::default(),
Some((weight, list)) => frontier.push((weight, list.iter())),
}
}
And {
state: AndState::Searching,
frontier,
last_viable_id: MIN_DOC_ID,
}
}
}
impl Iterator for And<'_> {
type Item = ScoredDoc;
fn next(&mut self) -> Option<ScoredDoc> {
let frontier_length = self.frontier.len();
let mut matched_doc_count = 0;
let mut score = 0.0;
while self.state!= AndState::Done {
for (weight, posting_list) in self.frontier.iter_mut() {
let mut current_state = self.state;
let mut last_viable_id = self.last_viable_id;
let mut list_iter = posting_list.skip_while(|doc| doc.id < last_viable_id);
match list_iter.next() {
None => current_state = current_state.next_state(frontier_length),
Some(doc) => {
let score_contribution = *weight * doc.score;
if doc.id == last_viable_id {
score += score_contribution;
matched_doc_count += 1;
} else {
score = score_contribution;
matched_doc_count = 0;
last_viable_id = doc.id;
}
}
}
if matched_doc_count == frontier_length - 1 {
return Some(ScoredDoc {
id: last_viable_id,
score,
});
}
self.last_viable_id = last_viable_id;
self.state = current_state;
if self.state == AndState::Done {
break;
}
}
}
None
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty_and() {
let inverted_index = &InvertedIndexNaive {
posting_lists: HashMap::new(),
};
let term = &vec![Term {
field: "your",
token: "mom",
weight: 9000.1,
}];
let and = And::new(inverted_index, term);
assert_eq!(and.count(), 0);
}
#[test]
fn nonempty_and() {
let inverted_index: InvertedIndexNaive = [
("description", "very", 1, 1.0),
("description", "very", 5, 1.0),
("description", "very", 6, 1.0),
("description", "human", 2, 1.0),
("description", "human", 5, 2.0),
("description", "human", 6, 1.0),
("description", "like", 3, 1.0),
("description", "like", 5, 3.0),
("description", "eyes", 4, 1.0),
("description", "eyes", 5, 4.0),
("description", "eyes", 6, 1.0),
("title", "manul", 1, 1.0),
("title", "manul", 5, 5.0),
("title", "manul", 6, 1.0),
("title", "cat", 2, 1.0),
("title", "cat", 5, 6.0),
("title", "cat", 6, 1.0),
("title", "facial", 3, 1.0),
("title", "facial", 5, 7.0),
("title", "facial", 6, 1.0),
("title", "expression", 4, 1.0),
("title", "expression", 5, 8.0),
("title", "expression", 6, 1.0),
]
.iter()
.cloned()
.collect();
let terms = [
Term {
field: "description",
token: "very",
weight: 1.0,
},
Term {
field: "description",
token: "human",
weight: 10.0,
},
Term {
field: "description",
token: "like",
weight: 100.0,
},
Term {
field: "description",
token: "eyes",
weight: 1_000.0,
},
Term {
field: "title",
token: "manul",
weight: 10_000.0,
},
Term {
field: "title",
token: "cat",
weight: 100_000.0,
},
Term {
field: "title",
token: "facial",
weight: 1_000_000.0,
},
Term {
field: "title",
token: "expression",
weight: 10_000_000.0,
},
];
let and = And::new(&inverted_index, &terms);
let actual: Vec<ScoredDoc> = and.collect();
assert_eq!(
actual,
vec![ScoredDoc {
id: 5,
score: 87_654_321.0
}]
)
}
}
|
}
|
random_line_split
|
lib.rs
|
#![allow(dead_code)]
use std::collections::{BTreeMap, HashMap};
use std::iter::FromIterator;
type DocId = i32;
const MIN_DOC_ID: DocId = std::i32::MIN;
const MAX_DOC_ID: DocId = std::i32::MAX;
#[derive(Clone, Debug, PartialEq)]
struct ScoredDoc {
id: DocId,
score: f32,
}
struct Term<'a> {
field: &'a str,
token: &'a str,
weight: f32,
}
// used to populate an index in a simple way
type IndexEntry<'a> = (&'a str, &'a str, DocId, f32);
type DensePostingList = Vec<ScoredDoc>;
type WeightedPostingListIterator<'a> = (f32, std::slice::Iter<'a, ScoredDoc>);
#[derive(Debug)]
struct InvertedIndexNaive<'a> {
// TODO: dictionaries and IDF's, etc as well
posting_lists: HashMap<&'a str, HashMap<&'a str, DensePostingList>>,
}
impl<'a> InvertedIndexNaive<'a> {
// TODO: figure out the lifetime stuff to make this return an iterator and can early terminate on
fn get_lists_for_terms(&self, terms: &[Term<'a>]) -> Vec<Option<(f32, &DensePostingList)>> {
terms
.iter()
.map(|term| {
self.posting_lists.get(term.field).and_then(|field_result| {
field_result.get(term.token).map(|list| (term.weight, list))
})
})
.collect()
}
}
impl<'a> FromIterator<IndexEntry<'a>> for InvertedIndexNaive<'a> {
fn from_iter<T: IntoIterator<Item = IndexEntry<'a>>>(iter: T) -> Self {
// insert field -> (token -> (doc id -> score))
let mut nested_maps = HashMap::new();
for (field, token, doc_id, payload) in iter {
let field_entry = nested_maps.entry(field).or_insert_with(HashMap::new);
let token_entry = field_entry.entry(token).or_insert_with(BTreeMap::new);
token_entry.insert(doc_id, payload);
}
let mut posting_lists = HashMap::new();
// turn BTreeMaps into DesnPostingLists
for (field, token_map) in nested_maps {
let field_entry = posting_lists.entry(field).or_insert_with(HashMap::new);
for (token, score_map) in token_map {
let posting_list = score_map
.iter()
.map(|x| ScoredDoc {
id: *x.0,
score: *x.1,
})
.collect();
field_entry.insert(token, posting_list);
}
}
InvertedIndexNaive { posting_lists }
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
enum AndState {
Searching,
IterationsAfterEmptyList(usize),
Done,
}
impl AndState {
fn next_state(&self, frontier_length: usize) -> Self {
match self {
AndState::Searching => AndState::IterationsAfterEmptyList(0),
AndState::IterationsAfterEmptyList(i) => {
if *i == frontier_length - 1 {
AndState::Done
} else {
AndState::IterationsAfterEmptyList(i + 1)
}
}
AndState::Done => AndState::Done,
}
}
}
impl Default for AndState {
fn default() -> Self {
AndState::Done
}
}
#[derive(Default)]
struct
|
<'a> {
state: AndState,
frontier: Vec<WeightedPostingListIterator<'a>>,
last_viable_id: i32,
}
// TODO: ctor that takes in existing iterators
impl<'a> And<'a> {
fn new(index: &'a InvertedIndexNaive, terms: &[Term<'a>]) -> Self {
let mut frontier = Vec::with_capacity(terms.len());
for lookup in index.get_lists_for_terms(terms) {
match lookup {
None => return And::default(),
Some((weight, list)) => frontier.push((weight, list.iter())),
}
}
And {
state: AndState::Searching,
frontier,
last_viable_id: MIN_DOC_ID,
}
}
}
impl Iterator for And<'_> {
type Item = ScoredDoc;
fn next(&mut self) -> Option<ScoredDoc> {
let frontier_length = self.frontier.len();
let mut matched_doc_count = 0;
let mut score = 0.0;
while self.state!= AndState::Done {
for (weight, posting_list) in self.frontier.iter_mut() {
let mut current_state = self.state;
let mut last_viable_id = self.last_viable_id;
let mut list_iter = posting_list.skip_while(|doc| doc.id < last_viable_id);
match list_iter.next() {
None => current_state = current_state.next_state(frontier_length),
Some(doc) => {
let score_contribution = *weight * doc.score;
if doc.id == last_viable_id {
score += score_contribution;
matched_doc_count += 1;
} else {
score = score_contribution;
matched_doc_count = 0;
last_viable_id = doc.id;
}
}
}
if matched_doc_count == frontier_length - 1 {
return Some(ScoredDoc {
id: last_viable_id,
score,
});
}
self.last_viable_id = last_viable_id;
self.state = current_state;
if self.state == AndState::Done {
break;
}
}
}
None
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty_and() {
let inverted_index = &InvertedIndexNaive {
posting_lists: HashMap::new(),
};
let term = &vec![Term {
field: "your",
token: "mom",
weight: 9000.1,
}];
let and = And::new(inverted_index, term);
assert_eq!(and.count(), 0);
}
#[test]
fn nonempty_and() {
let inverted_index: InvertedIndexNaive = [
("description", "very", 1, 1.0),
("description", "very", 5, 1.0),
("description", "very", 6, 1.0),
("description", "human", 2, 1.0),
("description", "human", 5, 2.0),
("description", "human", 6, 1.0),
("description", "like", 3, 1.0),
("description", "like", 5, 3.0),
("description", "eyes", 4, 1.0),
("description", "eyes", 5, 4.0),
("description", "eyes", 6, 1.0),
("title", "manul", 1, 1.0),
("title", "manul", 5, 5.0),
("title", "manul", 6, 1.0),
("title", "cat", 2, 1.0),
("title", "cat", 5, 6.0),
("title", "cat", 6, 1.0),
("title", "facial", 3, 1.0),
("title", "facial", 5, 7.0),
("title", "facial", 6, 1.0),
("title", "expression", 4, 1.0),
("title", "expression", 5, 8.0),
("title", "expression", 6, 1.0),
]
.iter()
.cloned()
.collect();
let terms = [
Term {
field: "description",
token: "very",
weight: 1.0,
},
Term {
field: "description",
token: "human",
weight: 10.0,
},
Term {
field: "description",
token: "like",
weight: 100.0,
},
Term {
field: "description",
token: "eyes",
weight: 1_000.0,
},
Term {
field: "title",
token: "manul",
weight: 10_000.0,
},
Term {
field: "title",
token: "cat",
weight: 100_000.0,
},
Term {
field: "title",
token: "facial",
weight: 1_000_000.0,
},
Term {
field: "title",
token: "expression",
weight: 10_000_000.0,
},
];
let and = And::new(&inverted_index, &terms);
let actual: Vec<ScoredDoc> = and.collect();
assert_eq!(
actual,
vec![ScoredDoc {
id: 5,
score: 87_654_321.0
}]
)
}
}
|
And
|
identifier_name
|
stop_signs.rs
|
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use ModifiedStopSign;
use geom::GeomMap;
use map_model::{IntersectionID, Map, TurnID};
use std::collections::HashMap;
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Copy, PartialOrd)]
pub enum TurnPriority {
Stop,
Yield,
Priority,
}
// This represents a single intersection controlled by a stop sign-like policy. The turns are
// partitioned into three groups:
//
// 1) Priority turns - these must be non-conflicting, and cars don't have to stop before doing this
// turn.
// 2) Yields - cars can do this immediately if there are no previously accepted conflicting turns.
// should maybe check that these turns originate from roads with priority turns.
// 3) Stops - cars must stop before doing this turn, and they are accepted with the lowest priority
#[derive(Debug)]
pub struct ControlStopSign {
intersection: IntersectionID,
turns: HashMap<TurnID, TurnPriority>,
changed: bool,
}
impl ControlStopSign {
pub fn new(map: &Map, intersection: IntersectionID) -> ControlStopSign {
assert!(!map.get_i(intersection).has_traffic_signal);
ControlStopSign::all_way_stop(map, intersection)
}
fn all_way_stop(map: &Map, intersection: IntersectionID) -> ControlStopSign {
let mut ss = ControlStopSign {
intersection,
turns: HashMap::new(),
changed: false,
};
for t in &map.get_i(intersection).turns {
ss.turns.insert(*t, TurnPriority::Stop);
}
ss
}
pub fn get_priority(&self, turn: TurnID) -> TurnPriority {
self.turns[&turn]
}
pub fn set_priority(&mut self, turn: TurnID, priority: TurnPriority, geom_map: &GeomMap) {
if priority == TurnPriority::Priority {
assert!(self.could_be_priority_turn(turn, geom_map));
}
self.turns.insert(turn, priority);
self.changed = true;
}
pub fn could_be_priority_turn(&self, id: TurnID, geom_map: &GeomMap) -> bool {
for (t, pri) in &self.turns {
if *pri == TurnPriority::Priority
&& geom_map.get_t(id).conflicts_with(geom_map.get_t(*t))
|
}
true
}
pub fn changed(&self) -> bool {
// TODO detect edits that've been undone, equivalent to original
self.changed
}
pub fn get_savestate(&self) -> Option<ModifiedStopSign> {
if!self.changed() {
return None;
}
Some(ModifiedStopSign {
turns: self.turns.clone(),
})
}
pub fn load_savestate(&mut self, state: &ModifiedStopSign) {
self.changed = true;
self.turns = state.turns.clone();
}
// TODO need to color turn icons
}
#[cfg(test)]
mod tests {
#[test]
fn ordering() {
use stop_signs::TurnPriority;
assert!(TurnPriority::Priority > TurnPriority::Yield);
}
}
|
{
return false;
}
|
conditional_block
|
stop_signs.rs
|
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use ModifiedStopSign;
use geom::GeomMap;
use map_model::{IntersectionID, Map, TurnID};
use std::collections::HashMap;
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Copy, PartialOrd)]
pub enum TurnPriority {
Stop,
Yield,
Priority,
}
// This represents a single intersection controlled by a stop sign-like policy. The turns are
// partitioned into three groups:
//
// 1) Priority turns - these must be non-conflicting, and cars don't have to stop before doing this
// turn.
// 2) Yields - cars can do this immediately if there are no previously accepted conflicting turns.
// should maybe check that these turns originate from roads with priority turns.
// 3) Stops - cars must stop before doing this turn, and they are accepted with the lowest priority
#[derive(Debug)]
pub struct ControlStopSign {
intersection: IntersectionID,
turns: HashMap<TurnID, TurnPriority>,
changed: bool,
}
impl ControlStopSign {
pub fn new(map: &Map, intersection: IntersectionID) -> ControlStopSign {
assert!(!map.get_i(intersection).has_traffic_signal);
ControlStopSign::all_way_stop(map, intersection)
}
fn all_way_stop(map: &Map, intersection: IntersectionID) -> ControlStopSign {
let mut ss = ControlStopSign {
intersection,
turns: HashMap::new(),
changed: false,
};
for t in &map.get_i(intersection).turns {
ss.turns.insert(*t, TurnPriority::Stop);
}
ss
}
pub fn get_priority(&self, turn: TurnID) -> TurnPriority {
self.turns[&turn]
}
pub fn set_priority(&mut self, turn: TurnID, priority: TurnPriority, geom_map: &GeomMap) {
if priority == TurnPriority::Priority {
assert!(self.could_be_priority_turn(turn, geom_map));
}
|
for (t, pri) in &self.turns {
if *pri == TurnPriority::Priority
&& geom_map.get_t(id).conflicts_with(geom_map.get_t(*t))
{
return false;
}
}
true
}
pub fn changed(&self) -> bool {
// TODO detect edits that've been undone, equivalent to original
self.changed
}
pub fn get_savestate(&self) -> Option<ModifiedStopSign> {
if!self.changed() {
return None;
}
Some(ModifiedStopSign {
turns: self.turns.clone(),
})
}
pub fn load_savestate(&mut self, state: &ModifiedStopSign) {
self.changed = true;
self.turns = state.turns.clone();
}
// TODO need to color turn icons
}
#[cfg(test)]
mod tests {
#[test]
fn ordering() {
use stop_signs::TurnPriority;
assert!(TurnPriority::Priority > TurnPriority::Yield);
}
}
|
self.turns.insert(turn, priority);
self.changed = true;
}
pub fn could_be_priority_turn(&self, id: TurnID, geom_map: &GeomMap) -> bool {
|
random_line_split
|
stop_signs.rs
|
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use ModifiedStopSign;
use geom::GeomMap;
use map_model::{IntersectionID, Map, TurnID};
use std::collections::HashMap;
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Copy, PartialOrd)]
pub enum TurnPriority {
Stop,
Yield,
Priority,
}
// This represents a single intersection controlled by a stop sign-like policy. The turns are
// partitioned into three groups:
//
// 1) Priority turns - these must be non-conflicting, and cars don't have to stop before doing this
// turn.
// 2) Yields - cars can do this immediately if there are no previously accepted conflicting turns.
// should maybe check that these turns originate from roads with priority turns.
// 3) Stops - cars must stop before doing this turn, and they are accepted with the lowest priority
#[derive(Debug)]
pub struct ControlStopSign {
intersection: IntersectionID,
turns: HashMap<TurnID, TurnPriority>,
changed: bool,
}
impl ControlStopSign {
pub fn new(map: &Map, intersection: IntersectionID) -> ControlStopSign {
assert!(!map.get_i(intersection).has_traffic_signal);
ControlStopSign::all_way_stop(map, intersection)
}
fn all_way_stop(map: &Map, intersection: IntersectionID) -> ControlStopSign {
let mut ss = ControlStopSign {
intersection,
turns: HashMap::new(),
changed: false,
};
for t in &map.get_i(intersection).turns {
ss.turns.insert(*t, TurnPriority::Stop);
}
ss
}
pub fn get_priority(&self, turn: TurnID) -> TurnPriority {
self.turns[&turn]
}
pub fn set_priority(&mut self, turn: TurnID, priority: TurnPriority, geom_map: &GeomMap) {
if priority == TurnPriority::Priority {
assert!(self.could_be_priority_turn(turn, geom_map));
}
self.turns.insert(turn, priority);
self.changed = true;
}
pub fn could_be_priority_turn(&self, id: TurnID, geom_map: &GeomMap) -> bool {
for (t, pri) in &self.turns {
if *pri == TurnPriority::Priority
&& geom_map.get_t(id).conflicts_with(geom_map.get_t(*t))
{
return false;
}
}
true
}
pub fn changed(&self) -> bool
|
pub fn get_savestate(&self) -> Option<ModifiedStopSign> {
if!self.changed() {
return None;
}
Some(ModifiedStopSign {
turns: self.turns.clone(),
})
}
pub fn load_savestate(&mut self, state: &ModifiedStopSign) {
self.changed = true;
self.turns = state.turns.clone();
}
// TODO need to color turn icons
}
#[cfg(test)]
mod tests {
#[test]
fn ordering() {
use stop_signs::TurnPriority;
assert!(TurnPriority::Priority > TurnPriority::Yield);
}
}
|
{
// TODO detect edits that've been undone, equivalent to original
self.changed
}
|
identifier_body
|
stop_signs.rs
|
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use ModifiedStopSign;
use geom::GeomMap;
use map_model::{IntersectionID, Map, TurnID};
use std::collections::HashMap;
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Copy, PartialOrd)]
pub enum TurnPriority {
Stop,
Yield,
Priority,
}
// This represents a single intersection controlled by a stop sign-like policy. The turns are
// partitioned into three groups:
//
// 1) Priority turns - these must be non-conflicting, and cars don't have to stop before doing this
// turn.
// 2) Yields - cars can do this immediately if there are no previously accepted conflicting turns.
// should maybe check that these turns originate from roads with priority turns.
// 3) Stops - cars must stop before doing this turn, and they are accepted with the lowest priority
#[derive(Debug)]
pub struct ControlStopSign {
intersection: IntersectionID,
turns: HashMap<TurnID, TurnPriority>,
changed: bool,
}
impl ControlStopSign {
pub fn
|
(map: &Map, intersection: IntersectionID) -> ControlStopSign {
assert!(!map.get_i(intersection).has_traffic_signal);
ControlStopSign::all_way_stop(map, intersection)
}
fn all_way_stop(map: &Map, intersection: IntersectionID) -> ControlStopSign {
let mut ss = ControlStopSign {
intersection,
turns: HashMap::new(),
changed: false,
};
for t in &map.get_i(intersection).turns {
ss.turns.insert(*t, TurnPriority::Stop);
}
ss
}
pub fn get_priority(&self, turn: TurnID) -> TurnPriority {
self.turns[&turn]
}
pub fn set_priority(&mut self, turn: TurnID, priority: TurnPriority, geom_map: &GeomMap) {
if priority == TurnPriority::Priority {
assert!(self.could_be_priority_turn(turn, geom_map));
}
self.turns.insert(turn, priority);
self.changed = true;
}
pub fn could_be_priority_turn(&self, id: TurnID, geom_map: &GeomMap) -> bool {
for (t, pri) in &self.turns {
if *pri == TurnPriority::Priority
&& geom_map.get_t(id).conflicts_with(geom_map.get_t(*t))
{
return false;
}
}
true
}
pub fn changed(&self) -> bool {
// TODO detect edits that've been undone, equivalent to original
self.changed
}
pub fn get_savestate(&self) -> Option<ModifiedStopSign> {
if!self.changed() {
return None;
}
Some(ModifiedStopSign {
turns: self.turns.clone(),
})
}
pub fn load_savestate(&mut self, state: &ModifiedStopSign) {
self.changed = true;
self.turns = state.turns.clone();
}
// TODO need to color turn icons
}
#[cfg(test)]
mod tests {
#[test]
fn ordering() {
use stop_signs::TurnPriority;
assert!(TurnPriority::Priority > TurnPriority::Yield);
}
}
|
new
|
identifier_name
|
cast_kernels.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#[macro_use]
extern crate criterion;
use criterion::Criterion;
use rand::distributions::{Distribution, Standard};
use rand::prelude::random;
use std::sync::Arc;
extern crate arrow;
use arrow::array::*;
use arrow::compute::cast;
use arrow::datatypes::*;
fn build_array<FROM>(size: usize) -> ArrayRef
where
FROM: ArrowNumericType,
Standard: Distribution<FROM::Native>,
PrimitiveArray<FROM>: std::convert::From<Vec<Option<FROM::Native>>>,
{
let values = (0..size)
.map(|_| {
// 10% nulls, i.e. dense.
if random::<f64>() < 0.1 {
None
} else {
Some(random::<FROM::Native>())
}
})
.collect();
Arc::new(PrimitiveArray::<FROM>::from(values))
}
fn build_timestamp_array<FROM>(size: usize) -> ArrayRef
where
FROM: ArrowTimestampType,
Standard: Distribution<FROM::Native>,
{
let values = (0..size)
.map(|_| {
if random::<f64>() < 0.5 {
None
} else {
Some(random::<i64>())
}
})
.collect::<Vec<Option<i64>>>();
Arc::new(PrimitiveArray::<FROM>::from_opt_vec(values, None))
}
// cast array from specified primitive array type to desired data type
fn cast_array(array: &ArrayRef, to_type: DataType) {
criterion::black_box(cast(array, &to_type).unwrap());
}
fn add_benchmark(c: &mut Criterion)
|
});
c.bench_function("cast int32 to float64 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Float64))
});
c.bench_function("cast int32 to int64 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Int64))
});
c.bench_function("cast float32 to int32 512", |b| {
b.iter(|| cast_array(&f32_array, DataType::Int32))
});
c.bench_function("cast float64 to float32 512", |b| {
b.iter(|| cast_array(&f64_array, DataType::Float32))
});
c.bench_function("cast float64 to uint64 512", |b| {
b.iter(|| cast_array(&f64_array, DataType::UInt64))
});
c.bench_function("cast int64 to int32 512", |b| {
b.iter(|| cast_array(&i64_array, DataType::Int32))
});
c.bench_function("cast date64 to date32 512", |b| {
b.iter(|| cast_array(&date64_array, DataType::Date32(DateUnit::Day)))
});
c.bench_function("cast date32 to date64 512", |b| {
b.iter(|| cast_array(&date32_array, DataType::Date64(DateUnit::Millisecond)))
});
c.bench_function("cast time32s to time32ms 512", |b| {
b.iter(|| cast_array(&time32s_array, DataType::Time32(TimeUnit::Millisecond)))
});
c.bench_function("cast time32s to time64us 512", |b| {
b.iter(|| cast_array(&time32s_array, DataType::Time64(TimeUnit::Microsecond)))
});
c.bench_function("cast time64ns to time32s 512", |b| {
b.iter(|| cast_array(&time64ns_array, DataType::Time32(TimeUnit::Second)))
});
c.bench_function("cast timestamp_ns to timestamp_s 512", |b| {
b.iter(|| {
cast_array(
&time_ns_array,
DataType::Timestamp(TimeUnit::Nanosecond, None),
)
})
});
c.bench_function("cast timestamp_ms to timestamp_ns 512", |b| {
b.iter(|| {
cast_array(
&time_ms_array,
DataType::Timestamp(TimeUnit::Nanosecond, None),
)
})
});
c.bench_function("cast timestamp_ms to i64 512", |b| {
b.iter(|| cast_array(&time_ms_array, DataType::Int64))
});
}
criterion_group!(benches, add_benchmark);
criterion_main!(benches);
|
{
let i32_array = build_array::<Int32Type>(512);
let i64_array = build_array::<Int64Type>(512);
let f32_array = build_array::<Float32Type>(512);
let f64_array = build_array::<Float64Type>(512);
let date64_array = build_array::<Date64Type>(512);
let date32_array = build_array::<Date32Type>(512);
let time32s_array = build_array::<Time32SecondType>(512);
let time64ns_array = build_array::<Time64NanosecondType>(512);
let time_ns_array = build_timestamp_array::<TimestampNanosecondType>(512);
let time_ms_array = build_timestamp_array::<TimestampMillisecondType>(512);
c.bench_function("cast int32 to int32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Int32))
});
c.bench_function("cast int32 to uint32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::UInt32))
});
c.bench_function("cast int32 to float32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Float32))
|
identifier_body
|
cast_kernels.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#[macro_use]
extern crate criterion;
use criterion::Criterion;
use rand::distributions::{Distribution, Standard};
use rand::prelude::random;
use std::sync::Arc;
extern crate arrow;
use arrow::array::*;
use arrow::compute::cast;
use arrow::datatypes::*;
fn
|
<FROM>(size: usize) -> ArrayRef
where
FROM: ArrowNumericType,
Standard: Distribution<FROM::Native>,
PrimitiveArray<FROM>: std::convert::From<Vec<Option<FROM::Native>>>,
{
let values = (0..size)
.map(|_| {
// 10% nulls, i.e. dense.
if random::<f64>() < 0.1 {
None
} else {
Some(random::<FROM::Native>())
}
})
.collect();
Arc::new(PrimitiveArray::<FROM>::from(values))
}
fn build_timestamp_array<FROM>(size: usize) -> ArrayRef
where
FROM: ArrowTimestampType,
Standard: Distribution<FROM::Native>,
{
let values = (0..size)
.map(|_| {
if random::<f64>() < 0.5 {
None
} else {
Some(random::<i64>())
}
})
.collect::<Vec<Option<i64>>>();
Arc::new(PrimitiveArray::<FROM>::from_opt_vec(values, None))
}
// cast array from specified primitive array type to desired data type
fn cast_array(array: &ArrayRef, to_type: DataType) {
criterion::black_box(cast(array, &to_type).unwrap());
}
fn add_benchmark(c: &mut Criterion) {
let i32_array = build_array::<Int32Type>(512);
let i64_array = build_array::<Int64Type>(512);
let f32_array = build_array::<Float32Type>(512);
let f64_array = build_array::<Float64Type>(512);
let date64_array = build_array::<Date64Type>(512);
let date32_array = build_array::<Date32Type>(512);
let time32s_array = build_array::<Time32SecondType>(512);
let time64ns_array = build_array::<Time64NanosecondType>(512);
let time_ns_array = build_timestamp_array::<TimestampNanosecondType>(512);
let time_ms_array = build_timestamp_array::<TimestampMillisecondType>(512);
c.bench_function("cast int32 to int32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Int32))
});
c.bench_function("cast int32 to uint32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::UInt32))
});
c.bench_function("cast int32 to float32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Float32))
});
c.bench_function("cast int32 to float64 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Float64))
});
c.bench_function("cast int32 to int64 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Int64))
});
c.bench_function("cast float32 to int32 512", |b| {
b.iter(|| cast_array(&f32_array, DataType::Int32))
});
c.bench_function("cast float64 to float32 512", |b| {
b.iter(|| cast_array(&f64_array, DataType::Float32))
});
c.bench_function("cast float64 to uint64 512", |b| {
b.iter(|| cast_array(&f64_array, DataType::UInt64))
});
c.bench_function("cast int64 to int32 512", |b| {
b.iter(|| cast_array(&i64_array, DataType::Int32))
});
c.bench_function("cast date64 to date32 512", |b| {
b.iter(|| cast_array(&date64_array, DataType::Date32(DateUnit::Day)))
});
c.bench_function("cast date32 to date64 512", |b| {
b.iter(|| cast_array(&date32_array, DataType::Date64(DateUnit::Millisecond)))
});
c.bench_function("cast time32s to time32ms 512", |b| {
b.iter(|| cast_array(&time32s_array, DataType::Time32(TimeUnit::Millisecond)))
});
c.bench_function("cast time32s to time64us 512", |b| {
b.iter(|| cast_array(&time32s_array, DataType::Time64(TimeUnit::Microsecond)))
});
c.bench_function("cast time64ns to time32s 512", |b| {
b.iter(|| cast_array(&time64ns_array, DataType::Time32(TimeUnit::Second)))
});
c.bench_function("cast timestamp_ns to timestamp_s 512", |b| {
b.iter(|| {
cast_array(
&time_ns_array,
DataType::Timestamp(TimeUnit::Nanosecond, None),
)
})
});
c.bench_function("cast timestamp_ms to timestamp_ns 512", |b| {
b.iter(|| {
cast_array(
&time_ms_array,
DataType::Timestamp(TimeUnit::Nanosecond, None),
)
})
});
c.bench_function("cast timestamp_ms to i64 512", |b| {
b.iter(|| cast_array(&time_ms_array, DataType::Int64))
});
}
criterion_group!(benches, add_benchmark);
criterion_main!(benches);
|
build_array
|
identifier_name
|
cast_kernels.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#[macro_use]
extern crate criterion;
use criterion::Criterion;
use rand::distributions::{Distribution, Standard};
use rand::prelude::random;
use std::sync::Arc;
extern crate arrow;
use arrow::array::*;
use arrow::compute::cast;
use arrow::datatypes::*;
fn build_array<FROM>(size: usize) -> ArrayRef
where
FROM: ArrowNumericType,
Standard: Distribution<FROM::Native>,
PrimitiveArray<FROM>: std::convert::From<Vec<Option<FROM::Native>>>,
{
let values = (0..size)
.map(|_| {
// 10% nulls, i.e. dense.
if random::<f64>() < 0.1
|
else {
Some(random::<FROM::Native>())
}
})
.collect();
Arc::new(PrimitiveArray::<FROM>::from(values))
}
fn build_timestamp_array<FROM>(size: usize) -> ArrayRef
where
FROM: ArrowTimestampType,
Standard: Distribution<FROM::Native>,
{
let values = (0..size)
.map(|_| {
if random::<f64>() < 0.5 {
None
} else {
Some(random::<i64>())
}
})
.collect::<Vec<Option<i64>>>();
Arc::new(PrimitiveArray::<FROM>::from_opt_vec(values, None))
}
// cast array from specified primitive array type to desired data type
fn cast_array(array: &ArrayRef, to_type: DataType) {
criterion::black_box(cast(array, &to_type).unwrap());
}
fn add_benchmark(c: &mut Criterion) {
let i32_array = build_array::<Int32Type>(512);
let i64_array = build_array::<Int64Type>(512);
let f32_array = build_array::<Float32Type>(512);
let f64_array = build_array::<Float64Type>(512);
let date64_array = build_array::<Date64Type>(512);
let date32_array = build_array::<Date32Type>(512);
let time32s_array = build_array::<Time32SecondType>(512);
let time64ns_array = build_array::<Time64NanosecondType>(512);
let time_ns_array = build_timestamp_array::<TimestampNanosecondType>(512);
let time_ms_array = build_timestamp_array::<TimestampMillisecondType>(512);
c.bench_function("cast int32 to int32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Int32))
});
c.bench_function("cast int32 to uint32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::UInt32))
});
c.bench_function("cast int32 to float32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Float32))
});
c.bench_function("cast int32 to float64 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Float64))
});
c.bench_function("cast int32 to int64 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Int64))
});
c.bench_function("cast float32 to int32 512", |b| {
b.iter(|| cast_array(&f32_array, DataType::Int32))
});
c.bench_function("cast float64 to float32 512", |b| {
b.iter(|| cast_array(&f64_array, DataType::Float32))
});
c.bench_function("cast float64 to uint64 512", |b| {
b.iter(|| cast_array(&f64_array, DataType::UInt64))
});
c.bench_function("cast int64 to int32 512", |b| {
b.iter(|| cast_array(&i64_array, DataType::Int32))
});
c.bench_function("cast date64 to date32 512", |b| {
b.iter(|| cast_array(&date64_array, DataType::Date32(DateUnit::Day)))
});
c.bench_function("cast date32 to date64 512", |b| {
b.iter(|| cast_array(&date32_array, DataType::Date64(DateUnit::Millisecond)))
});
c.bench_function("cast time32s to time32ms 512", |b| {
b.iter(|| cast_array(&time32s_array, DataType::Time32(TimeUnit::Millisecond)))
});
c.bench_function("cast time32s to time64us 512", |b| {
b.iter(|| cast_array(&time32s_array, DataType::Time64(TimeUnit::Microsecond)))
});
c.bench_function("cast time64ns to time32s 512", |b| {
b.iter(|| cast_array(&time64ns_array, DataType::Time32(TimeUnit::Second)))
});
c.bench_function("cast timestamp_ns to timestamp_s 512", |b| {
b.iter(|| {
cast_array(
&time_ns_array,
DataType::Timestamp(TimeUnit::Nanosecond, None),
)
})
});
c.bench_function("cast timestamp_ms to timestamp_ns 512", |b| {
b.iter(|| {
cast_array(
&time_ms_array,
DataType::Timestamp(TimeUnit::Nanosecond, None),
)
})
});
c.bench_function("cast timestamp_ms to i64 512", |b| {
b.iter(|| cast_array(&time_ms_array, DataType::Int64))
});
}
criterion_group!(benches, add_benchmark);
criterion_main!(benches);
|
{
None
}
|
conditional_block
|
cast_kernels.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#[macro_use]
extern crate criterion;
use criterion::Criterion;
use rand::distributions::{Distribution, Standard};
use rand::prelude::random;
use std::sync::Arc;
extern crate arrow;
use arrow::array::*;
use arrow::compute::cast;
use arrow::datatypes::*;
fn build_array<FROM>(size: usize) -> ArrayRef
where
FROM: ArrowNumericType,
Standard: Distribution<FROM::Native>,
PrimitiveArray<FROM>: std::convert::From<Vec<Option<FROM::Native>>>,
{
let values = (0..size)
.map(|_| {
// 10% nulls, i.e. dense.
if random::<f64>() < 0.1 {
None
} else {
Some(random::<FROM::Native>())
}
})
.collect();
Arc::new(PrimitiveArray::<FROM>::from(values))
}
fn build_timestamp_array<FROM>(size: usize) -> ArrayRef
where
FROM: ArrowTimestampType,
Standard: Distribution<FROM::Native>,
{
let values = (0..size)
.map(|_| {
if random::<f64>() < 0.5 {
None
} else {
Some(random::<i64>())
}
})
.collect::<Vec<Option<i64>>>();
Arc::new(PrimitiveArray::<FROM>::from_opt_vec(values, None))
}
// cast array from specified primitive array type to desired data type
fn cast_array(array: &ArrayRef, to_type: DataType) {
criterion::black_box(cast(array, &to_type).unwrap());
}
fn add_benchmark(c: &mut Criterion) {
let i32_array = build_array::<Int32Type>(512);
let i64_array = build_array::<Int64Type>(512);
let f32_array = build_array::<Float32Type>(512);
let f64_array = build_array::<Float64Type>(512);
let date64_array = build_array::<Date64Type>(512);
let date32_array = build_array::<Date32Type>(512);
let time32s_array = build_array::<Time32SecondType>(512);
let time64ns_array = build_array::<Time64NanosecondType>(512);
let time_ns_array = build_timestamp_array::<TimestampNanosecondType>(512);
let time_ms_array = build_timestamp_array::<TimestampMillisecondType>(512);
c.bench_function("cast int32 to int32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Int32))
});
c.bench_function("cast int32 to uint32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::UInt32))
});
c.bench_function("cast int32 to float32 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Float32))
});
c.bench_function("cast int32 to float64 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Float64))
});
c.bench_function("cast int32 to int64 512", |b| {
b.iter(|| cast_array(&i32_array, DataType::Int64))
});
c.bench_function("cast float32 to int32 512", |b| {
b.iter(|| cast_array(&f32_array, DataType::Int32))
});
c.bench_function("cast float64 to float32 512", |b| {
|
c.bench_function("cast float64 to uint64 512", |b| {
b.iter(|| cast_array(&f64_array, DataType::UInt64))
});
c.bench_function("cast int64 to int32 512", |b| {
b.iter(|| cast_array(&i64_array, DataType::Int32))
});
c.bench_function("cast date64 to date32 512", |b| {
b.iter(|| cast_array(&date64_array, DataType::Date32(DateUnit::Day)))
});
c.bench_function("cast date32 to date64 512", |b| {
b.iter(|| cast_array(&date32_array, DataType::Date64(DateUnit::Millisecond)))
});
c.bench_function("cast time32s to time32ms 512", |b| {
b.iter(|| cast_array(&time32s_array, DataType::Time32(TimeUnit::Millisecond)))
});
c.bench_function("cast time32s to time64us 512", |b| {
b.iter(|| cast_array(&time32s_array, DataType::Time64(TimeUnit::Microsecond)))
});
c.bench_function("cast time64ns to time32s 512", |b| {
b.iter(|| cast_array(&time64ns_array, DataType::Time32(TimeUnit::Second)))
});
c.bench_function("cast timestamp_ns to timestamp_s 512", |b| {
b.iter(|| {
cast_array(
&time_ns_array,
DataType::Timestamp(TimeUnit::Nanosecond, None),
)
})
});
c.bench_function("cast timestamp_ms to timestamp_ns 512", |b| {
b.iter(|| {
cast_array(
&time_ms_array,
DataType::Timestamp(TimeUnit::Nanosecond, None),
)
})
});
c.bench_function("cast timestamp_ms to i64 512", |b| {
b.iter(|| cast_array(&time_ms_array, DataType::Int64))
});
}
criterion_group!(benches, add_benchmark);
criterion_main!(benches);
|
b.iter(|| cast_array(&f64_array, DataType::Float32))
});
|
random_line_split
|
mod.rs
|
pub mod brainfuck;
|
#[derive(Clone, PartialEq, Debug)]
pub enum BasicCmd {
Skip,
Rewind,
Add,
Sub,
}
impl Display for BasicCmd {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "{:?}", self)
}
}
// workaround because FaustCmd::Repeatable is 'not a valid type'
#[derive(Clone, PartialEq, Debug)]
pub struct Repeatable(BasicCmd, usize);
impl Display for Repeatable {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "{:?}", self)
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum FaustCmd {
Repeatable(BasicCmd, usize),
Addressed(Repeatable, usize),
Clear,
Output,
Input,
JumpEqualZero,
JumpNotZero,
DebugPrint,
Breakpoint,
ToggleBuffer,
Buffer(String),
// Iteration primitives
For(Vec<FaustCmd>, usize), // loop and reduce by usize
ScanFwd(Vec<FaustCmd>, usize), // do something and skip by usize
ScanBk(Vec<FaustCmd>, usize), // do something and rewind by usize
}
impl Display for FaustCmd {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "<{:?}>", self)
}
}
trait Frontend {
fn basic(&self, code: &String) -> Vec<FaustCmd>;
fn optimize(&self, code: &String) -> Vec<FaustCmd> {
optimizer::full_optimize(self.basic(code))
}
}
|
mod optimizer;
use std::fmt::{Display, Formatter, Result};
|
random_line_split
|
mod.rs
|
pub mod brainfuck;
mod optimizer;
use std::fmt::{Display, Formatter, Result};
#[derive(Clone, PartialEq, Debug)]
pub enum BasicCmd {
Skip,
Rewind,
Add,
Sub,
}
impl Display for BasicCmd {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "{:?}", self)
}
}
// workaround because FaustCmd::Repeatable is 'not a valid type'
#[derive(Clone, PartialEq, Debug)]
pub struct Repeatable(BasicCmd, usize);
impl Display for Repeatable {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "{:?}", self)
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum FaustCmd {
Repeatable(BasicCmd, usize),
Addressed(Repeatable, usize),
Clear,
Output,
Input,
JumpEqualZero,
JumpNotZero,
DebugPrint,
Breakpoint,
ToggleBuffer,
Buffer(String),
// Iteration primitives
For(Vec<FaustCmd>, usize), // loop and reduce by usize
ScanFwd(Vec<FaustCmd>, usize), // do something and skip by usize
ScanBk(Vec<FaustCmd>, usize), // do something and rewind by usize
}
impl Display for FaustCmd {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "<{:?}>", self)
}
}
trait Frontend {
fn basic(&self, code: &String) -> Vec<FaustCmd>;
fn
|
(&self, code: &String) -> Vec<FaustCmd> {
optimizer::full_optimize(self.basic(code))
}
}
|
optimize
|
identifier_name
|
mod.rs
|
pub mod brainfuck;
mod optimizer;
use std::fmt::{Display, Formatter, Result};
#[derive(Clone, PartialEq, Debug)]
pub enum BasicCmd {
Skip,
Rewind,
Add,
Sub,
}
impl Display for BasicCmd {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "{:?}", self)
}
}
// workaround because FaustCmd::Repeatable is 'not a valid type'
#[derive(Clone, PartialEq, Debug)]
pub struct Repeatable(BasicCmd, usize);
impl Display for Repeatable {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "{:?}", self)
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum FaustCmd {
Repeatable(BasicCmd, usize),
Addressed(Repeatable, usize),
Clear,
Output,
Input,
JumpEqualZero,
JumpNotZero,
DebugPrint,
Breakpoint,
ToggleBuffer,
Buffer(String),
// Iteration primitives
For(Vec<FaustCmd>, usize), // loop and reduce by usize
ScanFwd(Vec<FaustCmd>, usize), // do something and skip by usize
ScanBk(Vec<FaustCmd>, usize), // do something and rewind by usize
}
impl Display for FaustCmd {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "<{:?}>", self)
}
}
trait Frontend {
fn basic(&self, code: &String) -> Vec<FaustCmd>;
fn optimize(&self, code: &String) -> Vec<FaustCmd>
|
}
|
{
optimizer::full_optimize(self.basic(code))
}
|
identifier_body
|
mod.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::env;
use std::str;
use std::net::SocketAddr;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use env_logger::LogBuilder;
use jsonrpc_core::IoHandler;
use jsonrpc_http_server::{self as http, Host, DomainsValidation};
use devtools::http_client;
use hash_fetch::urlhint::ContractClient;
use fetch::{Fetch, Client as FetchClient};
use parity_reactor::Remote;
use {Middleware, SyncStatus, WebProxyTokens};
mod registrar;
mod fetch;
use self::registrar::FakeRegistrar;
use self::fetch::FakeFetch;
const SIGNER_PORT: u16 = 18180;
fn init_logger() {
// Initialize logger
if let Ok(log) = env::var("RUST_LOG") {
let mut builder = LogBuilder::new();
builder.parse(&log);
let _ = builder.init(); // ignore errors since./test.sh will call this multiple times.
}
}
pub fn init_server<F, B>(process: F, io: IoHandler, remote: Remote) -> (Server, Arc<FakeRegistrar>) where
F: FnOnce(ServerBuilder) -> ServerBuilder<B>,
B: Fetch,
{
init_logger();
let registrar = Arc::new(FakeRegistrar::new());
let mut dapps_path = env::temp_dir();
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
let server = process(ServerBuilder::new(
&dapps_path, registrar.clone(), remote,
))
.signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)))
.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), io).unwrap();
(
server,
registrar,
)
}
pub fn serve_with_rpc(io: IoHandler) -> Server {
init_server(|builder| builder, io, Remote::new_sync()).0
}
pub fn serve_hosts(hosts: Option<Vec<String>>) -> Server {
let hosts = hosts.map(|hosts| hosts.into_iter().map(Into::into).collect());
init_server(|builder| builder.allowed_hosts(hosts.into()), Default::default(), Remote::new_sync()).0
}
pub fn serve_with_registrar() -> (Server, Arc<FakeRegistrar>) {
init_server(|builder| builder, Default::default(), Remote::new_sync())
}
pub fn serve_with_registrar_and_sync() -> (Server, Arc<FakeRegistrar>) {
init_server(|builder| {
builder.sync_status(Arc::new(|| true))
}, Default::default(), Remote::new_sync())
}
pub fn serve_with_registrar_and_fetch() -> (Server, FakeFetch, Arc<FakeRegistrar>) {
serve_with_registrar_and_fetch_and_threads(false)
}
pub fn serve_with_registrar_and_fetch_and_threads(multi_threaded: bool) -> (Server, FakeFetch, Arc<FakeRegistrar>) {
let fetch = FakeFetch::default();
let f = fetch.clone();
let (server, reg) = init_server(move |builder| {
builder.fetch(f.clone())
}, Default::default(), if multi_threaded { Remote::new_thread_per_future() } else { Remote::new_sync() });
(server, fetch, reg)
}
pub fn serve_with_fetch(web_token: &'static str, domain: &'static str) -> (Server, FakeFetch) {
let fetch = FakeFetch::default();
let f = fetch.clone();
let (server, _) = init_server(move |builder| {
builder
.fetch(f.clone())
.web_proxy_tokens(Arc::new(move |token| {
if &token == web_token { Some(domain.into()) } else { None }
}))
}, Default::default(), Remote::new_sync());
(server, fetch)
}
pub fn serve() -> Server {
init_server(|builder| builder, Default::default(), Remote::new_sync()).0
}
pub fn request(server: Server, request: &str) -> http_client::Response {
http_client::request(server.addr(), request)
}
pub fn assert_security_headers(headers: &[String]) {
http_client::assert_security_headers_present(headers, None)
}
pub fn assert_security_headers_for_embed(headers: &[String])
|
/// Webapps HTTP+RPC server build.
pub struct ServerBuilder<T: Fetch = FetchClient> {
dapps_path: PathBuf,
registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>,
web_proxy_tokens: Arc<WebProxyTokens>,
signer_address: Option<(String, u16)>,
allowed_hosts: DomainsValidation<Host>,
remote: Remote,
fetch: Option<T>,
}
impl ServerBuilder {
/// Construct new dapps server
pub fn new<P: AsRef<Path>>(dapps_path: P, registrar: Arc<ContractClient>, remote: Remote) -> Self {
ServerBuilder {
dapps_path: dapps_path.as_ref().to_owned(),
registrar: registrar,
sync_status: Arc::new(|| false),
web_proxy_tokens: Arc::new(|_| None),
signer_address: None,
allowed_hosts: DomainsValidation::Disabled,
remote: remote,
fetch: None,
}
}
}
impl<T: Fetch> ServerBuilder<T> {
/// Set a fetch client to use.
pub fn fetch<X: Fetch>(self, fetch: X) -> ServerBuilder<X> {
ServerBuilder {
dapps_path: self.dapps_path,
registrar: self.registrar,
sync_status: self.sync_status,
web_proxy_tokens: self.web_proxy_tokens,
signer_address: self.signer_address,
allowed_hosts: self.allowed_hosts,
remote: self.remote,
fetch: Some(fetch),
}
}
/// Change default sync status.
pub fn sync_status(mut self, status: Arc<SyncStatus>) -> Self {
self.sync_status = status;
self
}
/// Change default web proxy tokens validator.
pub fn web_proxy_tokens(mut self, tokens: Arc<WebProxyTokens>) -> Self {
self.web_proxy_tokens = tokens;
self
}
/// Change default signer port.
pub fn signer_address(mut self, signer_address: Option<(String, u16)>) -> Self {
self.signer_address = signer_address;
self
}
/// Change allowed hosts.
/// `None` - All hosts are allowed
/// `Some(whitelist)` - Allow only whitelisted hosts (+ listen address)
pub fn allowed_hosts(mut self, allowed_hosts: DomainsValidation<Host>) -> Self {
self.allowed_hosts = allowed_hosts;
self
}
/// Asynchronously start server with no authentication,
/// returns result with `Server` handle on success or an error.
pub fn start_unsecured_http(self, addr: &SocketAddr, io: IoHandler) -> Result<Server, http::Error> {
let fetch = self.fetch_client();
Server::start_http(
addr,
io,
self.allowed_hosts,
self.signer_address,
self.dapps_path,
vec![],
self.registrar,
self.sync_status,
self.web_proxy_tokens,
self.remote,
fetch,
)
}
fn fetch_client(&self) -> T {
match self.fetch.clone() {
Some(fetch) => fetch,
None => T::new().unwrap(),
}
}
}
const DAPPS_DOMAIN: &'static str = "web3.site";
/// Webapps HTTP server.
pub struct Server {
server: Option<http::Server>,
}
impl Server {
fn start_http<F: Fetch>(
addr: &SocketAddr,
io: IoHandler,
allowed_hosts: DomainsValidation<Host>,
signer_address: Option<(String, u16)>,
dapps_path: PathBuf,
extra_dapps: Vec<PathBuf>,
registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>,
web_proxy_tokens: Arc<WebProxyTokens>,
remote: Remote,
fetch: F,
) -> Result<Server, http::Error> {
let middleware = Middleware::dapps(
remote,
signer_address,
dapps_path,
extra_dapps,
DAPPS_DOMAIN.into(),
registrar,
sync_status,
web_proxy_tokens,
fetch,
);
let mut allowed_hosts: Option<Vec<Host>> = allowed_hosts.into();
allowed_hosts.as_mut().map(|mut hosts| {
hosts.push(format!("http://*.{}:*", DAPPS_DOMAIN).into());
hosts.push(format!("http://*.{}", DAPPS_DOMAIN).into());
});
http::ServerBuilder::new(io)
.request_middleware(middleware)
.allowed_hosts(allowed_hosts.into())
.cors(http::DomainsValidation::Disabled)
.start_http(addr)
.map(|server| Server {
server: Some(server),
})
}
/// Returns address that this server is bound to.
pub fn addr(&self) -> &SocketAddr {
self.server.as_ref()
.expect("server is always Some at the start; it's consumed only when object is dropped; qed")
.addrs()
.first()
.expect("You cannot start the server without binding to at least one address; qed")
}
}
impl Drop for Server {
fn drop(&mut self) {
self.server.take().unwrap().close()
}
}
|
{
http_client::assert_security_headers_present(headers, Some(SIGNER_PORT))
}
|
identifier_body
|
mod.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::env;
use std::str;
use std::net::SocketAddr;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use env_logger::LogBuilder;
use jsonrpc_core::IoHandler;
use jsonrpc_http_server::{self as http, Host, DomainsValidation};
use devtools::http_client;
use hash_fetch::urlhint::ContractClient;
use fetch::{Fetch, Client as FetchClient};
use parity_reactor::Remote;
use {Middleware, SyncStatus, WebProxyTokens};
mod registrar;
mod fetch;
use self::registrar::FakeRegistrar;
use self::fetch::FakeFetch;
const SIGNER_PORT: u16 = 18180;
fn init_logger() {
// Initialize logger
if let Ok(log) = env::var("RUST_LOG")
|
}
pub fn init_server<F, B>(process: F, io: IoHandler, remote: Remote) -> (Server, Arc<FakeRegistrar>) where
F: FnOnce(ServerBuilder) -> ServerBuilder<B>,
B: Fetch,
{
init_logger();
let registrar = Arc::new(FakeRegistrar::new());
let mut dapps_path = env::temp_dir();
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
let server = process(ServerBuilder::new(
&dapps_path, registrar.clone(), remote,
))
.signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)))
.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), io).unwrap();
(
server,
registrar,
)
}
pub fn serve_with_rpc(io: IoHandler) -> Server {
init_server(|builder| builder, io, Remote::new_sync()).0
}
pub fn serve_hosts(hosts: Option<Vec<String>>) -> Server {
let hosts = hosts.map(|hosts| hosts.into_iter().map(Into::into).collect());
init_server(|builder| builder.allowed_hosts(hosts.into()), Default::default(), Remote::new_sync()).0
}
pub fn serve_with_registrar() -> (Server, Arc<FakeRegistrar>) {
init_server(|builder| builder, Default::default(), Remote::new_sync())
}
pub fn serve_with_registrar_and_sync() -> (Server, Arc<FakeRegistrar>) {
init_server(|builder| {
builder.sync_status(Arc::new(|| true))
}, Default::default(), Remote::new_sync())
}
pub fn serve_with_registrar_and_fetch() -> (Server, FakeFetch, Arc<FakeRegistrar>) {
serve_with_registrar_and_fetch_and_threads(false)
}
pub fn serve_with_registrar_and_fetch_and_threads(multi_threaded: bool) -> (Server, FakeFetch, Arc<FakeRegistrar>) {
let fetch = FakeFetch::default();
let f = fetch.clone();
let (server, reg) = init_server(move |builder| {
builder.fetch(f.clone())
}, Default::default(), if multi_threaded { Remote::new_thread_per_future() } else { Remote::new_sync() });
(server, fetch, reg)
}
pub fn serve_with_fetch(web_token: &'static str, domain: &'static str) -> (Server, FakeFetch) {
let fetch = FakeFetch::default();
let f = fetch.clone();
let (server, _) = init_server(move |builder| {
builder
.fetch(f.clone())
.web_proxy_tokens(Arc::new(move |token| {
if &token == web_token { Some(domain.into()) } else { None }
}))
}, Default::default(), Remote::new_sync());
(server, fetch)
}
pub fn serve() -> Server {
init_server(|builder| builder, Default::default(), Remote::new_sync()).0
}
pub fn request(server: Server, request: &str) -> http_client::Response {
http_client::request(server.addr(), request)
}
pub fn assert_security_headers(headers: &[String]) {
http_client::assert_security_headers_present(headers, None)
}
pub fn assert_security_headers_for_embed(headers: &[String]) {
http_client::assert_security_headers_present(headers, Some(SIGNER_PORT))
}
/// Webapps HTTP+RPC server build.
pub struct ServerBuilder<T: Fetch = FetchClient> {
dapps_path: PathBuf,
registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>,
web_proxy_tokens: Arc<WebProxyTokens>,
signer_address: Option<(String, u16)>,
allowed_hosts: DomainsValidation<Host>,
remote: Remote,
fetch: Option<T>,
}
impl ServerBuilder {
/// Construct new dapps server
pub fn new<P: AsRef<Path>>(dapps_path: P, registrar: Arc<ContractClient>, remote: Remote) -> Self {
ServerBuilder {
dapps_path: dapps_path.as_ref().to_owned(),
registrar: registrar,
sync_status: Arc::new(|| false),
web_proxy_tokens: Arc::new(|_| None),
signer_address: None,
allowed_hosts: DomainsValidation::Disabled,
remote: remote,
fetch: None,
}
}
}
impl<T: Fetch> ServerBuilder<T> {
/// Set a fetch client to use.
pub fn fetch<X: Fetch>(self, fetch: X) -> ServerBuilder<X> {
ServerBuilder {
dapps_path: self.dapps_path,
registrar: self.registrar,
sync_status: self.sync_status,
web_proxy_tokens: self.web_proxy_tokens,
signer_address: self.signer_address,
allowed_hosts: self.allowed_hosts,
remote: self.remote,
fetch: Some(fetch),
}
}
/// Change default sync status.
pub fn sync_status(mut self, status: Arc<SyncStatus>) -> Self {
self.sync_status = status;
self
}
/// Change default web proxy tokens validator.
pub fn web_proxy_tokens(mut self, tokens: Arc<WebProxyTokens>) -> Self {
self.web_proxy_tokens = tokens;
self
}
/// Change default signer port.
pub fn signer_address(mut self, signer_address: Option<(String, u16)>) -> Self {
self.signer_address = signer_address;
self
}
/// Change allowed hosts.
/// `None` - All hosts are allowed
/// `Some(whitelist)` - Allow only whitelisted hosts (+ listen address)
pub fn allowed_hosts(mut self, allowed_hosts: DomainsValidation<Host>) -> Self {
self.allowed_hosts = allowed_hosts;
self
}
/// Asynchronously start server with no authentication,
/// returns result with `Server` handle on success or an error.
pub fn start_unsecured_http(self, addr: &SocketAddr, io: IoHandler) -> Result<Server, http::Error> {
let fetch = self.fetch_client();
Server::start_http(
addr,
io,
self.allowed_hosts,
self.signer_address,
self.dapps_path,
vec![],
self.registrar,
self.sync_status,
self.web_proxy_tokens,
self.remote,
fetch,
)
}
fn fetch_client(&self) -> T {
match self.fetch.clone() {
Some(fetch) => fetch,
None => T::new().unwrap(),
}
}
}
const DAPPS_DOMAIN: &'static str = "web3.site";
/// Webapps HTTP server.
pub struct Server {
server: Option<http::Server>,
}
impl Server {
fn start_http<F: Fetch>(
addr: &SocketAddr,
io: IoHandler,
allowed_hosts: DomainsValidation<Host>,
signer_address: Option<(String, u16)>,
dapps_path: PathBuf,
extra_dapps: Vec<PathBuf>,
registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>,
web_proxy_tokens: Arc<WebProxyTokens>,
remote: Remote,
fetch: F,
) -> Result<Server, http::Error> {
let middleware = Middleware::dapps(
remote,
signer_address,
dapps_path,
extra_dapps,
DAPPS_DOMAIN.into(),
registrar,
sync_status,
web_proxy_tokens,
fetch,
);
let mut allowed_hosts: Option<Vec<Host>> = allowed_hosts.into();
allowed_hosts.as_mut().map(|mut hosts| {
hosts.push(format!("http://*.{}:*", DAPPS_DOMAIN).into());
hosts.push(format!("http://*.{}", DAPPS_DOMAIN).into());
});
http::ServerBuilder::new(io)
.request_middleware(middleware)
.allowed_hosts(allowed_hosts.into())
.cors(http::DomainsValidation::Disabled)
.start_http(addr)
.map(|server| Server {
server: Some(server),
})
}
/// Returns address that this server is bound to.
pub fn addr(&self) -> &SocketAddr {
self.server.as_ref()
.expect("server is always Some at the start; it's consumed only when object is dropped; qed")
.addrs()
.first()
.expect("You cannot start the server without binding to at least one address; qed")
}
}
impl Drop for Server {
fn drop(&mut self) {
self.server.take().unwrap().close()
}
}
|
{
let mut builder = LogBuilder::new();
builder.parse(&log);
let _ = builder.init(); // ignore errors since ./test.sh will call this multiple times.
}
|
conditional_block
|
mod.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::env;
use std::str;
use std::net::SocketAddr;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use env_logger::LogBuilder;
use jsonrpc_core::IoHandler;
use jsonrpc_http_server::{self as http, Host, DomainsValidation};
use devtools::http_client;
use hash_fetch::urlhint::ContractClient;
use fetch::{Fetch, Client as FetchClient};
use parity_reactor::Remote;
use {Middleware, SyncStatus, WebProxyTokens};
mod registrar;
mod fetch;
use self::registrar::FakeRegistrar;
use self::fetch::FakeFetch;
const SIGNER_PORT: u16 = 18180;
fn init_logger() {
// Initialize logger
if let Ok(log) = env::var("RUST_LOG") {
let mut builder = LogBuilder::new();
builder.parse(&log);
let _ = builder.init(); // ignore errors since./test.sh will call this multiple times.
}
}
pub fn init_server<F, B>(process: F, io: IoHandler, remote: Remote) -> (Server, Arc<FakeRegistrar>) where
F: FnOnce(ServerBuilder) -> ServerBuilder<B>,
B: Fetch,
{
init_logger();
let registrar = Arc::new(FakeRegistrar::new());
let mut dapps_path = env::temp_dir();
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
let server = process(ServerBuilder::new(
&dapps_path, registrar.clone(), remote,
))
.signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)))
.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), io).unwrap();
(
server,
registrar,
)
}
pub fn serve_with_rpc(io: IoHandler) -> Server {
init_server(|builder| builder, io, Remote::new_sync()).0
}
pub fn serve_hosts(hosts: Option<Vec<String>>) -> Server {
let hosts = hosts.map(|hosts| hosts.into_iter().map(Into::into).collect());
init_server(|builder| builder.allowed_hosts(hosts.into()), Default::default(), Remote::new_sync()).0
}
pub fn serve_with_registrar() -> (Server, Arc<FakeRegistrar>) {
init_server(|builder| builder, Default::default(), Remote::new_sync())
}
pub fn serve_with_registrar_and_sync() -> (Server, Arc<FakeRegistrar>) {
init_server(|builder| {
builder.sync_status(Arc::new(|| true))
}, Default::default(), Remote::new_sync())
}
pub fn serve_with_registrar_and_fetch() -> (Server, FakeFetch, Arc<FakeRegistrar>) {
serve_with_registrar_and_fetch_and_threads(false)
}
pub fn serve_with_registrar_and_fetch_and_threads(multi_threaded: bool) -> (Server, FakeFetch, Arc<FakeRegistrar>) {
let fetch = FakeFetch::default();
let f = fetch.clone();
let (server, reg) = init_server(move |builder| {
builder.fetch(f.clone())
}, Default::default(), if multi_threaded { Remote::new_thread_per_future() } else { Remote::new_sync() });
(server, fetch, reg)
}
pub fn serve_with_fetch(web_token: &'static str, domain: &'static str) -> (Server, FakeFetch) {
let fetch = FakeFetch::default();
let f = fetch.clone();
let (server, _) = init_server(move |builder| {
builder
.fetch(f.clone())
.web_proxy_tokens(Arc::new(move |token| {
if &token == web_token { Some(domain.into()) } else { None }
}))
}, Default::default(), Remote::new_sync());
(server, fetch)
}
pub fn serve() -> Server {
init_server(|builder| builder, Default::default(), Remote::new_sync()).0
}
pub fn request(server: Server, request: &str) -> http_client::Response {
http_client::request(server.addr(), request)
}
pub fn assert_security_headers(headers: &[String]) {
http_client::assert_security_headers_present(headers, None)
}
pub fn assert_security_headers_for_embed(headers: &[String]) {
http_client::assert_security_headers_present(headers, Some(SIGNER_PORT))
}
|
dapps_path: PathBuf,
registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>,
web_proxy_tokens: Arc<WebProxyTokens>,
signer_address: Option<(String, u16)>,
allowed_hosts: DomainsValidation<Host>,
remote: Remote,
fetch: Option<T>,
}
impl ServerBuilder {
/// Construct new dapps server
pub fn new<P: AsRef<Path>>(dapps_path: P, registrar: Arc<ContractClient>, remote: Remote) -> Self {
ServerBuilder {
dapps_path: dapps_path.as_ref().to_owned(),
registrar: registrar,
sync_status: Arc::new(|| false),
web_proxy_tokens: Arc::new(|_| None),
signer_address: None,
allowed_hosts: DomainsValidation::Disabled,
remote: remote,
fetch: None,
}
}
}
impl<T: Fetch> ServerBuilder<T> {
/// Set a fetch client to use.
pub fn fetch<X: Fetch>(self, fetch: X) -> ServerBuilder<X> {
ServerBuilder {
dapps_path: self.dapps_path,
registrar: self.registrar,
sync_status: self.sync_status,
web_proxy_tokens: self.web_proxy_tokens,
signer_address: self.signer_address,
allowed_hosts: self.allowed_hosts,
remote: self.remote,
fetch: Some(fetch),
}
}
/// Change default sync status.
pub fn sync_status(mut self, status: Arc<SyncStatus>) -> Self {
self.sync_status = status;
self
}
/// Change default web proxy tokens validator.
pub fn web_proxy_tokens(mut self, tokens: Arc<WebProxyTokens>) -> Self {
self.web_proxy_tokens = tokens;
self
}
/// Change default signer port.
pub fn signer_address(mut self, signer_address: Option<(String, u16)>) -> Self {
self.signer_address = signer_address;
self
}
/// Change allowed hosts.
/// `None` - All hosts are allowed
/// `Some(whitelist)` - Allow only whitelisted hosts (+ listen address)
pub fn allowed_hosts(mut self, allowed_hosts: DomainsValidation<Host>) -> Self {
self.allowed_hosts = allowed_hosts;
self
}
/// Asynchronously start server with no authentication,
/// returns result with `Server` handle on success or an error.
pub fn start_unsecured_http(self, addr: &SocketAddr, io: IoHandler) -> Result<Server, http::Error> {
let fetch = self.fetch_client();
Server::start_http(
addr,
io,
self.allowed_hosts,
self.signer_address,
self.dapps_path,
vec![],
self.registrar,
self.sync_status,
self.web_proxy_tokens,
self.remote,
fetch,
)
}
fn fetch_client(&self) -> T {
match self.fetch.clone() {
Some(fetch) => fetch,
None => T::new().unwrap(),
}
}
}
const DAPPS_DOMAIN: &'static str = "web3.site";
/// Webapps HTTP server.
pub struct Server {
server: Option<http::Server>,
}
impl Server {
fn start_http<F: Fetch>(
addr: &SocketAddr,
io: IoHandler,
allowed_hosts: DomainsValidation<Host>,
signer_address: Option<(String, u16)>,
dapps_path: PathBuf,
extra_dapps: Vec<PathBuf>,
registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>,
web_proxy_tokens: Arc<WebProxyTokens>,
remote: Remote,
fetch: F,
) -> Result<Server, http::Error> {
let middleware = Middleware::dapps(
remote,
signer_address,
dapps_path,
extra_dapps,
DAPPS_DOMAIN.into(),
registrar,
sync_status,
web_proxy_tokens,
fetch,
);
let mut allowed_hosts: Option<Vec<Host>> = allowed_hosts.into();
allowed_hosts.as_mut().map(|mut hosts| {
hosts.push(format!("http://*.{}:*", DAPPS_DOMAIN).into());
hosts.push(format!("http://*.{}", DAPPS_DOMAIN).into());
});
http::ServerBuilder::new(io)
.request_middleware(middleware)
.allowed_hosts(allowed_hosts.into())
.cors(http::DomainsValidation::Disabled)
.start_http(addr)
.map(|server| Server {
server: Some(server),
})
}
/// Returns address that this server is bound to.
pub fn addr(&self) -> &SocketAddr {
self.server.as_ref()
.expect("server is always Some at the start; it's consumed only when object is dropped; qed")
.addrs()
.first()
.expect("You cannot start the server without binding to at least one address; qed")
}
}
impl Drop for Server {
fn drop(&mut self) {
self.server.take().unwrap().close()
}
}
|
/// Webapps HTTP+RPC server build.
pub struct ServerBuilder<T: Fetch = FetchClient> {
|
random_line_split
|
mod.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::env;
use std::str;
use std::net::SocketAddr;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use env_logger::LogBuilder;
use jsonrpc_core::IoHandler;
use jsonrpc_http_server::{self as http, Host, DomainsValidation};
use devtools::http_client;
use hash_fetch::urlhint::ContractClient;
use fetch::{Fetch, Client as FetchClient};
use parity_reactor::Remote;
use {Middleware, SyncStatus, WebProxyTokens};
mod registrar;
mod fetch;
use self::registrar::FakeRegistrar;
use self::fetch::FakeFetch;
const SIGNER_PORT: u16 = 18180;
fn init_logger() {
// Initialize logger
if let Ok(log) = env::var("RUST_LOG") {
let mut builder = LogBuilder::new();
builder.parse(&log);
let _ = builder.init(); // ignore errors since./test.sh will call this multiple times.
}
}
pub fn init_server<F, B>(process: F, io: IoHandler, remote: Remote) -> (Server, Arc<FakeRegistrar>) where
F: FnOnce(ServerBuilder) -> ServerBuilder<B>,
B: Fetch,
{
init_logger();
let registrar = Arc::new(FakeRegistrar::new());
let mut dapps_path = env::temp_dir();
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
let server = process(ServerBuilder::new(
&dapps_path, registrar.clone(), remote,
))
.signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)))
.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), io).unwrap();
(
server,
registrar,
)
}
pub fn serve_with_rpc(io: IoHandler) -> Server {
init_server(|builder| builder, io, Remote::new_sync()).0
}
pub fn serve_hosts(hosts: Option<Vec<String>>) -> Server {
let hosts = hosts.map(|hosts| hosts.into_iter().map(Into::into).collect());
init_server(|builder| builder.allowed_hosts(hosts.into()), Default::default(), Remote::new_sync()).0
}
pub fn serve_with_registrar() -> (Server, Arc<FakeRegistrar>) {
init_server(|builder| builder, Default::default(), Remote::new_sync())
}
pub fn serve_with_registrar_and_sync() -> (Server, Arc<FakeRegistrar>) {
init_server(|builder| {
builder.sync_status(Arc::new(|| true))
}, Default::default(), Remote::new_sync())
}
pub fn serve_with_registrar_and_fetch() -> (Server, FakeFetch, Arc<FakeRegistrar>) {
serve_with_registrar_and_fetch_and_threads(false)
}
pub fn serve_with_registrar_and_fetch_and_threads(multi_threaded: bool) -> (Server, FakeFetch, Arc<FakeRegistrar>) {
let fetch = FakeFetch::default();
let f = fetch.clone();
let (server, reg) = init_server(move |builder| {
builder.fetch(f.clone())
}, Default::default(), if multi_threaded { Remote::new_thread_per_future() } else { Remote::new_sync() });
(server, fetch, reg)
}
pub fn
|
(web_token: &'static str, domain: &'static str) -> (Server, FakeFetch) {
let fetch = FakeFetch::default();
let f = fetch.clone();
let (server, _) = init_server(move |builder| {
builder
.fetch(f.clone())
.web_proxy_tokens(Arc::new(move |token| {
if &token == web_token { Some(domain.into()) } else { None }
}))
}, Default::default(), Remote::new_sync());
(server, fetch)
}
pub fn serve() -> Server {
init_server(|builder| builder, Default::default(), Remote::new_sync()).0
}
pub fn request(server: Server, request: &str) -> http_client::Response {
http_client::request(server.addr(), request)
}
pub fn assert_security_headers(headers: &[String]) {
http_client::assert_security_headers_present(headers, None)
}
pub fn assert_security_headers_for_embed(headers: &[String]) {
http_client::assert_security_headers_present(headers, Some(SIGNER_PORT))
}
/// Webapps HTTP+RPC server build.
pub struct ServerBuilder<T: Fetch = FetchClient> {
dapps_path: PathBuf,
registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>,
web_proxy_tokens: Arc<WebProxyTokens>,
signer_address: Option<(String, u16)>,
allowed_hosts: DomainsValidation<Host>,
remote: Remote,
fetch: Option<T>,
}
impl ServerBuilder {
/// Construct new dapps server
pub fn new<P: AsRef<Path>>(dapps_path: P, registrar: Arc<ContractClient>, remote: Remote) -> Self {
ServerBuilder {
dapps_path: dapps_path.as_ref().to_owned(),
registrar: registrar,
sync_status: Arc::new(|| false),
web_proxy_tokens: Arc::new(|_| None),
signer_address: None,
allowed_hosts: DomainsValidation::Disabled,
remote: remote,
fetch: None,
}
}
}
impl<T: Fetch> ServerBuilder<T> {
/// Set a fetch client to use.
pub fn fetch<X: Fetch>(self, fetch: X) -> ServerBuilder<X> {
ServerBuilder {
dapps_path: self.dapps_path,
registrar: self.registrar,
sync_status: self.sync_status,
web_proxy_tokens: self.web_proxy_tokens,
signer_address: self.signer_address,
allowed_hosts: self.allowed_hosts,
remote: self.remote,
fetch: Some(fetch),
}
}
/// Change default sync status.
pub fn sync_status(mut self, status: Arc<SyncStatus>) -> Self {
self.sync_status = status;
self
}
/// Change default web proxy tokens validator.
pub fn web_proxy_tokens(mut self, tokens: Arc<WebProxyTokens>) -> Self {
self.web_proxy_tokens = tokens;
self
}
/// Change default signer port.
pub fn signer_address(mut self, signer_address: Option<(String, u16)>) -> Self {
self.signer_address = signer_address;
self
}
/// Change allowed hosts.
/// `None` - All hosts are allowed
/// `Some(whitelist)` - Allow only whitelisted hosts (+ listen address)
pub fn allowed_hosts(mut self, allowed_hosts: DomainsValidation<Host>) -> Self {
self.allowed_hosts = allowed_hosts;
self
}
/// Asynchronously start server with no authentication,
/// returns result with `Server` handle on success or an error.
pub fn start_unsecured_http(self, addr: &SocketAddr, io: IoHandler) -> Result<Server, http::Error> {
let fetch = self.fetch_client();
Server::start_http(
addr,
io,
self.allowed_hosts,
self.signer_address,
self.dapps_path,
vec![],
self.registrar,
self.sync_status,
self.web_proxy_tokens,
self.remote,
fetch,
)
}
fn fetch_client(&self) -> T {
match self.fetch.clone() {
Some(fetch) => fetch,
None => T::new().unwrap(),
}
}
}
const DAPPS_DOMAIN: &'static str = "web3.site";
/// Webapps HTTP server.
pub struct Server {
server: Option<http::Server>,
}
impl Server {
fn start_http<F: Fetch>(
addr: &SocketAddr,
io: IoHandler,
allowed_hosts: DomainsValidation<Host>,
signer_address: Option<(String, u16)>,
dapps_path: PathBuf,
extra_dapps: Vec<PathBuf>,
registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>,
web_proxy_tokens: Arc<WebProxyTokens>,
remote: Remote,
fetch: F,
) -> Result<Server, http::Error> {
let middleware = Middleware::dapps(
remote,
signer_address,
dapps_path,
extra_dapps,
DAPPS_DOMAIN.into(),
registrar,
sync_status,
web_proxy_tokens,
fetch,
);
let mut allowed_hosts: Option<Vec<Host>> = allowed_hosts.into();
allowed_hosts.as_mut().map(|mut hosts| {
hosts.push(format!("http://*.{}:*", DAPPS_DOMAIN).into());
hosts.push(format!("http://*.{}", DAPPS_DOMAIN).into());
});
http::ServerBuilder::new(io)
.request_middleware(middleware)
.allowed_hosts(allowed_hosts.into())
.cors(http::DomainsValidation::Disabled)
.start_http(addr)
.map(|server| Server {
server: Some(server),
})
}
/// Returns address that this server is bound to.
pub fn addr(&self) -> &SocketAddr {
self.server.as_ref()
.expect("server is always Some at the start; it's consumed only when object is dropped; qed")
.addrs()
.first()
.expect("You cannot start the server without binding to at least one address; qed")
}
}
impl Drop for Server {
fn drop(&mut self) {
self.server.take().unwrap().close()
}
}
|
serve_with_fetch
|
identifier_name
|
packet_identifier.rs
|
use std::io::{Read, Write};
use std::convert::From;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use control::variable_header::VariableHeaderError;
use {Encodable, Decodable};
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub struct PacketIdentifier(pub u16);
impl<'a> Encodable<'a> for PacketIdentifier {
type Err = VariableHeaderError;
fn encode<W: Write>(&self, writer: &mut W) -> Result<(), VariableHeaderError> {
writer.write_u16::<BigEndian>(self.0)
.map_err(From::from)
}
fn
|
(&self) -> u32 {
2
}
}
impl<'a> Decodable<'a> for PacketIdentifier {
type Err = VariableHeaderError;
type Cond = ();
fn decode_with<R: Read>(reader: &mut R, _rest: Option<()>) -> Result<PacketIdentifier, VariableHeaderError> {
reader.read_u16::<BigEndian>()
.map(PacketIdentifier)
.map_err(From::from)
}
}
|
encoded_length
|
identifier_name
|
packet_identifier.rs
|
use std::io::{Read, Write};
use std::convert::From;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use control::variable_header::VariableHeaderError;
use {Encodable, Decodable};
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub struct PacketIdentifier(pub u16);
impl<'a> Encodable<'a> for PacketIdentifier {
type Err = VariableHeaderError;
fn encode<W: Write>(&self, writer: &mut W) -> Result<(), VariableHeaderError> {
writer.write_u16::<BigEndian>(self.0)
.map_err(From::from)
}
fn encoded_length(&self) -> u32 {
2
}
}
impl<'a> Decodable<'a> for PacketIdentifier {
type Err = VariableHeaderError;
type Cond = ();
fn decode_with<R: Read>(reader: &mut R, _rest: Option<()>) -> Result<PacketIdentifier, VariableHeaderError> {
reader.read_u16::<BigEndian>()
.map(PacketIdentifier)
.map_err(From::from)
}
|
}
|
random_line_split
|
|
packet_identifier.rs
|
use std::io::{Read, Write};
use std::convert::From;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use control::variable_header::VariableHeaderError;
use {Encodable, Decodable};
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub struct PacketIdentifier(pub u16);
impl<'a> Encodable<'a> for PacketIdentifier {
type Err = VariableHeaderError;
fn encode<W: Write>(&self, writer: &mut W) -> Result<(), VariableHeaderError> {
writer.write_u16::<BigEndian>(self.0)
.map_err(From::from)
}
fn encoded_length(&self) -> u32
|
}
impl<'a> Decodable<'a> for PacketIdentifier {
type Err = VariableHeaderError;
type Cond = ();
fn decode_with<R: Read>(reader: &mut R, _rest: Option<()>) -> Result<PacketIdentifier, VariableHeaderError> {
reader.read_u16::<BigEndian>()
.map(PacketIdentifier)
.map_err(From::from)
}
}
|
{
2
}
|
identifier_body
|
tac.rs
|
use std::process::Command;
use util::*;
static PROGNAME: &'static str = "./tac";
#[path = "common/util.rs"]
#[macro_use]
mod util;
#[test]
fn test_stdin_default() {
let mut cmd = Command::new(PROGNAME);
let result = run_piped_stdin(&mut cmd, "100\n200\n300\n400\n500");
assert_eq!(result.stdout, "500400\n300\n200\n100\n");
}
#[test]
fn test_stdin_non_newline_separator() {
let mut cmd = Command::new(PROGNAME);
let result = run_piped_stdin(&mut cmd.args(&["-s", ":"]), "100:200:300:400:500");
assert_eq!(result.stdout, "500400:300:200:100:");
}
#[test]
fn test_stdin_non_newline_separator_before() {
let mut cmd = Command::new(PROGNAME);
let result = run_piped_stdin(&mut cmd.args(&["-b", "-s", ":"]), "100:200:300:400:500");
assert_eq!(result.stdout, "500:400:300:200:100");
}
#[test]
fn test_single_default() {
let mut cmd = Command::new(PROGNAME);
let result = run(&mut cmd.arg("prime_per_line.txt"));
assert_eq!(result.stdout, get_file_contents("prime_per_line.expected"));
}
#[test]
fn test_single_non_newline_separator() {
let mut cmd = Command::new(PROGNAME);
let result = run(&mut cmd.args(&["-s", ":", "delimited_primes.txt"]));
assert_eq!(result.stdout, get_file_contents("delimited_primes.expected"));
}
#[test]
fn test_single_non_newline_separator_before()
|
{
let mut cmd = Command::new(PROGNAME);
let result = run(&mut cmd.args(&["-b", "-s", ":", "delimited_primes.txt"]));
assert_eq!(result.stdout, get_file_contents("delimited_primes_before.expected"));
}
|
identifier_body
|
|
tac.rs
|
use std::process::Command;
use util::*;
static PROGNAME: &'static str = "./tac";
#[path = "common/util.rs"]
#[macro_use]
mod util;
#[test]
fn test_stdin_default() {
let mut cmd = Command::new(PROGNAME);
let result = run_piped_stdin(&mut cmd, "100\n200\n300\n400\n500");
assert_eq!(result.stdout, "500400\n300\n200\n100\n");
}
#[test]
fn test_stdin_non_newline_separator() {
let mut cmd = Command::new(PROGNAME);
let result = run_piped_stdin(&mut cmd.args(&["-s", ":"]), "100:200:300:400:500");
assert_eq!(result.stdout, "500400:300:200:100:");
}
#[test]
fn test_stdin_non_newline_separator_before() {
let mut cmd = Command::new(PROGNAME);
let result = run_piped_stdin(&mut cmd.args(&["-b", "-s", ":"]), "100:200:300:400:500");
assert_eq!(result.stdout, "500:400:300:200:100");
}
#[test]
fn test_single_default() {
let mut cmd = Command::new(PROGNAME);
let result = run(&mut cmd.arg("prime_per_line.txt"));
assert_eq!(result.stdout, get_file_contents("prime_per_line.expected"));
|
fn test_single_non_newline_separator() {
let mut cmd = Command::new(PROGNAME);
let result = run(&mut cmd.args(&["-s", ":", "delimited_primes.txt"]));
assert_eq!(result.stdout, get_file_contents("delimited_primes.expected"));
}
#[test]
fn test_single_non_newline_separator_before() {
let mut cmd = Command::new(PROGNAME);
let result = run(&mut cmd.args(&["-b", "-s", ":", "delimited_primes.txt"]));
assert_eq!(result.stdout, get_file_contents("delimited_primes_before.expected"));
}
|
}
#[test]
|
random_line_split
|
tac.rs
|
use std::process::Command;
use util::*;
static PROGNAME: &'static str = "./tac";
#[path = "common/util.rs"]
#[macro_use]
mod util;
#[test]
fn test_stdin_default() {
let mut cmd = Command::new(PROGNAME);
let result = run_piped_stdin(&mut cmd, "100\n200\n300\n400\n500");
assert_eq!(result.stdout, "500400\n300\n200\n100\n");
}
#[test]
fn test_stdin_non_newline_separator() {
let mut cmd = Command::new(PROGNAME);
let result = run_piped_stdin(&mut cmd.args(&["-s", ":"]), "100:200:300:400:500");
assert_eq!(result.stdout, "500400:300:200:100:");
}
#[test]
fn
|
() {
let mut cmd = Command::new(PROGNAME);
let result = run_piped_stdin(&mut cmd.args(&["-b", "-s", ":"]), "100:200:300:400:500");
assert_eq!(result.stdout, "500:400:300:200:100");
}
#[test]
fn test_single_default() {
let mut cmd = Command::new(PROGNAME);
let result = run(&mut cmd.arg("prime_per_line.txt"));
assert_eq!(result.stdout, get_file_contents("prime_per_line.expected"));
}
#[test]
fn test_single_non_newline_separator() {
let mut cmd = Command::new(PROGNAME);
let result = run(&mut cmd.args(&["-s", ":", "delimited_primes.txt"]));
assert_eq!(result.stdout, get_file_contents("delimited_primes.expected"));
}
#[test]
fn test_single_non_newline_separator_before() {
let mut cmd = Command::new(PROGNAME);
let result = run(&mut cmd.args(&["-b", "-s", ":", "delimited_primes.txt"]));
assert_eq!(result.stdout, get_file_contents("delimited_primes_before.expected"));
}
|
test_stdin_non_newline_separator_before
|
identifier_name
|
bench.rs
|
#![feature(test)]
extern crate chaskey;
extern crate rand;
extern crate test;
use chaskey::{Digester, Chaskey};
use rand::{Rng, ThreadRng, thread_rng};
use std::hash::{SipHasher, Hasher};
use test::{black_box, Bencher};
const SIZE: usize = 57;
#[bench]
fn sip_hasher(b: &mut Bencher) {
let mut rng: ThreadRng = thread_rng();
let (k0, k1) = rng.gen();
let mut hasher = SipHasher::new_with_keys(k0, k1);
bench_hasher(b, &mut hasher, SIZE);
}
#[bench]
fn chaskey_hasher(b: &mut Bencher) {
let mut rng: ThreadRng = thread_rng();
let key: [u32; 4] = rng.gen();
let mut hasher: Digester<Chaskey> = Digester::new(key);
bench_hasher(b, &mut hasher, SIZE);
}
fn bench_hasher<H: Hasher>(b: &mut Bencher, hasher: &mut H, size: usize)
|
{
let data: Vec<u8> = {
let mut r = vec![0; size];
let mut rng: ThreadRng = thread_rng();
rng.fill_bytes(&mut r);
r
};
b.iter(|| {
hasher.write(&data);
let r = hasher.finish();
});
}
|
identifier_body
|
|
bench.rs
|
#![feature(test)]
|
use chaskey::{Digester, Chaskey};
use rand::{Rng, ThreadRng, thread_rng};
use std::hash::{SipHasher, Hasher};
use test::{black_box, Bencher};
const SIZE: usize = 57;
#[bench]
fn sip_hasher(b: &mut Bencher) {
let mut rng: ThreadRng = thread_rng();
let (k0, k1) = rng.gen();
let mut hasher = SipHasher::new_with_keys(k0, k1);
bench_hasher(b, &mut hasher, SIZE);
}
#[bench]
fn chaskey_hasher(b: &mut Bencher) {
let mut rng: ThreadRng = thread_rng();
let key: [u32; 4] = rng.gen();
let mut hasher: Digester<Chaskey> = Digester::new(key);
bench_hasher(b, &mut hasher, SIZE);
}
fn bench_hasher<H: Hasher>(b: &mut Bencher, hasher: &mut H, size: usize) {
let data: Vec<u8> = {
let mut r = vec![0; size];
let mut rng: ThreadRng = thread_rng();
rng.fill_bytes(&mut r);
r
};
b.iter(|| {
hasher.write(&data);
let r = hasher.finish();
});
}
|
extern crate chaskey;
extern crate rand;
extern crate test;
|
random_line_split
|
bench.rs
|
#![feature(test)]
extern crate chaskey;
extern crate rand;
extern crate test;
use chaskey::{Digester, Chaskey};
use rand::{Rng, ThreadRng, thread_rng};
use std::hash::{SipHasher, Hasher};
use test::{black_box, Bencher};
const SIZE: usize = 57;
#[bench]
fn sip_hasher(b: &mut Bencher) {
let mut rng: ThreadRng = thread_rng();
let (k0, k1) = rng.gen();
let mut hasher = SipHasher::new_with_keys(k0, k1);
bench_hasher(b, &mut hasher, SIZE);
}
#[bench]
fn chaskey_hasher(b: &mut Bencher) {
let mut rng: ThreadRng = thread_rng();
let key: [u32; 4] = rng.gen();
let mut hasher: Digester<Chaskey> = Digester::new(key);
bench_hasher(b, &mut hasher, SIZE);
}
fn
|
<H: Hasher>(b: &mut Bencher, hasher: &mut H, size: usize) {
let data: Vec<u8> = {
let mut r = vec![0; size];
let mut rng: ThreadRng = thread_rng();
rng.fill_bytes(&mut r);
r
};
b.iter(|| {
hasher.write(&data);
let r = hasher.finish();
});
}
|
bench_hasher
|
identifier_name
|
build_gecko.rs
|
path
};
static ref SEARCH_PATHS: Vec<PathBuf> = vec![
DISTDIR_PATH.join("include"),
DISTDIR_PATH.join("include/nspr"),
];
static ref ADDED_PATHS: Mutex<HashSet<PathBuf>> = Mutex::new(HashSet::new());
static ref LAST_MODIFIED: Mutex<SystemTime> =
Mutex::new(get_modified_time(&env::current_exe().unwrap())
.expect("Failed to get modified time of executable"));
}
fn get_modified_time(file: &Path) -> Option<SystemTime> {
file.metadata().and_then(|m| m.modified()).ok()
}
fn update_last_modified(file: &Path) {
let modified = get_modified_time(file).expect("Couldn't get file modification time");
let mut last_modified = LAST_MODIFIED.lock().unwrap();
*last_modified = cmp::max(modified, *last_modified);
}
fn search_include(name: &str) -> Option<PathBuf> {
for path in SEARCH_PATHS.iter() {
let file = path.join(name);
if file.is_file() {
update_last_modified(&file);
return Some(file);
}
}
None
}
fn add_headers_recursively(path: PathBuf, added_paths: &mut HashSet<PathBuf>) {
if added_paths.contains(&path) {
return;
}
let mut file = File::open(&path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
added_paths.insert(path);
// Find all includes and add them recursively
for cap in INCLUDE_RE.captures_iter(&content) {
if let Some(path) = search_include(cap.get(1).unwrap().as_str()) {
add_headers_recursively(path, added_paths);
}
}
}
fn add_include(name: &str) -> String {
let mut added_paths = ADDED_PATHS.lock().unwrap();
let file = search_include(name).expect("Include not found!");
let result = String::from(file.to_str().unwrap());
add_headers_recursively(file, &mut *added_paths);
result
}
trait BuilderExt {
fn get_initial_builder() -> Builder;
fn include<T: Into<String>>(self, file: T) -> Builder;
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder;
fn borrowed_type(self, ty: &str) -> Builder;
fn mutable_borrowed_type(self, ty: &str) -> Builder;
}
fn add_clang_args(mut builder: Builder, config: &Table, matched_os: &mut bool) -> Builder {
fn add_args(mut builder: Builder, values: &[toml::Value]) -> Builder {
for item in values.iter() {
builder = builder.clang_arg(item.as_str().expect("Expect string in list"));
}
builder
}
for (k, v) in config.iter() {
if k == "args" {
builder = add_args(builder, v.as_array().unwrap().as_slice());
continue;
}
let equal_idx = k.find('=').expect(&format!("Invalid key: {}", k));
let (target_type, target_value) = k.split_at(equal_idx);
if TARGET_INFO[target_type]!= target_value[1..] {
continue;
}
if target_type == "os" {
*matched_os = true;
}
builder = match *v {
toml::Value::Table(ref table) => add_clang_args(builder, table, matched_os),
toml::Value::Array(ref array) => add_args(builder, array),
_ => panic!("Unknown type"),
};
}
builder
}
impl BuilderExt for Builder {
fn get_initial_builder() -> Builder {
use bindgen::RustTarget;
// Disable rust unions, because we replace some types inside of
// them.
let mut builder = Builder::default().rust_target(RustTarget::Stable_1_0);
let rustfmt_path = env::var_os("RUSTFMT")
// This can be replaced with
// >.filter(|p|!p.is_empty()).map(PathBuf::from)
// once we can use 1.27+.
.and_then(|p| {
if p.is_empty() {
None
} else {
Some(PathBuf::from(p))
}
});
if let Some(path) = rustfmt_path {
builder = builder.with_rustfmt(path);
}
for dir in SEARCH_PATHS.iter() {
builder = builder.clang_arg("-I").clang_arg(dir.to_str().unwrap());
}
builder = builder.include(add_include("mozilla-config.h"));
if env::var("CARGO_FEATURE_GECKO_DEBUG").is_ok() {
builder = builder.clang_arg("-DDEBUG=1").clang_arg("-DJS_DEBUG=1");
}
let mut matched_os = false;
let build_config = CONFIG["build"].as_table().expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
let build_config = BUILD_CONFIG["build"]
.as_table()
.expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
if!matched_os {
panic!("Unknown platform");
}
builder
}
fn include<T: Into<String>>(self, file: T) -> Builder {
self.clang_arg("-include").clang_arg(file)
}
// This makes an FFI-safe void type that can't be matched on
// &VoidType is UB to have, because you can match on it
// to produce a reachable unreachable. If it's wrapped in
// a struct as a private field it becomes okay again
//
// Not 100% sure of how safe this is, but it's what we're using
// in the XPCOM ffi too
// https://github.com/nikomatsakis/rust-memory-model/issues/2
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder {
if!structs_list.contains(ty) {
self.blacklist_type(ty)
.raw_line(format!("enum {}Void {{ }}", ty))
.raw_line(format!("pub struct {0}({0}Void);", ty))
} else {
self
}
}
fn borrowed_type(self, ty: &str) -> Builder {
self.blacklist_type(format!("{}Borrowed", ty))
.raw_line(format!("pub type {0}Borrowed<'a> = &'a {0};", ty))
.blacklist_type(format!("{}BorrowedOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedOrNull<'a> = Option<&'a {0}>;",
ty
))
}
fn mutable_borrowed_type(self, ty: &str) -> Builder {
self.borrowed_type(ty)
.blacklist_type(format!("{}BorrowedMut", ty))
.raw_line(format!("pub type {0}BorrowedMut<'a> = &'a mut {0};", ty))
.blacklist_type(format!("{}BorrowedMutOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedMutOrNull<'a> = Option<&'a mut {0}>;",
ty
))
}
}
struct Fixup {
pat: String,
rep: String,
}
fn write_binding_file(builder: Builder, file: &str, fixups: &[Fixup]) {
let out_file = OUTDIR_PATH.join(file);
if let Some(modified) = get_modified_time(&out_file) {
// Don't generate the file if nothing it depends on was modified.
let last_modified = LAST_MODIFIED.lock().unwrap();
if *last_modified <= modified {
return;
}
}
let command_line_opts = builder.command_line_flags();
let result = builder.generate();
let mut result = match result {
Ok(bindings) => bindings.to_string(),
Err(_) => {
panic!(
"Failed to generate bindings, flags: {:?}",
command_line_opts
);
},
};
for fixup in fixups.iter() {
result = Regex::new(&fixup.pat)
.unwrap()
.replace_all(&result, &*fixup.rep)
.into_owned()
.into();
}
let bytes = result.into_bytes();
File::create(&out_file)
.unwrap()
.write_all(&bytes)
.expect("Unable to write output");
}
fn get_arc_types() -> Vec<String> {
// Read the file
let mut list_file = File::open(DISTDIR_PATH.join("include/mozilla/ServoArcTypeList.h"))
.expect("Unable to open ServoArcTypeList.h");
let mut content = String::new();
list_file
.read_to_string(&mut content)
.expect("Fail to read ServoArcTypeList.h");
// Remove comments
let block_comment_re = Regex::new(r#"(?s)/\*.*?\*/"#).unwrap();
let content = block_comment_re.replace_all(&content, "");
// Extract the list
let re = Regex::new(r#"^SERVO_ARC_TYPE\(\w+,\s*(\w+)\)$"#).unwrap();
content
.lines()
.map(|line| line.trim())
.filter(|line|!line.is_empty())
.map(|line| {
re.captures(&line)
.expect(&format!(
"Unrecognized line in ServoArcTypeList.h: '{}'",
line
))
.get(1)
.unwrap()
.as_str()
.to_string()
})
.collect()
}
struct BuilderWithConfig<'a> {
builder: Builder,
config: &'a Table,
used_keys: HashSet<&'static str>,
}
impl<'a> BuilderWithConfig<'a> {
fn new(builder: Builder, config: &'a Table) -> Self {
BuilderWithConfig {
builder,
config,
used_keys: HashSet::new(),
}
}
fn handle_list<F>(self, key: &'static str, func: F) -> BuilderWithConfig<'a>
where
F: FnOnce(Builder, slice::Iter<'a, toml::Value>) -> Builder,
{
let mut builder = self.builder;
let config = self.config;
let mut used_keys = self.used_keys;
if let Some(list) = config.get(key) {
used_keys.insert(key);
builder = func(builder, list.as_array().unwrap().as_slice().iter());
}
BuilderWithConfig {
builder,
config,
used_keys,
}
}
fn handle_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a toml::Value) -> Builder,
{
self.handle_list(key, |b, iter| iter.fold(b, |b, item| func(b, item)))
}
fn handle_str_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a str) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_str().unwrap()))
}
fn handle_table_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a Table) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_table().unwrap()))
}
fn handle_common(self, fixups: &mut Vec<Fixup>) -> BuilderWithConfig<'a> {
self.handle_str_items("headers", |b, item| b.header(add_include(item)))
.handle_str_items("raw-lines", |b, item| b.raw_line(item))
.handle_str_items("hide-types", |b, item| b.blacklist_type(item))
.handle_table_items("fixups", |builder, item| {
fixups.push(Fixup {
pat: item["pat"].as_str().unwrap().into(),
rep: item["rep"].as_str().unwrap().into(),
});
builder
})
}
fn get_builder(self) -> Builder {
for key in self.config.keys() {
if!self.used_keys.contains(key.as_str()) {
panic!(format!("Unknown key: {}", key));
}
}
self.builder
}
}
fn generate_structs() {
let builder = Builder::get_initial_builder()
.enable_cxx_namespaces()
.with_codegen_config(CodegenConfig {
types: true,
vars: true,
..CodegenConfig::nothing()
});
let mut fixups = vec![];
let builder = BuilderWithConfig::new(builder, CONFIG["structs"].as_table().unwrap())
.handle_common(&mut fixups)
.handle_str_items("bitfield-enums", |b, item| b.bitfield_enum(item))
.handle_str_items("rusty-enums", |b, item| b.rustified_enum(item))
.handle_str_items("whitelist-vars", |b, item| b.whitelist_var(item))
.handle_str_items("whitelist-types", |b, item| b.whitelist_type(item))
.handle_str_items("opaque-types", |b, item| b.opaque_type(item))
.handle_table_items("mapped-generic-types", |builder, item| {
let generic = item["generic"].as_bool().unwrap();
let gecko = item["gecko"].as_str().unwrap();
let servo = item["servo"].as_str().unwrap();
let gecko_name = gecko.rsplit("::").next().unwrap();
let gecko = gecko
.split("::")
.map(|s| format!("\\s*{}\\s*", s))
.collect::<Vec<_>>()
.join("::");
fixups.push(Fixup {
pat: format!("\\broot\\s*::\\s*{}\\b", gecko),
rep: format!("::gecko_bindings::structs::{}", gecko_name),
});
builder.blacklist_type(gecko).raw_line(format!(
"pub type {0}{2} = {1}{2};",
gecko_name,
servo,
if generic { "<T>" } else { "" }
))
})
.get_builder();
write_binding_file(builder, STRUCTS_FILE, &fixups);
}
fn setup_logging() -> bool {
use log;
struct BuildLogger {
file: Option<Mutex<fs::File>>,
filter: String,
}
impl log::Log for BuildLogger {
fn enabled(&self, meta: &log::Metadata) -> bool {
self.file.is_some() && meta.target().contains(&self.filter)
}
fn log(&self, record: &log::Record) {
if!self.enabled(record.metadata()) {
return;
}
let mut file = self.file.as_ref().unwrap().lock().unwrap();
let _ = writeln!(
file,
"{} - {} - {} @ {}:{}",
record.level(),
record.target(),
record.args(),
record.file().unwrap_or("<unknown>"),
record.line().unwrap_or(0)
);
}
fn flush(&self) {
if let Some(ref file) = self.file
|
}
}
if let Some(path) = env::var_os("STYLO_BUILD_LOG") {
log::set_max_level(log::LevelFilter::Debug);
log::set_boxed_logger(Box::new(BuildLogger {
file: fs::File::create(path).ok().map(Mutex::new),
filter: env::var("STYLO_BUILD_FILTER")
.ok()
.unwrap_or_else(|| "bindgen".to_owned()),
})).expect("Failed to set logger.");
true
|
{
file.lock().unwrap().flush().unwrap();
}
|
conditional_block
|
build_gecko.rs
|
fn include<T: Into<String>>(self, file: T) -> Builder {
self.clang_arg("-include").clang_arg(file)
}
// This makes an FFI-safe void type that can't be matched on
// &VoidType is UB to have, because you can match on it
// to produce a reachable unreachable. If it's wrapped in
// a struct as a private field it becomes okay again
//
// Not 100% sure of how safe this is, but it's what we're using
// in the XPCOM ffi too
// https://github.com/nikomatsakis/rust-memory-model/issues/2
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder {
if!structs_list.contains(ty) {
self.blacklist_type(ty)
.raw_line(format!("enum {}Void {{ }}", ty))
.raw_line(format!("pub struct {0}({0}Void);", ty))
} else {
self
}
}
fn borrowed_type(self, ty: &str) -> Builder {
self.blacklist_type(format!("{}Borrowed", ty))
.raw_line(format!("pub type {0}Borrowed<'a> = &'a {0};", ty))
.blacklist_type(format!("{}BorrowedOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedOrNull<'a> = Option<&'a {0}>;",
ty
))
}
fn mutable_borrowed_type(self, ty: &str) -> Builder {
self.borrowed_type(ty)
.blacklist_type(format!("{}BorrowedMut", ty))
.raw_line(format!("pub type {0}BorrowedMut<'a> = &'a mut {0};", ty))
.blacklist_type(format!("{}BorrowedMutOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedMutOrNull<'a> = Option<&'a mut {0}>;",
ty
))
}
}
struct Fixup {
pat: String,
rep: String,
}
fn write_binding_file(builder: Builder, file: &str, fixups: &[Fixup]) {
let out_file = OUTDIR_PATH.join(file);
if let Some(modified) = get_modified_time(&out_file) {
// Don't generate the file if nothing it depends on was modified.
let last_modified = LAST_MODIFIED.lock().unwrap();
if *last_modified <= modified {
return;
}
}
let command_line_opts = builder.command_line_flags();
let result = builder.generate();
let mut result = match result {
Ok(bindings) => bindings.to_string(),
Err(_) => {
panic!(
"Failed to generate bindings, flags: {:?}",
command_line_opts
);
},
};
for fixup in fixups.iter() {
result = Regex::new(&fixup.pat)
.unwrap()
.replace_all(&result, &*fixup.rep)
.into_owned()
.into();
}
let bytes = result.into_bytes();
File::create(&out_file)
.unwrap()
.write_all(&bytes)
.expect("Unable to write output");
}
fn get_arc_types() -> Vec<String> {
// Read the file
let mut list_file = File::open(DISTDIR_PATH.join("include/mozilla/ServoArcTypeList.h"))
.expect("Unable to open ServoArcTypeList.h");
let mut content = String::new();
list_file
.read_to_string(&mut content)
.expect("Fail to read ServoArcTypeList.h");
// Remove comments
let block_comment_re = Regex::new(r#"(?s)/\*.*?\*/"#).unwrap();
let content = block_comment_re.replace_all(&content, "");
// Extract the list
let re = Regex::new(r#"^SERVO_ARC_TYPE\(\w+,\s*(\w+)\)$"#).unwrap();
content
.lines()
.map(|line| line.trim())
.filter(|line|!line.is_empty())
.map(|line| {
re.captures(&line)
.expect(&format!(
"Unrecognized line in ServoArcTypeList.h: '{}'",
line
))
.get(1)
.unwrap()
.as_str()
.to_string()
})
.collect()
}
struct BuilderWithConfig<'a> {
builder: Builder,
config: &'a Table,
used_keys: HashSet<&'static str>,
}
impl<'a> BuilderWithConfig<'a> {
fn new(builder: Builder, config: &'a Table) -> Self {
BuilderWithConfig {
builder,
config,
used_keys: HashSet::new(),
}
}
fn handle_list<F>(self, key: &'static str, func: F) -> BuilderWithConfig<'a>
where
F: FnOnce(Builder, slice::Iter<'a, toml::Value>) -> Builder,
{
let mut builder = self.builder;
let config = self.config;
let mut used_keys = self.used_keys;
if let Some(list) = config.get(key) {
used_keys.insert(key);
builder = func(builder, list.as_array().unwrap().as_slice().iter());
}
BuilderWithConfig {
builder,
config,
used_keys,
}
}
fn handle_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a toml::Value) -> Builder,
{
self.handle_list(key, |b, iter| iter.fold(b, |b, item| func(b, item)))
}
fn handle_str_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a str) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_str().unwrap()))
}
fn handle_table_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a Table) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_table().unwrap()))
}
fn handle_common(self, fixups: &mut Vec<Fixup>) -> BuilderWithConfig<'a> {
self.handle_str_items("headers", |b, item| b.header(add_include(item)))
.handle_str_items("raw-lines", |b, item| b.raw_line(item))
.handle_str_items("hide-types", |b, item| b.blacklist_type(item))
.handle_table_items("fixups", |builder, item| {
fixups.push(Fixup {
pat: item["pat"].as_str().unwrap().into(),
rep: item["rep"].as_str().unwrap().into(),
});
builder
})
}
fn get_builder(self) -> Builder {
for key in self.config.keys() {
if!self.used_keys.contains(key.as_str()) {
panic!(format!("Unknown key: {}", key));
}
}
self.builder
}
}
fn generate_structs() {
let builder = Builder::get_initial_builder()
.enable_cxx_namespaces()
.with_codegen_config(CodegenConfig {
types: true,
vars: true,
..CodegenConfig::nothing()
});
let mut fixups = vec![];
let builder = BuilderWithConfig::new(builder, CONFIG["structs"].as_table().unwrap())
.handle_common(&mut fixups)
.handle_str_items("bitfield-enums", |b, item| b.bitfield_enum(item))
.handle_str_items("rusty-enums", |b, item| b.rustified_enum(item))
.handle_str_items("whitelist-vars", |b, item| b.whitelist_var(item))
.handle_str_items("whitelist-types", |b, item| b.whitelist_type(item))
.handle_str_items("opaque-types", |b, item| b.opaque_type(item))
.handle_table_items("mapped-generic-types", |builder, item| {
let generic = item["generic"].as_bool().unwrap();
let gecko = item["gecko"].as_str().unwrap();
let servo = item["servo"].as_str().unwrap();
let gecko_name = gecko.rsplit("::").next().unwrap();
let gecko = gecko
.split("::")
.map(|s| format!("\\s*{}\\s*", s))
.collect::<Vec<_>>()
.join("::");
fixups.push(Fixup {
pat: format!("\\broot\\s*::\\s*{}\\b", gecko),
rep: format!("::gecko_bindings::structs::{}", gecko_name),
});
builder.blacklist_type(gecko).raw_line(format!(
"pub type {0}{2} = {1}{2};",
gecko_name,
servo,
if generic { "<T>" } else { "" }
))
})
.get_builder();
write_binding_file(builder, STRUCTS_FILE, &fixups);
}
fn setup_logging() -> bool {
use log;
struct BuildLogger {
file: Option<Mutex<fs::File>>,
filter: String,
}
impl log::Log for BuildLogger {
fn enabled(&self, meta: &log::Metadata) -> bool {
self.file.is_some() && meta.target().contains(&self.filter)
}
fn log(&self, record: &log::Record) {
if!self.enabled(record.metadata()) {
return;
}
let mut file = self.file.as_ref().unwrap().lock().unwrap();
let _ = writeln!(
file,
"{} - {} - {} @ {}:{}",
record.level(),
record.target(),
record.args(),
record.file().unwrap_or("<unknown>"),
record.line().unwrap_or(0)
);
}
fn flush(&self) {
if let Some(ref file) = self.file {
file.lock().unwrap().flush().unwrap();
}
}
}
if let Some(path) = env::var_os("STYLO_BUILD_LOG") {
log::set_max_level(log::LevelFilter::Debug);
log::set_boxed_logger(Box::new(BuildLogger {
file: fs::File::create(path).ok().map(Mutex::new),
filter: env::var("STYLO_BUILD_FILTER")
.ok()
.unwrap_or_else(|| "bindgen".to_owned()),
})).expect("Failed to set logger.");
true
} else {
false
}
}
fn generate_bindings() {
let builder = Builder::get_initial_builder()
.disable_name_namespacing()
.with_codegen_config(CodegenConfig {
functions: true,
..CodegenConfig::nothing()
});
let config = CONFIG["bindings"].as_table().unwrap();
let mut structs_types = HashSet::new();
let mut fixups = vec![];
let mut builder = BuilderWithConfig::new(builder, config)
.handle_common(&mut fixups)
.handle_str_items("whitelist-functions", |b, item| b.whitelist_function(item))
.handle_str_items("structs-types", |mut builder, ty| {
builder = builder.blacklist_type(ty)
.raw_line(format!("use gecko_bindings::structs::{};", ty));
structs_types.insert(ty);
// TODO this is hacky, figure out a better way to do it without
// hardcoding everything...
if ty.starts_with("nsStyle") {
builder = builder
.raw_line(format!("unsafe impl Send for {} {{}}", ty))
.raw_line(format!("unsafe impl Sync for {} {{}}", ty));
}
builder
})
// TODO This was added due to servo/rust-bindgen#75, but
// that has been fixed in clang 4.0+. When we switch people
// to libclang 4.0, we can remove this.
.handle_table_items("array-types", |builder, item| {
let cpp_type = item["cpp-type"].as_str().unwrap();
let rust_type = item["rust-type"].as_str().unwrap();
builder
.raw_line(format!(concat!("pub type nsTArrayBorrowed_{}<'a> = ",
"&'a mut ::gecko_bindings::structs::nsTArray<{}>;"),
cpp_type, rust_type))
})
.handle_table_items("servo-owned-types", |mut builder, item| {
let name = item["name"].as_str().unwrap();
builder = builder.blacklist_type(format!("{}Owned", name))
.raw_line(format!("pub type {0}Owned = ::gecko_bindings::sugar::ownership::Owned<{0}>;", name))
.blacklist_type(format!("{}OwnedOrNull", name))
.raw_line(format!(concat!("pub type {0}OwnedOrNull = ",
"::gecko_bindings::sugar::ownership::OwnedOrNull<{0}>;"), name))
.mutable_borrowed_type(name);
if item["opaque"].as_bool().unwrap() {
builder = builder.zero_size_type(name, &structs_types);
}
builder
})
.handle_str_items("servo-immutable-borrow-types", |b, ty| b.borrowed_type(ty))
// Right now the only immutable borrow types are ones which we import
// from the |structs| module. As such, we don't need to create an opaque
// type with zero_size_type. If we ever introduce immutable borrow types
// which _do_ need to be opaque, we'll need a separate mode.
.handle_str_items("servo-borrow-types", |b, ty| b.mutable_borrowed_type(ty))
.get_builder();
for ty in get_arc_types().iter() {
builder = builder
.blacklist_type(format!("{}Strong", ty))
.raw_line(format!(
"pub type {0}Strong = ::gecko_bindings::sugar::ownership::Strong<{0}>;",
ty
))
.borrowed_type(ty)
.zero_size_type(ty, &structs_types);
}
write_binding_file(builder, BINDINGS_FILE, &fixups);
}
fn generate_atoms() {
let script = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap())
.join("gecko")
.join("regen_atoms.py");
println!("cargo:rerun-if-changed={}", script.display());
let status = Command::new(&*PYTHON)
.arg(&script)
.arg(DISTDIR_PATH.as_os_str())
.arg(OUTDIR_PATH.as_os_str())
.status()
.unwrap();
if!status.success() {
exit(1);
}
}
pub fn generate() {
use std::thread;
macro_rules! run_tasks {
($($task:expr,)+) => {
if setup_logging() {
$($task;)+
} else {
let threads = vec![$( thread::spawn(|| $task) ),+];
for thread in threads.into_iter() {
thread.join().unwrap();
}
}
}
}
run_tasks! {
generate_structs(),
generate_bindings(),
generate_atoms(),
}
}
}
#[cfg(not(feature = "bindgen"))]
mod bindings {
use std::{env, fs, io};
use std::path::{Path, PathBuf};
use super::common::*;
/// Copy contents of one directory into another.
/// It currently only does a shallow copy.
fn
|
copy_dir
|
identifier_name
|
|
build_gecko.rs
|
lazy_static! {
static ref CONFIG: Table = {
// Load Gecko's binding generator config from the source tree.
let path = PathBuf::from(env::var_os("MOZ_SRC").unwrap())
.join("layout/style/ServoBindings.toml");
read_config(&path)
};
static ref BUILD_CONFIG: Table = {
// Load build-specific config overrides.
// FIXME: We should merge with CONFIG above instead of
// forcing callers to do it.
let path = PathBuf::from(env::var_os("MOZ_TOPOBJDIR").unwrap())
.join("layout/style/bindgen.toml");
read_config(&path)
};
static ref TARGET_INFO: HashMap<String, String> = {
const TARGET_PREFIX: &'static str = "CARGO_CFG_TARGET_";
let mut result = HashMap::new();
for (k, v) in env::vars() {
if k.starts_with(TARGET_PREFIX) {
result.insert(k[TARGET_PREFIX.len()..].to_lowercase(), v);
}
}
result
};
static ref INCLUDE_RE: Regex = Regex::new(r#"#include\s*"(.+?)""#).unwrap();
static ref DISTDIR_PATH: PathBuf = {
let path = PathBuf::from(env::var_os("MOZ_DIST").unwrap());
if!path.is_absolute() ||!path.is_dir() {
panic!("MOZ_DIST must be an absolute directory, was: {}", path.display());
}
path
};
static ref SEARCH_PATHS: Vec<PathBuf> = vec![
DISTDIR_PATH.join("include"),
DISTDIR_PATH.join("include/nspr"),
];
static ref ADDED_PATHS: Mutex<HashSet<PathBuf>> = Mutex::new(HashSet::new());
static ref LAST_MODIFIED: Mutex<SystemTime> =
Mutex::new(get_modified_time(&env::current_exe().unwrap())
.expect("Failed to get modified time of executable"));
}
fn get_modified_time(file: &Path) -> Option<SystemTime> {
file.metadata().and_then(|m| m.modified()).ok()
}
fn update_last_modified(file: &Path) {
let modified = get_modified_time(file).expect("Couldn't get file modification time");
let mut last_modified = LAST_MODIFIED.lock().unwrap();
*last_modified = cmp::max(modified, *last_modified);
}
fn search_include(name: &str) -> Option<PathBuf> {
for path in SEARCH_PATHS.iter() {
let file = path.join(name);
if file.is_file() {
update_last_modified(&file);
return Some(file);
}
}
None
}
fn add_headers_recursively(path: PathBuf, added_paths: &mut HashSet<PathBuf>) {
if added_paths.contains(&path) {
return;
}
let mut file = File::open(&path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
added_paths.insert(path);
// Find all includes and add them recursively
for cap in INCLUDE_RE.captures_iter(&content) {
if let Some(path) = search_include(cap.get(1).unwrap().as_str()) {
add_headers_recursively(path, added_paths);
}
}
}
fn add_include(name: &str) -> String {
let mut added_paths = ADDED_PATHS.lock().unwrap();
let file = search_include(name).expect("Include not found!");
let result = String::from(file.to_str().unwrap());
add_headers_recursively(file, &mut *added_paths);
result
}
trait BuilderExt {
fn get_initial_builder() -> Builder;
fn include<T: Into<String>>(self, file: T) -> Builder;
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder;
fn borrowed_type(self, ty: &str) -> Builder;
fn mutable_borrowed_type(self, ty: &str) -> Builder;
}
fn add_clang_args(mut builder: Builder, config: &Table, matched_os: &mut bool) -> Builder {
fn add_args(mut builder: Builder, values: &[toml::Value]) -> Builder {
for item in values.iter() {
builder = builder.clang_arg(item.as_str().expect("Expect string in list"));
}
builder
}
for (k, v) in config.iter() {
if k == "args" {
builder = add_args(builder, v.as_array().unwrap().as_slice());
continue;
}
let equal_idx = k.find('=').expect(&format!("Invalid key: {}", k));
let (target_type, target_value) = k.split_at(equal_idx);
if TARGET_INFO[target_type]!= target_value[1..] {
continue;
}
if target_type == "os" {
*matched_os = true;
}
builder = match *v {
toml::Value::Table(ref table) => add_clang_args(builder, table, matched_os),
toml::Value::Array(ref array) => add_args(builder, array),
_ => panic!("Unknown type"),
};
}
builder
}
impl BuilderExt for Builder {
fn get_initial_builder() -> Builder {
use bindgen::RustTarget;
// Disable rust unions, because we replace some types inside of
// them.
let mut builder = Builder::default().rust_target(RustTarget::Stable_1_0);
let rustfmt_path = env::var_os("RUSTFMT")
// This can be replaced with
// >.filter(|p|!p.is_empty()).map(PathBuf::from)
// once we can use 1.27+.
.and_then(|p| {
if p.is_empty() {
None
} else {
Some(PathBuf::from(p))
}
});
if let Some(path) = rustfmt_path {
builder = builder.with_rustfmt(path);
}
for dir in SEARCH_PATHS.iter() {
builder = builder.clang_arg("-I").clang_arg(dir.to_str().unwrap());
}
builder = builder.include(add_include("mozilla-config.h"));
if env::var("CARGO_FEATURE_GECKO_DEBUG").is_ok() {
builder = builder.clang_arg("-DDEBUG=1").clang_arg("-DJS_DEBUG=1");
}
let mut matched_os = false;
let build_config = CONFIG["build"].as_table().expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
let build_config = BUILD_CONFIG["build"]
.as_table()
.expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
if!matched_os {
panic!("Unknown platform");
}
builder
}
fn include<T: Into<String>>(self, file: T) -> Builder {
self.clang_arg("-include").clang_arg(file)
}
// This makes an FFI-safe void type that can't be matched on
// &VoidType is UB to have, because you can match on it
// to produce a reachable unreachable. If it's wrapped in
// a struct as a private field it becomes okay again
//
// Not 100% sure of how safe this is, but it's what we're using
// in the XPCOM ffi too
// https://github.com/nikomatsakis/rust-memory-model/issues/2
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder {
if!structs_list.contains(ty) {
self.blacklist_type(ty)
.raw_line(format!("enum {}Void {{ }}", ty))
.raw_line(format!("pub struct {0}({0}Void);", ty))
} else {
self
}
}
fn borrowed_type(self, ty: &str) -> Builder {
self.blacklist_type(format!("{}Borrowed", ty))
.raw_line(format!("pub type {0}Borrowed<'a> = &'a {0};", ty))
.blacklist_type(format!("{}BorrowedOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedOrNull<'a> = Option<&'a {0}>;",
ty
))
}
fn mutable_borrowed_type(self, ty: &str) -> Builder {
self.borrowed_type(ty)
.blacklist_type(format!("{}BorrowedMut", ty))
.raw_line(format!("pub type {0}BorrowedMut<'a> = &'a mut {0};", ty))
.blacklist_type(format!("{}BorrowedMutOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedMutOrNull<'a> = Option<&'a mut {0}>;",
ty
))
}
}
struct Fixup {
pat: String,
rep: String,
}
fn write_binding_file(builder: Builder, file: &str, fixups: &[Fixup]) {
let out_file = OUTDIR_PATH.join(file);
if let Some(modified) = get_modified_time(&out_file) {
// Don't generate the file if nothing it depends on was modified.
let last_modified = LAST_MODIFIED.lock().unwrap();
if *last_modified <= modified {
return;
}
}
let command_line_opts = builder.command_line_flags();
let result = builder.generate();
let mut result = match result {
Ok(bindings) => bindings.to_string(),
Err(_) => {
panic!(
"Failed to generate bindings, flags: {:?}",
command_line_opts
);
},
};
for fixup in fixups.iter() {
result = Regex::new(&fixup.pat)
.unwrap()
.replace_all(&result, &*fixup.rep)
.into_owned()
.into();
}
let bytes = result.into_bytes();
File::create(&out_file)
.unwrap()
.write_all(&bytes)
.expect("Unable to write output");
}
fn get_arc_types() -> Vec<String> {
// Read the file
let mut list_file = File::open(DISTDIR_PATH.join("include/mozilla/ServoArcTypeList.h"))
.expect("Unable to open ServoArcTypeList.h");
let mut content = String::new();
list_file
.read_to_string(&mut content)
.expect("Fail to read ServoArcTypeList.h");
// Remove comments
let block_comment_re = Regex::new(r#"(?s)/\*.*?\*/"#).unwrap();
let content = block_comment_re.replace_all(&content, "");
// Extract the list
let re = Regex::new(r#"^SERVO_ARC_TYPE\(\w+,\s*(\w+)\)$"#).unwrap();
content
.lines()
.map(|line| line.trim())
.filter(|line|!line.is_empty())
.map(|line| {
re.captures(&line)
.expect(&format!(
"Unrecognized line in ServoArcTypeList.h: '{}'",
line
))
.get(1)
.unwrap()
.as_str()
.to_string()
})
.collect()
}
struct BuilderWithConfig<'a> {
builder: Builder,
config: &'a Table,
used_keys: HashSet<&'static str>,
}
impl<'a> BuilderWithConfig<'a> {
fn new(builder: Builder, config: &'a Table) -> Self {
BuilderWithConfig {
builder,
config,
used_keys: HashSet::new(),
}
}
fn handle_list<F>(self, key: &'static str, func: F) -> BuilderWithConfig<'a>
where
F: FnOnce(Builder, slice::Iter<'a, toml::Value>) -> Builder,
{
let mut builder = self.builder;
let config = self.config;
let mut used_keys = self.used_keys;
if let Some(list) = config.get(key) {
used_keys.insert(key);
builder = func(builder, list.as_array().unwrap().as_slice().iter());
}
BuilderWithConfig {
builder,
config,
used_keys,
}
}
fn handle_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a toml::Value) -> Builder,
{
self.handle_list(key, |b, iter| iter.fold(b, |b, item| func(b, item)))
}
fn handle_str_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a str) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_str().unwrap()))
}
fn handle_table_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a Table) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_table().unwrap()))
}
fn handle_common(self, fixups: &mut Vec<Fixup>) -> BuilderWithConfig<'a> {
self.handle_str_items("headers", |b, item| b.header(add_include(item)))
.handle_str_items("raw-lines", |b, item| b.raw_line(item))
.handle_str_items("hide-types", |b, item| b.blacklist_type(item))
.handle_table_items("fixups", |builder, item| {
fixups.push(Fixup {
pat: item["pat"].as_str().unwrap().into(),
rep: item["rep"].as_str().unwrap().into(),
});
builder
})
}
fn get_builder(self) -> Builder {
for key in self.config.keys() {
if!self.used_keys.contains(key.as_str()) {
panic!(format!("Unknown key: {}", key));
}
}
self.builder
}
}
fn generate_structs() {
let builder = Builder::get_initial_builder()
.enable_cxx_namespaces()
.with_codegen_config(CodegenConfig {
types: true,
vars: true,
..CodegenConfig::nothing()
});
let mut fixups = vec![];
let builder = BuilderWithConfig::new(builder, CONFIG["structs"].as_table().unwrap())
.handle_common(&mut fixups)
.handle_str_items("bitfield-enums", |b, item| b.bitfield_enum(item))
.handle_str_items("rusty-enums", |b, item| b.rustified_enum(item))
.handle_str_items("whitelist-vars", |b, item| b.whitelist_var(item))
.handle_str_items("whitelist-types", |b, item| b.whitelist_type(item))
.handle_str_items("opaque-types", |b, item| b.opaque_type(item))
.handle_table_items("mapped-generic-types", |builder, item| {
let generic = item["generic"].as_bool().unwrap();
let gecko = item["gecko"].as_str().unwrap();
let servo = item["servo"].as_str().unwrap();
let gecko_name = gecko.rsplit("::").next().unwrap();
let gecko = gecko
.split("::")
.map(|s| format!("\\s*{}\\s*", s))
.collect::<Vec<_>>()
.join("::");
fixups.push(Fixup {
pat: format!("\\broot\\s*::\\s*{}\\b", gecko),
rep: format!("::gecko_bindings::structs::{}", gecko_name),
});
builder.blacklist_type(gecko).raw_line(format!(
"pub type {0}{2} = {1}{2};",
gecko_name,
servo,
if generic { "<T>" } else { "" }
))
})
.get_builder();
write_binding_file(builder, STRUCTS_FILE
|
{
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
update_last_modified(&path);
let mut contents = String::new();
File::open(path)
.expect("Failed to open config file")
.read_to_string(&mut contents)
.expect("Failed to read config file");
match toml::from_str::<toml::value::Table>(&contents) {
Ok(result) => result,
Err(e) => panic!("Failed to parse config file: {}", e),
}
}
|
identifier_body
|
|
build_gecko.rs
|
}
path
};
static ref SEARCH_PATHS: Vec<PathBuf> = vec![
DISTDIR_PATH.join("include"),
DISTDIR_PATH.join("include/nspr"),
];
static ref ADDED_PATHS: Mutex<HashSet<PathBuf>> = Mutex::new(HashSet::new());
static ref LAST_MODIFIED: Mutex<SystemTime> =
Mutex::new(get_modified_time(&env::current_exe().unwrap())
.expect("Failed to get modified time of executable"));
}
fn get_modified_time(file: &Path) -> Option<SystemTime> {
file.metadata().and_then(|m| m.modified()).ok()
}
fn update_last_modified(file: &Path) {
let modified = get_modified_time(file).expect("Couldn't get file modification time");
let mut last_modified = LAST_MODIFIED.lock().unwrap();
*last_modified = cmp::max(modified, *last_modified);
}
fn search_include(name: &str) -> Option<PathBuf> {
for path in SEARCH_PATHS.iter() {
let file = path.join(name);
if file.is_file() {
update_last_modified(&file);
return Some(file);
}
}
None
}
fn add_headers_recursively(path: PathBuf, added_paths: &mut HashSet<PathBuf>) {
if added_paths.contains(&path) {
return;
}
let mut file = File::open(&path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
println!("cargo:rerun-if-changed={}", path.to_str().unwrap());
added_paths.insert(path);
// Find all includes and add them recursively
for cap in INCLUDE_RE.captures_iter(&content) {
if let Some(path) = search_include(cap.get(1).unwrap().as_str()) {
add_headers_recursively(path, added_paths);
}
}
}
fn add_include(name: &str) -> String {
let mut added_paths = ADDED_PATHS.lock().unwrap();
let file = search_include(name).expect("Include not found!");
let result = String::from(file.to_str().unwrap());
add_headers_recursively(file, &mut *added_paths);
result
}
trait BuilderExt {
fn get_initial_builder() -> Builder;
fn include<T: Into<String>>(self, file: T) -> Builder;
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder;
fn borrowed_type(self, ty: &str) -> Builder;
fn mutable_borrowed_type(self, ty: &str) -> Builder;
}
fn add_clang_args(mut builder: Builder, config: &Table, matched_os: &mut bool) -> Builder {
fn add_args(mut builder: Builder, values: &[toml::Value]) -> Builder {
for item in values.iter() {
builder = builder.clang_arg(item.as_str().expect("Expect string in list"));
}
builder
}
for (k, v) in config.iter() {
if k == "args" {
builder = add_args(builder, v.as_array().unwrap().as_slice());
continue;
}
let equal_idx = k.find('=').expect(&format!("Invalid key: {}", k));
let (target_type, target_value) = k.split_at(equal_idx);
if TARGET_INFO[target_type]!= target_value[1..] {
continue;
}
if target_type == "os" {
*matched_os = true;
}
builder = match *v {
toml::Value::Table(ref table) => add_clang_args(builder, table, matched_os),
toml::Value::Array(ref array) => add_args(builder, array),
_ => panic!("Unknown type"),
};
}
builder
}
impl BuilderExt for Builder {
fn get_initial_builder() -> Builder {
use bindgen::RustTarget;
// Disable rust unions, because we replace some types inside of
// them.
let mut builder = Builder::default().rust_target(RustTarget::Stable_1_0);
let rustfmt_path = env::var_os("RUSTFMT")
// This can be replaced with
// >.filter(|p|!p.is_empty()).map(PathBuf::from)
// once we can use 1.27+.
.and_then(|p| {
if p.is_empty() {
None
} else {
Some(PathBuf::from(p))
}
});
if let Some(path) = rustfmt_path {
builder = builder.with_rustfmt(path);
}
for dir in SEARCH_PATHS.iter() {
builder = builder.clang_arg("-I").clang_arg(dir.to_str().unwrap());
}
builder = builder.include(add_include("mozilla-config.h"));
if env::var("CARGO_FEATURE_GECKO_DEBUG").is_ok() {
builder = builder.clang_arg("-DDEBUG=1").clang_arg("-DJS_DEBUG=1");
}
let mut matched_os = false;
let build_config = CONFIG["build"].as_table().expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
let build_config = BUILD_CONFIG["build"]
.as_table()
.expect("Malformed config file");
builder = add_clang_args(builder, build_config, &mut matched_os);
if!matched_os {
panic!("Unknown platform");
}
builder
}
fn include<T: Into<String>>(self, file: T) -> Builder {
self.clang_arg("-include").clang_arg(file)
}
// This makes an FFI-safe void type that can't be matched on
// &VoidType is UB to have, because you can match on it
// to produce a reachable unreachable. If it's wrapped in
// a struct as a private field it becomes okay again
//
// Not 100% sure of how safe this is, but it's what we're using
// in the XPCOM ffi too
// https://github.com/nikomatsakis/rust-memory-model/issues/2
fn zero_size_type(self, ty: &str, structs_list: &HashSet<&str>) -> Builder {
if!structs_list.contains(ty) {
self.blacklist_type(ty)
.raw_line(format!("enum {}Void {{ }}", ty))
.raw_line(format!("pub struct {0}({0}Void);", ty))
} else {
self
}
}
fn borrowed_type(self, ty: &str) -> Builder {
self.blacklist_type(format!("{}Borrowed", ty))
.raw_line(format!("pub type {0}Borrowed<'a> = &'a {0};", ty))
.blacklist_type(format!("{}BorrowedOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedOrNull<'a> = Option<&'a {0}>;",
ty
))
}
fn mutable_borrowed_type(self, ty: &str) -> Builder {
self.borrowed_type(ty)
.blacklist_type(format!("{}BorrowedMut", ty))
.raw_line(format!("pub type {0}BorrowedMut<'a> = &'a mut {0};", ty))
.blacklist_type(format!("{}BorrowedMutOrNull", ty))
.raw_line(format!(
"pub type {0}BorrowedMutOrNull<'a> = Option<&'a mut {0}>;",
ty
))
}
}
struct Fixup {
pat: String,
rep: String,
}
fn write_binding_file(builder: Builder, file: &str, fixups: &[Fixup]) {
let out_file = OUTDIR_PATH.join(file);
if let Some(modified) = get_modified_time(&out_file) {
// Don't generate the file if nothing it depends on was modified.
let last_modified = LAST_MODIFIED.lock().unwrap();
if *last_modified <= modified {
return;
}
}
let command_line_opts = builder.command_line_flags();
let result = builder.generate();
let mut result = match result {
Ok(bindings) => bindings.to_string(),
Err(_) => {
panic!(
"Failed to generate bindings, flags: {:?}",
command_line_opts
|
for fixup in fixups.iter() {
result = Regex::new(&fixup.pat)
.unwrap()
.replace_all(&result, &*fixup.rep)
.into_owned()
.into();
}
let bytes = result.into_bytes();
File::create(&out_file)
.unwrap()
.write_all(&bytes)
.expect("Unable to write output");
}
fn get_arc_types() -> Vec<String> {
// Read the file
let mut list_file = File::open(DISTDIR_PATH.join("include/mozilla/ServoArcTypeList.h"))
.expect("Unable to open ServoArcTypeList.h");
let mut content = String::new();
list_file
.read_to_string(&mut content)
.expect("Fail to read ServoArcTypeList.h");
// Remove comments
let block_comment_re = Regex::new(r#"(?s)/\*.*?\*/"#).unwrap();
let content = block_comment_re.replace_all(&content, "");
// Extract the list
let re = Regex::new(r#"^SERVO_ARC_TYPE\(\w+,\s*(\w+)\)$"#).unwrap();
content
.lines()
.map(|line| line.trim())
.filter(|line|!line.is_empty())
.map(|line| {
re.captures(&line)
.expect(&format!(
"Unrecognized line in ServoArcTypeList.h: '{}'",
line
))
.get(1)
.unwrap()
.as_str()
.to_string()
})
.collect()
}
struct BuilderWithConfig<'a> {
builder: Builder,
config: &'a Table,
used_keys: HashSet<&'static str>,
}
impl<'a> BuilderWithConfig<'a> {
fn new(builder: Builder, config: &'a Table) -> Self {
BuilderWithConfig {
builder,
config,
used_keys: HashSet::new(),
}
}
fn handle_list<F>(self, key: &'static str, func: F) -> BuilderWithConfig<'a>
where
F: FnOnce(Builder, slice::Iter<'a, toml::Value>) -> Builder,
{
let mut builder = self.builder;
let config = self.config;
let mut used_keys = self.used_keys;
if let Some(list) = config.get(key) {
used_keys.insert(key);
builder = func(builder, list.as_array().unwrap().as_slice().iter());
}
BuilderWithConfig {
builder,
config,
used_keys,
}
}
fn handle_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a toml::Value) -> Builder,
{
self.handle_list(key, |b, iter| iter.fold(b, |b, item| func(b, item)))
}
fn handle_str_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a str) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_str().unwrap()))
}
fn handle_table_items<F>(self, key: &'static str, mut func: F) -> BuilderWithConfig<'a>
where
F: FnMut(Builder, &'a Table) -> Builder,
{
self.handle_items(key, |b, item| func(b, item.as_table().unwrap()))
}
fn handle_common(self, fixups: &mut Vec<Fixup>) -> BuilderWithConfig<'a> {
self.handle_str_items("headers", |b, item| b.header(add_include(item)))
.handle_str_items("raw-lines", |b, item| b.raw_line(item))
.handle_str_items("hide-types", |b, item| b.blacklist_type(item))
.handle_table_items("fixups", |builder, item| {
fixups.push(Fixup {
pat: item["pat"].as_str().unwrap().into(),
rep: item["rep"].as_str().unwrap().into(),
});
builder
})
}
fn get_builder(self) -> Builder {
for key in self.config.keys() {
if!self.used_keys.contains(key.as_str()) {
panic!(format!("Unknown key: {}", key));
}
}
self.builder
}
}
fn generate_structs() {
let builder = Builder::get_initial_builder()
.enable_cxx_namespaces()
.with_codegen_config(CodegenConfig {
types: true,
vars: true,
..CodegenConfig::nothing()
});
let mut fixups = vec![];
let builder = BuilderWithConfig::new(builder, CONFIG["structs"].as_table().unwrap())
.handle_common(&mut fixups)
.handle_str_items("bitfield-enums", |b, item| b.bitfield_enum(item))
.handle_str_items("rusty-enums", |b, item| b.rustified_enum(item))
.handle_str_items("whitelist-vars", |b, item| b.whitelist_var(item))
.handle_str_items("whitelist-types", |b, item| b.whitelist_type(item))
.handle_str_items("opaque-types", |b, item| b.opaque_type(item))
.handle_table_items("mapped-generic-types", |builder, item| {
let generic = item["generic"].as_bool().unwrap();
let gecko = item["gecko"].as_str().unwrap();
let servo = item["servo"].as_str().unwrap();
let gecko_name = gecko.rsplit("::").next().unwrap();
let gecko = gecko
.split("::")
.map(|s| format!("\\s*{}\\s*", s))
.collect::<Vec<_>>()
.join("::");
fixups.push(Fixup {
pat: format!("\\broot\\s*::\\s*{}\\b", gecko),
rep: format!("::gecko_bindings::structs::{}", gecko_name),
});
builder.blacklist_type(gecko).raw_line(format!(
"pub type {0}{2} = {1}{2};",
gecko_name,
servo,
if generic { "<T>" } else { "" }
))
})
.get_builder();
write_binding_file(builder, STRUCTS_FILE, &fixups);
}
fn setup_logging() -> bool {
use log;
struct BuildLogger {
file: Option<Mutex<fs::File>>,
filter: String,
}
impl log::Log for BuildLogger {
fn enabled(&self, meta: &log::Metadata) -> bool {
self.file.is_some() && meta.target().contains(&self.filter)
}
fn log(&self, record: &log::Record) {
if!self.enabled(record.metadata()) {
return;
}
let mut file = self.file.as_ref().unwrap().lock().unwrap();
let _ = writeln!(
file,
"{} - {} - {} @ {}:{}",
record.level(),
record.target(),
record.args(),
record.file().unwrap_or("<unknown>"),
record.line().unwrap_or(0)
);
}
fn flush(&self) {
if let Some(ref file) = self.file {
file.lock().unwrap().flush().unwrap();
}
}
}
if let Some(path) = env::var_os("STYLO_BUILD_LOG") {
log::set_max_level(log::LevelFilter::Debug);
log::set_boxed_logger(Box::new(BuildLogger {
file: fs::File::create(path).ok().map(Mutex::new),
filter: env::var("STYLO_BUILD_FILTER")
.ok()
.unwrap_or_else(|| "bindgen".to_owned()),
})).expect("Failed to set logger.");
true
}
|
);
},
};
|
random_line_split
|
actor.rs
|
}
actors! {
breakable {
SIGN: Kanban
BREAKABLE_CUP: MKoppu
BREAKABLE_PLATE: MOsara
BREAKABLE_JUG: MPot
SKULL: Odokuro
NUT: VigaH
PILE_OF_LEAVES: Vochi
SMALL_POT: kotubo
LARGE_POT: ootubo1
}
door {
KNOB00D: KNOB00D
KNOB01D: KNOB01D
}
dungeon_boss {
KALLE_DEMOS: Bkm
GOHDAN: Bst
GOHMA: Btd
MOLGERA: Bwd
GANONDORF: Gnd
JALHALLA: big_pow
}
enemy_npc {
KARGAROC: Bb
BOKOBLIN: Bk
QUILL: Bm1
CANON: Canon
BIG_OCTO: Daiocta
PHANTOM_GANON: Fganon
FIRE_KEESE: Fkeeth
FLOOR_MASTER_2: Fmastr2
GYORG: GyCtrl
REDEAD: Rdead1
DEXIVINE: Sss
STALFOS: Stal
DARKNUT: Tn
BLADE_TRAP: Trap
ARMOS: amos
ARMOS_2: amos2
BUBBLE: bable
BOKO_BABA: bbaba
BLACK_CHUCHU: c_black
BLUE_CHUCHU: c_blue
GREEN_CHUCHU: c_green
YELLOW_CHUCHU: c_kiiro
RED_CHUCHU: c_red
KEESE: keeth
MAGTAIL: magtail
MOBLIN: mo2
MOBLIN_STATUE: moZOU
MOUSE: nezumi
PEAHAT: p_hat
POE: pow
REDEAD_1: rdead1
REGULAR_WIZZROBE: wiz_r
}
exit {
DOOR_0: KNOB00
DOOR_1: KNOB01
GROTTO_ENTRANCE: Pitfall
}
foliage {
PALM_TREE: Oyashi
FLOWER: flower
FLWR17: flwr17
FLWR7: flwr7
SMALL_ROCK_1: koisi1
KUSAX1: kusax1
KUSAX21: kusax21
KUSAX7: kusax7
LARGE_TREE: lwood
PFLWRX7: pflwrx7
SMALL_TREE_3: swood3
SMALL_TREE_5: swood5
}
friendly_npc {
STURGEON: Aj1
GRANDMA: Ba1
GREAT_FAIRY: BigElf
RITO_POSTMAN_2: Bm2
RITO_POSTMAN_4: Bm4
RITO_POSTMAN_5: Bm5
MAKAR: Cb1
SEAGULL: Kamome
ARYLL: Ls1
MEDLI: Md1
PIG: Pig
TETRA: Zl1
CRAB: kani
}
gameplay {
ATTENTION_GRABBER: AttTag
BOMB_FLOWER: BFlower
HEART_CONTAINER_DUNGEON_BOSS_ITEM_DROP: Bitem
VALOOS_TAIL: Dr2
HOOKSHOT_TARGET: Hfuck1
BREAKABLE_FLOOR_TILE: Hhyu1
SPRING_ON_A_BLOCK_2: Hjump2
WIND_COLUMN_GENERATOR: Hsen1
GRAPPLE_POINT: Kui
SOLIDIFIED_MAGMA_PLATFORM: Magrock
WOODEN_BOX_WITH_BLACK_FRAME: Ospbox
DANGLING_ROPE_WITH_LANTERN: RopeR
POSTBOX: Tpost
WARP_JAR_2: Warpts2
JET_OF_MAGMA: Yfire00
RING_OF_FIRE: Zenfire
BRIDGE: bridge
COLLECTIBLE_ITEM: item
BABA_BUD: jbaba
PUSHABLE_BLOCK_0: osiBLK0
PUSHABLE_BLOCK_1: osiBLK1
}
lod_model {
FORSAKEN_FORTRESS: LOD01
STAR_ISLAND: LOD02
NORTHERN_FAIRY_ISLE: LOD03
GALE_ISLAND: LOD04
CRESCENT_MOON_ISLE: LOD05
SEVEN_STAR_ISLES: LOD06
OVERLOOK_ISLAND: LOD07
FOUR_EYE_REEF: LOD08
MOTHER_AND_CHILD_ISLES: LOD09
SPECTACLE_ISLAND: LOD10
WINDFALL_ISLAND: LOD11
PAWPRINT_ISLE: LOD12
DRAGON_ROOST_ISLAND: LOD13
FLIGHT_CONTROL_PLATFORM: LOD14
WESTERN_FAIRY_ISLE: LOD15
ROCK_SPIRE_ISLE: LOD16
TINGLE_ISLAND: LOD17
NORTHERN_TRIANGLE_ISLAND: LOD18
EASTERN_FAIRY_ISLE: LOD19
FIRE_MOUNTAIN: LOD20
STAR_BELT_ARCHIPELAGO: LOD21
THREE_EYE_REEF: LOD22
GREATFISH_ISLE: LOD23
CYCLOPS_REEF: LOD24
SIX_EYE_REEF: LOD25
TOWER_OF_THE_GODS: LOD26
EASTERN_TRIANGLE_ISLAND: LOD27
THORNED_FAIRY_ISLE: LOD28
NEEDLEPOINT_ISLAND: LOD29
ISLET_OF_STEEL: LOD30
STONE_WATCHER_ISLAND: LOD31
SOUTHERN_TRIANGLE_ISLAND: LOD32
PRIVATE_OASIS: LOD33
BOMB_ISLAND: LOD34
BIRDS_PEAK_ISLAND: LOD35
DIAMOND_STEPPE_ISLAND: LOD36
FIVE_EYE_REEF: LOD37
SHARK_ISLAND: LOD38
SOUTHERN_FAIRY_ISLE: LOD39
ICE_RING_ISLE: LOD40
FOREST_HAVEN: LOD41
CLIFF_PLATEAU_ISLES: LOD42
HORSESHOE_ISLAND: LOD43
OUTSET_ISLAND: LOD44
HEADSTONE_ISLAND: LOD45
TWO_EYE_REEF: LOD46
ANGULAR_ISLES: LOD47
BOAT_RACE_ISLAND: LOD48
FIVE_STAR_ISLES: LOD49
}
large_object {
STALL_A: RotenA
STALL_B: RotenB
STALL_C: RotenC
TOWER_OF_THE_GODS_EXTERIOR: X_tower
LINK_STATUE_INSIDE_HYRULE_CASTLE: YLzou
}
mechanics {
SEED_PLANTING_SPOT_FOR_MAKAR: VmcBS
}
obstacle {
IRON_BARS: Ashut
LARGE_ROCK: Ebrock
SPIKE: Htoge1
EYE_VINE_BLOCKER: Ss
TINGLE: Tc
}
storyline {
TRIANGLE_ISLAND_STATUE: Doguu
ZEPHOS_AND_CYCLOS: Hr
DIN_STATUE: MegamiD
FARORE_STATUE: MegamiF
NAYRU_STATUE: MegamiN
GANONS_TOWER_4_BOSS_DOOR: VgnFD
}
switch {
ALL_ENEMIES_KILLED_SWITCH: ALLdie
SWITCH_BUFFER_0: AND_SW0
SWITCH_BUFFER_2: AND_SW2
WIND_SWITCH: Hpbot1
FLOOR_SWITCH_A: Kbota_A
PROXIMITY_SWITCH: SW_C00
CRYSTAL_SWITCH: SW_HIT0
WIND_WAKER_SONG_SWITCH_B: SWtactB
TINGLE_C_SWITCH: agbCSW
}
tg_door {
KNOB00D: KNOB00D
KNOB01D: KNOB01D
KNOB03D: KNOB03D
ZENS12: ZenS12
DUNGEON_BARRED_DOOR: Zenshut
NORMAL_DUNGEON_DOOR: door10
NORMAL_EARTH_AND_WIND_TEMPLE_DOOR: door12
BOSS_DUNGEON_DOOR: door20
FORBIDDEN_WOODS_BOSS_DOOR: doorKD
BARRED_EARTH_AND_WIND_TEMPLE_DOOR: doorSH
LOCKED_EARTH_AND_WIND_TEMPLE_DOOR: keyS12
DUNGEON_LOCKED_DOOR: keyshut
}
treasure_chest {
TREASURE_CHEST: takara
TREASURE_CHEST_2: takara2
TAKARA3: takara3
TREASURE_CHEST_3: takara3
TREASURE_CHEST_4: takara4
TREASURE_CHEST_5: takara5
TREASURE_CHEST_6: takara6
TREASURE_CHEST_7: takara7
TREASURE_CHEST_8: takara8
TREASURE_I: takaraI
TREASURE_K: takaraK
TREASURE_M: takaraM
TREASURE_AGC: tkrAGc
TREASURE_AIK: tkrAIk
TREASURE_AKD: tkrAKd
TREASURE_AOC: tkrAOc
TREASURE_AOS: tkrAOs
TREASURE_A_SWITCH: tkrASw
TREASURE_CHEST_UNLOCKED_BY_LIGHT_BEAM: tkrBMs
TREASURE_CTF: tkrCTf
}
trigger {
EVENT_TRIGGER: TagEv
HINT_TRIGGER: TagHt
HINT_TRIGGER_2: TagHt2
TEXT_EVENT_TRIGGER: TagMsg
WEATHER_TRIGGER_0: ky_tag0
WEATHER_TRIGGER_1: ky_tag1
WEATHER_TRIGGER_2: ky_tag2
WEATHER_TRIGGER_3: ky_tag3
WEATHER_TRIGGER_4: kytag4
WEATHER_TRIGGER_6: kytag6
}
uncategorized {
ATDOOR: ATdoor
AC1: Ac1
AH: Ah
INVISIBLE_WALL: Akabe
AKABE10: Akabe10
APZL: Apzl
ASTOP: Astop
ATTENTION_GRABBER_B: AttTagB
AYGR: Aygr
AYUSH: Ayush
BLK_CR: BLK_CR
HELMAROC_KING_OBJECT_GIBS: Bdkobj
BITA: Bita
BJ1: Bj1
BJ2: Bj2
BJ3: Bj3
BJ4: Bj4
BJ5: Bj5
BJ6: Bj6
BJ7: Bj7
BJ8: Bj8
BJ9: Bj9
BLIFT: Blift
BM3: Bm3
BMCON1: Bmcon1
BMCON2: Bmcon2
BMSW: Bmsw
BS1: Bs1
BS2: Bs2
BTSW2: Btsw2
CAFE_LAMP: Cafelmp
CMTRAP: CmTrap
CO1: Co1
COM_A: Com_A
COM_C: Com_C
CRTRM1: CrTrM1
CRTRM2: CrTrM2
CRTRS3: CrTrS3
CRTRS4: CrTrS4
CRTRS5: CrTrS5
DBLK0: DBLK0
DKKIBA: DKkiba
DEMO_DK: Demo_Dk
DK: Dk
DS1: Ds1
DSAKU: Dsaku
EAYOGN: Eayogn
EBOMZO: Ebomzo
EBROCK2: Ebrock2
ECUBE: Ecube
EKAO: Ekao
EKSKZ: Ekskz
ESEKH: Esekh
ESEKH2: Esekh2
ESKBAN: Eskban
EVSW: Evsw
FTREE: FTree
F_PLATFORM_FLIGHT_PLATFORM: Fdai
FIGURE: Figure
FIRE: Fire
FLOOR_MASTER: Fmaster
FLOOR_MASTER_1: Fmastr1
GBOARD: GBoard
GASHIP1: Gaship1
GASHIP2: Gaship2
GBRG00: Gbrg00
GDEMO20: Gdemo20
GFLAG: Gflag
YELLOW_OCEAN_WARP: Ghrwp
GICEL: GiceL
GK1: Gk1
GKAI00: Gkai00
GNBTAKI: Gnbtaki
GNTAKIE: Gntakie
GNTAKIS: Gntakis
GP1: Gp1
GRYW00: Gryw00
GTAKI: Gtaki
GYCTRLB: GyCtrlB
HAMI1: Hami1
HAMI2: Hami2
HAMI3: Hami3
HAMI4: Hami4
HAMIY: HamiY
HBOX1: Hbox1
HBOX2: Hbox2
HBOX2S: Hbox2S
HBRF1: Hbrf1
HCBH: Hcbh
HDAI1: Hdai1
HDAI2: Hdai2
HDAI3: Hdai3
HFBOT1A: Hfbot1A
HFBOT1B: Hfbot1B
HFBOT1C: Hfbot1C
HHA: Hha
HHBOT1: Hhbot1
HHBOT1N: Hhbot1N
SPRING_ON_A_BLOCK_1: Hjump1
HKIKAI1: Hkikai1
HMLIF: Hmlif
HMON1: Hmon1
HMON1D: Hmon1d
HMON2: Hmon2
HMON2D: Hmon2d
HMOS1: Hmos1
HMOS2: Hmos2
HMOS3: Hmos3
HO: Ho
HOMEN1: Homen1
HOMEN2: Homen2
HPU1: Hpu1
HPU2: Hpu2
HR2: Hr2
HSEKI1: Hseki1
HSEKI2: Hseki2
HSEKI3: Hseki3
HSEKI4: Hseki4
HSEKI5: Hseki5
HSEKI6: Hseki6
HSEKI7: Hseki7
HSEN2: Hsen2
HSEN3: Hsen3
HSH: Hsh
HSH2: Hsh2
HTETU1: Htetu1
HTOBI1: Htobi1
HTOBI2: Htobi2
HTOBI3: Htobi3
HUMI0Z: Humi0z
HUMI2Z: Humi2z
HUMI3Z: Humi3z
HUMI4Z: Humi4z
HUMI5Z: Humi5z
HYOIKAM: HyoiKam
HYS: Hys
HYS2: Hys2
HYUF1: Hyuf1
HYUF2: Hyuf2
ITAT00: ITat00
IKADA: Ikada
IKARI: Ikari
IKORI: Ikori
JI1: Ji1
KGBDOR: KGBdor
DOOR_2: KNOB02
DOOR_3: KNOB03
KANAT: Kanat
KBOTAC: KbotaC
KBOTA_B: Kbota_B
KF1: Kf1
KG1: Kg1
KG2: Kg2
KITA: Kita
KK1: Kk1
KKIBA: Kkiba
KKIBAB: KkibaB
FORBBIDEN_WOODS_LIFT: Klft
KM1: Km1
KMI00: Kmi00
KMI02: Kmi02
KMTUB: Kmtub
KO1: Ko1
KO2: Ko2
KOKIIE: Kokiie
KP1: Kp1
KROCK00: Krock00
KRYU00: Kryu00
KSAKU: Ksaku
KTARU: Ktaru
KTARUO: Ktaruo
KTARUR: Ktarur
KTARUX: Ktarux
REFLECTABLE_LIGHT_BEAM_0: LTag0
REFLECTABLE_LIGHT_BEAM_1: LTag1
LTAGR0: LTagR0
LAMP: Lamp
MKANOK2: MKanok2
MKANOKE: MKanoke
MCRTN: Mcrtn
MCUBE: Mcube
MCUBE10: Mcube10
MCYLN: Mcyln
MFLFT: Mflft
MHMRSW0: MhmrSW0
MHSG12: Mhsg12
MHSG15: Mhsg15
MHSG4H: Mhsg4h
MHSG6: Mhsg6
MHSG9: Mhsg9
MJDOOR: MjDoor
MK: Mk
MKDAN1: Mkdan1
MKIEBA: MkieBA
MKIEBAB: MkieBAB
MKIEBB: MkieBB
MKIEK: MkieK
MKNJD: MknjD
MMRR: Mmrr
MMUSIC: Mmusic
MN: Mn
MORI1: Mori1
MPWRB: MpwrB
MSDAN: Msdan
MSDAN2: Msdan2
MSUSW: MsuSW
MSUSWB: MsuSWB
MSWING: Mswing
MT: Mt
MTFLAG: MtFlag
MTORISU: MtoriSU
TRIANGULAR_PRISM_BLOCK: MtryB
TRIANGULAR_PRISM_BLOCK_TARGET_LOCATION: MtryBCr
MWTRSB: MwtrSB
MYGNSB: MygnSB
NBOX: NBOX
NBOX10: NBOX10
NH: Nh
NPCSO: NpcSo
NZFALL: Nzfall
OB1: Ob1
TIMER: ObjTime
OCANON: Ocanon
OCLOUD: Ocloud
OHATCH: Ohatch
OJTREE: Ojtree
OKIOKE: Okioke
OLIFT: Olift
OQ: Oq
OQW: Oqw
OS: Os
OS1: Os1
OS2: Os2
OSHIP: Oship
OSTOOL: Ostool
OTANA: Otana
OTBLE: Otble
OTBLEL: OtbleL
OWATER: Owater
P1A: P1a
P1B: P1b
P2A: P2a
P2B: P2b
P2C: P2c
PSCNCHG: PScnChg
PAPER: Paper
PBCO: Pbco
PBKA: Pbka
PF1: Pf1
PIRATES: Pirates
PIWA: Piwa
PLANT: Plant
PM1: Pm1
PO: Po
PPOS: Ppos
PTCO: Ptco
PTCU: Ptcu
PTUBO: Ptubo
PUTI: Puti
QDGHD: Qdghd
QTKHD: Qtkhd
QUAKE: Quake
RCLOUD: Rcloud
RDEAD2: Rdead2
RETAG0: ReTag0
RFLW: Rflw
RFORCE: Rforce
ROTEN2: Roten2
ROTEN3: Roten3
ROTEN4: Roten4
SMBDOR: SMBdor
SMTOGE: SMtoge
|
}
|
random_line_split
|
|
actor.rs
|
BARRED_EARTH_AND_WIND_TEMPLE_DOOR: doorSH
LOCKED_EARTH_AND_WIND_TEMPLE_DOOR: keyS12
DUNGEON_LOCKED_DOOR: keyshut
}
treasure_chest {
TREASURE_CHEST: takara
TREASURE_CHEST_2: takara2
TAKARA3: takara3
TREASURE_CHEST_3: takara3
TREASURE_CHEST_4: takara4
TREASURE_CHEST_5: takara5
TREASURE_CHEST_6: takara6
TREASURE_CHEST_7: takara7
TREASURE_CHEST_8: takara8
TREASURE_I: takaraI
TREASURE_K: takaraK
TREASURE_M: takaraM
TREASURE_AGC: tkrAGc
TREASURE_AIK: tkrAIk
TREASURE_AKD: tkrAKd
TREASURE_AOC: tkrAOc
TREASURE_AOS: tkrAOs
TREASURE_A_SWITCH: tkrASw
TREASURE_CHEST_UNLOCKED_BY_LIGHT_BEAM: tkrBMs
TREASURE_CTF: tkrCTf
}
trigger {
EVENT_TRIGGER: TagEv
HINT_TRIGGER: TagHt
HINT_TRIGGER_2: TagHt2
TEXT_EVENT_TRIGGER: TagMsg
WEATHER_TRIGGER_0: ky_tag0
WEATHER_TRIGGER_1: ky_tag1
WEATHER_TRIGGER_2: ky_tag2
WEATHER_TRIGGER_3: ky_tag3
WEATHER_TRIGGER_4: kytag4
WEATHER_TRIGGER_6: kytag6
}
uncategorized {
ATDOOR: ATdoor
AC1: Ac1
AH: Ah
INVISIBLE_WALL: Akabe
AKABE10: Akabe10
APZL: Apzl
ASTOP: Astop
ATTENTION_GRABBER_B: AttTagB
AYGR: Aygr
AYUSH: Ayush
BLK_CR: BLK_CR
HELMAROC_KING_OBJECT_GIBS: Bdkobj
BITA: Bita
BJ1: Bj1
BJ2: Bj2
BJ3: Bj3
BJ4: Bj4
BJ5: Bj5
BJ6: Bj6
BJ7: Bj7
BJ8: Bj8
BJ9: Bj9
BLIFT: Blift
BM3: Bm3
BMCON1: Bmcon1
BMCON2: Bmcon2
BMSW: Bmsw
BS1: Bs1
BS2: Bs2
BTSW2: Btsw2
CAFE_LAMP: Cafelmp
CMTRAP: CmTrap
CO1: Co1
COM_A: Com_A
COM_C: Com_C
CRTRM1: CrTrM1
CRTRM2: CrTrM2
CRTRS3: CrTrS3
CRTRS4: CrTrS4
CRTRS5: CrTrS5
DBLK0: DBLK0
DKKIBA: DKkiba
DEMO_DK: Demo_Dk
DK: Dk
DS1: Ds1
DSAKU: Dsaku
EAYOGN: Eayogn
EBOMZO: Ebomzo
EBROCK2: Ebrock2
ECUBE: Ecube
EKAO: Ekao
EKSKZ: Ekskz
ESEKH: Esekh
ESEKH2: Esekh2
ESKBAN: Eskban
EVSW: Evsw
FTREE: FTree
F_PLATFORM_FLIGHT_PLATFORM: Fdai
FIGURE: Figure
FIRE: Fire
FLOOR_MASTER: Fmaster
FLOOR_MASTER_1: Fmastr1
GBOARD: GBoard
GASHIP1: Gaship1
GASHIP2: Gaship2
GBRG00: Gbrg00
GDEMO20: Gdemo20
GFLAG: Gflag
YELLOW_OCEAN_WARP: Ghrwp
GICEL: GiceL
GK1: Gk1
GKAI00: Gkai00
GNBTAKI: Gnbtaki
GNTAKIE: Gntakie
GNTAKIS: Gntakis
GP1: Gp1
GRYW00: Gryw00
GTAKI: Gtaki
GYCTRLB: GyCtrlB
HAMI1: Hami1
HAMI2: Hami2
HAMI3: Hami3
HAMI4: Hami4
HAMIY: HamiY
HBOX1: Hbox1
HBOX2: Hbox2
HBOX2S: Hbox2S
HBRF1: Hbrf1
HCBH: Hcbh
HDAI1: Hdai1
HDAI2: Hdai2
HDAI3: Hdai3
HFBOT1A: Hfbot1A
HFBOT1B: Hfbot1B
HFBOT1C: Hfbot1C
HHA: Hha
HHBOT1: Hhbot1
HHBOT1N: Hhbot1N
SPRING_ON_A_BLOCK_1: Hjump1
HKIKAI1: Hkikai1
HMLIF: Hmlif
HMON1: Hmon1
HMON1D: Hmon1d
HMON2: Hmon2
HMON2D: Hmon2d
HMOS1: Hmos1
HMOS2: Hmos2
HMOS3: Hmos3
HO: Ho
HOMEN1: Homen1
HOMEN2: Homen2
HPU1: Hpu1
HPU2: Hpu2
HR2: Hr2
HSEKI1: Hseki1
HSEKI2: Hseki2
HSEKI3: Hseki3
HSEKI4: Hseki4
HSEKI5: Hseki5
HSEKI6: Hseki6
HSEKI7: Hseki7
HSEN2: Hsen2
HSEN3: Hsen3
HSH: Hsh
HSH2: Hsh2
HTETU1: Htetu1
HTOBI1: Htobi1
HTOBI2: Htobi2
HTOBI3: Htobi3
HUMI0Z: Humi0z
HUMI2Z: Humi2z
HUMI3Z: Humi3z
HUMI4Z: Humi4z
HUMI5Z: Humi5z
HYOIKAM: HyoiKam
HYS: Hys
HYS2: Hys2
HYUF1: Hyuf1
HYUF2: Hyuf2
ITAT00: ITat00
IKADA: Ikada
IKARI: Ikari
IKORI: Ikori
JI1: Ji1
KGBDOR: KGBdor
DOOR_2: KNOB02
DOOR_3: KNOB03
KANAT: Kanat
KBOTAC: KbotaC
KBOTA_B: Kbota_B
KF1: Kf1
KG1: Kg1
KG2: Kg2
KITA: Kita
KK1: Kk1
KKIBA: Kkiba
KKIBAB: KkibaB
FORBBIDEN_WOODS_LIFT: Klft
KM1: Km1
KMI00: Kmi00
KMI02: Kmi02
KMTUB: Kmtub
KO1: Ko1
KO2: Ko2
KOKIIE: Kokiie
KP1: Kp1
KROCK00: Krock00
KRYU00: Kryu00
KSAKU: Ksaku
KTARU: Ktaru
KTARUO: Ktaruo
KTARUR: Ktarur
KTARUX: Ktarux
REFLECTABLE_LIGHT_BEAM_0: LTag0
REFLECTABLE_LIGHT_BEAM_1: LTag1
LTAGR0: LTagR0
LAMP: Lamp
MKANOK2: MKanok2
MKANOKE: MKanoke
MCRTN: Mcrtn
MCUBE: Mcube
MCUBE10: Mcube10
MCYLN: Mcyln
MFLFT: Mflft
MHMRSW0: MhmrSW0
MHSG12: Mhsg12
MHSG15: Mhsg15
MHSG4H: Mhsg4h
MHSG6: Mhsg6
MHSG9: Mhsg9
MJDOOR: MjDoor
MK: Mk
MKDAN1: Mkdan1
MKIEBA: MkieBA
MKIEBAB: MkieBAB
MKIEBB: MkieBB
MKIEK: MkieK
MKNJD: MknjD
MMRR: Mmrr
MMUSIC: Mmusic
MN: Mn
MORI1: Mori1
MPWRB: MpwrB
MSDAN: Msdan
MSDAN2: Msdan2
MSUSW: MsuSW
MSUSWB: MsuSWB
MSWING: Mswing
MT: Mt
MTFLAG: MtFlag
MTORISU: MtoriSU
TRIANGULAR_PRISM_BLOCK: MtryB
TRIANGULAR_PRISM_BLOCK_TARGET_LOCATION: MtryBCr
MWTRSB: MwtrSB
MYGNSB: MygnSB
NBOX: NBOX
NBOX10: NBOX10
NH: Nh
NPCSO: NpcSo
NZFALL: Nzfall
OB1: Ob1
TIMER: ObjTime
OCANON: Ocanon
OCLOUD: Ocloud
OHATCH: Ohatch
OJTREE: Ojtree
OKIOKE: Okioke
OLIFT: Olift
OQ: Oq
OQW: Oqw
OS: Os
OS1: Os1
OS2: Os2
OSHIP: Oship
OSTOOL: Ostool
OTANA: Otana
OTBLE: Otble
OTBLEL: OtbleL
OWATER: Owater
P1A: P1a
P1B: P1b
P2A: P2a
P2B: P2b
P2C: P2c
PSCNCHG: PScnChg
PAPER: Paper
PBCO: Pbco
PBKA: Pbka
PF1: Pf1
PIRATES: Pirates
PIWA: Piwa
PLANT: Plant
PM1: Pm1
PO: Po
PPOS: Ppos
PTCO: Ptco
PTCU: Ptcu
PTUBO: Ptubo
PUTI: Puti
QDGHD: Qdghd
QTKHD: Qtkhd
QUAKE: Quake
RCLOUD: Rcloud
RDEAD2: Rdead2
RETAG0: ReTag0
RFLW: Rflw
RFORCE: Rforce
ROTEN2: Roten2
ROTEN3: Roten3
ROTEN4: Roten4
SMBDOR: SMBdor
SMTOGE: SMtoge
SPITEM: SPitem
SWTDOOR: SWTdoor
SWAT00: SWat00
WIND_WAKER_SONG_SWITCH: SWtact
CHANDELIER: SYAN
SA1: Sa1
SA2: Sa2
SA3: Sa3
SA4: Sa4
SA5: Sa5
SALVFM: SalvFM
SALVAG2: Salvag2
SALVAGE_E: SalvagE
SALVAGN: SalvagN
SALVAGE: Salvage
SARACE: Sarace
SEARCH: Search
SFAIRY: Sfairy
KING_OF_RED_LIONS_SHIP_FORM_PROP: Ship
SHMRGRD: Shmrgrd
SIEFLAG: SieFlag
SITEM: Sitem
SKANRAN: Skanran
STDOORL: Stdoorl
STDOORR: Stdoorr
STGATE: Stgate
STOUDAI: Stoudai
STTOGE: Sttoge
SV0: Sv0
SV1: Sv1
SV2: Sv2
SV3: Sv3
SVSP: Svsp
SALVAGE_SWITCH: SwSlvg
TABLE: Table
TAGCB1: TagCb1
TAGCB11: TagCb11
TAGCB12: TagCb12
TAGCB13: TagCb13
TAGCB14: TagCb14
TAGD1: TagD1
TAGD2: TagD2
TAGD3: TagD3
TAGD4: TagD4
TAGDM: TagDM
TAGISL: TagIsl
TAGKB: TagKb
TAGMSO: TagMSo
TAGMD: TagMd
TAGMD1: TagMd1
TAGMD11: TagMd11
TAGMD12: TagMd12
TAGMD13: TagMd13
TAGMD14: TagMd14
TAGMD15: TagMd15
TAGMD16: TagMd16
TAGMK: TagMk
TAGPO: TagPo
TAGSO: TagSo
TAGWP: TagWp
TENMADO: Tenmado
TESTPO: TestPo
LASER_BARRIER_INISIDE_HYRULE_CASTLE: TnTrap
TPOTA: Tpota
TRFLAG: TrFlag
TURU: Turu
TURU2: Turu2
TURU3: Turu3
UB1: Ub1
UB2: Ub2
UB3: Ub3
UB4: Ub4
UG1: Ug1
UG2: Ug2
UM1: Um1
UM2: Um2
UM3: Um3
UO1: Uo1
UO2: Uo2
UO3: Uo3
USOVMC: Usovmc
UW1: Uw1
UW2: Uw2
VBAKH: VbakH
VDORA: Vdora
VDS: Vds
VFAN: Vfan
PEDASTAL_OF_TIME: VmsDZ
MASTER_SWORD_MODEL_FROM_HYRULE_CASTLE_BASEMENT: VmsMS
VOLTAG: VolTag
VPBOT: Vpbot
VTENG: Vteng
VTIL1: Vtil1
VTIL2: Vtil2
VTIL3: Vtil3
VTIL4: Vtil4
VTIL5: Vtil5
VYASI: Vyasi
WLVTAG: WLvTag
WALL: Wall
DUNGEON_WARP_EXIT: Warpf
WARPFO: Warpfo
WARPGN: Warpgn
WARPNT: Warpnt
WARPT: Warpt
WARP_JAR_1: Warpts1
WARP_JAR_3: Warpts3
WIND_COLUMN: WindTag
YBGAF00: Ybgaf00
YBOIL00: Yboil00
MAGICAL_BARRIER: Ycage00
YFRLT00: Yfrlt00
YGCWP: Ygcwp
YGSTP00: Ygstp00
YGUSH00: Ygush00
YGUSH01: Ygush01
YGUSH02: Ygush02
YKGROFF: YkgrOFF
YKGRON: YkgrON
YKZYG: Ykzyg
YLKIC: Ylkic
YLLIC: Yllic
YLSIC: Ylsic
YM1: Ym1
YM2: Ym2
SHAFT_OF_LIGHT_WARP: Ysdls00
YTRND00: Ytrnd00
YW1: Yw1
YWARP00: Ywarp00
ZK1: Zk1
AGBA: agbA
AGBA2: agbA2
AGBAT: agbAT
AGBB: agbB
AGBD: agbD
AGBF: agbF
AGBF2: agbF2
AGBFA: agbFA
AGBMARK: agbMARK
AGBMW: agbMW
AGBR: agbR
AGBTBOX: agbTBOX
TORCH: bonbori
DMGROOM: dmgroom
DRAGON: dragon
FLOWER: flower
FLWR7: flwr7
FROCK: frock
GMOS: gmos
LOWERCASE_HO: ho
IKADAS: ikadaS
BEEDLES_SHOPSHIP: ikada_h
IKADA_U: ikada_u
KT: kt
KURO_S: kuro_s
KURO_T: kuro_t
KUSAX1: kusax1
KUSAX21: kusax21
KUSAX7: kusax7
KY00YOU: ky00you
KYTAG00: kytag00
KYTAG5: kytag5
MOUSE_HOLE: nezuana
PFLOWER: pflower
S_TURU: s_turu
SEA: sea
SPEAKUN: speakun
SPOTBX1: spotbx1
SWOOD: swood
SWOOD3: swood3
WOODB: woodb
WOODBX: woodbx
KNIGHT_STATUE: zouK
KNIGHT_STATUE_1: zouK1
KNIGHT_STATUE_2: zouK2
KNIGHT_STATUE_3: zouK3
KNIGHT_STATUE_4: zouK4
TRIFORCE_FLAG: HcFlag
FORSAKEN_FORTRESS_FLAG: MjFlag
JET_OF_STEAM_0: Ystm0
JET_OF_STEAM_1: Ystm1
MAGMA: magma
}
}
#[repr(C, packed)]
pub struct ActorTemplate {
pub name: [u8; 8],
pub params: u32,
pub coord: Coord,
pub rotation: [u16; 2],
pub flag: u16,
pub enemy_id: i16,
}
#[repr(C, packed)]
pub struct ActorMemory {
pub params: u32,
pub coord: Coord,
pub rotation: [u16; 2],
pub flag: u16,
pub enemy_id: i16,
pub flags: [u8; 9],
pub room_id: u8,
pub padding: [u8; 2],
}
impl ActorMemory {
fn new() -> &'static mut ActorMemory {
system::fopacm_create_append()
}
fn write_actor(&mut self, actor: &ActorTemplate) {
self.params = actor.params;
self.coord = actor.coord.clone();
self.rotation[0] = actor.rotation[0];
self.rotation[1] = actor.rotation[1];
self.flag = actor.flag;
self.enemy_id = actor.enemy_id;
}
}
impl ActorTemplate {
pub fn new(name: &str, coord: Coord, rotation: [u16; 2]) -> Self {
let mut actor = ActorTemplate {
name: [0; 8],
params: DEFAULT_PARAMS,
coord: coord,
rotation: rotation,
flag: DEFAULT_FLAG,
enemy_id: DEFAULT_ENEMY_ID,
};
memory::write_str(actor.name.as_mut_ptr(), name);
actor
}
pub fn
|
with_params
|
identifier_name
|
|
actor.rs
|
AGc
TREASURE_AIK: tkrAIk
TREASURE_AKD: tkrAKd
TREASURE_AOC: tkrAOc
TREASURE_AOS: tkrAOs
TREASURE_A_SWITCH: tkrASw
TREASURE_CHEST_UNLOCKED_BY_LIGHT_BEAM: tkrBMs
TREASURE_CTF: tkrCTf
}
trigger {
EVENT_TRIGGER: TagEv
HINT_TRIGGER: TagHt
HINT_TRIGGER_2: TagHt2
TEXT_EVENT_TRIGGER: TagMsg
WEATHER_TRIGGER_0: ky_tag0
WEATHER_TRIGGER_1: ky_tag1
WEATHER_TRIGGER_2: ky_tag2
WEATHER_TRIGGER_3: ky_tag3
WEATHER_TRIGGER_4: kytag4
WEATHER_TRIGGER_6: kytag6
}
uncategorized {
ATDOOR: ATdoor
AC1: Ac1
AH: Ah
INVISIBLE_WALL: Akabe
AKABE10: Akabe10
APZL: Apzl
ASTOP: Astop
ATTENTION_GRABBER_B: AttTagB
AYGR: Aygr
AYUSH: Ayush
BLK_CR: BLK_CR
HELMAROC_KING_OBJECT_GIBS: Bdkobj
BITA: Bita
BJ1: Bj1
BJ2: Bj2
BJ3: Bj3
BJ4: Bj4
BJ5: Bj5
BJ6: Bj6
BJ7: Bj7
BJ8: Bj8
BJ9: Bj9
BLIFT: Blift
BM3: Bm3
BMCON1: Bmcon1
BMCON2: Bmcon2
BMSW: Bmsw
BS1: Bs1
BS2: Bs2
BTSW2: Btsw2
CAFE_LAMP: Cafelmp
CMTRAP: CmTrap
CO1: Co1
COM_A: Com_A
COM_C: Com_C
CRTRM1: CrTrM1
CRTRM2: CrTrM2
CRTRS3: CrTrS3
CRTRS4: CrTrS4
CRTRS5: CrTrS5
DBLK0: DBLK0
DKKIBA: DKkiba
DEMO_DK: Demo_Dk
DK: Dk
DS1: Ds1
DSAKU: Dsaku
EAYOGN: Eayogn
EBOMZO: Ebomzo
EBROCK2: Ebrock2
ECUBE: Ecube
EKAO: Ekao
EKSKZ: Ekskz
ESEKH: Esekh
ESEKH2: Esekh2
ESKBAN: Eskban
EVSW: Evsw
FTREE: FTree
F_PLATFORM_FLIGHT_PLATFORM: Fdai
FIGURE: Figure
FIRE: Fire
FLOOR_MASTER: Fmaster
FLOOR_MASTER_1: Fmastr1
GBOARD: GBoard
GASHIP1: Gaship1
GASHIP2: Gaship2
GBRG00: Gbrg00
GDEMO20: Gdemo20
GFLAG: Gflag
YELLOW_OCEAN_WARP: Ghrwp
GICEL: GiceL
GK1: Gk1
GKAI00: Gkai00
GNBTAKI: Gnbtaki
GNTAKIE: Gntakie
GNTAKIS: Gntakis
GP1: Gp1
GRYW00: Gryw00
GTAKI: Gtaki
GYCTRLB: GyCtrlB
HAMI1: Hami1
HAMI2: Hami2
HAMI3: Hami3
HAMI4: Hami4
HAMIY: HamiY
HBOX1: Hbox1
HBOX2: Hbox2
HBOX2S: Hbox2S
HBRF1: Hbrf1
HCBH: Hcbh
HDAI1: Hdai1
HDAI2: Hdai2
HDAI3: Hdai3
HFBOT1A: Hfbot1A
HFBOT1B: Hfbot1B
HFBOT1C: Hfbot1C
HHA: Hha
HHBOT1: Hhbot1
HHBOT1N: Hhbot1N
SPRING_ON_A_BLOCK_1: Hjump1
HKIKAI1: Hkikai1
HMLIF: Hmlif
HMON1: Hmon1
HMON1D: Hmon1d
HMON2: Hmon2
HMON2D: Hmon2d
HMOS1: Hmos1
HMOS2: Hmos2
HMOS3: Hmos3
HO: Ho
HOMEN1: Homen1
HOMEN2: Homen2
HPU1: Hpu1
HPU2: Hpu2
HR2: Hr2
HSEKI1: Hseki1
HSEKI2: Hseki2
HSEKI3: Hseki3
HSEKI4: Hseki4
HSEKI5: Hseki5
HSEKI6: Hseki6
HSEKI7: Hseki7
HSEN2: Hsen2
HSEN3: Hsen3
HSH: Hsh
HSH2: Hsh2
HTETU1: Htetu1
HTOBI1: Htobi1
HTOBI2: Htobi2
HTOBI3: Htobi3
HUMI0Z: Humi0z
HUMI2Z: Humi2z
HUMI3Z: Humi3z
HUMI4Z: Humi4z
HUMI5Z: Humi5z
HYOIKAM: HyoiKam
HYS: Hys
HYS2: Hys2
HYUF1: Hyuf1
HYUF2: Hyuf2
ITAT00: ITat00
IKADA: Ikada
IKARI: Ikari
IKORI: Ikori
JI1: Ji1
KGBDOR: KGBdor
DOOR_2: KNOB02
DOOR_3: KNOB03
KANAT: Kanat
KBOTAC: KbotaC
KBOTA_B: Kbota_B
KF1: Kf1
KG1: Kg1
KG2: Kg2
KITA: Kita
KK1: Kk1
KKIBA: Kkiba
KKIBAB: KkibaB
FORBBIDEN_WOODS_LIFT: Klft
KM1: Km1
KMI00: Kmi00
KMI02: Kmi02
KMTUB: Kmtub
KO1: Ko1
KO2: Ko2
KOKIIE: Kokiie
KP1: Kp1
KROCK00: Krock00
KRYU00: Kryu00
KSAKU: Ksaku
KTARU: Ktaru
KTARUO: Ktaruo
KTARUR: Ktarur
KTARUX: Ktarux
REFLECTABLE_LIGHT_BEAM_0: LTag0
REFLECTABLE_LIGHT_BEAM_1: LTag1
LTAGR0: LTagR0
LAMP: Lamp
MKANOK2: MKanok2
MKANOKE: MKanoke
MCRTN: Mcrtn
MCUBE: Mcube
MCUBE10: Mcube10
MCYLN: Mcyln
MFLFT: Mflft
MHMRSW0: MhmrSW0
MHSG12: Mhsg12
MHSG15: Mhsg15
MHSG4H: Mhsg4h
MHSG6: Mhsg6
MHSG9: Mhsg9
MJDOOR: MjDoor
MK: Mk
MKDAN1: Mkdan1
MKIEBA: MkieBA
MKIEBAB: MkieBAB
MKIEBB: MkieBB
MKIEK: MkieK
MKNJD: MknjD
MMRR: Mmrr
MMUSIC: Mmusic
MN: Mn
MORI1: Mori1
MPWRB: MpwrB
MSDAN: Msdan
MSDAN2: Msdan2
MSUSW: MsuSW
MSUSWB: MsuSWB
MSWING: Mswing
MT: Mt
MTFLAG: MtFlag
MTORISU: MtoriSU
TRIANGULAR_PRISM_BLOCK: MtryB
TRIANGULAR_PRISM_BLOCK_TARGET_LOCATION: MtryBCr
MWTRSB: MwtrSB
MYGNSB: MygnSB
NBOX: NBOX
NBOX10: NBOX10
NH: Nh
NPCSO: NpcSo
NZFALL: Nzfall
OB1: Ob1
TIMER: ObjTime
OCANON: Ocanon
OCLOUD: Ocloud
OHATCH: Ohatch
OJTREE: Ojtree
OKIOKE: Okioke
OLIFT: Olift
OQ: Oq
OQW: Oqw
OS: Os
OS1: Os1
OS2: Os2
OSHIP: Oship
OSTOOL: Ostool
OTANA: Otana
OTBLE: Otble
OTBLEL: OtbleL
OWATER: Owater
P1A: P1a
P1B: P1b
P2A: P2a
P2B: P2b
P2C: P2c
PSCNCHG: PScnChg
PAPER: Paper
PBCO: Pbco
PBKA: Pbka
PF1: Pf1
PIRATES: Pirates
PIWA: Piwa
PLANT: Plant
PM1: Pm1
PO: Po
PPOS: Ppos
PTCO: Ptco
PTCU: Ptcu
PTUBO: Ptubo
PUTI: Puti
QDGHD: Qdghd
QTKHD: Qtkhd
QUAKE: Quake
RCLOUD: Rcloud
RDEAD2: Rdead2
RETAG0: ReTag0
RFLW: Rflw
RFORCE: Rforce
ROTEN2: Roten2
ROTEN3: Roten3
ROTEN4: Roten4
SMBDOR: SMBdor
SMTOGE: SMtoge
SPITEM: SPitem
SWTDOOR: SWTdoor
SWAT00: SWat00
WIND_WAKER_SONG_SWITCH: SWtact
CHANDELIER: SYAN
SA1: Sa1
SA2: Sa2
SA3: Sa3
SA4: Sa4
SA5: Sa5
SALVFM: SalvFM
SALVAG2: Salvag2
SALVAGE_E: SalvagE
SALVAGN: SalvagN
SALVAGE: Salvage
SARACE: Sarace
SEARCH: Search
SFAIRY: Sfairy
KING_OF_RED_LIONS_SHIP_FORM_PROP: Ship
SHMRGRD: Shmrgrd
SIEFLAG: SieFlag
SITEM: Sitem
SKANRAN: Skanran
STDOORL: Stdoorl
STDOORR: Stdoorr
STGATE: Stgate
STOUDAI: Stoudai
STTOGE: Sttoge
SV0: Sv0
SV1: Sv1
SV2: Sv2
SV3: Sv3
SVSP: Svsp
SALVAGE_SWITCH: SwSlvg
TABLE: Table
TAGCB1: TagCb1
TAGCB11: TagCb11
TAGCB12: TagCb12
TAGCB13: TagCb13
TAGCB14: TagCb14
TAGD1: TagD1
TAGD2: TagD2
TAGD3: TagD3
TAGD4: TagD4
TAGDM: TagDM
TAGISL: TagIsl
TAGKB: TagKb
TAGMSO: TagMSo
TAGMD: TagMd
TAGMD1: TagMd1
TAGMD11: TagMd11
TAGMD12: TagMd12
TAGMD13: TagMd13
TAGMD14: TagMd14
TAGMD15: TagMd15
TAGMD16: TagMd16
TAGMK: TagMk
TAGPO: TagPo
TAGSO: TagSo
TAGWP: TagWp
TENMADO: Tenmado
TESTPO: TestPo
LASER_BARRIER_INISIDE_HYRULE_CASTLE: TnTrap
TPOTA: Tpota
TRFLAG: TrFlag
TURU: Turu
TURU2: Turu2
TURU3: Turu3
UB1: Ub1
UB2: Ub2
UB3: Ub3
UB4: Ub4
UG1: Ug1
UG2: Ug2
UM1: Um1
UM2: Um2
UM3: Um3
UO1: Uo1
UO2: Uo2
UO3: Uo3
USOVMC: Usovmc
UW1: Uw1
UW2: Uw2
VBAKH: VbakH
VDORA: Vdora
VDS: Vds
VFAN: Vfan
PEDASTAL_OF_TIME: VmsDZ
MASTER_SWORD_MODEL_FROM_HYRULE_CASTLE_BASEMENT: VmsMS
VOLTAG: VolTag
VPBOT: Vpbot
VTENG: Vteng
VTIL1: Vtil1
VTIL2: Vtil2
VTIL3: Vtil3
VTIL4: Vtil4
VTIL5: Vtil5
VYASI: Vyasi
WLVTAG: WLvTag
WALL: Wall
DUNGEON_WARP_EXIT: Warpf
WARPFO: Warpfo
WARPGN: Warpgn
WARPNT: Warpnt
WARPT: Warpt
WARP_JAR_1: Warpts1
WARP_JAR_3: Warpts3
WIND_COLUMN: WindTag
YBGAF00: Ybgaf00
YBOIL00: Yboil00
MAGICAL_BARRIER: Ycage00
YFRLT00: Yfrlt00
YGCWP: Ygcwp
YGSTP00: Ygstp00
YGUSH00: Ygush00
YGUSH01: Ygush01
YGUSH02: Ygush02
YKGROFF: YkgrOFF
YKGRON: YkgrON
YKZYG: Ykzyg
YLKIC: Ylkic
YLLIC: Yllic
YLSIC: Ylsic
YM1: Ym1
YM2: Ym2
SHAFT_OF_LIGHT_WARP: Ysdls00
YTRND00: Ytrnd00
YW1: Yw1
YWARP00: Ywarp00
ZK1: Zk1
AGBA: agbA
AGBA2: agbA2
AGBAT: agbAT
AGBB: agbB
AGBD: agbD
AGBF: agbF
AGBF2: agbF2
AGBFA: agbFA
AGBMARK: agbMARK
AGBMW: agbMW
AGBR: agbR
AGBTBOX: agbTBOX
TORCH: bonbori
DMGROOM: dmgroom
DRAGON: dragon
FLOWER: flower
FLWR7: flwr7
FROCK: frock
GMOS: gmos
LOWERCASE_HO: ho
IKADAS: ikadaS
BEEDLES_SHOPSHIP: ikada_h
IKADA_U: ikada_u
KT: kt
KURO_S: kuro_s
KURO_T: kuro_t
KUSAX1: kusax1
KUSAX21: kusax21
KUSAX7: kusax7
KY00YOU: ky00you
KYTAG00: kytag00
KYTAG5: kytag5
MOUSE_HOLE: nezuana
PFLOWER: pflower
S_TURU: s_turu
SEA: sea
SPEAKUN: speakun
SPOTBX1: spotbx1
SWOOD: swood
SWOOD3: swood3
WOODB: woodb
WOODBX: woodbx
KNIGHT_STATUE: zouK
KNIGHT_STATUE_1: zouK1
KNIGHT_STATUE_2: zouK2
KNIGHT_STATUE_3: zouK3
KNIGHT_STATUE_4: zouK4
TRIFORCE_FLAG: HcFlag
FORSAKEN_FORTRESS_FLAG: MjFlag
JET_OF_STEAM_0: Ystm0
JET_OF_STEAM_1: Ystm1
MAGMA: magma
}
}
#[repr(C, packed)]
pub struct ActorTemplate {
pub name: [u8; 8],
pub params: u32,
pub coord: Coord,
pub rotation: [u16; 2],
pub flag: u16,
pub enemy_id: i16,
}
#[repr(C, packed)]
pub struct ActorMemory {
pub params: u32,
pub coord: Coord,
pub rotation: [u16; 2],
pub flag: u16,
pub enemy_id: i16,
pub flags: [u8; 9],
pub room_id: u8,
pub padding: [u8; 2],
}
impl ActorMemory {
fn new() -> &'static mut ActorMemory {
system::fopacm_create_append()
}
fn write_actor(&mut self, actor: &ActorTemplate) {
self.params = actor.params;
self.coord = actor.coord.clone();
self.rotation[0] = actor.rotation[0];
self.rotation[1] = actor.rotation[1];
self.flag = actor.flag;
self.enemy_id = actor.enemy_id;
}
}
impl ActorTemplate {
pub fn new(name: &str, coord: Coord, rotation: [u16; 2]) -> Self {
let mut actor = ActorTemplate {
name: [0; 8],
params: DEFAULT_PARAMS,
coord: coord,
rotation: rotation,
flag: DEFAULT_FLAG,
enemy_id: DEFAULT_ENEMY_ID,
};
memory::write_str(actor.name.as_mut_ptr(), name);
actor
}
pub fn with_params(mut self, params: u32) -> Self {
self.params = params;
self
}
pub fn with_flag(mut self, flag: u16) -> Self {
self.flag = flag;
self
}
pub fn with_enemy_id(mut self, enemy_id: i16) -> Self {
self.enemy_id = enemy_id;
self
}
pub fn actor_name(&self) -> &str {
memory::read_str(self.name.as_ptr())
}
pub fn spawn(&self) -> &'static mut ActorMemory
|
{
let memory = ActorMemory::new();
memory.write_actor(self);
memory.room_id = Link::room();
layer::switch_to_safe_layer();
system::dstage_actor_create(self, memory);
memory
}
|
identifier_body
|
|
smb3.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
|
use nom7::number::streaming::{le_u16, le_u32, le_u64};
use nom7::IResult;
#[derive(Debug,PartialEq)]
pub struct Smb3TransformRecord<'a> {
pub session_id: u64,
pub enc_algo: u16,
pub enc_data: &'a[u8],
}
pub fn parse_smb3_transform_record(i: &[u8]) -> IResult<&[u8], Smb3TransformRecord> {
let (i, _) = tag(b"\xfdSMB")(i)?;
let (i, _signature) = take(16_usize)(i)?;
let (i, _nonce) = take(16_usize)(i)?;
let (i, msg_size) = le_u32(i)?;
let (i, _reserved) = le_u16(i)?;
let (i, enc_algo) = le_u16(i)?;
let (i, session_id) = le_u64(i)?;
let (i, enc_data) = take(msg_size)(i)?;
let record = Smb3TransformRecord {
session_id,
enc_algo,
enc_data,
};
Ok((i, record))
}
|
use nom7::bytes::streaming::{tag, take};
|
random_line_split
|
smb3.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use nom7::bytes::streaming::{tag, take};
use nom7::number::streaming::{le_u16, le_u32, le_u64};
use nom7::IResult;
#[derive(Debug,PartialEq)]
pub struct Smb3TransformRecord<'a> {
pub session_id: u64,
pub enc_algo: u16,
pub enc_data: &'a[u8],
}
pub fn parse_smb3_transform_record(i: &[u8]) -> IResult<&[u8], Smb3TransformRecord>
|
{
let (i, _) = tag(b"\xfdSMB")(i)?;
let (i, _signature) = take(16_usize)(i)?;
let (i, _nonce) = take(16_usize)(i)?;
let (i, msg_size) = le_u32(i)?;
let (i, _reserved) = le_u16(i)?;
let (i, enc_algo) = le_u16(i)?;
let (i, session_id) = le_u64(i)?;
let (i, enc_data) = take(msg_size)(i)?;
let record = Smb3TransformRecord {
session_id,
enc_algo,
enc_data,
};
Ok((i, record))
}
|
identifier_body
|
|
smb3.rs
|
/* Copyright (C) 2018 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
use nom7::bytes::streaming::{tag, take};
use nom7::number::streaming::{le_u16, le_u32, le_u64};
use nom7::IResult;
#[derive(Debug,PartialEq)]
pub struct Smb3TransformRecord<'a> {
pub session_id: u64,
pub enc_algo: u16,
pub enc_data: &'a[u8],
}
pub fn
|
(i: &[u8]) -> IResult<&[u8], Smb3TransformRecord> {
let (i, _) = tag(b"\xfdSMB")(i)?;
let (i, _signature) = take(16_usize)(i)?;
let (i, _nonce) = take(16_usize)(i)?;
let (i, msg_size) = le_u32(i)?;
let (i, _reserved) = le_u16(i)?;
let (i, enc_algo) = le_u16(i)?;
let (i, session_id) = le_u64(i)?;
let (i, enc_data) = take(msg_size)(i)?;
let record = Smb3TransformRecord {
session_id,
enc_algo,
enc_data,
};
Ok((i, record))
}
|
parse_smb3_transform_record
|
identifier_name
|
inline.rs
|
// compile-flags: -Zinline-mir
use std::fmt::Display;
fn main() {
permutations(&['a', 'b', 'c']);
}
#[inline(always)]
fn
|
<T: Copy + Display>(xs: &[T]) {
let mut ys = xs.to_owned();
permutate(&mut ys, 0);
}
fn permutate<T: Copy + Display>(xs: &mut [T], k: usize) {
let n = length(xs);
if k == n {
display(xs);
} else if k < n {
for i in k..n {
swap(xs, i, k);
permutate(xs, k + 1);
swap(xs, i, k);
}
} else {
error();
}
}
fn length<T>(xs: &[T]) -> usize {
xs.len()
}
#[inline]
fn swap<T: Copy>(xs: &mut [T], i: usize, j: usize) {
let t = xs[i];
xs[i] = xs[j];
xs[j] = t;
}
fn display<T: Display>(xs: &[T]) {
for x in xs {
print!("{}", x);
}
println!();
}
#[inline(always)]
fn error() {
panic!("error");
}
|
permutations
|
identifier_name
|
inline.rs
|
// compile-flags: -Zinline-mir
use std::fmt::Display;
fn main() {
permutations(&['a', 'b', 'c']);
}
#[inline(always)]
fn permutations<T: Copy + Display>(xs: &[T]) {
let mut ys = xs.to_owned();
permutate(&mut ys, 0);
}
fn permutate<T: Copy + Display>(xs: &mut [T], k: usize) {
let n = length(xs);
if k == n {
display(xs);
} else if k < n {
for i in k..n {
swap(xs, i, k);
permutate(xs, k + 1);
swap(xs, i, k);
}
} else
|
}
fn length<T>(xs: &[T]) -> usize {
xs.len()
}
#[inline]
fn swap<T: Copy>(xs: &mut [T], i: usize, j: usize) {
let t = xs[i];
xs[i] = xs[j];
xs[j] = t;
}
fn display<T: Display>(xs: &[T]) {
for x in xs {
print!("{}", x);
}
println!();
}
#[inline(always)]
fn error() {
panic!("error");
}
|
{
error();
}
|
conditional_block
|
inline.rs
|
// compile-flags: -Zinline-mir
use std::fmt::Display;
fn main() {
permutations(&['a', 'b', 'c']);
}
|
#[inline(always)]
fn permutations<T: Copy + Display>(xs: &[T]) {
let mut ys = xs.to_owned();
permutate(&mut ys, 0);
}
fn permutate<T: Copy + Display>(xs: &mut [T], k: usize) {
let n = length(xs);
if k == n {
display(xs);
} else if k < n {
for i in k..n {
swap(xs, i, k);
permutate(xs, k + 1);
swap(xs, i, k);
}
} else {
error();
}
}
fn length<T>(xs: &[T]) -> usize {
xs.len()
}
#[inline]
fn swap<T: Copy>(xs: &mut [T], i: usize, j: usize) {
let t = xs[i];
xs[i] = xs[j];
xs[j] = t;
}
fn display<T: Display>(xs: &[T]) {
for x in xs {
print!("{}", x);
}
println!();
}
#[inline(always)]
fn error() {
panic!("error");
}
|
random_line_split
|
|
inline.rs
|
// compile-flags: -Zinline-mir
use std::fmt::Display;
fn main() {
permutations(&['a', 'b', 'c']);
}
#[inline(always)]
fn permutations<T: Copy + Display>(xs: &[T]) {
let mut ys = xs.to_owned();
permutate(&mut ys, 0);
}
fn permutate<T: Copy + Display>(xs: &mut [T], k: usize)
|
fn length<T>(xs: &[T]) -> usize {
xs.len()
}
#[inline]
fn swap<T: Copy>(xs: &mut [T], i: usize, j: usize) {
let t = xs[i];
xs[i] = xs[j];
xs[j] = t;
}
fn display<T: Display>(xs: &[T]) {
for x in xs {
print!("{}", x);
}
println!();
}
#[inline(always)]
fn error() {
panic!("error");
}
|
{
let n = length(xs);
if k == n {
display(xs);
} else if k < n {
for i in k..n {
swap(xs, i, k);
permutate(xs, k + 1);
swap(xs, i, k);
}
} else {
error();
}
}
|
identifier_body
|
commands.rs
|
use std::process::exit;
use std::fs::File;
use std::io::Write;
use libllama::dbgcore::{self, ActiveCpu};
use libllama::utils::from_hex;
/// Prints disassembly for the next instruction
/// Command format: "asm [address hex]"
///
/// `args`: Iterator over &str items
fn cmd_asm<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
use capstone::Capstone;
use capstone::arch::BuildsCapstone;
use capstone::arch::arm::ArchMode;
let _ = args;
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
let pause_addr = match args.next().map(from_hex) {
Some(Ok(x)) => x,
Some(Err(_)) => { error!("Could not parse hex value!"); return }
None => hw.pause_addr(),
};
let cpu_mode = if hw.is_thumb() {
ArchMode::Thumb
} else {
ArchMode::Arm
};
let cs = Capstone::new()
.arm()
.mode(cpu_mode)
.build();
|
if let Ok(mut cs) = cs {
let mut inst_bytes = [0u8; 4];
if let Err(e) = hw.read_mem(pause_addr, &mut inst_bytes) {
error!("{}", e);
return;
}
match cs.disasm_count(&inst_bytes, pause_addr as u64, 1) {
Ok(insts) => {
let inst = insts.iter().next().unwrap();
info!("{:X}: {} {}", pause_addr,
inst.mnemonic().unwrap(),
inst.op_str().unwrap())
}
Err(_) => error!("Failed to disassemble instruction at 0x{:X}", pause_addr),
}
} else {
error!("Could not initialize capstone!");
}
}
/// Adds CPU breakpoint at instruction address
/// Command format: "brk <address hex>"
///
/// `args`: Iterator over &str items
fn cmd_brk<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let addr_str = match args.next() {
Some(arg) => from_hex(arg),
None => { info!("Usage: `brk <addr>"); return }
};
// Check for from_hex errors
let addr = match addr_str {
Ok(x) => x,
_ => { error!("Could not parse hex value!"); return }
};
info!("Toggling breakpoint at 0x{:X}", addr);
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
if!hw.has_breakpoint(addr) {
hw.set_breakpoint(addr);
} else {
hw.del_breakpoint(addr);
}
}
/// Toggles or displays button state
/// Command format: "btn [button name] [up/down]"
///
/// `args`: Iterator over &str items
fn cmd_btn<'a, It>(_active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
use libllama::io::hid;
let mut ctx = debugger.ctx(ActiveCpu::Arm11);
let hw = ctx.hw11();
let io_shared = &hw.io_shared_devices().hid;
let btn_map = [
("a", hid::Button::A),
("b", hid::Button::B),
("x", hid::Button::X),
("y", hid::Button::Y),
("l", hid::Button::L),
("r", hid::Button::R),
("up", hid::Button::Up),
("down", hid::Button::Down),
("left", hid::Button::Left),
("right", hid::Button::Right),
("start", hid::Button::Start),
("select", hid::Button::Select)
];
let mut btn_map = btn_map.iter();
if let Some(button) = args.next() {
let press = match args.next() {
Some("up") => hid::ButtonState::Released,
Some("down") => hid::ButtonState::Pressed,
_ => {
error!("Specify whether button `{}` should be `up`/`down`", button);
return
}
};
if let Some((_, btn)) = btn_map.find(|tup| button.eq_ignore_ascii_case(tup.0)) {
hid::update_pad(&mut io_shared.lock(), press(*btn));
} else {
error!("Button `{}` does not exist!", button);
}
} else {
let pad = hid::pad(&mut io_shared.lock());
let mut pressed = Vec::new();
for (label, btn) in btn_map {
if pad & (1 << (*btn as usize))!= 0 {
pressed.push(label);
}
}
info!("Pressed buttons: {:?}", pressed);
}
}
/// Dumps framebuffer to file
/// Command format: "fbdmp"
///
/// `args`: Unused
fn cmd_fbdmp<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, _: It)
where It: Iterator<Item=&'a str> {
use libllama::io::gpu;
let mut ctx = debugger.ctx(active_cpu);
let fb_state = {
let hw = ctx.hw11();
let gpu = &hw.io11_devices().gpu;
let fb_state = gpu::fb_state(&*gpu.borrow());
fb_state
};
let mut fbs = libllama::hwcore::Framebuffers::default();
ctx.hwcore().copy_framebuffers(&mut fbs, &fb_state);
info!("Dumping framebuffers to disk in CWD...");
let mut top = File::create("./fb-top.bin")
.expect("Could not create fb-top.bin file!");
top.write_all(fbs.top_screen.as_slice())
.expect("Could not write top framebuffer!");
let mut bot = File::create("./fb-bot.bin")
.expect("Could not create fb-bot.bin file!");
bot.write_all(fbs.bot_screen.as_slice())
.expect("Could not write bottom framebuffer!");
}
/// Sets AES key-dumping state
/// Command format: "keydmp"
///
/// `args`: Unused
fn cmd_keydmp<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, _: It)
where It: Iterator<Item=&'a str> {
use libllama::io::aes;
let mut ctx = debugger.ctx(active_cpu);
let hw = ctx.hw9();
let key_slots = {
let aes = &hw.io9_devices().aes;
aes::dump_keys(&*aes.borrow())
};
info!("Dumping AES keys to disk...");
use libllama::fs;
fs::create_file(fs::LlamaFile::AesKeyDb, |file| {
for k in key_slots.iter() {
if let Err(x) = file.write_all(&k.data) {
error!("Failed to write to aeskeydb file; {:?}", x);
return
}
}
}).unwrap();
}
/// Triggers the specified IRQ
/// Command format: "irq <type>"
///
/// `args`: Iterator over &str items
fn cmd_irq<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let irq_ty = match args.next() {
Some(arg) => arg.to_lowercase(),
None => { info!("Usage: `irq <type>"); return }
};
let irq = match irq_ty.as_str() {
"timer0" => dbgcore::IrqType9::Timer0,
"timer1" => dbgcore::IrqType9::Timer1,
"timer2" => dbgcore::IrqType9::Timer2,
"timer3" => dbgcore::IrqType9::Timer3,
_ => { error!("Unimplemented/unknown IRQ type `{}`", irq_ty); return }
};
info!("Triggering IRQ {}", irq_ty);
let mut ctx = debugger.ctx(active_cpu);
ctx.trigger_irq(irq);
}
/// Prints memory to the screen based on provided address, number of bytes
/// Command format: "mem <start address hex> [# bytes hex]"
///
/// `args`: Iterator over &str items
fn cmd_mem<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
// Tuple: (u32: start, u32: num)
let arg_res = match (args.next(), args.next()) {
(Some(ss), Some(ns)) => from_hex(ss).and_then(|s| Ok((s, from_hex(ns)?))),
(Some(ss), None) => from_hex(ss).and_then(|s| Ok((s, 1))),
(None, _) => { info!("Usage: `mem <start> [num] [outfile.bin]"); return }
};
// Check for from_hex errors, validate `num` input
let (start, num) = match arg_res {
Ok((s, n)) if n > 0 => (s, n),
Ok((s, _)) => (s, 1),
_ => { error!("Could not parse hex value!"); return }
};
trace!("Printing {} bytes of RAM starting at 0x{:08X}", num, start);
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
let mut mem_bytes = vec![0u8; num as usize];
if let Err(e) = hw.read_mem(start, &mut mem_bytes) {
error!("{}", e);
return;
} else {
let mut strbuf = String::new();
strbuf.push_str(&format!("{:02X}", mem_bytes[0]));
for i in 1.. num as usize {
strbuf.push_str(&format!(" {:02X}", mem_bytes[i]));
}
info!("{}", &strbuf);
}
if let Some(filename) = args.next() {
let file = File::create(filename);
let mut file = match file {
Ok(file) => file,
Err(e) => {
error!("Unable to open file `{}` for dumping memory: {:?}!", filename, e);
return;
}
};
if let Err(e) = file.write_all(mem_bytes.as_slice()) {
error!("Unable to write into file `{}`: {:?}", filename, e);
return;
}
info!("Wrote 0x{:X} bytes to `{}`", num, filename);
}
}
/// Prints registers to the screen based on provided register name
/// Command format: "reg [register name]"
///
/// `args`: Iterator over &str items
fn cmd_reg<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let mut ctx = debugger.ctx(active_cpu);
let hw = ctx.hw();
let print_reg = |reg_num| info!("R{} = 0x{:08X}", reg_num, hw.read_reg(reg_num));
let print_cpsr = || info!("CPSR = 0x{:08X}", hw.read_cpsr());
let reg_str = match args.next() {
Some(arg) => arg.to_owned().to_lowercase(),
None => {
for i in 0..16 {
print_reg(i);
}
print_cpsr();
return;
}
};
match reg_str.as_str() {
"r0" => print_reg(0),
"r1" => print_reg(1),
"r2" => print_reg(2),
"r3" => print_reg(3),
"r4" => print_reg(4),
"r5" => print_reg(5),
"r6" => print_reg(6),
"r7" => print_reg(7),
"r8" => print_reg(8),
"r9" => print_reg(9),
"r10" => print_reg(10),
"r11" => print_reg(11),
"r12" => print_reg(12),
"sp" | "r13" => print_reg(13),
"lr" | "r14" => print_reg(14),
"pc" | "r15" => print_reg(15),
"cpsr" => print_cpsr(),
_ => error!("Unrecognized register!"),
}
}
/// Runs one instruction on the CPU
/// Command format: "step"
///
/// `args`: Unused
fn cmd_step<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, args: It)
where It: Iterator<Item=&'a str> {
let _ = args;
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
hw.step();
}
/// Controls debugger behavior based on user-provided commands
///
/// `command`: Iterator over &str items
pub fn handle<'a, It>(active_cpu: &mut ActiveCpu, debugger: &mut dbgcore::DbgCore, mut command: It)
where It: Iterator<Item=&'a str> {
match command.next() {
Some("asm") => cmd_asm(*active_cpu, debugger, command),
Some("brk") => cmd_brk(*active_cpu, debugger, command),
Some("btn") => cmd_btn(*active_cpu, debugger, command),
Some("fbdmp") => cmd_fbdmp(*active_cpu, debugger, command),
Some("irq") => cmd_irq(*active_cpu, debugger, command),
Some("keydmp") => cmd_keydmp(*active_cpu, debugger, command),
Some("mem") => cmd_mem(*active_cpu, debugger, command),
Some("reg") => cmd_reg(*active_cpu, debugger, command),
Some("run") => { debugger.ctx(*active_cpu).resume() },
Some("step") => cmd_step(*active_cpu, debugger, command),
Some("cpu") => {
match command.next() {
Some("arm9") => *active_cpu = ActiveCpu::Arm9,
Some("arm11") => *active_cpu = ActiveCpu::Arm11,
_ => error!("Expected `cpu <arm9|arm11>")
}
}
Some("quit") | Some("exit") => {
debugger.ctx(*active_cpu).hwcore_mut().stop();
// TODO: Cleaner exit?
exit(0);
}
None => {},
Some(unk_cmd @ _) => error!("Unrecognized command `{}`", unk_cmd),
}
}
|
random_line_split
|
|
commands.rs
|
use std::process::exit;
use std::fs::File;
use std::io::Write;
use libllama::dbgcore::{self, ActiveCpu};
use libllama::utils::from_hex;
/// Prints disassembly for the next instruction
/// Command format: "asm [address hex]"
///
/// `args`: Iterator over &str items
fn cmd_asm<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
use capstone::Capstone;
use capstone::arch::BuildsCapstone;
use capstone::arch::arm::ArchMode;
let _ = args;
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
let pause_addr = match args.next().map(from_hex) {
Some(Ok(x)) => x,
Some(Err(_)) => { error!("Could not parse hex value!"); return }
None => hw.pause_addr(),
};
let cpu_mode = if hw.is_thumb() {
ArchMode::Thumb
} else {
ArchMode::Arm
};
let cs = Capstone::new()
.arm()
.mode(cpu_mode)
.build();
if let Ok(mut cs) = cs {
let mut inst_bytes = [0u8; 4];
if let Err(e) = hw.read_mem(pause_addr, &mut inst_bytes) {
error!("{}", e);
return;
}
match cs.disasm_count(&inst_bytes, pause_addr as u64, 1) {
Ok(insts) => {
let inst = insts.iter().next().unwrap();
info!("{:X}: {} {}", pause_addr,
inst.mnemonic().unwrap(),
inst.op_str().unwrap())
}
Err(_) => error!("Failed to disassemble instruction at 0x{:X}", pause_addr),
}
} else {
error!("Could not initialize capstone!");
}
}
/// Adds CPU breakpoint at instruction address
/// Command format: "brk <address hex>"
///
/// `args`: Iterator over &str items
fn cmd_brk<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let addr_str = match args.next() {
Some(arg) => from_hex(arg),
None => { info!("Usage: `brk <addr>"); return }
};
// Check for from_hex errors
let addr = match addr_str {
Ok(x) => x,
_ => { error!("Could not parse hex value!"); return }
};
info!("Toggling breakpoint at 0x{:X}", addr);
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
if!hw.has_breakpoint(addr) {
hw.set_breakpoint(addr);
} else {
hw.del_breakpoint(addr);
}
}
/// Toggles or displays button state
/// Command format: "btn [button name] [up/down]"
///
/// `args`: Iterator over &str items
fn cmd_btn<'a, It>(_active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str>
|
];
let mut btn_map = btn_map.iter();
if let Some(button) = args.next() {
let press = match args.next() {
Some("up") => hid::ButtonState::Released,
Some("down") => hid::ButtonState::Pressed,
_ => {
error!("Specify whether button `{}` should be `up`/`down`", button);
return
}
};
if let Some((_, btn)) = btn_map.find(|tup| button.eq_ignore_ascii_case(tup.0)) {
hid::update_pad(&mut io_shared.lock(), press(*btn));
} else {
error!("Button `{}` does not exist!", button);
}
} else {
let pad = hid::pad(&mut io_shared.lock());
let mut pressed = Vec::new();
for (label, btn) in btn_map {
if pad & (1 << (*btn as usize))!= 0 {
pressed.push(label);
}
}
info!("Pressed buttons: {:?}", pressed);
}
}
/// Dumps framebuffer to file
/// Command format: "fbdmp"
///
/// `args`: Unused
fn cmd_fbdmp<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, _: It)
where It: Iterator<Item=&'a str> {
use libllama::io::gpu;
let mut ctx = debugger.ctx(active_cpu);
let fb_state = {
let hw = ctx.hw11();
let gpu = &hw.io11_devices().gpu;
let fb_state = gpu::fb_state(&*gpu.borrow());
fb_state
};
let mut fbs = libllama::hwcore::Framebuffers::default();
ctx.hwcore().copy_framebuffers(&mut fbs, &fb_state);
info!("Dumping framebuffers to disk in CWD...");
let mut top = File::create("./fb-top.bin")
.expect("Could not create fb-top.bin file!");
top.write_all(fbs.top_screen.as_slice())
.expect("Could not write top framebuffer!");
let mut bot = File::create("./fb-bot.bin")
.expect("Could not create fb-bot.bin file!");
bot.write_all(fbs.bot_screen.as_slice())
.expect("Could not write bottom framebuffer!");
}
/// Sets AES key-dumping state
/// Command format: "keydmp"
///
/// `args`: Unused
fn cmd_keydmp<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, _: It)
where It: Iterator<Item=&'a str> {
use libllama::io::aes;
let mut ctx = debugger.ctx(active_cpu);
let hw = ctx.hw9();
let key_slots = {
let aes = &hw.io9_devices().aes;
aes::dump_keys(&*aes.borrow())
};
info!("Dumping AES keys to disk...");
use libllama::fs;
fs::create_file(fs::LlamaFile::AesKeyDb, |file| {
for k in key_slots.iter() {
if let Err(x) = file.write_all(&k.data) {
error!("Failed to write to aeskeydb file; {:?}", x);
return
}
}
}).unwrap();
}
/// Triggers the specified IRQ
/// Command format: "irq <type>"
///
/// `args`: Iterator over &str items
fn cmd_irq<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let irq_ty = match args.next() {
Some(arg) => arg.to_lowercase(),
None => { info!("Usage: `irq <type>"); return }
};
let irq = match irq_ty.as_str() {
"timer0" => dbgcore::IrqType9::Timer0,
"timer1" => dbgcore::IrqType9::Timer1,
"timer2" => dbgcore::IrqType9::Timer2,
"timer3" => dbgcore::IrqType9::Timer3,
_ => { error!("Unimplemented/unknown IRQ type `{}`", irq_ty); return }
};
info!("Triggering IRQ {}", irq_ty);
let mut ctx = debugger.ctx(active_cpu);
ctx.trigger_irq(irq);
}
/// Prints memory to the screen based on provided address, number of bytes
/// Command format: "mem <start address hex> [# bytes hex]"
///
/// `args`: Iterator over &str items
fn cmd_mem<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
// Tuple: (u32: start, u32: num)
let arg_res = match (args.next(), args.next()) {
(Some(ss), Some(ns)) => from_hex(ss).and_then(|s| Ok((s, from_hex(ns)?))),
(Some(ss), None) => from_hex(ss).and_then(|s| Ok((s, 1))),
(None, _) => { info!("Usage: `mem <start> [num] [outfile.bin]"); return }
};
// Check for from_hex errors, validate `num` input
let (start, num) = match arg_res {
Ok((s, n)) if n > 0 => (s, n),
Ok((s, _)) => (s, 1),
_ => { error!("Could not parse hex value!"); return }
};
trace!("Printing {} bytes of RAM starting at 0x{:08X}", num, start);
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
let mut mem_bytes = vec![0u8; num as usize];
if let Err(e) = hw.read_mem(start, &mut mem_bytes) {
error!("{}", e);
return;
} else {
let mut strbuf = String::new();
strbuf.push_str(&format!("{:02X}", mem_bytes[0]));
for i in 1.. num as usize {
strbuf.push_str(&format!(" {:02X}", mem_bytes[i]));
}
info!("{}", &strbuf);
}
if let Some(filename) = args.next() {
let file = File::create(filename);
let mut file = match file {
Ok(file) => file,
Err(e) => {
error!("Unable to open file `{}` for dumping memory: {:?}!", filename, e);
return;
}
};
if let Err(e) = file.write_all(mem_bytes.as_slice()) {
error!("Unable to write into file `{}`: {:?}", filename, e);
return;
}
info!("Wrote 0x{:X} bytes to `{}`", num, filename);
}
}
/// Prints registers to the screen based on provided register name
/// Command format: "reg [register name]"
///
/// `args`: Iterator over &str items
fn cmd_reg<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let mut ctx = debugger.ctx(active_cpu);
let hw = ctx.hw();
let print_reg = |reg_num| info!("R{} = 0x{:08X}", reg_num, hw.read_reg(reg_num));
let print_cpsr = || info!("CPSR = 0x{:08X}", hw.read_cpsr());
let reg_str = match args.next() {
Some(arg) => arg.to_owned().to_lowercase(),
None => {
for i in 0..16 {
print_reg(i);
}
print_cpsr();
return;
}
};
match reg_str.as_str() {
"r0" => print_reg(0),
"r1" => print_reg(1),
"r2" => print_reg(2),
"r3" => print_reg(3),
"r4" => print_reg(4),
"r5" => print_reg(5),
"r6" => print_reg(6),
"r7" => print_reg(7),
"r8" => print_reg(8),
"r9" => print_reg(9),
"r10" => print_reg(10),
"r11" => print_reg(11),
"r12" => print_reg(12),
"sp" | "r13" => print_reg(13),
"lr" | "r14" => print_reg(14),
"pc" | "r15" => print_reg(15),
"cpsr" => print_cpsr(),
_ => error!("Unrecognized register!"),
}
}
/// Runs one instruction on the CPU
/// Command format: "step"
///
/// `args`: Unused
fn cmd_step<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, args: It)
where It: Iterator<Item=&'a str> {
let _ = args;
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
hw.step();
}
/// Controls debugger behavior based on user-provided commands
///
/// `command`: Iterator over &str items
pub fn handle<'a, It>(active_cpu: &mut ActiveCpu, debugger: &mut dbgcore::DbgCore, mut command: It)
where It: Iterator<Item=&'a str> {
match command.next() {
Some("asm") => cmd_asm(*active_cpu, debugger, command),
Some("brk") => cmd_brk(*active_cpu, debugger, command),
Some("btn") => cmd_btn(*active_cpu, debugger, command),
Some("fbdmp") => cmd_fbdmp(*active_cpu, debugger, command),
Some("irq") => cmd_irq(*active_cpu, debugger, command),
Some("keydmp") => cmd_keydmp(*active_cpu, debugger, command),
Some("mem") => cmd_mem(*active_cpu, debugger, command),
Some("reg") => cmd_reg(*active_cpu, debugger, command),
Some("run") => { debugger.ctx(*active_cpu).resume() },
Some("step") => cmd_step(*active_cpu, debugger, command),
Some("cpu") => {
match command.next() {
Some("arm9") => *active_cpu = ActiveCpu::Arm9,
Some("arm11") => *active_cpu = ActiveCpu::Arm11,
_ => error!("Expected `cpu <arm9|arm11>")
}
}
Some("quit") | Some("exit") => {
debugger.ctx(*active_cpu).hwcore_mut().stop();
// TODO: Cleaner exit?
exit(0);
}
None => {},
Some(unk_cmd @ _) => error!("Unrecognized command `{}`", unk_cmd),
}
}
|
{
use libllama::io::hid;
let mut ctx = debugger.ctx(ActiveCpu::Arm11);
let hw = ctx.hw11();
let io_shared = &hw.io_shared_devices().hid;
let btn_map = [
("a", hid::Button::A),
("b", hid::Button::B),
("x", hid::Button::X),
("y", hid::Button::Y),
("l", hid::Button::L),
("r", hid::Button::R),
("up", hid::Button::Up),
("down", hid::Button::Down),
("left", hid::Button::Left),
("right", hid::Button::Right),
("start", hid::Button::Start),
("select", hid::Button::Select)
|
identifier_body
|
commands.rs
|
use std::process::exit;
use std::fs::File;
use std::io::Write;
use libllama::dbgcore::{self, ActiveCpu};
use libllama::utils::from_hex;
/// Prints disassembly for the next instruction
/// Command format: "asm [address hex]"
///
/// `args`: Iterator over &str items
fn cmd_asm<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
use capstone::Capstone;
use capstone::arch::BuildsCapstone;
use capstone::arch::arm::ArchMode;
let _ = args;
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
let pause_addr = match args.next().map(from_hex) {
Some(Ok(x)) => x,
Some(Err(_)) => { error!("Could not parse hex value!"); return }
None => hw.pause_addr(),
};
let cpu_mode = if hw.is_thumb() {
ArchMode::Thumb
} else
|
;
let cs = Capstone::new()
.arm()
.mode(cpu_mode)
.build();
if let Ok(mut cs) = cs {
let mut inst_bytes = [0u8; 4];
if let Err(e) = hw.read_mem(pause_addr, &mut inst_bytes) {
error!("{}", e);
return;
}
match cs.disasm_count(&inst_bytes, pause_addr as u64, 1) {
Ok(insts) => {
let inst = insts.iter().next().unwrap();
info!("{:X}: {} {}", pause_addr,
inst.mnemonic().unwrap(),
inst.op_str().unwrap())
}
Err(_) => error!("Failed to disassemble instruction at 0x{:X}", pause_addr),
}
} else {
error!("Could not initialize capstone!");
}
}
/// Adds CPU breakpoint at instruction address
/// Command format: "brk <address hex>"
///
/// `args`: Iterator over &str items
fn cmd_brk<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let addr_str = match args.next() {
Some(arg) => from_hex(arg),
None => { info!("Usage: `brk <addr>"); return }
};
// Check for from_hex errors
let addr = match addr_str {
Ok(x) => x,
_ => { error!("Could not parse hex value!"); return }
};
info!("Toggling breakpoint at 0x{:X}", addr);
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
if!hw.has_breakpoint(addr) {
hw.set_breakpoint(addr);
} else {
hw.del_breakpoint(addr);
}
}
/// Toggles or displays button state
/// Command format: "btn [button name] [up/down]"
///
/// `args`: Iterator over &str items
fn cmd_btn<'a, It>(_active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
use libllama::io::hid;
let mut ctx = debugger.ctx(ActiveCpu::Arm11);
let hw = ctx.hw11();
let io_shared = &hw.io_shared_devices().hid;
let btn_map = [
("a", hid::Button::A),
("b", hid::Button::B),
("x", hid::Button::X),
("y", hid::Button::Y),
("l", hid::Button::L),
("r", hid::Button::R),
("up", hid::Button::Up),
("down", hid::Button::Down),
("left", hid::Button::Left),
("right", hid::Button::Right),
("start", hid::Button::Start),
("select", hid::Button::Select)
];
let mut btn_map = btn_map.iter();
if let Some(button) = args.next() {
let press = match args.next() {
Some("up") => hid::ButtonState::Released,
Some("down") => hid::ButtonState::Pressed,
_ => {
error!("Specify whether button `{}` should be `up`/`down`", button);
return
}
};
if let Some((_, btn)) = btn_map.find(|tup| button.eq_ignore_ascii_case(tup.0)) {
hid::update_pad(&mut io_shared.lock(), press(*btn));
} else {
error!("Button `{}` does not exist!", button);
}
} else {
let pad = hid::pad(&mut io_shared.lock());
let mut pressed = Vec::new();
for (label, btn) in btn_map {
if pad & (1 << (*btn as usize))!= 0 {
pressed.push(label);
}
}
info!("Pressed buttons: {:?}", pressed);
}
}
/// Dumps framebuffer to file
/// Command format: "fbdmp"
///
/// `args`: Unused
fn cmd_fbdmp<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, _: It)
where It: Iterator<Item=&'a str> {
use libllama::io::gpu;
let mut ctx = debugger.ctx(active_cpu);
let fb_state = {
let hw = ctx.hw11();
let gpu = &hw.io11_devices().gpu;
let fb_state = gpu::fb_state(&*gpu.borrow());
fb_state
};
let mut fbs = libllama::hwcore::Framebuffers::default();
ctx.hwcore().copy_framebuffers(&mut fbs, &fb_state);
info!("Dumping framebuffers to disk in CWD...");
let mut top = File::create("./fb-top.bin")
.expect("Could not create fb-top.bin file!");
top.write_all(fbs.top_screen.as_slice())
.expect("Could not write top framebuffer!");
let mut bot = File::create("./fb-bot.bin")
.expect("Could not create fb-bot.bin file!");
bot.write_all(fbs.bot_screen.as_slice())
.expect("Could not write bottom framebuffer!");
}
/// Sets AES key-dumping state
/// Command format: "keydmp"
///
/// `args`: Unused
fn cmd_keydmp<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, _: It)
where It: Iterator<Item=&'a str> {
use libllama::io::aes;
let mut ctx = debugger.ctx(active_cpu);
let hw = ctx.hw9();
let key_slots = {
let aes = &hw.io9_devices().aes;
aes::dump_keys(&*aes.borrow())
};
info!("Dumping AES keys to disk...");
use libllama::fs;
fs::create_file(fs::LlamaFile::AesKeyDb, |file| {
for k in key_slots.iter() {
if let Err(x) = file.write_all(&k.data) {
error!("Failed to write to aeskeydb file; {:?}", x);
return
}
}
}).unwrap();
}
/// Triggers the specified IRQ
/// Command format: "irq <type>"
///
/// `args`: Iterator over &str items
fn cmd_irq<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let irq_ty = match args.next() {
Some(arg) => arg.to_lowercase(),
None => { info!("Usage: `irq <type>"); return }
};
let irq = match irq_ty.as_str() {
"timer0" => dbgcore::IrqType9::Timer0,
"timer1" => dbgcore::IrqType9::Timer1,
"timer2" => dbgcore::IrqType9::Timer2,
"timer3" => dbgcore::IrqType9::Timer3,
_ => { error!("Unimplemented/unknown IRQ type `{}`", irq_ty); return }
};
info!("Triggering IRQ {}", irq_ty);
let mut ctx = debugger.ctx(active_cpu);
ctx.trigger_irq(irq);
}
/// Prints memory to the screen based on provided address, number of bytes
/// Command format: "mem <start address hex> [# bytes hex]"
///
/// `args`: Iterator over &str items
fn cmd_mem<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
// Tuple: (u32: start, u32: num)
let arg_res = match (args.next(), args.next()) {
(Some(ss), Some(ns)) => from_hex(ss).and_then(|s| Ok((s, from_hex(ns)?))),
(Some(ss), None) => from_hex(ss).and_then(|s| Ok((s, 1))),
(None, _) => { info!("Usage: `mem <start> [num] [outfile.bin]"); return }
};
// Check for from_hex errors, validate `num` input
let (start, num) = match arg_res {
Ok((s, n)) if n > 0 => (s, n),
Ok((s, _)) => (s, 1),
_ => { error!("Could not parse hex value!"); return }
};
trace!("Printing {} bytes of RAM starting at 0x{:08X}", num, start);
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
let mut mem_bytes = vec![0u8; num as usize];
if let Err(e) = hw.read_mem(start, &mut mem_bytes) {
error!("{}", e);
return;
} else {
let mut strbuf = String::new();
strbuf.push_str(&format!("{:02X}", mem_bytes[0]));
for i in 1.. num as usize {
strbuf.push_str(&format!(" {:02X}", mem_bytes[i]));
}
info!("{}", &strbuf);
}
if let Some(filename) = args.next() {
let file = File::create(filename);
let mut file = match file {
Ok(file) => file,
Err(e) => {
error!("Unable to open file `{}` for dumping memory: {:?}!", filename, e);
return;
}
};
if let Err(e) = file.write_all(mem_bytes.as_slice()) {
error!("Unable to write into file `{}`: {:?}", filename, e);
return;
}
info!("Wrote 0x{:X} bytes to `{}`", num, filename);
}
}
/// Prints registers to the screen based on provided register name
/// Command format: "reg [register name]"
///
/// `args`: Iterator over &str items
fn cmd_reg<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let mut ctx = debugger.ctx(active_cpu);
let hw = ctx.hw();
let print_reg = |reg_num| info!("R{} = 0x{:08X}", reg_num, hw.read_reg(reg_num));
let print_cpsr = || info!("CPSR = 0x{:08X}", hw.read_cpsr());
let reg_str = match args.next() {
Some(arg) => arg.to_owned().to_lowercase(),
None => {
for i in 0..16 {
print_reg(i);
}
print_cpsr();
return;
}
};
match reg_str.as_str() {
"r0" => print_reg(0),
"r1" => print_reg(1),
"r2" => print_reg(2),
"r3" => print_reg(3),
"r4" => print_reg(4),
"r5" => print_reg(5),
"r6" => print_reg(6),
"r7" => print_reg(7),
"r8" => print_reg(8),
"r9" => print_reg(9),
"r10" => print_reg(10),
"r11" => print_reg(11),
"r12" => print_reg(12),
"sp" | "r13" => print_reg(13),
"lr" | "r14" => print_reg(14),
"pc" | "r15" => print_reg(15),
"cpsr" => print_cpsr(),
_ => error!("Unrecognized register!"),
}
}
/// Runs one instruction on the CPU
/// Command format: "step"
///
/// `args`: Unused
fn cmd_step<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, args: It)
where It: Iterator<Item=&'a str> {
let _ = args;
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
hw.step();
}
/// Controls debugger behavior based on user-provided commands
///
/// `command`: Iterator over &str items
pub fn handle<'a, It>(active_cpu: &mut ActiveCpu, debugger: &mut dbgcore::DbgCore, mut command: It)
where It: Iterator<Item=&'a str> {
match command.next() {
Some("asm") => cmd_asm(*active_cpu, debugger, command),
Some("brk") => cmd_brk(*active_cpu, debugger, command),
Some("btn") => cmd_btn(*active_cpu, debugger, command),
Some("fbdmp") => cmd_fbdmp(*active_cpu, debugger, command),
Some("irq") => cmd_irq(*active_cpu, debugger, command),
Some("keydmp") => cmd_keydmp(*active_cpu, debugger, command),
Some("mem") => cmd_mem(*active_cpu, debugger, command),
Some("reg") => cmd_reg(*active_cpu, debugger, command),
Some("run") => { debugger.ctx(*active_cpu).resume() },
Some("step") => cmd_step(*active_cpu, debugger, command),
Some("cpu") => {
match command.next() {
Some("arm9") => *active_cpu = ActiveCpu::Arm9,
Some("arm11") => *active_cpu = ActiveCpu::Arm11,
_ => error!("Expected `cpu <arm9|arm11>")
}
}
Some("quit") | Some("exit") => {
debugger.ctx(*active_cpu).hwcore_mut().stop();
// TODO: Cleaner exit?
exit(0);
}
None => {},
Some(unk_cmd @ _) => error!("Unrecognized command `{}`", unk_cmd),
}
}
|
{
ArchMode::Arm
}
|
conditional_block
|
commands.rs
|
use std::process::exit;
use std::fs::File;
use std::io::Write;
use libllama::dbgcore::{self, ActiveCpu};
use libllama::utils::from_hex;
/// Prints disassembly for the next instruction
/// Command format: "asm [address hex]"
///
/// `args`: Iterator over &str items
fn cmd_asm<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
use capstone::Capstone;
use capstone::arch::BuildsCapstone;
use capstone::arch::arm::ArchMode;
let _ = args;
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
let pause_addr = match args.next().map(from_hex) {
Some(Ok(x)) => x,
Some(Err(_)) => { error!("Could not parse hex value!"); return }
None => hw.pause_addr(),
};
let cpu_mode = if hw.is_thumb() {
ArchMode::Thumb
} else {
ArchMode::Arm
};
let cs = Capstone::new()
.arm()
.mode(cpu_mode)
.build();
if let Ok(mut cs) = cs {
let mut inst_bytes = [0u8; 4];
if let Err(e) = hw.read_mem(pause_addr, &mut inst_bytes) {
error!("{}", e);
return;
}
match cs.disasm_count(&inst_bytes, pause_addr as u64, 1) {
Ok(insts) => {
let inst = insts.iter().next().unwrap();
info!("{:X}: {} {}", pause_addr,
inst.mnemonic().unwrap(),
inst.op_str().unwrap())
}
Err(_) => error!("Failed to disassemble instruction at 0x{:X}", pause_addr),
}
} else {
error!("Could not initialize capstone!");
}
}
/// Adds CPU breakpoint at instruction address
/// Command format: "brk <address hex>"
///
/// `args`: Iterator over &str items
fn cmd_brk<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let addr_str = match args.next() {
Some(arg) => from_hex(arg),
None => { info!("Usage: `brk <addr>"); return }
};
// Check for from_hex errors
let addr = match addr_str {
Ok(x) => x,
_ => { error!("Could not parse hex value!"); return }
};
info!("Toggling breakpoint at 0x{:X}", addr);
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
if!hw.has_breakpoint(addr) {
hw.set_breakpoint(addr);
} else {
hw.del_breakpoint(addr);
}
}
/// Toggles or displays button state
/// Command format: "btn [button name] [up/down]"
///
/// `args`: Iterator over &str items
fn cmd_btn<'a, It>(_active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
use libllama::io::hid;
let mut ctx = debugger.ctx(ActiveCpu::Arm11);
let hw = ctx.hw11();
let io_shared = &hw.io_shared_devices().hid;
let btn_map = [
("a", hid::Button::A),
("b", hid::Button::B),
("x", hid::Button::X),
("y", hid::Button::Y),
("l", hid::Button::L),
("r", hid::Button::R),
("up", hid::Button::Up),
("down", hid::Button::Down),
("left", hid::Button::Left),
("right", hid::Button::Right),
("start", hid::Button::Start),
("select", hid::Button::Select)
];
let mut btn_map = btn_map.iter();
if let Some(button) = args.next() {
let press = match args.next() {
Some("up") => hid::ButtonState::Released,
Some("down") => hid::ButtonState::Pressed,
_ => {
error!("Specify whether button `{}` should be `up`/`down`", button);
return
}
};
if let Some((_, btn)) = btn_map.find(|tup| button.eq_ignore_ascii_case(tup.0)) {
hid::update_pad(&mut io_shared.lock(), press(*btn));
} else {
error!("Button `{}` does not exist!", button);
}
} else {
let pad = hid::pad(&mut io_shared.lock());
let mut pressed = Vec::new();
for (label, btn) in btn_map {
if pad & (1 << (*btn as usize))!= 0 {
pressed.push(label);
}
}
info!("Pressed buttons: {:?}", pressed);
}
}
/// Dumps framebuffer to file
/// Command format: "fbdmp"
///
/// `args`: Unused
fn cmd_fbdmp<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, _: It)
where It: Iterator<Item=&'a str> {
use libllama::io::gpu;
let mut ctx = debugger.ctx(active_cpu);
let fb_state = {
let hw = ctx.hw11();
let gpu = &hw.io11_devices().gpu;
let fb_state = gpu::fb_state(&*gpu.borrow());
fb_state
};
let mut fbs = libllama::hwcore::Framebuffers::default();
ctx.hwcore().copy_framebuffers(&mut fbs, &fb_state);
info!("Dumping framebuffers to disk in CWD...");
let mut top = File::create("./fb-top.bin")
.expect("Could not create fb-top.bin file!");
top.write_all(fbs.top_screen.as_slice())
.expect("Could not write top framebuffer!");
let mut bot = File::create("./fb-bot.bin")
.expect("Could not create fb-bot.bin file!");
bot.write_all(fbs.bot_screen.as_slice())
.expect("Could not write bottom framebuffer!");
}
/// Sets AES key-dumping state
/// Command format: "keydmp"
///
/// `args`: Unused
fn cmd_keydmp<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, _: It)
where It: Iterator<Item=&'a str> {
use libllama::io::aes;
let mut ctx = debugger.ctx(active_cpu);
let hw = ctx.hw9();
let key_slots = {
let aes = &hw.io9_devices().aes;
aes::dump_keys(&*aes.borrow())
};
info!("Dumping AES keys to disk...");
use libllama::fs;
fs::create_file(fs::LlamaFile::AesKeyDb, |file| {
for k in key_slots.iter() {
if let Err(x) = file.write_all(&k.data) {
error!("Failed to write to aeskeydb file; {:?}", x);
return
}
}
}).unwrap();
}
/// Triggers the specified IRQ
/// Command format: "irq <type>"
///
/// `args`: Iterator over &str items
fn cmd_irq<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let irq_ty = match args.next() {
Some(arg) => arg.to_lowercase(),
None => { info!("Usage: `irq <type>"); return }
};
let irq = match irq_ty.as_str() {
"timer0" => dbgcore::IrqType9::Timer0,
"timer1" => dbgcore::IrqType9::Timer1,
"timer2" => dbgcore::IrqType9::Timer2,
"timer3" => dbgcore::IrqType9::Timer3,
_ => { error!("Unimplemented/unknown IRQ type `{}`", irq_ty); return }
};
info!("Triggering IRQ {}", irq_ty);
let mut ctx = debugger.ctx(active_cpu);
ctx.trigger_irq(irq);
}
/// Prints memory to the screen based on provided address, number of bytes
/// Command format: "mem <start address hex> [# bytes hex]"
///
/// `args`: Iterator over &str items
fn cmd_mem<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
// Tuple: (u32: start, u32: num)
let arg_res = match (args.next(), args.next()) {
(Some(ss), Some(ns)) => from_hex(ss).and_then(|s| Ok((s, from_hex(ns)?))),
(Some(ss), None) => from_hex(ss).and_then(|s| Ok((s, 1))),
(None, _) => { info!("Usage: `mem <start> [num] [outfile.bin]"); return }
};
// Check for from_hex errors, validate `num` input
let (start, num) = match arg_res {
Ok((s, n)) if n > 0 => (s, n),
Ok((s, _)) => (s, 1),
_ => { error!("Could not parse hex value!"); return }
};
trace!("Printing {} bytes of RAM starting at 0x{:08X}", num, start);
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
let mut mem_bytes = vec![0u8; num as usize];
if let Err(e) = hw.read_mem(start, &mut mem_bytes) {
error!("{}", e);
return;
} else {
let mut strbuf = String::new();
strbuf.push_str(&format!("{:02X}", mem_bytes[0]));
for i in 1.. num as usize {
strbuf.push_str(&format!(" {:02X}", mem_bytes[i]));
}
info!("{}", &strbuf);
}
if let Some(filename) = args.next() {
let file = File::create(filename);
let mut file = match file {
Ok(file) => file,
Err(e) => {
error!("Unable to open file `{}` for dumping memory: {:?}!", filename, e);
return;
}
};
if let Err(e) = file.write_all(mem_bytes.as_slice()) {
error!("Unable to write into file `{}`: {:?}", filename, e);
return;
}
info!("Wrote 0x{:X} bytes to `{}`", num, filename);
}
}
/// Prints registers to the screen based on provided register name
/// Command format: "reg [register name]"
///
/// `args`: Iterator over &str items
fn
|
<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, mut args: It)
where It: Iterator<Item=&'a str> {
let mut ctx = debugger.ctx(active_cpu);
let hw = ctx.hw();
let print_reg = |reg_num| info!("R{} = 0x{:08X}", reg_num, hw.read_reg(reg_num));
let print_cpsr = || info!("CPSR = 0x{:08X}", hw.read_cpsr());
let reg_str = match args.next() {
Some(arg) => arg.to_owned().to_lowercase(),
None => {
for i in 0..16 {
print_reg(i);
}
print_cpsr();
return;
}
};
match reg_str.as_str() {
"r0" => print_reg(0),
"r1" => print_reg(1),
"r2" => print_reg(2),
"r3" => print_reg(3),
"r4" => print_reg(4),
"r5" => print_reg(5),
"r6" => print_reg(6),
"r7" => print_reg(7),
"r8" => print_reg(8),
"r9" => print_reg(9),
"r10" => print_reg(10),
"r11" => print_reg(11),
"r12" => print_reg(12),
"sp" | "r13" => print_reg(13),
"lr" | "r14" => print_reg(14),
"pc" | "r15" => print_reg(15),
"cpsr" => print_cpsr(),
_ => error!("Unrecognized register!"),
}
}
/// Runs one instruction on the CPU
/// Command format: "step"
///
/// `args`: Unused
fn cmd_step<'a, It>(active_cpu: ActiveCpu, debugger: &mut dbgcore::DbgCore, args: It)
where It: Iterator<Item=&'a str> {
let _ = args;
let mut ctx = debugger.ctx(active_cpu);
let mut hw = ctx.hw();
hw.step();
}
/// Controls debugger behavior based on user-provided commands
///
/// `command`: Iterator over &str items
pub fn handle<'a, It>(active_cpu: &mut ActiveCpu, debugger: &mut dbgcore::DbgCore, mut command: It)
where It: Iterator<Item=&'a str> {
match command.next() {
Some("asm") => cmd_asm(*active_cpu, debugger, command),
Some("brk") => cmd_brk(*active_cpu, debugger, command),
Some("btn") => cmd_btn(*active_cpu, debugger, command),
Some("fbdmp") => cmd_fbdmp(*active_cpu, debugger, command),
Some("irq") => cmd_irq(*active_cpu, debugger, command),
Some("keydmp") => cmd_keydmp(*active_cpu, debugger, command),
Some("mem") => cmd_mem(*active_cpu, debugger, command),
Some("reg") => cmd_reg(*active_cpu, debugger, command),
Some("run") => { debugger.ctx(*active_cpu).resume() },
Some("step") => cmd_step(*active_cpu, debugger, command),
Some("cpu") => {
match command.next() {
Some("arm9") => *active_cpu = ActiveCpu::Arm9,
Some("arm11") => *active_cpu = ActiveCpu::Arm11,
_ => error!("Expected `cpu <arm9|arm11>")
}
}
Some("quit") | Some("exit") => {
debugger.ctx(*active_cpu).hwcore_mut().stop();
// TODO: Cleaner exit?
exit(0);
}
None => {},
Some(unk_cmd @ _) => error!("Unrecognized command `{}`", unk_cmd),
}
}
|
cmd_reg
|
identifier_name
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::AttrBinding;
use dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use dom::bindings::codegen::InheritTypes::NodeCast;
use dom::bindings::global;
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::element::{Element, AttributeHandlers};
use dom::node::Node;
use dom::window::Window;
use dom::virtualmethods::vtable_for;
use devtools_traits::AttrInfo;
use servo_util::str::{DOMString, split_html_space_chars};
use string_cache::{Atom, Namespace};
use std::cell::Ref;
use std::mem;
pub enum AttrSettingType {
FirstSetAttr,
ReplacedAttr,
}
#[deriving(PartialEq, Clone)]
#[jstraceable]
pub enum AttrValue {
StringAttrValue(DOMString),
TokenListAttrValue(DOMString, Vec<Atom>),
UIntAttrValue(DOMString, u32),
AtomAttrValue(Atom),
}
impl AttrValue {
pub fn from_tokenlist(tokens: DOMString) -> AttrValue {
let atoms = split_html_space_chars(tokens.as_slice())
.map(|token| Atom::from_slice(token)).collect();
TokenListAttrValue(tokens, atoms)
}
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result: u32 = from_str(string.as_slice()).unwrap_or(default);
UIntAttrValue(string, result)
}
pub fn from_atomic(string: DOMString) -> AttrValue {
let value = Atom::from_slice(string.as_slice());
AtomAttrValue(value)
}
pub fn tokens<'a>(&'a self) -> Option<&'a [Atom]> {
match *self {
TokenListAttrValue(_, ref tokens) => Some(tokens.as_slice()),
_ => None
}
}
}
impl Str for AttrValue {
fn as_slice<'a>(&'a self) -> &'a str {
match *self {
StringAttrValue(ref value) |
TokenListAttrValue(ref value, _) |
UIntAttrValue(ref value, _) => value.as_slice(),
AtomAttrValue(ref value) => value.as_slice(),
}
}
}
#[dom_struct]
pub struct Attr {
reflector_: Reflector,
local_name: Atom,
value: DOMRefCell<AttrValue>,
name: Atom,
namespace: Namespace,
prefix: Option<DOMString>,
/// the element that owns this attribute.
owner: Option<JS<Element>>,
}
impl Reflectable for Attr {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
impl Attr {
fn new_inherited(local_name: Atom, value: AttrValue,
name: Atom, namespace: Namespace,
prefix: Option<DOMString>, owner: Option<JSRef<Element>>) -> Attr {
Attr {
reflector_: Reflector::new(),
local_name: local_name,
value: DOMRefCell::new(value),
name: name,
namespace: namespace,
prefix: prefix,
owner: owner.map(|o| JS::from_rooted(o)),
}
}
pub fn new(window: JSRef<Window>, local_name: Atom, value: AttrValue,
name: Atom, namespace: Namespace,
prefix: Option<DOMString>, owner: Option<JSRef<Element>>) -> Temporary<Attr> {
reflect_dom_object(box Attr::new_inherited(local_name, value, name, namespace, prefix, owner),
&global::Window(window), AttrBinding::Wrap)
}
#[inline]
pub fn name<'a>(&'a self) -> &'a Atom {
&self.name
}
#[inline]
pub fn namespace<'a>(&'a self) -> &'a Namespace {
&self.namespace
}
#[inline]
pub fn prefix<'a>(&'a self) -> &'a Option<DOMString> {
&self.prefix
}
}
impl<'a> AttrMethods for JSRef<'a, Attr> {
fn LocalName(self) -> DOMString {
self.local_name().as_slice().to_string()
}
fn Value(self) -> DOMString {
self.value().as_slice().to_string()
}
fn SetValue(self, value: DOMString) {
match self.owner {
None => {
*self.value.borrow_mut() = StringAttrValue(value)
}
Some(o) => {
let owner = o.root();
let value = owner.parse_attribute(&self.namespace, self.local_name(), value);
self.set_value(ReplacedAttr, value, *owner);
}
}
}
fn TextContent(self) -> DOMString {
self.Value()
}
fn SetTextContent(self, value: DOMString) {
self.SetValue(value)
}
fn NodeValue(self) -> DOMString {
self.Value()
}
fn SetNodeValue(self, value: DOMString) {
self.SetValue(value)
}
fn Name(self) -> DOMString {
self.name.as_slice().to_string()
}
fn GetNamespaceURI(self) -> Option<DOMString>
|
fn GetPrefix(self) -> Option<DOMString> {
self.prefix.clone()
}
fn GetOwnerElement(self) -> Option<Temporary<Element>> {
self.owner.map(|o| Temporary::new(o))
}
fn Specified(self) -> bool {
true // Always returns true
}
}
pub trait AttrHelpers<'a> {
fn set_value(self, set_type: AttrSettingType, value: AttrValue, owner: JSRef<Element>);
fn value(self) -> Ref<'a, AttrValue>;
fn local_name(self) -> &'a Atom;
fn summarize(self) -> AttrInfo;
}
impl<'a> AttrHelpers<'a> for JSRef<'a, Attr> {
fn set_value(self, set_type: AttrSettingType, value: AttrValue, owner: JSRef<Element>) {
assert!(Some(owner) == self.owner.map(|o| *o.root()));
let node: JSRef<Node> = NodeCast::from_ref(owner);
let namespace_is_null = self.namespace == ns!("");
match set_type {
ReplacedAttr if namespace_is_null => vtable_for(&node).before_remove_attr(self),
_ => ()
}
*self.value.borrow_mut() = value;
if namespace_is_null {
vtable_for(&node).after_set_attr(self)
}
}
fn value(self) -> Ref<'a, AttrValue> {
self.extended_deref().value.borrow()
}
fn local_name(self) -> &'a Atom {
&self.extended_deref().local_name
}
fn summarize(self) -> AttrInfo {
let Namespace(ref ns) = self.namespace;
AttrInfo {
namespace: ns.as_slice().to_string(),
name: self.Name(),
value: self.Value(),
}
}
}
pub trait AttrHelpersForLayout {
unsafe fn value_ref_forever(&self) -> &'static str;
unsafe fn value_atom_forever(&self) -> Option<Atom>;
unsafe fn value_tokens_forever(&self) -> Option<&'static [Atom]>;
unsafe fn local_name_atom_forever(&self) -> Atom;
}
impl AttrHelpersForLayout for Attr {
#[inline]
unsafe fn value_ref_forever(&self) -> &'static str {
// This transmute is used to cheat the lifetime restriction.
let value = mem::transmute::<&AttrValue, &AttrValue>(self.value.borrow_for_layout());
value.as_slice()
}
#[inline]
unsafe fn value_atom_forever(&self) -> Option<Atom> {
let value = self.value.borrow_for_layout();
match *value {
AtomAttrValue(ref val) => Some(val.clone()),
_ => None,
}
}
#[inline]
unsafe fn value_tokens_forever(&self) -> Option<&'static [Atom]> {
// This transmute is used to cheat the lifetime restriction.
let value = mem::transmute::<&AttrValue, &AttrValue>(self.value.borrow_for_layout());
match *value {
TokenListAttrValue(_, ref tokens) => Some(tokens.as_slice()),
_ => None,
}
}
#[inline]
unsafe fn local_name_atom_forever(&self) -> Atom {
self.local_name.clone()
}
}
|
{
let Namespace(ref atom) = self.namespace;
match atom.as_slice() {
"" => None,
url => Some(url.to_string()),
}
}
|
identifier_body
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::AttrBinding;
use dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use dom::bindings::codegen::InheritTypes::NodeCast;
use dom::bindings::global;
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::element::{Element, AttributeHandlers};
use dom::node::Node;
use dom::window::Window;
use dom::virtualmethods::vtable_for;
use devtools_traits::AttrInfo;
use servo_util::str::{DOMString, split_html_space_chars};
use string_cache::{Atom, Namespace};
use std::cell::Ref;
use std::mem;
pub enum AttrSettingType {
FirstSetAttr,
ReplacedAttr,
}
#[deriving(PartialEq, Clone)]
#[jstraceable]
pub enum AttrValue {
StringAttrValue(DOMString),
TokenListAttrValue(DOMString, Vec<Atom>),
UIntAttrValue(DOMString, u32),
AtomAttrValue(Atom),
}
impl AttrValue {
pub fn from_tokenlist(tokens: DOMString) -> AttrValue {
let atoms = split_html_space_chars(tokens.as_slice())
.map(|token| Atom::from_slice(token)).collect();
TokenListAttrValue(tokens, atoms)
}
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result: u32 = from_str(string.as_slice()).unwrap_or(default);
UIntAttrValue(string, result)
}
pub fn from_atomic(string: DOMString) -> AttrValue {
let value = Atom::from_slice(string.as_slice());
AtomAttrValue(value)
}
pub fn tokens<'a>(&'a self) -> Option<&'a [Atom]> {
match *self {
TokenListAttrValue(_, ref tokens) => Some(tokens.as_slice()),
_ => None
}
}
}
impl Str for AttrValue {
fn as_slice<'a>(&'a self) -> &'a str {
match *self {
StringAttrValue(ref value) |
TokenListAttrValue(ref value, _) |
UIntAttrValue(ref value, _) => value.as_slice(),
AtomAttrValue(ref value) => value.as_slice(),
}
}
}
#[dom_struct]
pub struct Attr {
reflector_: Reflector,
local_name: Atom,
value: DOMRefCell<AttrValue>,
name: Atom,
namespace: Namespace,
prefix: Option<DOMString>,
/// the element that owns this attribute.
owner: Option<JS<Element>>,
}
impl Reflectable for Attr {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
impl Attr {
fn new_inherited(local_name: Atom, value: AttrValue,
name: Atom, namespace: Namespace,
prefix: Option<DOMString>, owner: Option<JSRef<Element>>) -> Attr {
Attr {
reflector_: Reflector::new(),
local_name: local_name,
value: DOMRefCell::new(value),
name: name,
namespace: namespace,
prefix: prefix,
owner: owner.map(|o| JS::from_rooted(o)),
}
}
pub fn new(window: JSRef<Window>, local_name: Atom, value: AttrValue,
name: Atom, namespace: Namespace,
prefix: Option<DOMString>, owner: Option<JSRef<Element>>) -> Temporary<Attr> {
reflect_dom_object(box Attr::new_inherited(local_name, value, name, namespace, prefix, owner),
&global::Window(window), AttrBinding::Wrap)
}
#[inline]
pub fn
|
<'a>(&'a self) -> &'a Atom {
&self.name
}
#[inline]
pub fn namespace<'a>(&'a self) -> &'a Namespace {
&self.namespace
}
#[inline]
pub fn prefix<'a>(&'a self) -> &'a Option<DOMString> {
&self.prefix
}
}
impl<'a> AttrMethods for JSRef<'a, Attr> {
fn LocalName(self) -> DOMString {
self.local_name().as_slice().to_string()
}
fn Value(self) -> DOMString {
self.value().as_slice().to_string()
}
fn SetValue(self, value: DOMString) {
match self.owner {
None => {
*self.value.borrow_mut() = StringAttrValue(value)
}
Some(o) => {
let owner = o.root();
let value = owner.parse_attribute(&self.namespace, self.local_name(), value);
self.set_value(ReplacedAttr, value, *owner);
}
}
}
fn TextContent(self) -> DOMString {
self.Value()
}
fn SetTextContent(self, value: DOMString) {
self.SetValue(value)
}
fn NodeValue(self) -> DOMString {
self.Value()
}
fn SetNodeValue(self, value: DOMString) {
self.SetValue(value)
}
fn Name(self) -> DOMString {
self.name.as_slice().to_string()
}
fn GetNamespaceURI(self) -> Option<DOMString> {
let Namespace(ref atom) = self.namespace;
match atom.as_slice() {
"" => None,
url => Some(url.to_string()),
}
}
fn GetPrefix(self) -> Option<DOMString> {
self.prefix.clone()
}
fn GetOwnerElement(self) -> Option<Temporary<Element>> {
self.owner.map(|o| Temporary::new(o))
}
fn Specified(self) -> bool {
true // Always returns true
}
}
pub trait AttrHelpers<'a> {
fn set_value(self, set_type: AttrSettingType, value: AttrValue, owner: JSRef<Element>);
fn value(self) -> Ref<'a, AttrValue>;
fn local_name(self) -> &'a Atom;
fn summarize(self) -> AttrInfo;
}
impl<'a> AttrHelpers<'a> for JSRef<'a, Attr> {
fn set_value(self, set_type: AttrSettingType, value: AttrValue, owner: JSRef<Element>) {
assert!(Some(owner) == self.owner.map(|o| *o.root()));
let node: JSRef<Node> = NodeCast::from_ref(owner);
let namespace_is_null = self.namespace == ns!("");
match set_type {
ReplacedAttr if namespace_is_null => vtable_for(&node).before_remove_attr(self),
_ => ()
}
*self.value.borrow_mut() = value;
if namespace_is_null {
vtable_for(&node).after_set_attr(self)
}
}
fn value(self) -> Ref<'a, AttrValue> {
self.extended_deref().value.borrow()
}
fn local_name(self) -> &'a Atom {
&self.extended_deref().local_name
}
fn summarize(self) -> AttrInfo {
let Namespace(ref ns) = self.namespace;
AttrInfo {
namespace: ns.as_slice().to_string(),
name: self.Name(),
value: self.Value(),
}
}
}
pub trait AttrHelpersForLayout {
unsafe fn value_ref_forever(&self) -> &'static str;
unsafe fn value_atom_forever(&self) -> Option<Atom>;
unsafe fn value_tokens_forever(&self) -> Option<&'static [Atom]>;
unsafe fn local_name_atom_forever(&self) -> Atom;
}
impl AttrHelpersForLayout for Attr {
#[inline]
unsafe fn value_ref_forever(&self) -> &'static str {
// This transmute is used to cheat the lifetime restriction.
let value = mem::transmute::<&AttrValue, &AttrValue>(self.value.borrow_for_layout());
value.as_slice()
}
#[inline]
unsafe fn value_atom_forever(&self) -> Option<Atom> {
let value = self.value.borrow_for_layout();
match *value {
AtomAttrValue(ref val) => Some(val.clone()),
_ => None,
}
}
#[inline]
unsafe fn value_tokens_forever(&self) -> Option<&'static [Atom]> {
// This transmute is used to cheat the lifetime restriction.
let value = mem::transmute::<&AttrValue, &AttrValue>(self.value.borrow_for_layout());
match *value {
TokenListAttrValue(_, ref tokens) => Some(tokens.as_slice()),
_ => None,
}
}
#[inline]
unsafe fn local_name_atom_forever(&self) -> Atom {
self.local_name.clone()
}
}
|
name
|
identifier_name
|
attr.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::AttrBinding;
use dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use dom::bindings::codegen::InheritTypes::NodeCast;
use dom::bindings::global;
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::element::{Element, AttributeHandlers};
use dom::node::Node;
use dom::window::Window;
use dom::virtualmethods::vtable_for;
use devtools_traits::AttrInfo;
use servo_util::str::{DOMString, split_html_space_chars};
use string_cache::{Atom, Namespace};
use std::cell::Ref;
use std::mem;
pub enum AttrSettingType {
FirstSetAttr,
ReplacedAttr,
}
#[deriving(PartialEq, Clone)]
#[jstraceable]
pub enum AttrValue {
StringAttrValue(DOMString),
TokenListAttrValue(DOMString, Vec<Atom>),
UIntAttrValue(DOMString, u32),
AtomAttrValue(Atom),
}
impl AttrValue {
pub fn from_tokenlist(tokens: DOMString) -> AttrValue {
let atoms = split_html_space_chars(tokens.as_slice())
.map(|token| Atom::from_slice(token)).collect();
TokenListAttrValue(tokens, atoms)
}
pub fn from_u32(string: DOMString, default: u32) -> AttrValue {
let result: u32 = from_str(string.as_slice()).unwrap_or(default);
UIntAttrValue(string, result)
}
pub fn from_atomic(string: DOMString) -> AttrValue {
let value = Atom::from_slice(string.as_slice());
|
TokenListAttrValue(_, ref tokens) => Some(tokens.as_slice()),
_ => None
}
}
}
impl Str for AttrValue {
fn as_slice<'a>(&'a self) -> &'a str {
match *self {
StringAttrValue(ref value) |
TokenListAttrValue(ref value, _) |
UIntAttrValue(ref value, _) => value.as_slice(),
AtomAttrValue(ref value) => value.as_slice(),
}
}
}
#[dom_struct]
pub struct Attr {
reflector_: Reflector,
local_name: Atom,
value: DOMRefCell<AttrValue>,
name: Atom,
namespace: Namespace,
prefix: Option<DOMString>,
/// the element that owns this attribute.
owner: Option<JS<Element>>,
}
impl Reflectable for Attr {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
impl Attr {
fn new_inherited(local_name: Atom, value: AttrValue,
name: Atom, namespace: Namespace,
prefix: Option<DOMString>, owner: Option<JSRef<Element>>) -> Attr {
Attr {
reflector_: Reflector::new(),
local_name: local_name,
value: DOMRefCell::new(value),
name: name,
namespace: namespace,
prefix: prefix,
owner: owner.map(|o| JS::from_rooted(o)),
}
}
pub fn new(window: JSRef<Window>, local_name: Atom, value: AttrValue,
name: Atom, namespace: Namespace,
prefix: Option<DOMString>, owner: Option<JSRef<Element>>) -> Temporary<Attr> {
reflect_dom_object(box Attr::new_inherited(local_name, value, name, namespace, prefix, owner),
&global::Window(window), AttrBinding::Wrap)
}
#[inline]
pub fn name<'a>(&'a self) -> &'a Atom {
&self.name
}
#[inline]
pub fn namespace<'a>(&'a self) -> &'a Namespace {
&self.namespace
}
#[inline]
pub fn prefix<'a>(&'a self) -> &'a Option<DOMString> {
&self.prefix
}
}
impl<'a> AttrMethods for JSRef<'a, Attr> {
fn LocalName(self) -> DOMString {
self.local_name().as_slice().to_string()
}
fn Value(self) -> DOMString {
self.value().as_slice().to_string()
}
fn SetValue(self, value: DOMString) {
match self.owner {
None => {
*self.value.borrow_mut() = StringAttrValue(value)
}
Some(o) => {
let owner = o.root();
let value = owner.parse_attribute(&self.namespace, self.local_name(), value);
self.set_value(ReplacedAttr, value, *owner);
}
}
}
fn TextContent(self) -> DOMString {
self.Value()
}
fn SetTextContent(self, value: DOMString) {
self.SetValue(value)
}
fn NodeValue(self) -> DOMString {
self.Value()
}
fn SetNodeValue(self, value: DOMString) {
self.SetValue(value)
}
fn Name(self) -> DOMString {
self.name.as_slice().to_string()
}
fn GetNamespaceURI(self) -> Option<DOMString> {
let Namespace(ref atom) = self.namespace;
match atom.as_slice() {
"" => None,
url => Some(url.to_string()),
}
}
fn GetPrefix(self) -> Option<DOMString> {
self.prefix.clone()
}
fn GetOwnerElement(self) -> Option<Temporary<Element>> {
self.owner.map(|o| Temporary::new(o))
}
fn Specified(self) -> bool {
true // Always returns true
}
}
pub trait AttrHelpers<'a> {
fn set_value(self, set_type: AttrSettingType, value: AttrValue, owner: JSRef<Element>);
fn value(self) -> Ref<'a, AttrValue>;
fn local_name(self) -> &'a Atom;
fn summarize(self) -> AttrInfo;
}
impl<'a> AttrHelpers<'a> for JSRef<'a, Attr> {
fn set_value(self, set_type: AttrSettingType, value: AttrValue, owner: JSRef<Element>) {
assert!(Some(owner) == self.owner.map(|o| *o.root()));
let node: JSRef<Node> = NodeCast::from_ref(owner);
let namespace_is_null = self.namespace == ns!("");
match set_type {
ReplacedAttr if namespace_is_null => vtable_for(&node).before_remove_attr(self),
_ => ()
}
*self.value.borrow_mut() = value;
if namespace_is_null {
vtable_for(&node).after_set_attr(self)
}
}
fn value(self) -> Ref<'a, AttrValue> {
self.extended_deref().value.borrow()
}
fn local_name(self) -> &'a Atom {
&self.extended_deref().local_name
}
fn summarize(self) -> AttrInfo {
let Namespace(ref ns) = self.namespace;
AttrInfo {
namespace: ns.as_slice().to_string(),
name: self.Name(),
value: self.Value(),
}
}
}
pub trait AttrHelpersForLayout {
unsafe fn value_ref_forever(&self) -> &'static str;
unsafe fn value_atom_forever(&self) -> Option<Atom>;
unsafe fn value_tokens_forever(&self) -> Option<&'static [Atom]>;
unsafe fn local_name_atom_forever(&self) -> Atom;
}
impl AttrHelpersForLayout for Attr {
#[inline]
unsafe fn value_ref_forever(&self) -> &'static str {
// This transmute is used to cheat the lifetime restriction.
let value = mem::transmute::<&AttrValue, &AttrValue>(self.value.borrow_for_layout());
value.as_slice()
}
#[inline]
unsafe fn value_atom_forever(&self) -> Option<Atom> {
let value = self.value.borrow_for_layout();
match *value {
AtomAttrValue(ref val) => Some(val.clone()),
_ => None,
}
}
#[inline]
unsafe fn value_tokens_forever(&self) -> Option<&'static [Atom]> {
// This transmute is used to cheat the lifetime restriction.
let value = mem::transmute::<&AttrValue, &AttrValue>(self.value.borrow_for_layout());
match *value {
TokenListAttrValue(_, ref tokens) => Some(tokens.as_slice()),
_ => None,
}
}
#[inline]
unsafe fn local_name_atom_forever(&self) -> Atom {
self.local_name.clone()
}
}
|
AtomAttrValue(value)
}
pub fn tokens<'a>(&'a self) -> Option<&'a [Atom]> {
match *self {
|
random_line_split
|
namespaced-enum-glob-import.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
mod m2 {
pub enum Foo {
A,
B(isize),
C { a: isize },
}
impl Foo {
pub fn foo() {}
}
}
mod m {
pub use m2::Foo::*;
}
fn _f(f: m2::Foo) {
use m2::Foo::*;
match f {
A | B(_) | C {.. } => {}
|
fn _f2(f: m2::Foo) {
match f {
m::A | m::B(_) | m::C {.. } => {}
}
}
pub fn main() {}
|
}
}
|
random_line_split
|
namespaced-enum-glob-import.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
mod m2 {
pub enum Foo {
A,
B(isize),
C { a: isize },
}
impl Foo {
pub fn foo() {}
}
}
mod m {
pub use m2::Foo::*;
}
fn _f(f: m2::Foo) {
use m2::Foo::*;
match f {
A | B(_) | C {.. } =>
|
}
}
fn _f2(f: m2::Foo) {
match f {
m::A | m::B(_) | m::C {.. } => {}
}
}
pub fn main() {}
|
{}
|
conditional_block
|
namespaced-enum-glob-import.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
mod m2 {
pub enum Foo {
A,
B(isize),
C { a: isize },
}
impl Foo {
pub fn foo() {}
}
}
mod m {
pub use m2::Foo::*;
}
fn
|
(f: m2::Foo) {
use m2::Foo::*;
match f {
A | B(_) | C {.. } => {}
}
}
fn _f2(f: m2::Foo) {
match f {
m::A | m::B(_) | m::C {.. } => {}
}
}
pub fn main() {}
|
_f
|
identifier_name
|
clone.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! The `Clone` trait for types that cannot be 'implicitly copied'
In Rust, some simple types are "implicitly copyable" and when you
assign them or pass them as arguments, the receiver will get a copy,
leaving the original value in place. These types do not require
allocation to copy and do not have finalizers (i.e. they do not
contain owned boxes or implement `Drop`), so the compiler considers
them cheap and safe to copy. For other types copies must be made
explicitly, by convention implementing the `Clone` trait and calling
the `clone` method.
*/
#![unstable]
/// A common trait for cloning an object.
pub trait Clone {
/// Returns a copy of the value. The contents of owned pointers
/// are copied to maintain uniqueness, while the contents of
/// managed pointers are not copied.
fn clone(&self) -> Self;
/// Perform copy-assignment from `source`.
///
/// `a.clone_from(&b)` is equivalent to `a = b.clone()` in functionality,
/// but can be overridden to reuse the resources of `a` to avoid unnecessary
/// allocations.
#[inline(always)]
#[experimental = "this function is mostly unused"]
fn clone_from(&mut self, source: &Self) {
*self = source.clone()
}
}
impl<'a, T> Clone for &'a T {
/// Return a shallow copy of the reference.
#[inline]
fn clone(&self) -> &'a T { *self }
}
|
/// Return a shallow copy of the slice.
#[inline]
fn clone(&self) -> &'a [T] { *self }
}
impl<'a> Clone for &'a str {
/// Return a shallow copy of the slice.
#[inline]
fn clone(&self) -> &'a str { *self }
}
macro_rules! clone_impl(
($t:ty) => {
impl Clone for $t {
/// Return a deep copy of the value.
#[inline]
fn clone(&self) -> $t { *self }
}
}
)
clone_impl!(int)
clone_impl!(i8)
clone_impl!(i16)
clone_impl!(i32)
clone_impl!(i64)
clone_impl!(uint)
clone_impl!(u8)
clone_impl!(u16)
clone_impl!(u32)
clone_impl!(u64)
clone_impl!(f32)
clone_impl!(f64)
clone_impl!(())
clone_impl!(bool)
clone_impl!(char)
macro_rules! extern_fn_clone(
($($A:ident),*) => (
#[experimental = "this may not be sufficient for fns with region parameters"]
impl<$($A,)* ReturnType> Clone for extern "Rust" fn($($A),*) -> ReturnType {
/// Return a copy of a function pointer
#[inline]
fn clone(&self) -> extern "Rust" fn($($A),*) -> ReturnType { *self }
}
)
)
extern_fn_clone!()
extern_fn_clone!(A)
extern_fn_clone!(A, B)
extern_fn_clone!(A, B, C)
extern_fn_clone!(A, B, C, D)
extern_fn_clone!(A, B, C, D, E)
extern_fn_clone!(A, B, C, D, E, F)
extern_fn_clone!(A, B, C, D, E, F, G)
extern_fn_clone!(A, B, C, D, E, F, G, H)
|
impl<'a, T> Clone for &'a [T] {
|
random_line_split
|
clone.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! The `Clone` trait for types that cannot be 'implicitly copied'
In Rust, some simple types are "implicitly copyable" and when you
assign them or pass them as arguments, the receiver will get a copy,
leaving the original value in place. These types do not require
allocation to copy and do not have finalizers (i.e. they do not
contain owned boxes or implement `Drop`), so the compiler considers
them cheap and safe to copy. For other types copies must be made
explicitly, by convention implementing the `Clone` trait and calling
the `clone` method.
*/
#![unstable]
/// A common trait for cloning an object.
pub trait Clone {
/// Returns a copy of the value. The contents of owned pointers
/// are copied to maintain uniqueness, while the contents of
/// managed pointers are not copied.
fn clone(&self) -> Self;
/// Perform copy-assignment from `source`.
///
/// `a.clone_from(&b)` is equivalent to `a = b.clone()` in functionality,
/// but can be overridden to reuse the resources of `a` to avoid unnecessary
/// allocations.
#[inline(always)]
#[experimental = "this function is mostly unused"]
fn clone_from(&mut self, source: &Self) {
*self = source.clone()
}
}
impl<'a, T> Clone for &'a T {
/// Return a shallow copy of the reference.
#[inline]
fn clone(&self) -> &'a T { *self }
}
impl<'a, T> Clone for &'a [T] {
/// Return a shallow copy of the slice.
#[inline]
fn clone(&self) -> &'a [T]
|
}
impl<'a> Clone for &'a str {
/// Return a shallow copy of the slice.
#[inline]
fn clone(&self) -> &'a str { *self }
}
macro_rules! clone_impl(
($t:ty) => {
impl Clone for $t {
/// Return a deep copy of the value.
#[inline]
fn clone(&self) -> $t { *self }
}
}
)
clone_impl!(int)
clone_impl!(i8)
clone_impl!(i16)
clone_impl!(i32)
clone_impl!(i64)
clone_impl!(uint)
clone_impl!(u8)
clone_impl!(u16)
clone_impl!(u32)
clone_impl!(u64)
clone_impl!(f32)
clone_impl!(f64)
clone_impl!(())
clone_impl!(bool)
clone_impl!(char)
macro_rules! extern_fn_clone(
($($A:ident),*) => (
#[experimental = "this may not be sufficient for fns with region parameters"]
impl<$($A,)* ReturnType> Clone for extern "Rust" fn($($A),*) -> ReturnType {
/// Return a copy of a function pointer
#[inline]
fn clone(&self) -> extern "Rust" fn($($A),*) -> ReturnType { *self }
}
)
)
extern_fn_clone!()
extern_fn_clone!(A)
extern_fn_clone!(A, B)
extern_fn_clone!(A, B, C)
extern_fn_clone!(A, B, C, D)
extern_fn_clone!(A, B, C, D, E)
extern_fn_clone!(A, B, C, D, E, F)
extern_fn_clone!(A, B, C, D, E, F, G)
extern_fn_clone!(A, B, C, D, E, F, G, H)
|
{ *self }
|
identifier_body
|
clone.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! The `Clone` trait for types that cannot be 'implicitly copied'
In Rust, some simple types are "implicitly copyable" and when you
assign them or pass them as arguments, the receiver will get a copy,
leaving the original value in place. These types do not require
allocation to copy and do not have finalizers (i.e. they do not
contain owned boxes or implement `Drop`), so the compiler considers
them cheap and safe to copy. For other types copies must be made
explicitly, by convention implementing the `Clone` trait and calling
the `clone` method.
*/
#![unstable]
/// A common trait for cloning an object.
pub trait Clone {
/// Returns a copy of the value. The contents of owned pointers
/// are copied to maintain uniqueness, while the contents of
/// managed pointers are not copied.
fn clone(&self) -> Self;
/// Perform copy-assignment from `source`.
///
/// `a.clone_from(&b)` is equivalent to `a = b.clone()` in functionality,
/// but can be overridden to reuse the resources of `a` to avoid unnecessary
/// allocations.
#[inline(always)]
#[experimental = "this function is mostly unused"]
fn
|
(&mut self, source: &Self) {
*self = source.clone()
}
}
impl<'a, T> Clone for &'a T {
/// Return a shallow copy of the reference.
#[inline]
fn clone(&self) -> &'a T { *self }
}
impl<'a, T> Clone for &'a [T] {
/// Return a shallow copy of the slice.
#[inline]
fn clone(&self) -> &'a [T] { *self }
}
impl<'a> Clone for &'a str {
/// Return a shallow copy of the slice.
#[inline]
fn clone(&self) -> &'a str { *self }
}
macro_rules! clone_impl(
($t:ty) => {
impl Clone for $t {
/// Return a deep copy of the value.
#[inline]
fn clone(&self) -> $t { *self }
}
}
)
clone_impl!(int)
clone_impl!(i8)
clone_impl!(i16)
clone_impl!(i32)
clone_impl!(i64)
clone_impl!(uint)
clone_impl!(u8)
clone_impl!(u16)
clone_impl!(u32)
clone_impl!(u64)
clone_impl!(f32)
clone_impl!(f64)
clone_impl!(())
clone_impl!(bool)
clone_impl!(char)
macro_rules! extern_fn_clone(
($($A:ident),*) => (
#[experimental = "this may not be sufficient for fns with region parameters"]
impl<$($A,)* ReturnType> Clone for extern "Rust" fn($($A),*) -> ReturnType {
/// Return a copy of a function pointer
#[inline]
fn clone(&self) -> extern "Rust" fn($($A),*) -> ReturnType { *self }
}
)
)
extern_fn_clone!()
extern_fn_clone!(A)
extern_fn_clone!(A, B)
extern_fn_clone!(A, B, C)
extern_fn_clone!(A, B, C, D)
extern_fn_clone!(A, B, C, D, E)
extern_fn_clone!(A, B, C, D, E, F)
extern_fn_clone!(A, B, C, D, E, F, G)
extern_fn_clone!(A, B, C, D, E, F, G, H)
|
clone_from
|
identifier_name
|
flexible_array.rs
|
// Copyright 2019 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! A wrapper for structures that contain flexible arrays.
use std::marker::PhantomData;
use std::mem::size_of;
// Returns a `Vec<T>` with a size in bytes at least as large as `size_in_bytes`.
fn vec_with_size_in_bytes<T: Default>(size_in_bytes: usize) -> Vec<T> {
let rounded_size = (size_in_bytes + size_of::<T>() - 1) / size_of::<T>();
let mut v = Vec::with_capacity(rounded_size);
for _ in 0..rounded_size {
v.push(T::default())
}
v
}
/// The kernel API has many structs that resemble the following `Foo` structure:
///
/// ```ignore
/// #[repr(C)]
/// struct Foo {
/// some_data: u32,
/// entries: __IncompleteArrayField<__u32>,
/// }
/// ```
///
/// In order to allocate such a structure, `size_of::<Foo>()` would be too small because it would
/// not include any space for `entries`. To make the allocation large enough while still being
/// aligned for `Foo`, a `Vec<Foo>` is created. Only the first element of `Vec<Foo>` would actually
/// be used as a `Foo`. The remaining memory in the `Vec<Foo>` is for `entries`, which must be
/// contiguous with `Foo`. This function is used to make the `Vec<Foo>` with enough space for
/// `count` entries.
pub fn vec_with_array_field<T: Default, F>(count: usize) -> Vec<T> {
let element_space = count * size_of::<F>();
let vec_size_bytes = size_of::<T>() + element_space;
vec_with_size_in_bytes(vec_size_bytes)
}
/// The following code provides generic helpers for creating and accessing flexible array structs.
/// A complete definition of flexible array structs is found in the ISO 9899 specification
/// <http://www.iso-9899.info/n1570.html>. A flexible array struct is of the form:
///
/// ```ignore
/// #[repr(C)]
/// struct T {
/// some_data: u32,
/// nents: u32,
/// entries: __IncompleteArrayField<S>,
/// }
/// ```
/// where:
///
/// - `T` is the flexible array struct type
/// - `S` is the flexible array type
/// - `nents` is the flexible array length
/// - `entries` is the flexible array member
///
/// These structures are used by the kernel API.
/// A collection of methods that are required by the FlexibleArrayWrapper type.
///
/// When implemented for `T`, this trait allows the caller to set number of `S` entries and
/// retrieve a slice of `S` entries. Trait methods must only be called by the FlexibleArrayWrapper
/// type. Don't implement this trait directly, use the flexible_array! macro to avoid duplication.
pub trait FlexibleArray<S> {
/// Implementations must set flexible array length in the flexible array struct to the value
/// specified by `len`. Appropriate conversions (i.e, usize to u32) are allowed so long as
/// they don't overflow or underflow.
fn set_len(&mut self, len: usize);
/// Implementations must return the length of the flexible array member. Appropriate
/// conversions (i.e, usize to u32) are allowed so long as they don't overflow or underflow.
fn get_len(&self) -> usize;
/// Implementations must return a slice of flexible array member of length `len`.
/// # Safety
/// Do not use this function directly, as the FlexibleArrayWrapper will guarantee safety.
unsafe fn get_slice(&self, len: usize) -> &[S];
/// Implementations must return a mutable slice of flexible array member of length `len`.
/// # Safety
/// Do not use this function directly, as the FlexibleArrayWrapper will guarantee safety.
unsafe fn get_mut_slice(&mut self, len: usize) -> &mut [S];
}
/// Always use this macro for implementing the FlexibleArray<`S`> trait for a given `T`. There
/// exists an 1:1 mapping of macro identifiers to the definitions in the FlexibleArray<`S`>
/// documentation, so refer to that for more information.
#[macro_export]
macro_rules! flexible_array_impl {
($T:ident, $S:ident, $nents:ident, $entries:ident) => {
impl FlexibleArray<$S> for $T {
fn set_len(&mut self, len: usize) {
self.$nents = ::std::convert::TryInto::try_into(len).unwrap();
}
fn get_len(&self) -> usize {
self.$nents as usize
}
unsafe fn get_slice(&self, len: usize) -> &[$S] {
self.$entries.as_slice(len)
}
unsafe fn get_mut_slice(&mut self, len: usize) -> &mut [$S] {
self.$entries.as_mut_slice(len)
}
}
};
}
pub struct FlexibleArrayWrapper<T, S> {
entries: Vec<T>,
phantom: PhantomData<S>,
allocated_len: usize,
}
/// Convenience wrapper for flexible array structs.
///
/// The FlexibleArray trait must be implemented for the flexible array struct before using this
/// wrapper.
impl<T, S> FlexibleArrayWrapper<T, S>
where
T: FlexibleArray<S> + Default,
{
/// Creates a new FlexibleArrayWrapper for the given flexible array struct type and flexible
/// array type. The flexible array length is set to `array_len`. vec_with_array_field is used
/// to make sure the resultant wrapper is appropriately sized.
pub fn new(array_len: usize) -> FlexibleArrayWrapper<T, S> {
let mut entries = vec_with_array_field::<T, S>(array_len);
entries[0].set_len(array_len);
FlexibleArrayWrapper {
entries,
phantom: PhantomData,
allocated_len: array_len,
}
}
/// Mapping the unsized array to a slice is unsafe because the length isn't known. Using
/// the length we originally allocated with eliminates the possibility of overflow.
fn get_valid_len(&self) -> usize {
if self.entries[0].get_len() > self.allocated_len {
self.allocated_len
} else
|
}
/// Returns a slice of the flexible array member, for inspecting. To modify, use
/// mut_entries_slice instead.
pub fn entries_slice(&self) -> &[S] {
let valid_length = self.get_valid_len();
// Safe because the length has been validated.
unsafe { self.entries[0].get_slice(valid_length) }
}
/// Returns a mutable slice of the flexible array member, for modifying.
pub fn mut_entries_slice(&mut self) -> &mut [S] {
let valid_length = self.get_valid_len();
self.entries[0].set_len(valid_length);
// Safe because the length has been validated.
unsafe { self.entries[0].get_mut_slice(valid_length) }
}
/// Get a pointer so it can be passed to the kernel. Callers must not access the flexible
/// array member. Using this pointer is unsafe.
pub fn as_ptr(&self) -> *const T {
&self.entries[0]
}
/// Get a mutable pointer so it can be passed to the kernel. Callers must not access the
/// flexible array member. Using this pointer is unsafe.
pub fn as_mut_ptr(&mut self) -> *mut T {
&mut self.entries[0]
}
}
|
{
self.entries[0].get_len()
}
|
conditional_block
|
flexible_array.rs
|
// Copyright 2019 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! A wrapper for structures that contain flexible arrays.
use std::marker::PhantomData;
use std::mem::size_of;
// Returns a `Vec<T>` with a size in bytes at least as large as `size_in_bytes`.
fn vec_with_size_in_bytes<T: Default>(size_in_bytes: usize) -> Vec<T> {
let rounded_size = (size_in_bytes + size_of::<T>() - 1) / size_of::<T>();
let mut v = Vec::with_capacity(rounded_size);
for _ in 0..rounded_size {
v.push(T::default())
}
v
}
/// The kernel API has many structs that resemble the following `Foo` structure:
///
/// ```ignore
/// #[repr(C)]
/// struct Foo {
/// some_data: u32,
/// entries: __IncompleteArrayField<__u32>,
/// }
/// ```
///
/// In order to allocate such a structure, `size_of::<Foo>()` would be too small because it would
/// not include any space for `entries`. To make the allocation large enough while still being
/// aligned for `Foo`, a `Vec<Foo>` is created. Only the first element of `Vec<Foo>` would actually
/// be used as a `Foo`. The remaining memory in the `Vec<Foo>` is for `entries`, which must be
/// contiguous with `Foo`. This function is used to make the `Vec<Foo>` with enough space for
/// `count` entries.
pub fn vec_with_array_field<T: Default, F>(count: usize) -> Vec<T> {
let element_space = count * size_of::<F>();
let vec_size_bytes = size_of::<T>() + element_space;
vec_with_size_in_bytes(vec_size_bytes)
}
/// The following code provides generic helpers for creating and accessing flexible array structs.
/// A complete definition of flexible array structs is found in the ISO 9899 specification
/// <http://www.iso-9899.info/n1570.html>. A flexible array struct is of the form:
///
/// ```ignore
/// #[repr(C)]
/// struct T {
/// some_data: u32,
/// nents: u32,
/// entries: __IncompleteArrayField<S>,
/// }
/// ```
/// where:
///
/// - `T` is the flexible array struct type
/// - `S` is the flexible array type
/// - `nents` is the flexible array length
/// - `entries` is the flexible array member
///
/// These structures are used by the kernel API.
/// A collection of methods that are required by the FlexibleArrayWrapper type.
///
/// When implemented for `T`, this trait allows the caller to set number of `S` entries and
/// retrieve a slice of `S` entries. Trait methods must only be called by the FlexibleArrayWrapper
/// type. Don't implement this trait directly, use the flexible_array! macro to avoid duplication.
pub trait FlexibleArray<S> {
/// Implementations must set flexible array length in the flexible array struct to the value
/// specified by `len`. Appropriate conversions (i.e, usize to u32) are allowed so long as
/// they don't overflow or underflow.
fn set_len(&mut self, len: usize);
/// Implementations must return the length of the flexible array member. Appropriate
/// conversions (i.e, usize to u32) are allowed so long as they don't overflow or underflow.
fn get_len(&self) -> usize;
/// Implementations must return a slice of flexible array member of length `len`.
/// # Safety
/// Do not use this function directly, as the FlexibleArrayWrapper will guarantee safety.
unsafe fn get_slice(&self, len: usize) -> &[S];
/// Implementations must return a mutable slice of flexible array member of length `len`.
/// # Safety
/// Do not use this function directly, as the FlexibleArrayWrapper will guarantee safety.
unsafe fn get_mut_slice(&mut self, len: usize) -> &mut [S];
}
/// Always use this macro for implementing the FlexibleArray<`S`> trait for a given `T`. There
/// exists an 1:1 mapping of macro identifiers to the definitions in the FlexibleArray<`S`>
/// documentation, so refer to that for more information.
#[macro_export]
macro_rules! flexible_array_impl {
($T:ident, $S:ident, $nents:ident, $entries:ident) => {
impl FlexibleArray<$S> for $T {
fn set_len(&mut self, len: usize) {
self.$nents = ::std::convert::TryInto::try_into(len).unwrap();
}
fn get_len(&self) -> usize {
self.$nents as usize
}
unsafe fn get_slice(&self, len: usize) -> &[$S] {
self.$entries.as_slice(len)
}
unsafe fn get_mut_slice(&mut self, len: usize) -> &mut [$S] {
self.$entries.as_mut_slice(len)
}
}
};
}
pub struct FlexibleArrayWrapper<T, S> {
entries: Vec<T>,
phantom: PhantomData<S>,
allocated_len: usize,
}
/// Convenience wrapper for flexible array structs.
///
/// The FlexibleArray trait must be implemented for the flexible array struct before using this
/// wrapper.
impl<T, S> FlexibleArrayWrapper<T, S>
where
T: FlexibleArray<S> + Default,
{
/// Creates a new FlexibleArrayWrapper for the given flexible array struct type and flexible
/// array type. The flexible array length is set to `array_len`. vec_with_array_field is used
/// to make sure the resultant wrapper is appropriately sized.
pub fn new(array_len: usize) -> FlexibleArrayWrapper<T, S> {
let mut entries = vec_with_array_field::<T, S>(array_len);
entries[0].set_len(array_len);
FlexibleArrayWrapper {
entries,
phantom: PhantomData,
allocated_len: array_len,
}
}
/// Mapping the unsized array to a slice is unsafe because the length isn't known. Using
/// the length we originally allocated with eliminates the possibility of overflow.
fn get_valid_len(&self) -> usize {
if self.entries[0].get_len() > self.allocated_len {
self.allocated_len
} else {
self.entries[0].get_len()
}
}
/// Returns a slice of the flexible array member, for inspecting. To modify, use
/// mut_entries_slice instead.
pub fn entries_slice(&self) -> &[S] {
let valid_length = self.get_valid_len();
// Safe because the length has been validated.
unsafe { self.entries[0].get_slice(valid_length) }
}
/// Returns a mutable slice of the flexible array member, for modifying.
pub fn mut_entries_slice(&mut self) -> &mut [S]
|
/// Get a pointer so it can be passed to the kernel. Callers must not access the flexible
/// array member. Using this pointer is unsafe.
pub fn as_ptr(&self) -> *const T {
&self.entries[0]
}
/// Get a mutable pointer so it can be passed to the kernel. Callers must not access the
/// flexible array member. Using this pointer is unsafe.
pub fn as_mut_ptr(&mut self) -> *mut T {
&mut self.entries[0]
}
}
|
{
let valid_length = self.get_valid_len();
self.entries[0].set_len(valid_length);
// Safe because the length has been validated.
unsafe { self.entries[0].get_mut_slice(valid_length) }
}
|
identifier_body
|
flexible_array.rs
|
// Copyright 2019 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! A wrapper for structures that contain flexible arrays.
use std::marker::PhantomData;
use std::mem::size_of;
// Returns a `Vec<T>` with a size in bytes at least as large as `size_in_bytes`.
fn vec_with_size_in_bytes<T: Default>(size_in_bytes: usize) -> Vec<T> {
let rounded_size = (size_in_bytes + size_of::<T>() - 1) / size_of::<T>();
let mut v = Vec::with_capacity(rounded_size);
for _ in 0..rounded_size {
v.push(T::default())
}
v
}
/// The kernel API has many structs that resemble the following `Foo` structure:
///
/// ```ignore
/// #[repr(C)]
/// struct Foo {
/// some_data: u32,
/// entries: __IncompleteArrayField<__u32>,
/// }
/// ```
///
/// In order to allocate such a structure, `size_of::<Foo>()` would be too small because it would
/// not include any space for `entries`. To make the allocation large enough while still being
/// aligned for `Foo`, a `Vec<Foo>` is created. Only the first element of `Vec<Foo>` would actually
/// be used as a `Foo`. The remaining memory in the `Vec<Foo>` is for `entries`, which must be
/// contiguous with `Foo`. This function is used to make the `Vec<Foo>` with enough space for
/// `count` entries.
pub fn vec_with_array_field<T: Default, F>(count: usize) -> Vec<T> {
let element_space = count * size_of::<F>();
let vec_size_bytes = size_of::<T>() + element_space;
vec_with_size_in_bytes(vec_size_bytes)
}
/// The following code provides generic helpers for creating and accessing flexible array structs.
/// A complete definition of flexible array structs is found in the ISO 9899 specification
/// <http://www.iso-9899.info/n1570.html>. A flexible array struct is of the form:
///
/// ```ignore
/// #[repr(C)]
/// struct T {
/// some_data: u32,
/// nents: u32,
/// entries: __IncompleteArrayField<S>,
/// }
/// ```
/// where:
///
/// - `T` is the flexible array struct type
/// - `S` is the flexible array type
/// - `nents` is the flexible array length
/// - `entries` is the flexible array member
///
/// These structures are used by the kernel API.
/// A collection of methods that are required by the FlexibleArrayWrapper type.
///
/// When implemented for `T`, this trait allows the caller to set number of `S` entries and
/// retrieve a slice of `S` entries. Trait methods must only be called by the FlexibleArrayWrapper
/// type. Don't implement this trait directly, use the flexible_array! macro to avoid duplication.
pub trait FlexibleArray<S> {
/// Implementations must set flexible array length in the flexible array struct to the value
/// specified by `len`. Appropriate conversions (i.e, usize to u32) are allowed so long as
/// they don't overflow or underflow.
fn set_len(&mut self, len: usize);
/// Implementations must return the length of the flexible array member. Appropriate
/// conversions (i.e, usize to u32) are allowed so long as they don't overflow or underflow.
fn get_len(&self) -> usize;
/// Implementations must return a slice of flexible array member of length `len`.
/// # Safety
/// Do not use this function directly, as the FlexibleArrayWrapper will guarantee safety.
unsafe fn get_slice(&self, len: usize) -> &[S];
/// Implementations must return a mutable slice of flexible array member of length `len`.
/// # Safety
/// Do not use this function directly, as the FlexibleArrayWrapper will guarantee safety.
unsafe fn get_mut_slice(&mut self, len: usize) -> &mut [S];
}
/// Always use this macro for implementing the FlexibleArray<`S`> trait for a given `T`. There
/// exists an 1:1 mapping of macro identifiers to the definitions in the FlexibleArray<`S`>
/// documentation, so refer to that for more information.
#[macro_export]
macro_rules! flexible_array_impl {
($T:ident, $S:ident, $nents:ident, $entries:ident) => {
impl FlexibleArray<$S> for $T {
fn set_len(&mut self, len: usize) {
self.$nents = ::std::convert::TryInto::try_into(len).unwrap();
}
fn get_len(&self) -> usize {
self.$nents as usize
}
unsafe fn get_slice(&self, len: usize) -> &[$S] {
self.$entries.as_slice(len)
}
unsafe fn get_mut_slice(&mut self, len: usize) -> &mut [$S] {
self.$entries.as_mut_slice(len)
}
}
};
}
pub struct FlexibleArrayWrapper<T, S> {
entries: Vec<T>,
phantom: PhantomData<S>,
allocated_len: usize,
}
/// Convenience wrapper for flexible array structs.
///
/// The FlexibleArray trait must be implemented for the flexible array struct before using this
/// wrapper.
impl<T, S> FlexibleArrayWrapper<T, S>
where
T: FlexibleArray<S> + Default,
{
/// Creates a new FlexibleArrayWrapper for the given flexible array struct type and flexible
/// array type. The flexible array length is set to `array_len`. vec_with_array_field is used
/// to make sure the resultant wrapper is appropriately sized.
pub fn new(array_len: usize) -> FlexibleArrayWrapper<T, S> {
let mut entries = vec_with_array_field::<T, S>(array_len);
entries[0].set_len(array_len);
FlexibleArrayWrapper {
entries,
phantom: PhantomData,
allocated_len: array_len,
}
}
/// Mapping the unsized array to a slice is unsafe because the length isn't known. Using
/// the length we originally allocated with eliminates the possibility of overflow.
fn get_valid_len(&self) -> usize {
if self.entries[0].get_len() > self.allocated_len {
self.allocated_len
} else {
self.entries[0].get_len()
}
}
/// Returns a slice of the flexible array member, for inspecting. To modify, use
/// mut_entries_slice instead.
pub fn entries_slice(&self) -> &[S] {
let valid_length = self.get_valid_len();
// Safe because the length has been validated.
unsafe { self.entries[0].get_slice(valid_length) }
}
/// Returns a mutable slice of the flexible array member, for modifying.
pub fn mut_entries_slice(&mut self) -> &mut [S] {
let valid_length = self.get_valid_len();
self.entries[0].set_len(valid_length);
// Safe because the length has been validated.
unsafe { self.entries[0].get_mut_slice(valid_length) }
}
/// Get a pointer so it can be passed to the kernel. Callers must not access the flexible
/// array member. Using this pointer is unsafe.
pub fn
|
(&self) -> *const T {
&self.entries[0]
}
/// Get a mutable pointer so it can be passed to the kernel. Callers must not access the
/// flexible array member. Using this pointer is unsafe.
pub fn as_mut_ptr(&mut self) -> *mut T {
&mut self.entries[0]
}
}
|
as_ptr
|
identifier_name
|
flexible_array.rs
|
// Copyright 2019 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! A wrapper for structures that contain flexible arrays.
use std::marker::PhantomData;
use std::mem::size_of;
// Returns a `Vec<T>` with a size in bytes at least as large as `size_in_bytes`.
fn vec_with_size_in_bytes<T: Default>(size_in_bytes: usize) -> Vec<T> {
let rounded_size = (size_in_bytes + size_of::<T>() - 1) / size_of::<T>();
let mut v = Vec::with_capacity(rounded_size);
for _ in 0..rounded_size {
v.push(T::default())
}
v
}
/// The kernel API has many structs that resemble the following `Foo` structure:
///
/// ```ignore
/// #[repr(C)]
/// struct Foo {
/// some_data: u32,
/// entries: __IncompleteArrayField<__u32>,
/// }
/// ```
///
/// In order to allocate such a structure, `size_of::<Foo>()` would be too small because it would
/// not include any space for `entries`. To make the allocation large enough while still being
/// aligned for `Foo`, a `Vec<Foo>` is created. Only the first element of `Vec<Foo>` would actually
/// be used as a `Foo`. The remaining memory in the `Vec<Foo>` is for `entries`, which must be
/// contiguous with `Foo`. This function is used to make the `Vec<Foo>` with enough space for
/// `count` entries.
pub fn vec_with_array_field<T: Default, F>(count: usize) -> Vec<T> {
let element_space = count * size_of::<F>();
let vec_size_bytes = size_of::<T>() + element_space;
vec_with_size_in_bytes(vec_size_bytes)
}
/// The following code provides generic helpers for creating and accessing flexible array structs.
/// A complete definition of flexible array structs is found in the ISO 9899 specification
/// <http://www.iso-9899.info/n1570.html>. A flexible array struct is of the form:
///
/// ```ignore
/// #[repr(C)]
/// struct T {
/// some_data: u32,
/// nents: u32,
/// entries: __IncompleteArrayField<S>,
/// }
/// ```
/// where:
///
/// - `T` is the flexible array struct type
/// - `S` is the flexible array type
/// - `nents` is the flexible array length
/// - `entries` is the flexible array member
///
/// These structures are used by the kernel API.
/// A collection of methods that are required by the FlexibleArrayWrapper type.
///
/// When implemented for `T`, this trait allows the caller to set number of `S` entries and
/// retrieve a slice of `S` entries. Trait methods must only be called by the FlexibleArrayWrapper
/// type. Don't implement this trait directly, use the flexible_array! macro to avoid duplication.
pub trait FlexibleArray<S> {
/// Implementations must set flexible array length in the flexible array struct to the value
/// specified by `len`. Appropriate conversions (i.e, usize to u32) are allowed so long as
/// they don't overflow or underflow.
fn set_len(&mut self, len: usize);
/// Implementations must return the length of the flexible array member. Appropriate
/// conversions (i.e, usize to u32) are allowed so long as they don't overflow or underflow.
fn get_len(&self) -> usize;
/// Implementations must return a slice of flexible array member of length `len`.
/// # Safety
/// Do not use this function directly, as the FlexibleArrayWrapper will guarantee safety.
unsafe fn get_slice(&self, len: usize) -> &[S];
/// Implementations must return a mutable slice of flexible array member of length `len`.
/// # Safety
/// Do not use this function directly, as the FlexibleArrayWrapper will guarantee safety.
unsafe fn get_mut_slice(&mut self, len: usize) -> &mut [S];
}
/// Always use this macro for implementing the FlexibleArray<`S`> trait for a given `T`. There
/// exists an 1:1 mapping of macro identifiers to the definitions in the FlexibleArray<`S`>
/// documentation, so refer to that for more information.
#[macro_export]
macro_rules! flexible_array_impl {
($T:ident, $S:ident, $nents:ident, $entries:ident) => {
impl FlexibleArray<$S> for $T {
fn set_len(&mut self, len: usize) {
self.$nents = ::std::convert::TryInto::try_into(len).unwrap();
}
fn get_len(&self) -> usize {
self.$nents as usize
}
unsafe fn get_slice(&self, len: usize) -> &[$S] {
self.$entries.as_slice(len)
}
unsafe fn get_mut_slice(&mut self, len: usize) -> &mut [$S] {
self.$entries.as_mut_slice(len)
}
}
};
}
pub struct FlexibleArrayWrapper<T, S> {
entries: Vec<T>,
phantom: PhantomData<S>,
allocated_len: usize,
}
/// Convenience wrapper for flexible array structs.
///
/// The FlexibleArray trait must be implemented for the flexible array struct before using this
/// wrapper.
impl<T, S> FlexibleArrayWrapper<T, S>
where
T: FlexibleArray<S> + Default,
{
/// Creates a new FlexibleArrayWrapper for the given flexible array struct type and flexible
/// array type. The flexible array length is set to `array_len`. vec_with_array_field is used
/// to make sure the resultant wrapper is appropriately sized.
pub fn new(array_len: usize) -> FlexibleArrayWrapper<T, S> {
let mut entries = vec_with_array_field::<T, S>(array_len);
|
FlexibleArrayWrapper {
entries,
phantom: PhantomData,
allocated_len: array_len,
}
}
/// Mapping the unsized array to a slice is unsafe because the length isn't known. Using
/// the length we originally allocated with eliminates the possibility of overflow.
fn get_valid_len(&self) -> usize {
if self.entries[0].get_len() > self.allocated_len {
self.allocated_len
} else {
self.entries[0].get_len()
}
}
/// Returns a slice of the flexible array member, for inspecting. To modify, use
/// mut_entries_slice instead.
pub fn entries_slice(&self) -> &[S] {
let valid_length = self.get_valid_len();
// Safe because the length has been validated.
unsafe { self.entries[0].get_slice(valid_length) }
}
/// Returns a mutable slice of the flexible array member, for modifying.
pub fn mut_entries_slice(&mut self) -> &mut [S] {
let valid_length = self.get_valid_len();
self.entries[0].set_len(valid_length);
// Safe because the length has been validated.
unsafe { self.entries[0].get_mut_slice(valid_length) }
}
/// Get a pointer so it can be passed to the kernel. Callers must not access the flexible
/// array member. Using this pointer is unsafe.
pub fn as_ptr(&self) -> *const T {
&self.entries[0]
}
/// Get a mutable pointer so it can be passed to the kernel. Callers must not access the
/// flexible array member. Using this pointer is unsafe.
pub fn as_mut_ptr(&mut self) -> *mut T {
&mut self.entries[0]
}
}
|
entries[0].set_len(array_len);
|
random_line_split
|
borrowck-object-mutability.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Foo {
fn borrowed(&self);
fn borrowed_mut(&mut self);
}
fn borrowed_receiver(x: &Foo) {
|
fn borrowed_mut_receiver(x: &mut Foo) {
x.borrowed();
x.borrowed_mut();
}
fn owned_receiver(x: Box<Foo>) {
x.borrowed();
x.borrowed_mut(); //~ ERROR cannot borrow
}
fn mut_owned_receiver(mut x: Box<Foo>) {
x.borrowed();
x.borrowed_mut();
}
fn main() {}
|
x.borrowed();
x.borrowed_mut(); //~ ERROR cannot borrow
}
|
random_line_split
|
borrowck-object-mutability.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Foo {
fn borrowed(&self);
fn borrowed_mut(&mut self);
}
fn borrowed_receiver(x: &Foo)
|
fn borrowed_mut_receiver(x: &mut Foo) {
x.borrowed();
x.borrowed_mut();
}
fn owned_receiver(x: Box<Foo>) {
x.borrowed();
x.borrowed_mut(); //~ ERROR cannot borrow
}
fn mut_owned_receiver(mut x: Box<Foo>) {
x.borrowed();
x.borrowed_mut();
}
fn main() {}
|
{
x.borrowed();
x.borrowed_mut(); //~ ERROR cannot borrow
}
|
identifier_body
|
borrowck-object-mutability.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Foo {
fn borrowed(&self);
fn borrowed_mut(&mut self);
}
fn borrowed_receiver(x: &Foo) {
x.borrowed();
x.borrowed_mut(); //~ ERROR cannot borrow
}
fn
|
(x: &mut Foo) {
x.borrowed();
x.borrowed_mut();
}
fn owned_receiver(x: Box<Foo>) {
x.borrowed();
x.borrowed_mut(); //~ ERROR cannot borrow
}
fn mut_owned_receiver(mut x: Box<Foo>) {
x.borrowed();
x.borrowed_mut();
}
fn main() {}
|
borrowed_mut_receiver
|
identifier_name
|
kitchen_sink.rs
|
#![feature(plugin)]
#![plugin(json_macros)]
#[cfg(feature="with-rustc-serialize")]
extern crate rustc_serialize;
#[cfg(feature="with-serde")]
extern crate serde_json;
#[cfg(feature="with-rustc-serialize")]
fn make_pretty_json(x: i32) -> String {
json!({ // object literal
"foo": "foooooo", // string literal keys and values
"bar": [true, null, 123, 123.4], // array, boolean, null, numeric literals
"quux": { // nest as deeply as you like
"a": [1, 2, 3, 4],
"b": { "a": null },
"c": false
},
"waldo": (192 - x) // wrap in parens to splice ToJson expressions directly
}).pretty().to_string()
}
|
"bar": [true, null, 123, 123.4], // array, boolean, null, numeric literals
"quux": { // nest as deeply as you like
"a": [1, 2, 3, 4],
"b": { "a": null },
"c": false
},
"waldo": (192 - x) // wrap in parens to splice ToJson expressions directly
})).unwrap()
}
pub fn main() {
// See implementation for serde/rustc-serialize features above.
println!("{}", make_pretty_json(1));
}
|
#[cfg(feature="with-serde")]
fn make_pretty_json(x: i32) -> String {
serde_json::to_string_pretty(&json!({ // object literal
"foo": "foooooo", // string literal keys and values
|
random_line_split
|
kitchen_sink.rs
|
#![feature(plugin)]
#![plugin(json_macros)]
#[cfg(feature="with-rustc-serialize")]
extern crate rustc_serialize;
#[cfg(feature="with-serde")]
extern crate serde_json;
#[cfg(feature="with-rustc-serialize")]
fn
|
(x: i32) -> String {
json!({ // object literal
"foo": "foooooo", // string literal keys and values
"bar": [true, null, 123, 123.4], // array, boolean, null, numeric literals
"quux": { // nest as deeply as you like
"a": [1, 2, 3, 4],
"b": { "a": null },
"c": false
},
"waldo": (192 - x) // wrap in parens to splice ToJson expressions directly
}).pretty().to_string()
}
#[cfg(feature="with-serde")]
fn make_pretty_json(x: i32) -> String {
serde_json::to_string_pretty(&json!({ // object literal
"foo": "foooooo", // string literal keys and values
"bar": [true, null, 123, 123.4], // array, boolean, null, numeric literals
"quux": { // nest as deeply as you like
"a": [1, 2, 3, 4],
"b": { "a": null },
"c": false
},
"waldo": (192 - x) // wrap in parens to splice ToJson expressions directly
})).unwrap()
}
pub fn main() {
// See implementation for serde/rustc-serialize features above.
println!("{}", make_pretty_json(1));
}
|
make_pretty_json
|
identifier_name
|
kitchen_sink.rs
|
#![feature(plugin)]
#![plugin(json_macros)]
#[cfg(feature="with-rustc-serialize")]
extern crate rustc_serialize;
#[cfg(feature="with-serde")]
extern crate serde_json;
#[cfg(feature="with-rustc-serialize")]
fn make_pretty_json(x: i32) -> String
|
#[cfg(feature="with-serde")]
fn make_pretty_json(x: i32) -> String {
serde_json::to_string_pretty(&json!({ // object literal
"foo": "foooooo", // string literal keys and values
"bar": [true, null, 123, 123.4], // array, boolean, null, numeric literals
"quux": { // nest as deeply as you like
"a": [1, 2, 3, 4],
"b": { "a": null },
"c": false
},
"waldo": (192 - x) // wrap in parens to splice ToJson expressions directly
})).unwrap()
}
pub fn main() {
// See implementation for serde/rustc-serialize features above.
println!("{}", make_pretty_json(1));
}
|
{
json!({ // object literal
"foo": "foooooo", // string literal keys and values
"bar": [true, null, 123, 123.4], // array, boolean, null, numeric literals
"quux": { // nest as deeply as you like
"a": [1, 2, 3, 4],
"b": { "a": null },
"c": false
},
"waldo": (192 - x) // wrap in parens to splice ToJson expressions directly
}).pretty().to_string()
}
|
identifier_body
|
search.rs
|
use bytes::Bytes;
use std::io::Cursor;
use rocket::response::{self, Response, Responder};
use rocket::request::Request;
use rocket::http::ContentType;
use crate::web::config::{Config, ServerConfig};
use crate::data_types::{SolrTermSummary, SolrReferenceSummary};
use crate::api::term_search::{search_terms, term_complete, term_summary_by_id};
use crate::api::ref_search::{search_refs};
use crate::api::doc_search::search_docs;
pub use crate::api::doc_search::DocSearchMatch;
pub struct Search {
config: ServerConfig,
}
pub struct PNGPlot {
pub bytes: Bytes
}
impl<'a> Responder<'a, 'a> for PNGPlot {
fn respond_to(self, _: &Request) -> response::Result<'a> {
Response::build()
.sized_body(self.bytes.len(), Cursor::new(self.bytes))
.header(ContentType::new("image", "png"))
.ok()
}
}
#[derive(Deserialize, Debug)]
pub enum SolrSearchScope {
#[serde(rename = "term")]
Term,
#[serde(rename = "reference")]
Reference,
#[serde(rename = "doc")]
Documentation,
}
impl SolrSearchScope {
pub fn new_from_str(scope_str: &str) -> Option<SolrSearchScope> {
match scope_str {
"term" => Some(SolrSearchScope::Term),
"ref" => Some(SolrSearchScope::Reference),
"doc" => Some(SolrSearchScope::Documentation),
_ => None,
}
}
pub fn is_term(&self) -> bool {
matches!(self, SolrSearchScope::Term)
}
pub fn is_reference(&self) -> bool {
matches!(self, SolrSearchScope::Reference)
}
pub fn is_documentation(&self) -> bool {
matches!(self, SolrSearchScope::Documentation)
}
}
#[derive(Serialize, Debug)]
pub struct SolrSearchResult {
pub term_matches: Vec<SolrTermSummary>,
pub ref_matches: Vec<SolrReferenceSummary>,
pub doc_matches: Vec<DocSearchMatch>,
}
impl Search {
pub fn new(config: &Config) -> Search {
Search {
config: config.server.clone(),
}
}
pub fn motif_search(&self, scope: &str, pattern: &str) -> Result<String, String> {
let search_url = self.config.django_url.to_owned() + "/motifsearch/query/";
let params = [("scope", scope), ("pattern", pattern)];
let client = reqwest::blocking::Client::new();
let result = client.get(&search_url).query(¶ms).send();
match result {
Ok(res) => {
match res.text() {
Ok(text) => Ok(text),
Err(err) => Err(format!("Error getting text from motif search: {:?}", err)),
}
},
Err(err) => {
Err(format!("Motif search error: {:?}", err))
}
}
}
pub fn gene_ex_violin_plot(&self, plot_size: &str, genes: &str)
-> Result<PNGPlot, String>
{
let plot_url = self.config.django_url.to_owned() + "/gene_ex/gene_ex_violin/";
let params = [("plot_size", plot_size), ("genes", genes)];
let client = reqwest::blocking::Client::new();
let result = client.get(&plot_url).query(¶ms).send();
match result {
Ok(res) => {
match res.bytes() {
Ok(bytes) => Ok(PNGPlot { bytes }),
Err(err) => Err(format!("Error getting violin plot image: {:?}", err)),
}
},
Err(err) => {
Err(format!("Gene expression violin plot error: {:?}", err))
}
}
}
pub fn term_complete(&self, cv_name: &str, q: &str)
-> Result<Vec<SolrTermSummary>, String>
{
term_complete(&self.config, cv_name, q)
}
pub fn term_summary_by_id(&self, termid: &str)
-> Result<Option<SolrTermSummary>, String>
{
term_summary_by_id(&self.config, termid)
}
pub fn ref_complete(&self, q: &str)
-> Result<Vec<SolrReferenceSummary>, String>
{
search_refs(&self.config, q)
}
pub fn solr_search(&self, scope: &SolrSearchScope, q: &str)
-> Result<SolrSearchResult, String>
{
let trimmed_query = q.trim();
let term_matches =
if scope.is_term() {
search_terms(&self.config, trimmed_query)?
} else {
vec![]
};
let ref_matches =
if scope.is_reference() {
search_refs(&self.config, trimmed_query)?
} else
|
;
let doc_matches =
if scope.is_documentation() {
search_docs(&self.config, trimmed_query)?
} else {
vec![]
};
Ok(SolrSearchResult {
term_matches,
ref_matches,
doc_matches,
})
}
}
|
{
vec![]
}
|
conditional_block
|
search.rs
|
use bytes::Bytes;
use std::io::Cursor;
use rocket::response::{self, Response, Responder};
use rocket::request::Request;
use rocket::http::ContentType;
use crate::web::config::{Config, ServerConfig};
use crate::data_types::{SolrTermSummary, SolrReferenceSummary};
use crate::api::term_search::{search_terms, term_complete, term_summary_by_id};
use crate::api::ref_search::{search_refs};
use crate::api::doc_search::search_docs;
pub use crate::api::doc_search::DocSearchMatch;
pub struct Search {
config: ServerConfig,
}
pub struct
|
{
pub bytes: Bytes
}
impl<'a> Responder<'a, 'a> for PNGPlot {
fn respond_to(self, _: &Request) -> response::Result<'a> {
Response::build()
.sized_body(self.bytes.len(), Cursor::new(self.bytes))
.header(ContentType::new("image", "png"))
.ok()
}
}
#[derive(Deserialize, Debug)]
pub enum SolrSearchScope {
#[serde(rename = "term")]
Term,
#[serde(rename = "reference")]
Reference,
#[serde(rename = "doc")]
Documentation,
}
impl SolrSearchScope {
pub fn new_from_str(scope_str: &str) -> Option<SolrSearchScope> {
match scope_str {
"term" => Some(SolrSearchScope::Term),
"ref" => Some(SolrSearchScope::Reference),
"doc" => Some(SolrSearchScope::Documentation),
_ => None,
}
}
pub fn is_term(&self) -> bool {
matches!(self, SolrSearchScope::Term)
}
pub fn is_reference(&self) -> bool {
matches!(self, SolrSearchScope::Reference)
}
pub fn is_documentation(&self) -> bool {
matches!(self, SolrSearchScope::Documentation)
}
}
#[derive(Serialize, Debug)]
pub struct SolrSearchResult {
pub term_matches: Vec<SolrTermSummary>,
pub ref_matches: Vec<SolrReferenceSummary>,
pub doc_matches: Vec<DocSearchMatch>,
}
impl Search {
pub fn new(config: &Config) -> Search {
Search {
config: config.server.clone(),
}
}
pub fn motif_search(&self, scope: &str, pattern: &str) -> Result<String, String> {
let search_url = self.config.django_url.to_owned() + "/motifsearch/query/";
let params = [("scope", scope), ("pattern", pattern)];
let client = reqwest::blocking::Client::new();
let result = client.get(&search_url).query(¶ms).send();
match result {
Ok(res) => {
match res.text() {
Ok(text) => Ok(text),
Err(err) => Err(format!("Error getting text from motif search: {:?}", err)),
}
},
Err(err) => {
Err(format!("Motif search error: {:?}", err))
}
}
}
pub fn gene_ex_violin_plot(&self, plot_size: &str, genes: &str)
-> Result<PNGPlot, String>
{
let plot_url = self.config.django_url.to_owned() + "/gene_ex/gene_ex_violin/";
let params = [("plot_size", plot_size), ("genes", genes)];
let client = reqwest::blocking::Client::new();
let result = client.get(&plot_url).query(¶ms).send();
match result {
Ok(res) => {
match res.bytes() {
Ok(bytes) => Ok(PNGPlot { bytes }),
Err(err) => Err(format!("Error getting violin plot image: {:?}", err)),
}
},
Err(err) => {
Err(format!("Gene expression violin plot error: {:?}", err))
}
}
}
pub fn term_complete(&self, cv_name: &str, q: &str)
-> Result<Vec<SolrTermSummary>, String>
{
term_complete(&self.config, cv_name, q)
}
pub fn term_summary_by_id(&self, termid: &str)
-> Result<Option<SolrTermSummary>, String>
{
term_summary_by_id(&self.config, termid)
}
pub fn ref_complete(&self, q: &str)
-> Result<Vec<SolrReferenceSummary>, String>
{
search_refs(&self.config, q)
}
pub fn solr_search(&self, scope: &SolrSearchScope, q: &str)
-> Result<SolrSearchResult, String>
{
let trimmed_query = q.trim();
let term_matches =
if scope.is_term() {
search_terms(&self.config, trimmed_query)?
} else {
vec![]
};
let ref_matches =
if scope.is_reference() {
search_refs(&self.config, trimmed_query)?
} else {
vec![]
};
let doc_matches =
if scope.is_documentation() {
search_docs(&self.config, trimmed_query)?
} else {
vec![]
};
Ok(SolrSearchResult {
term_matches,
ref_matches,
doc_matches,
})
}
}
|
PNGPlot
|
identifier_name
|
search.rs
|
use bytes::Bytes;
use std::io::Cursor;
use rocket::response::{self, Response, Responder};
use rocket::request::Request;
use rocket::http::ContentType;
use crate::web::config::{Config, ServerConfig};
use crate::data_types::{SolrTermSummary, SolrReferenceSummary};
use crate::api::term_search::{search_terms, term_complete, term_summary_by_id};
use crate::api::ref_search::{search_refs};
use crate::api::doc_search::search_docs;
pub use crate::api::doc_search::DocSearchMatch;
pub struct Search {
config: ServerConfig,
}
pub struct PNGPlot {
pub bytes: Bytes
}
impl<'a> Responder<'a, 'a> for PNGPlot {
fn respond_to(self, _: &Request) -> response::Result<'a> {
Response::build()
.sized_body(self.bytes.len(), Cursor::new(self.bytes))
.header(ContentType::new("image", "png"))
.ok()
}
}
#[derive(Deserialize, Debug)]
pub enum SolrSearchScope {
#[serde(rename = "term")]
Term,
#[serde(rename = "reference")]
Reference,
#[serde(rename = "doc")]
Documentation,
}
impl SolrSearchScope {
pub fn new_from_str(scope_str: &str) -> Option<SolrSearchScope> {
match scope_str {
"term" => Some(SolrSearchScope::Term),
"ref" => Some(SolrSearchScope::Reference),
"doc" => Some(SolrSearchScope::Documentation),
_ => None,
}
|
pub fn is_term(&self) -> bool {
matches!(self, SolrSearchScope::Term)
}
pub fn is_reference(&self) -> bool {
matches!(self, SolrSearchScope::Reference)
}
pub fn is_documentation(&self) -> bool {
matches!(self, SolrSearchScope::Documentation)
}
}
#[derive(Serialize, Debug)]
pub struct SolrSearchResult {
pub term_matches: Vec<SolrTermSummary>,
pub ref_matches: Vec<SolrReferenceSummary>,
pub doc_matches: Vec<DocSearchMatch>,
}
impl Search {
pub fn new(config: &Config) -> Search {
Search {
config: config.server.clone(),
}
}
pub fn motif_search(&self, scope: &str, pattern: &str) -> Result<String, String> {
let search_url = self.config.django_url.to_owned() + "/motifsearch/query/";
let params = [("scope", scope), ("pattern", pattern)];
let client = reqwest::blocking::Client::new();
let result = client.get(&search_url).query(¶ms).send();
match result {
Ok(res) => {
match res.text() {
Ok(text) => Ok(text),
Err(err) => Err(format!("Error getting text from motif search: {:?}", err)),
}
},
Err(err) => {
Err(format!("Motif search error: {:?}", err))
}
}
}
pub fn gene_ex_violin_plot(&self, plot_size: &str, genes: &str)
-> Result<PNGPlot, String>
{
let plot_url = self.config.django_url.to_owned() + "/gene_ex/gene_ex_violin/";
let params = [("plot_size", plot_size), ("genes", genes)];
let client = reqwest::blocking::Client::new();
let result = client.get(&plot_url).query(¶ms).send();
match result {
Ok(res) => {
match res.bytes() {
Ok(bytes) => Ok(PNGPlot { bytes }),
Err(err) => Err(format!("Error getting violin plot image: {:?}", err)),
}
},
Err(err) => {
Err(format!("Gene expression violin plot error: {:?}", err))
}
}
}
pub fn term_complete(&self, cv_name: &str, q: &str)
-> Result<Vec<SolrTermSummary>, String>
{
term_complete(&self.config, cv_name, q)
}
pub fn term_summary_by_id(&self, termid: &str)
-> Result<Option<SolrTermSummary>, String>
{
term_summary_by_id(&self.config, termid)
}
pub fn ref_complete(&self, q: &str)
-> Result<Vec<SolrReferenceSummary>, String>
{
search_refs(&self.config, q)
}
pub fn solr_search(&self, scope: &SolrSearchScope, q: &str)
-> Result<SolrSearchResult, String>
{
let trimmed_query = q.trim();
let term_matches =
if scope.is_term() {
search_terms(&self.config, trimmed_query)?
} else {
vec![]
};
let ref_matches =
if scope.is_reference() {
search_refs(&self.config, trimmed_query)?
} else {
vec![]
};
let doc_matches =
if scope.is_documentation() {
search_docs(&self.config, trimmed_query)?
} else {
vec![]
};
Ok(SolrSearchResult {
term_matches,
ref_matches,
doc_matches,
})
}
}
|
}
|
random_line_split
|
asm_ext.rs
|
// #![crate_type="dylib"]
// #![feature(plugin_registrar, rustc_private)]
//
// extern crate syntax;
// extern crate rustc;
// extern crate rustc_plugin;
//
// use syntax::parse::token;
// use syntax::tokenstream::TokenTree;
// use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
// use syntax::ext::build::AstBuilder; // A trait for expr_usize.
// use syntax::ext::quote::rt::Span;
// use rustc_plugin::Registry;
|
//
// asm::parseInput();
//
// }
//
//
// #[plugin_registrar]
// pub fn plugin_registrar(reg: &mut Registry) {
// reg.register_macro("z80asm", expand_asm);
// }
//
|
//
// fn expand_asm(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) -> Box<MacResult + 'static> {
// use rusty_boy::assembler::asm;
// use rusty_boy::assembler::language;
|
random_line_split
|
13b_enum_values_manual.rs
|
// If you require more complex configuration than simple_enum! provides, you can implement the
// trait manually, as in the following example.
//
// In the following example we will create an enum with 4 values, assign a positional argument
// that accepts only one of those values, and use clap to parse the argument.
//
// Start with bringing the trait into scope.
use std::str::FromStr;
// Add clap like normal
#[macro_use]
extern crate clap;
use clap::{App, Arg};
// Define your enum
enum Vals {
Foo,
Bar,
Baz,
Qux
}
// Implement the trait
impl FromStr for Vals {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"Foo" => Ok(Vals::Foo),
"Bar" => Ok(Vals::Bar),
"Baz" => Ok(Vals::Baz),
"Qux" => Ok(Vals::Qux),
_ => Err("no match")
}
}
}
fn main()
|
{
// Create the application like normal
let enum_vals = ["Foo", "Bar", "Baz", "Qux"];
let m = App::new("myapp")
// Use a single positional argument that is required
.arg(Arg::from_usage("<type> 'The type to use'")
// Define the list of possible values
.possible_values(&enum_vals))
.get_matches();
let t = value_t_or_exit!(m.value_of("type"), Vals);
// Now we can use our enum like normal.
match t {
Vals::Foo => println!("Found a Foo"),
Vals::Bar => println!("Found a Bar"),
Vals::Baz => println!("Found a Baz"),
Vals::Qux => println!("Found a Qux")
}
}
|
identifier_body
|
|
13b_enum_values_manual.rs
|
// If you require more complex configuration than simple_enum! provides, you can implement the
// trait manually, as in the following example.
//
// In the following example we will create an enum with 4 values, assign a positional argument
// that accepts only one of those values, and use clap to parse the argument.
//
// Start with bringing the trait into scope.
use std::str::FromStr;
// Add clap like normal
#[macro_use]
extern crate clap;
use clap::{App, Arg};
// Define your enum
enum Vals {
Foo,
Bar,
Baz,
Qux
}
// Implement the trait
impl FromStr for Vals {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"Foo" => Ok(Vals::Foo),
"Bar" => Ok(Vals::Bar),
"Baz" => Ok(Vals::Baz),
"Qux" => Ok(Vals::Qux),
_ => Err("no match")
}
}
}
fn
|
() {
// Create the application like normal
let enum_vals = ["Foo", "Bar", "Baz", "Qux"];
let m = App::new("myapp")
// Use a single positional argument that is required
.arg(Arg::from_usage("<type> 'The type to use'")
// Define the list of possible values
.possible_values(&enum_vals))
.get_matches();
let t = value_t_or_exit!(m.value_of("type"), Vals);
// Now we can use our enum like normal.
match t {
Vals::Foo => println!("Found a Foo"),
Vals::Bar => println!("Found a Bar"),
Vals::Baz => println!("Found a Baz"),
Vals::Qux => println!("Found a Qux")
}
}
|
main
|
identifier_name
|
13b_enum_values_manual.rs
|
// If you require more complex configuration than simple_enum! provides, you can implement the
// trait manually, as in the following example.
//
// In the following example we will create an enum with 4 values, assign a positional argument
// that accepts only one of those values, and use clap to parse the argument.
//
// Start with bringing the trait into scope.
use std::str::FromStr;
// Add clap like normal
#[macro_use]
extern crate clap;
use clap::{App, Arg};
// Define your enum
enum Vals {
Foo,
Bar,
Baz,
|
}
// Implement the trait
impl FromStr for Vals {
type Err = &'static str;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"Foo" => Ok(Vals::Foo),
"Bar" => Ok(Vals::Bar),
"Baz" => Ok(Vals::Baz),
"Qux" => Ok(Vals::Qux),
_ => Err("no match")
}
}
}
fn main() {
// Create the application like normal
let enum_vals = ["Foo", "Bar", "Baz", "Qux"];
let m = App::new("myapp")
// Use a single positional argument that is required
.arg(Arg::from_usage("<type> 'The type to use'")
// Define the list of possible values
.possible_values(&enum_vals))
.get_matches();
let t = value_t_or_exit!(m.value_of("type"), Vals);
// Now we can use our enum like normal.
match t {
Vals::Foo => println!("Found a Foo"),
Vals::Bar => println!("Found a Bar"),
Vals::Baz => println!("Found a Baz"),
Vals::Qux => println!("Found a Qux")
}
}
|
Qux
|
random_line_split
|
alert.rs
|
use util::{ReadExt, WriteExt};
use tls_result::{TlsResult, TlsError, TlsErrorKind};
use tls_item::TlsItem;
// we treat every alert as fatal.
tls_enum!(u8, enum AlertLevel {
warning(1),
fatal(2)
});
// A.3. Alert Messages
// http://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml
tls_enum!(u8, #[derive(Debug)] enum AlertDescription {
close_notify(0),
unexpected_message(10),
bad_record_mac(20),
decryption_failed_RESERVED(21),
record_overflow(22),
decompression_failure(30),
handshake_failure(40),
no_certificate_RESERVED(41),
bad_certificate(42),
unsupported_certificate(43),
certificate_revoked(44),
certificate_expired(45),
certificate_unknown(46),
illegal_parameter(47),
unknown_ca(48),
access_denied(49),
decode_error(50),
decrypt_error(51),
export_restriction_RESERVED(60),
|
insufficient_security(71),
internal_error(80),
user_canceled(90),
no_renegotiation(100),
unsupported_extension(110)
// RFC 6066
// certificate_unobtainable(111),
// unrecognized_name(112),
// bad_certificate_status_response(113),
// bad_certificate_hash_value(114),
});
impl AlertDescription {
fn from_err(kind: TlsErrorKind) -> AlertDescription {
match kind {
TlsErrorKind::UnexpectedMessage => AlertDescription::unexpected_message,
TlsErrorKind::BadRecordMac => AlertDescription::bad_record_mac,
TlsErrorKind::RecordOverflow => AlertDescription::record_overflow,
TlsErrorKind::IllegalParameter => AlertDescription::illegal_parameter,
TlsErrorKind::DecodeError => AlertDescription::decode_error,
TlsErrorKind::DecryptError => AlertDescription::decrypt_error,
TlsErrorKind::InternalError => AlertDescription::internal_error,
// FIXME: we probably can't even send alert?
TlsErrorKind::IoFailure => AlertDescription::internal_error,
TlsErrorKind::AlertReceived => AlertDescription::close_notify,
}
}
}
tls_struct!(struct Alert {
level: AlertLevel,
description: AlertDescription
});
impl Alert {
pub fn new(level: AlertLevel, desc: AlertDescription) -> TlsResult<Alert> {
// TODO filter out some rfc-invalid alerts
Ok(Alert {
level: level,
description: desc,
})
}
pub fn from_tls_err(err: &TlsError) -> Alert {
Alert {
level: AlertLevel::fatal,
description: AlertDescription::from_err(err.kind),
}
}
}
|
protocol_version(70),
|
random_line_split
|
alert.rs
|
use util::{ReadExt, WriteExt};
use tls_result::{TlsResult, TlsError, TlsErrorKind};
use tls_item::TlsItem;
// we treat every alert as fatal.
tls_enum!(u8, enum AlertLevel {
warning(1),
fatal(2)
});
// A.3. Alert Messages
// http://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml
tls_enum!(u8, #[derive(Debug)] enum AlertDescription {
close_notify(0),
unexpected_message(10),
bad_record_mac(20),
decryption_failed_RESERVED(21),
record_overflow(22),
decompression_failure(30),
handshake_failure(40),
no_certificate_RESERVED(41),
bad_certificate(42),
unsupported_certificate(43),
certificate_revoked(44),
certificate_expired(45),
certificate_unknown(46),
illegal_parameter(47),
unknown_ca(48),
access_denied(49),
decode_error(50),
decrypt_error(51),
export_restriction_RESERVED(60),
protocol_version(70),
insufficient_security(71),
internal_error(80),
user_canceled(90),
no_renegotiation(100),
unsupported_extension(110)
// RFC 6066
// certificate_unobtainable(111),
// unrecognized_name(112),
// bad_certificate_status_response(113),
// bad_certificate_hash_value(114),
});
impl AlertDescription {
fn from_err(kind: TlsErrorKind) -> AlertDescription {
match kind {
TlsErrorKind::UnexpectedMessage => AlertDescription::unexpected_message,
TlsErrorKind::BadRecordMac => AlertDescription::bad_record_mac,
TlsErrorKind::RecordOverflow => AlertDescription::record_overflow,
TlsErrorKind::IllegalParameter => AlertDescription::illegal_parameter,
TlsErrorKind::DecodeError => AlertDescription::decode_error,
TlsErrorKind::DecryptError => AlertDescription::decrypt_error,
TlsErrorKind::InternalError => AlertDescription::internal_error,
// FIXME: we probably can't even send alert?
TlsErrorKind::IoFailure => AlertDescription::internal_error,
TlsErrorKind::AlertReceived => AlertDescription::close_notify,
}
}
}
tls_struct!(struct Alert {
level: AlertLevel,
description: AlertDescription
});
impl Alert {
pub fn new(level: AlertLevel, desc: AlertDescription) -> TlsResult<Alert> {
// TODO filter out some rfc-invalid alerts
Ok(Alert {
level: level,
description: desc,
})
}
pub fn from_tls_err(err: &TlsError) -> Alert
|
}
|
{
Alert {
level: AlertLevel::fatal,
description: AlertDescription::from_err(err.kind),
}
}
|
identifier_body
|
alert.rs
|
use util::{ReadExt, WriteExt};
use tls_result::{TlsResult, TlsError, TlsErrorKind};
use tls_item::TlsItem;
// we treat every alert as fatal.
tls_enum!(u8, enum AlertLevel {
warning(1),
fatal(2)
});
// A.3. Alert Messages
// http://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml
tls_enum!(u8, #[derive(Debug)] enum AlertDescription {
close_notify(0),
unexpected_message(10),
bad_record_mac(20),
decryption_failed_RESERVED(21),
record_overflow(22),
decompression_failure(30),
handshake_failure(40),
no_certificate_RESERVED(41),
bad_certificate(42),
unsupported_certificate(43),
certificate_revoked(44),
certificate_expired(45),
certificate_unknown(46),
illegal_parameter(47),
unknown_ca(48),
access_denied(49),
decode_error(50),
decrypt_error(51),
export_restriction_RESERVED(60),
protocol_version(70),
insufficient_security(71),
internal_error(80),
user_canceled(90),
no_renegotiation(100),
unsupported_extension(110)
// RFC 6066
// certificate_unobtainable(111),
// unrecognized_name(112),
// bad_certificate_status_response(113),
// bad_certificate_hash_value(114),
});
impl AlertDescription {
fn from_err(kind: TlsErrorKind) -> AlertDescription {
match kind {
TlsErrorKind::UnexpectedMessage => AlertDescription::unexpected_message,
TlsErrorKind::BadRecordMac => AlertDescription::bad_record_mac,
TlsErrorKind::RecordOverflow => AlertDescription::record_overflow,
TlsErrorKind::IllegalParameter => AlertDescription::illegal_parameter,
TlsErrorKind::DecodeError => AlertDescription::decode_error,
TlsErrorKind::DecryptError => AlertDescription::decrypt_error,
TlsErrorKind::InternalError => AlertDescription::internal_error,
// FIXME: we probably can't even send alert?
TlsErrorKind::IoFailure => AlertDescription::internal_error,
TlsErrorKind::AlertReceived => AlertDescription::close_notify,
}
}
}
tls_struct!(struct Alert {
level: AlertLevel,
description: AlertDescription
});
impl Alert {
pub fn
|
(level: AlertLevel, desc: AlertDescription) -> TlsResult<Alert> {
// TODO filter out some rfc-invalid alerts
Ok(Alert {
level: level,
description: desc,
})
}
pub fn from_tls_err(err: &TlsError) -> Alert {
Alert {
level: AlertLevel::fatal,
description: AlertDescription::from_err(err.kind),
}
}
}
|
new
|
identifier_name
|
c-style-enum.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-android: FIXME(#10381)
// compile-flags:-Z extra-debug-info
// debugger:rbreak zzz
// debugger:run
// debugger:finish
// debugger:print auto_one
// check:$1 = One
// debugger:print auto_two
// check:$2 = Two
// debugger:print auto_three
// check:$3 = Three
// debugger:print manual_one_hundred
// check:$4 = OneHundred
// debugger:print manual_one_thousand
// check:$5 = OneThousand
// debugger:print manual_one_million
// check:$6 = OneMillion
// debugger:print single_variant
// check:$7 = TheOnlyVariant
#[allow(unused_variable)];
enum AutoDiscriminant {
One,
Two,
Three
}
enum ManualDiscriminant {
OneHundred = 100,
OneThousand = 1000,
OneMillion = 1000000
}
enum SingleVariant {
TheOnlyVariant
}
fn main() {
let auto_one = One;
let auto_two = Two;
let auto_three = Three;
let manual_one_hundred = OneHundred;
let manual_one_thousand = OneThousand;
let manual_one_million = OneMillion;
let single_variant = TheOnlyVariant;
zzz();
}
fn
|
() {()}
|
zzz
|
identifier_name
|
c-style-enum.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-android: FIXME(#10381)
// compile-flags:-Z extra-debug-info
// debugger:rbreak zzz
// debugger:run
// debugger:finish
// debugger:print auto_one
// check:$1 = One
// debugger:print auto_two
// check:$2 = Two
// debugger:print auto_three
// check:$3 = Three
// debugger:print manual_one_hundred
// check:$4 = OneHundred
// debugger:print manual_one_thousand
// check:$5 = OneThousand
// debugger:print manual_one_million
// check:$6 = OneMillion
// debugger:print single_variant
// check:$7 = TheOnlyVariant
|
#[allow(unused_variable)];
enum AutoDiscriminant {
One,
Two,
Three
}
enum ManualDiscriminant {
OneHundred = 100,
OneThousand = 1000,
OneMillion = 1000000
}
enum SingleVariant {
TheOnlyVariant
}
fn main() {
let auto_one = One;
let auto_two = Two;
let auto_three = Three;
let manual_one_hundred = OneHundred;
let manual_one_thousand = OneThousand;
let manual_one_million = OneMillion;
let single_variant = TheOnlyVariant;
zzz();
}
fn zzz() {()}
|
random_line_split
|
|
main.rs
|
mod game;
use game::*;
use std::time::Instant;
use std::env;
#[allow(unreachable_code)]
fn main()
{
match 55
{
0 => general_play(PlayerType::IO, PlayerType::AIValue, 2, 1, true), //play against IO + learn
1 => general_play(PlayerType::IO, PlayerType::AIValueFixed, 2, 1, true), //test with IO
2 => general_play(PlayerType::Minimax, PlayerType::AIValueFixed, 100, 1, true), //test with minimax
3 => general_play(PlayerType::Random, PlayerType::AIValueFixed, 1000, 1, true), //test with random
4 => general_play(PlayerType::AIValueFixed, PlayerType::AIValue, 1_000, 10, true), //training
5 => { //continuous training and testing
println!("Training:");
for i in 0..100
{
println!("Training {}:", i+1);
general_play(PlayerType::AIValueFixed, PlayerType::AIValue, 100, 10, true); //train, learn
println!("Test {}:", i+1);
general_play(PlayerType::Minimax, PlayerType::AIValueFixed, 2, 1, true); //test with minimax
}
println!("Testing:");
general_play(PlayerType::Random, PlayerType::AIValueFixed, 1000, 1, true); //test with random
general_play(PlayerType::IO, PlayerType::AIValueFixed, 2, 1, true); //test with IO
},
_ => {
//general playing with command line arguments
play_from_args();
}
}
}
#[allow(dead_code)]
fn play_from_args()
{
let args = env::args();
//general playing with command line arguments
let mut p1 = PlayerType::IO;
let mut p2 = PlayerType::AIValue;
let mut num = 2;
let mut player1starts = true;
for (i, arg) in args.enumerate()
{
let param = arg.trim().to_lowercase();
match i
{
1 => {
let player = string_to_player(¶m);
if player.is_some() { p1 = player.unwrap(); }
},
2 => {
let player = string_to_player(¶m);
if player.is_some() { p2 = player.unwrap(); }
},
3 => {
let parsed = param.parse::<u32>();
if parsed.is_ok() { num = parsed.unwrap(); }
},
4 => {
if param == "false" { player1starts = false; }
},
_ => {}, //ignore first and all other args
}
}
println!("Running:");
general_play(p1, p2, num, 1, player1starts);
}
#[allow(dead_code)]
fn
|
(p1:PlayerType, p2:PlayerType, num:u32, gps:u32, player1starts:bool)
{
println!("Player X: {:?}", p1);
println!("Player O: {:?}", p2);
println!("Playing {} games..", num);
//prepare
let mut game = Game::new();
game.set_start_player(if player1starts {1} else {2});
game.set_player1(p1);
game.set_player2(p2);
//measure time
let now = Instant::now();
let (p1w, p2w) = game.play_many(num, gps); //play
let elapsed = now.elapsed();
let sec = (elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1000_000_000.0);
println!("Time: {} min {:.3} s", (sec / 60.0).floor(), sec % 60.0);
println!("");
//drop worse player first in case 2 learning agents play against each other and use the same file
if p1w > p2w
{
game.set_player2(PlayerType::None);
game.set_player1(PlayerType::None);
}
else
{
game.set_player1(PlayerType::None);
game.set_player2(PlayerType::None);
}
}
fn string_to_player(str:&str) -> Option<PlayerType>
{
match str
{
"io" => Some(PlayerType::IO),
"random" => Some(PlayerType::Random),
"minimax" => Some(PlayerType::Minimax),
"aiq" => Some(PlayerType::AIQ),
"aiqfixed" => Some(PlayerType::AIQFixed),
"aiqplay" => Some(PlayerType::AIQPlay),
"aivalue" => Some(PlayerType::AIValue),
"aivaluefixed" => Some(PlayerType::AIValueFixed),
//"aiqoff" => Some(PlayerType::AIQOff),
//"aiqofffixed" => Some(PlayerType::AIQOffFixed),
_ => None,
}
}
|
general_play
|
identifier_name
|
main.rs
|
mod game;
use game::*;
use std::time::Instant;
use std::env;
#[allow(unreachable_code)]
fn main()
|
},
_ => {
//general playing with command line arguments
play_from_args();
}
}
}
#[allow(dead_code)]
fn play_from_args()
{
let args = env::args();
//general playing with command line arguments
let mut p1 = PlayerType::IO;
let mut p2 = PlayerType::AIValue;
let mut num = 2;
let mut player1starts = true;
for (i, arg) in args.enumerate()
{
let param = arg.trim().to_lowercase();
match i
{
1 => {
let player = string_to_player(¶m);
if player.is_some() { p1 = player.unwrap(); }
},
2 => {
let player = string_to_player(¶m);
if player.is_some() { p2 = player.unwrap(); }
},
3 => {
let parsed = param.parse::<u32>();
if parsed.is_ok() { num = parsed.unwrap(); }
},
4 => {
if param == "false" { player1starts = false; }
},
_ => {}, //ignore first and all other args
}
}
println!("Running:");
general_play(p1, p2, num, 1, player1starts);
}
#[allow(dead_code)]
fn general_play(p1:PlayerType, p2:PlayerType, num:u32, gps:u32, player1starts:bool)
{
println!("Player X: {:?}", p1);
println!("Player O: {:?}", p2);
println!("Playing {} games..", num);
//prepare
let mut game = Game::new();
game.set_start_player(if player1starts {1} else {2});
game.set_player1(p1);
game.set_player2(p2);
//measure time
let now = Instant::now();
let (p1w, p2w) = game.play_many(num, gps); //play
let elapsed = now.elapsed();
let sec = (elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1000_000_000.0);
println!("Time: {} min {:.3} s", (sec / 60.0).floor(), sec % 60.0);
println!("");
//drop worse player first in case 2 learning agents play against each other and use the same file
if p1w > p2w
{
game.set_player2(PlayerType::None);
game.set_player1(PlayerType::None);
}
else
{
game.set_player1(PlayerType::None);
game.set_player2(PlayerType::None);
}
}
fn string_to_player(str:&str) -> Option<PlayerType>
{
match str
{
"io" => Some(PlayerType::IO),
"random" => Some(PlayerType::Random),
"minimax" => Some(PlayerType::Minimax),
"aiq" => Some(PlayerType::AIQ),
"aiqfixed" => Some(PlayerType::AIQFixed),
"aiqplay" => Some(PlayerType::AIQPlay),
"aivalue" => Some(PlayerType::AIValue),
"aivaluefixed" => Some(PlayerType::AIValueFixed),
//"aiqoff" => Some(PlayerType::AIQOff),
//"aiqofffixed" => Some(PlayerType::AIQOffFixed),
_ => None,
}
}
|
{
match 55
{
0 => general_play(PlayerType::IO, PlayerType::AIValue, 2, 1, true), //play against IO + learn
1 => general_play(PlayerType::IO, PlayerType::AIValueFixed, 2, 1, true), //test with IO
2 => general_play(PlayerType::Minimax, PlayerType::AIValueFixed, 100, 1, true), //test with minimax
3 => general_play(PlayerType::Random, PlayerType::AIValueFixed, 1000, 1, true), //test with random
4 => general_play(PlayerType::AIValueFixed, PlayerType::AIValue, 1_000, 10, true), //training
5 => { //continuous training and testing
println!("Training:");
for i in 0..100
{
println!("Training {}:", i+1);
general_play(PlayerType::AIValueFixed, PlayerType::AIValue, 100, 10, true); //train, learn
println!("Test {}:", i+1);
general_play(PlayerType::Minimax, PlayerType::AIValueFixed, 2, 1, true); //test with minimax
}
println!("Testing:");
general_play(PlayerType::Random, PlayerType::AIValueFixed, 1000, 1, true); //test with random
general_play(PlayerType::IO, PlayerType::AIValueFixed, 2, 1, true); //test with IO
|
identifier_body
|
main.rs
|
mod game;
use game::*;
use std::time::Instant;
use std::env;
#[allow(unreachable_code)]
fn main()
{
match 55
|
4 => general_play(PlayerType::AIValueFixed, PlayerType::AIValue, 1_000, 10, true), //training
5 => { //continuous training and testing
println!("Training:");
for i in 0..100
{
println!("Training {}:", i+1);
general_play(PlayerType::AIValueFixed, PlayerType::AIValue, 100, 10, true); //train, learn
println!("Test {}:", i+1);
general_play(PlayerType::Minimax, PlayerType::AIValueFixed, 2, 1, true); //test with minimax
}
println!("Testing:");
general_play(PlayerType::Random, PlayerType::AIValueFixed, 1000, 1, true); //test with random
general_play(PlayerType::IO, PlayerType::AIValueFixed, 2, 1, true); //test with IO
},
_ => {
//general playing with command line arguments
play_from_args();
}
}
}
#[allow(dead_code)]
fn play_from_args()
{
let args = env::args();
//general playing with command line arguments
let mut p1 = PlayerType::IO;
let mut p2 = PlayerType::AIValue;
let mut num = 2;
let mut player1starts = true;
for (i, arg) in args.enumerate()
{
let param = arg.trim().to_lowercase();
match i
{
1 => {
let player = string_to_player(¶m);
if player.is_some() { p1 = player.unwrap(); }
},
2 => {
let player = string_to_player(¶m);
if player.is_some() { p2 = player.unwrap(); }
},
3 => {
let parsed = param.parse::<u32>();
if parsed.is_ok() { num = parsed.unwrap(); }
},
4 => {
if param == "false" { player1starts = false; }
},
_ => {}, //ignore first and all other args
}
}
println!("Running:");
general_play(p1, p2, num, 1, player1starts);
}
#[allow(dead_code)]
fn general_play(p1:PlayerType, p2:PlayerType, num:u32, gps:u32, player1starts:bool)
{
println!("Player X: {:?}", p1);
println!("Player O: {:?}", p2);
println!("Playing {} games..", num);
//prepare
let mut game = Game::new();
game.set_start_player(if player1starts {1} else {2});
game.set_player1(p1);
game.set_player2(p2);
//measure time
let now = Instant::now();
let (p1w, p2w) = game.play_many(num, gps); //play
let elapsed = now.elapsed();
let sec = (elapsed.as_secs() as f64) + (elapsed.subsec_nanos() as f64 / 1000_000_000.0);
println!("Time: {} min {:.3} s", (sec / 60.0).floor(), sec % 60.0);
println!("");
//drop worse player first in case 2 learning agents play against each other and use the same file
if p1w > p2w
{
game.set_player2(PlayerType::None);
game.set_player1(PlayerType::None);
}
else
{
game.set_player1(PlayerType::None);
game.set_player2(PlayerType::None);
}
}
fn string_to_player(str:&str) -> Option<PlayerType>
{
match str
{
"io" => Some(PlayerType::IO),
"random" => Some(PlayerType::Random),
"minimax" => Some(PlayerType::Minimax),
"aiq" => Some(PlayerType::AIQ),
"aiqfixed" => Some(PlayerType::AIQFixed),
"aiqplay" => Some(PlayerType::AIQPlay),
"aivalue" => Some(PlayerType::AIValue),
"aivaluefixed" => Some(PlayerType::AIValueFixed),
//"aiqoff" => Some(PlayerType::AIQOff),
//"aiqofffixed" => Some(PlayerType::AIQOffFixed),
_ => None,
}
}
|
{
0 => general_play(PlayerType::IO, PlayerType::AIValue, 2, 1, true), //play against IO + learn
1 => general_play(PlayerType::IO, PlayerType::AIValueFixed, 2, 1, true), //test with IO
2 => general_play(PlayerType::Minimax, PlayerType::AIValueFixed, 100, 1, true), //test with minimax
3 => general_play(PlayerType::Random, PlayerType::AIValueFixed, 1000, 1, true), //test with random
|
random_line_split
|
filter-on-struct-member.rs
|
// edition:2021
#![feature(rustc_attrs)]
struct Filter {
div: i32,
}
impl Filter {
fn
|
(&self, x: i32) -> bool {
x % self.div == 1
}
}
struct Data {
filter: Filter,
list: Vec<i32>,
}
impl Data {
fn update(&mut self) {
// The closure passed to filter only captures self.filter,
// therefore mutating self.list is allowed.
self.list.retain(
#[rustc_capture_analysis]
|v| self.filter.allowed(*v),
//~^ ERROR: First Pass analysis includes:
//~| ERROR: Min Capture analysis includes:
//~| NOTE: Capturing self[Deref,(0, 0)] -> ImmBorrow
//~| NOTE: Min Capture self[Deref,(0, 0)] -> ImmBorrow
);
}
}
fn main() {
let mut d = Data { filter: Filter { div: 3 }, list: Vec::new() };
for i in 1..10 {
d.list.push(i);
}
d.update();
}
|
allowed
|
identifier_name
|
filter-on-struct-member.rs
|
// edition:2021
#![feature(rustc_attrs)]
struct Filter {
div: i32,
}
impl Filter {
fn allowed(&self, x: i32) -> bool {
x % self.div == 1
}
}
struct Data {
filter: Filter,
list: Vec<i32>,
}
impl Data {
fn update(&mut self) {
// The closure passed to filter only captures self.filter,
// therefore mutating self.list is allowed.
self.list.retain(
|
//~| NOTE: Min Capture self[Deref,(0, 0)] -> ImmBorrow
);
}
}
fn main() {
let mut d = Data { filter: Filter { div: 3 }, list: Vec::new() };
for i in 1..10 {
d.list.push(i);
}
d.update();
}
|
#[rustc_capture_analysis]
|v| self.filter.allowed(*v),
//~^ ERROR: First Pass analysis includes:
//~| ERROR: Min Capture analysis includes:
//~| NOTE: Capturing self[Deref,(0, 0)] -> ImmBorrow
|
random_line_split
|
window.rs
|
use std::fs::File;
use std::io::*;
use std::mem;
use std::slice;
use std::syscall::sys_yield;
use std::to_num::ToNum;
use super::Event;
use super::Color;
/// A window
pub struct Window {
/// The x coordinate of the window
x: i32,
/// The y coordinate of the window
y: i32,
/// The width of the window
w: u32,
/// The height of the window
h: u32,
/// The title of the window
t: String,
/// The input scheme
file: File,
/// Font file
font: Vec<u8>,
/// Window data
data: Vec<u32>,
}
impl Window {
/// Create a new window
pub fn new(x: i32, y: i32, w: u32, h: u32, title: &str) -> Option<Box<Self>> {
let mut font = Vec::new();
if let Ok(mut font_file) = File::open("file:/ui/unifont.font") {
font_file.read_to_end(&mut font);
}
match File::open(&format!("orbital:///{}/{}/{}/{}/{}", x, y, w, h, title)) {
Ok(file) => Some(box Window {
x: x,
y: y,
w: w,
h: h,
t: title.to_string(),
file: file,
font: font,
data: vec![0; (w * h * 4) as usize],
}),
Err(_) => None
}
}
//TODO: Replace with smarter mechanism, maybe a move event?
pub fn sync_path(&mut self) {
if let Ok(path) = self.file.path() {
//orbital://x/y/w/h/t
if let Some(path_str) = path.to_str() {
let parts: Vec<&str> = path_str.split('/').collect();
if let Some(x) = parts.get(3) {
self.x = x.to_num_signed();
}
if let Some(y) = parts.get(4) {
self.y = y.to_num_signed();
}
if let Some(w) = parts.get(5) {
self.w = w.to_num();
}
if let Some(h) = parts.get(6) {
self.h = h.to_num();
}
}
}
}
/// Get x
//TODO: Sync with window movements
pub fn x(&self) -> i32 {
self.x
}
/// Get y
//TODO: Sync with window movements
pub fn y(&self) -> i32 {
self.y
}
/// Get width
pub fn width(&self) -> u32 {
self.w
}
/// Get height
pub fn height(&self) -> u32 {
self.h
}
/// Get title
pub fn title(&self) -> String {
self.t.clone()
}
/// Set title
pub fn set_title(&mut self, _: &str) {
//TODO
}
/// Draw a pixel
pub fn pixel(&mut self, x: i32, y: i32, color: Color) {
if x >= 0 && y >= 0 && x < self.w as i32 && y < self.h as i32 {
let offset = y as u32 * self.w + x as u32;
self.data[offset as usize] = color.data;
}
}
/// Draw a character, using the loaded font
pub fn char(&mut self, x: i32, y: i32, c: char, color: Color) {
let mut offset = (c as usize) * 16;
for row in 0..16 {
let row_data;
if offset < self.font.len() {
row_data = self.font[offset];
} else {
row_data = 0;
}
for col in 0..8 {
let pixel = (row_data >> (7 - col)) & 1;
if pixel > 0 {
self.pixel(x + col as i32, y + row as i32, color);
}
}
offset += 1;
}
}
//TODO move, resize, set_title
/// Set entire window to a color
// TODO: Improve speed
#[allow(unused_variables)]
pub fn set(&mut self, color: Color) {
let w = self.w;
let h = self.h;
self.rect(0, 0, w, h, color);
}
/// Draw rectangle
// TODO: Improve speed
#[allow(unused_variables)]
pub fn rect(&mut self, start_x: i32, start_y: i32, w: u32, h: u32, color: Color) {
for y in start_y..start_y + h as i32 {
for x in start_x..start_x + w as i32 {
self.pixel(x, y, color);
}
}
}
/// Display an image
//TODO: Improve speed
pub fn image(&mut self, start_x: i32, start_y: i32, w: u32, h: u32, data: &[Color]) {
let mut i = 0;
for y in start_y..start_y + h as i32 {
for x in start_x..start_x + w as i32 {
if i < data.len() {
self.pixel(x, y, data[i])
}
i += 1;
}
}
}
/// Poll for an event
//TODO: clean this up
pub fn poll(&mut self) -> Option<Event> {
let mut event = Event::new();
let event_ptr: *mut Event = &mut event;
loop {
match self.file.read(&mut unsafe {
slice::from_raw_parts_mut(event_ptr as *mut u8, mem::size_of::<Event>())
}) {
Ok(0) => unsafe { sys_yield() },
Ok(_) => return Some(event),
Err(_) => return None,
}
}
}
/// Flip the window buffer
pub fn sync(&mut self) -> bool {
|
}
/// Return a iterator over events
pub fn event_iter<'a>(&'a mut self) -> EventIter<'a> {
EventIter {
window: self,
}
}
}
/// Event iterator
pub struct EventIter<'a> {
window: &'a mut Window,
}
impl<'a> Iterator for EventIter<'a> {
type Item = Event;
fn next(&mut self) -> Option<Event> {
self.window.poll()
}
}
|
self.file.seek(SeekFrom::Start(0));
self.file.write(& unsafe {
slice::from_raw_parts(self.data.as_ptr() as *const u8, self.data.len() * mem::size_of::<u32>())
});
return self.file.sync_all().is_ok();
|
random_line_split
|
window.rs
|
use std::fs::File;
use std::io::*;
use std::mem;
use std::slice;
use std::syscall::sys_yield;
use std::to_num::ToNum;
use super::Event;
use super::Color;
/// A window
pub struct Window {
/// The x coordinate of the window
x: i32,
/// The y coordinate of the window
y: i32,
/// The width of the window
w: u32,
/// The height of the window
h: u32,
/// The title of the window
t: String,
/// The input scheme
file: File,
/// Font file
font: Vec<u8>,
/// Window data
data: Vec<u32>,
}
impl Window {
/// Create a new window
pub fn new(x: i32, y: i32, w: u32, h: u32, title: &str) -> Option<Box<Self>> {
let mut font = Vec::new();
if let Ok(mut font_file) = File::open("file:/ui/unifont.font") {
font_file.read_to_end(&mut font);
}
match File::open(&format!("orbital:///{}/{}/{}/{}/{}", x, y, w, h, title)) {
Ok(file) => Some(box Window {
x: x,
y: y,
w: w,
h: h,
t: title.to_string(),
file: file,
font: font,
data: vec![0; (w * h * 4) as usize],
}),
Err(_) => None
}
}
//TODO: Replace with smarter mechanism, maybe a move event?
pub fn sync_path(&mut self) {
if let Ok(path) = self.file.path() {
//orbital://x/y/w/h/t
if let Some(path_str) = path.to_str()
|
}
}
/// Get x
//TODO: Sync with window movements
pub fn x(&self) -> i32 {
self.x
}
/// Get y
//TODO: Sync with window movements
pub fn y(&self) -> i32 {
self.y
}
/// Get width
pub fn width(&self) -> u32 {
self.w
}
/// Get height
pub fn height(&self) -> u32 {
self.h
}
/// Get title
pub fn title(&self) -> String {
self.t.clone()
}
/// Set title
pub fn set_title(&mut self, _: &str) {
//TODO
}
/// Draw a pixel
pub fn pixel(&mut self, x: i32, y: i32, color: Color) {
if x >= 0 && y >= 0 && x < self.w as i32 && y < self.h as i32 {
let offset = y as u32 * self.w + x as u32;
self.data[offset as usize] = color.data;
}
}
/// Draw a character, using the loaded font
pub fn char(&mut self, x: i32, y: i32, c: char, color: Color) {
let mut offset = (c as usize) * 16;
for row in 0..16 {
let row_data;
if offset < self.font.len() {
row_data = self.font[offset];
} else {
row_data = 0;
}
for col in 0..8 {
let pixel = (row_data >> (7 - col)) & 1;
if pixel > 0 {
self.pixel(x + col as i32, y + row as i32, color);
}
}
offset += 1;
}
}
//TODO move, resize, set_title
/// Set entire window to a color
// TODO: Improve speed
#[allow(unused_variables)]
pub fn set(&mut self, color: Color) {
let w = self.w;
let h = self.h;
self.rect(0, 0, w, h, color);
}
/// Draw rectangle
// TODO: Improve speed
#[allow(unused_variables)]
pub fn rect(&mut self, start_x: i32, start_y: i32, w: u32, h: u32, color: Color) {
for y in start_y..start_y + h as i32 {
for x in start_x..start_x + w as i32 {
self.pixel(x, y, color);
}
}
}
/// Display an image
//TODO: Improve speed
pub fn image(&mut self, start_x: i32, start_y: i32, w: u32, h: u32, data: &[Color]) {
let mut i = 0;
for y in start_y..start_y + h as i32 {
for x in start_x..start_x + w as i32 {
if i < data.len() {
self.pixel(x, y, data[i])
}
i += 1;
}
}
}
/// Poll for an event
//TODO: clean this up
pub fn poll(&mut self) -> Option<Event> {
let mut event = Event::new();
let event_ptr: *mut Event = &mut event;
loop {
match self.file.read(&mut unsafe {
slice::from_raw_parts_mut(event_ptr as *mut u8, mem::size_of::<Event>())
}) {
Ok(0) => unsafe { sys_yield() },
Ok(_) => return Some(event),
Err(_) => return None,
}
}
}
/// Flip the window buffer
pub fn sync(&mut self) -> bool {
self.file.seek(SeekFrom::Start(0));
self.file.write(& unsafe {
slice::from_raw_parts(self.data.as_ptr() as *const u8, self.data.len() * mem::size_of::<u32>())
});
return self.file.sync_all().is_ok();
}
/// Return a iterator over events
pub fn event_iter<'a>(&'a mut self) -> EventIter<'a> {
EventIter {
window: self,
}
}
}
/// Event iterator
pub struct EventIter<'a> {
window: &'a mut Window,
}
impl<'a> Iterator for EventIter<'a> {
type Item = Event;
fn next(&mut self) -> Option<Event> {
self.window.poll()
}
}
|
{
let parts: Vec<&str> = path_str.split('/').collect();
if let Some(x) = parts.get(3) {
self.x = x.to_num_signed();
}
if let Some(y) = parts.get(4) {
self.y = y.to_num_signed();
}
if let Some(w) = parts.get(5) {
self.w = w.to_num();
}
if let Some(h) = parts.get(6) {
self.h = h.to_num();
}
}
|
conditional_block
|
window.rs
|
use std::fs::File;
use std::io::*;
use std::mem;
use std::slice;
use std::syscall::sys_yield;
use std::to_num::ToNum;
use super::Event;
use super::Color;
/// A window
pub struct Window {
/// The x coordinate of the window
x: i32,
/// The y coordinate of the window
y: i32,
/// The width of the window
w: u32,
/// The height of the window
h: u32,
/// The title of the window
t: String,
/// The input scheme
file: File,
/// Font file
font: Vec<u8>,
/// Window data
data: Vec<u32>,
}
impl Window {
/// Create a new window
pub fn new(x: i32, y: i32, w: u32, h: u32, title: &str) -> Option<Box<Self>> {
let mut font = Vec::new();
if let Ok(mut font_file) = File::open("file:/ui/unifont.font") {
font_file.read_to_end(&mut font);
}
match File::open(&format!("orbital:///{}/{}/{}/{}/{}", x, y, w, h, title)) {
Ok(file) => Some(box Window {
x: x,
y: y,
w: w,
h: h,
t: title.to_string(),
file: file,
font: font,
data: vec![0; (w * h * 4) as usize],
}),
Err(_) => None
}
}
//TODO: Replace with smarter mechanism, maybe a move event?
pub fn sync_path(&mut self) {
if let Ok(path) = self.file.path() {
//orbital://x/y/w/h/t
if let Some(path_str) = path.to_str() {
let parts: Vec<&str> = path_str.split('/').collect();
if let Some(x) = parts.get(3) {
self.x = x.to_num_signed();
}
if let Some(y) = parts.get(4) {
self.y = y.to_num_signed();
}
if let Some(w) = parts.get(5) {
self.w = w.to_num();
}
if let Some(h) = parts.get(6) {
self.h = h.to_num();
}
}
}
}
/// Get x
//TODO: Sync with window movements
pub fn x(&self) -> i32
|
/// Get y
//TODO: Sync with window movements
pub fn y(&self) -> i32 {
self.y
}
/// Get width
pub fn width(&self) -> u32 {
self.w
}
/// Get height
pub fn height(&self) -> u32 {
self.h
}
/// Get title
pub fn title(&self) -> String {
self.t.clone()
}
/// Set title
pub fn set_title(&mut self, _: &str) {
//TODO
}
/// Draw a pixel
pub fn pixel(&mut self, x: i32, y: i32, color: Color) {
if x >= 0 && y >= 0 && x < self.w as i32 && y < self.h as i32 {
let offset = y as u32 * self.w + x as u32;
self.data[offset as usize] = color.data;
}
}
/// Draw a character, using the loaded font
pub fn char(&mut self, x: i32, y: i32, c: char, color: Color) {
let mut offset = (c as usize) * 16;
for row in 0..16 {
let row_data;
if offset < self.font.len() {
row_data = self.font[offset];
} else {
row_data = 0;
}
for col in 0..8 {
let pixel = (row_data >> (7 - col)) & 1;
if pixel > 0 {
self.pixel(x + col as i32, y + row as i32, color);
}
}
offset += 1;
}
}
//TODO move, resize, set_title
/// Set entire window to a color
// TODO: Improve speed
#[allow(unused_variables)]
pub fn set(&mut self, color: Color) {
let w = self.w;
let h = self.h;
self.rect(0, 0, w, h, color);
}
/// Draw rectangle
// TODO: Improve speed
#[allow(unused_variables)]
pub fn rect(&mut self, start_x: i32, start_y: i32, w: u32, h: u32, color: Color) {
for y in start_y..start_y + h as i32 {
for x in start_x..start_x + w as i32 {
self.pixel(x, y, color);
}
}
}
/// Display an image
//TODO: Improve speed
pub fn image(&mut self, start_x: i32, start_y: i32, w: u32, h: u32, data: &[Color]) {
let mut i = 0;
for y in start_y..start_y + h as i32 {
for x in start_x..start_x + w as i32 {
if i < data.len() {
self.pixel(x, y, data[i])
}
i += 1;
}
}
}
/// Poll for an event
//TODO: clean this up
pub fn poll(&mut self) -> Option<Event> {
let mut event = Event::new();
let event_ptr: *mut Event = &mut event;
loop {
match self.file.read(&mut unsafe {
slice::from_raw_parts_mut(event_ptr as *mut u8, mem::size_of::<Event>())
}) {
Ok(0) => unsafe { sys_yield() },
Ok(_) => return Some(event),
Err(_) => return None,
}
}
}
/// Flip the window buffer
pub fn sync(&mut self) -> bool {
self.file.seek(SeekFrom::Start(0));
self.file.write(& unsafe {
slice::from_raw_parts(self.data.as_ptr() as *const u8, self.data.len() * mem::size_of::<u32>())
});
return self.file.sync_all().is_ok();
}
/// Return a iterator over events
pub fn event_iter<'a>(&'a mut self) -> EventIter<'a> {
EventIter {
window: self,
}
}
}
/// Event iterator
pub struct EventIter<'a> {
window: &'a mut Window,
}
impl<'a> Iterator for EventIter<'a> {
type Item = Event;
fn next(&mut self) -> Option<Event> {
self.window.poll()
}
}
|
{
self.x
}
|
identifier_body
|
window.rs
|
use std::fs::File;
use std::io::*;
use std::mem;
use std::slice;
use std::syscall::sys_yield;
use std::to_num::ToNum;
use super::Event;
use super::Color;
/// A window
pub struct Window {
/// The x coordinate of the window
x: i32,
/// The y coordinate of the window
y: i32,
/// The width of the window
w: u32,
/// The height of the window
h: u32,
/// The title of the window
t: String,
/// The input scheme
file: File,
/// Font file
font: Vec<u8>,
/// Window data
data: Vec<u32>,
}
impl Window {
/// Create a new window
pub fn new(x: i32, y: i32, w: u32, h: u32, title: &str) -> Option<Box<Self>> {
let mut font = Vec::new();
if let Ok(mut font_file) = File::open("file:/ui/unifont.font") {
font_file.read_to_end(&mut font);
}
match File::open(&format!("orbital:///{}/{}/{}/{}/{}", x, y, w, h, title)) {
Ok(file) => Some(box Window {
x: x,
y: y,
w: w,
h: h,
t: title.to_string(),
file: file,
font: font,
data: vec![0; (w * h * 4) as usize],
}),
Err(_) => None
}
}
//TODO: Replace with smarter mechanism, maybe a move event?
pub fn sync_path(&mut self) {
if let Ok(path) = self.file.path() {
//orbital://x/y/w/h/t
if let Some(path_str) = path.to_str() {
let parts: Vec<&str> = path_str.split('/').collect();
if let Some(x) = parts.get(3) {
self.x = x.to_num_signed();
}
if let Some(y) = parts.get(4) {
self.y = y.to_num_signed();
}
if let Some(w) = parts.get(5) {
self.w = w.to_num();
}
if let Some(h) = parts.get(6) {
self.h = h.to_num();
}
}
}
}
/// Get x
//TODO: Sync with window movements
pub fn x(&self) -> i32 {
self.x
}
/// Get y
//TODO: Sync with window movements
pub fn y(&self) -> i32 {
self.y
}
/// Get width
pub fn width(&self) -> u32 {
self.w
}
/// Get height
pub fn height(&self) -> u32 {
self.h
}
/// Get title
pub fn title(&self) -> String {
self.t.clone()
}
/// Set title
pub fn set_title(&mut self, _: &str) {
//TODO
}
/// Draw a pixel
pub fn pixel(&mut self, x: i32, y: i32, color: Color) {
if x >= 0 && y >= 0 && x < self.w as i32 && y < self.h as i32 {
let offset = y as u32 * self.w + x as u32;
self.data[offset as usize] = color.data;
}
}
/// Draw a character, using the loaded font
pub fn char(&mut self, x: i32, y: i32, c: char, color: Color) {
let mut offset = (c as usize) * 16;
for row in 0..16 {
let row_data;
if offset < self.font.len() {
row_data = self.font[offset];
} else {
row_data = 0;
}
for col in 0..8 {
let pixel = (row_data >> (7 - col)) & 1;
if pixel > 0 {
self.pixel(x + col as i32, y + row as i32, color);
}
}
offset += 1;
}
}
//TODO move, resize, set_title
/// Set entire window to a color
// TODO: Improve speed
#[allow(unused_variables)]
pub fn
|
(&mut self, color: Color) {
let w = self.w;
let h = self.h;
self.rect(0, 0, w, h, color);
}
/// Draw rectangle
// TODO: Improve speed
#[allow(unused_variables)]
pub fn rect(&mut self, start_x: i32, start_y: i32, w: u32, h: u32, color: Color) {
for y in start_y..start_y + h as i32 {
for x in start_x..start_x + w as i32 {
self.pixel(x, y, color);
}
}
}
/// Display an image
//TODO: Improve speed
pub fn image(&mut self, start_x: i32, start_y: i32, w: u32, h: u32, data: &[Color]) {
let mut i = 0;
for y in start_y..start_y + h as i32 {
for x in start_x..start_x + w as i32 {
if i < data.len() {
self.pixel(x, y, data[i])
}
i += 1;
}
}
}
/// Poll for an event
//TODO: clean this up
pub fn poll(&mut self) -> Option<Event> {
let mut event = Event::new();
let event_ptr: *mut Event = &mut event;
loop {
match self.file.read(&mut unsafe {
slice::from_raw_parts_mut(event_ptr as *mut u8, mem::size_of::<Event>())
}) {
Ok(0) => unsafe { sys_yield() },
Ok(_) => return Some(event),
Err(_) => return None,
}
}
}
/// Flip the window buffer
pub fn sync(&mut self) -> bool {
self.file.seek(SeekFrom::Start(0));
self.file.write(& unsafe {
slice::from_raw_parts(self.data.as_ptr() as *const u8, self.data.len() * mem::size_of::<u32>())
});
return self.file.sync_all().is_ok();
}
/// Return a iterator over events
pub fn event_iter<'a>(&'a mut self) -> EventIter<'a> {
EventIter {
window: self,
}
}
}
/// Event iterator
pub struct EventIter<'a> {
window: &'a mut Window,
}
impl<'a> Iterator for EventIter<'a> {
type Item = Event;
fn next(&mut self) -> Option<Event> {
self.window.poll()
}
}
|
set
|
identifier_name
|
lint-dead-code-5.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unused_variables)]
#![deny(dead_code)]
enum Enum1 {
Variant1(isize),
Variant2 //~ ERROR: variant is never used
}
enum Enum2 {
Variant3(bool),
#[allow(dead_code)]
Variant4(isize),
Variant5 { _x: isize }, //~ ERROR: variant is never used: `Variant5`
Variant6(isize), //~ ERROR: variant is never used: `Variant6`
_Variant7,
}
enum Enum3 { //~ ERROR: enum is never used
Variant8,
Variant9
}
fn main() {
|
Enum1::Variant1(_) => (),
Enum1::Variant2 => ()
}
let x = Enum2::Variant3(true);
}
|
let v = Enum1::Variant1(1);
match v {
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.