file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
font_chooser_dialog.rs
|
// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use ffi;
use FFIWidget;
use cast::{GTK_WINDOW};
use glib::translate::ToGlibPtr;
struct_Widget!(FontChooserDialog);
impl FontChooserDialog {
pub fn new(title: &str, parent: Option<&::Window>) -> Option<FontChooserDialog> {
let tmp = unsafe {
ffi::gtk_font_chooser_dialog_new(title.to_glib_none().0,
match parent {
Some(ref p) => GTK_WINDOW(p.unwrap_widget()),
None => GTK_WINDOW(::std::ptr::null_mut())
}
)
};
if tmp.is_null() {
None
} else {
Some(::FFIWidget::wrap_widget(tmp))
}
|
}
}
impl_drop!(FontChooserDialog);
impl_TraitWidget!(FontChooserDialog);
impl ::ContainerTrait for FontChooserDialog {}
impl ::BinTrait for FontChooserDialog {}
impl ::WindowTrait for FontChooserDialog {}
impl ::DialogTrait for FontChooserDialog {}
impl ::FontChooserTrait for FontChooserDialog {}
|
random_line_split
|
|
sound-api.rs
|
#[macro_use]
extern crate hlua;
fn main() {
let mut lua = hlua::Lua::new();
lua.openlibs();
|
// we create a fill an array named `Sound` which will be used as a class-like interface
{
let mut sound_namespace = lua.empty_array("Sound");
// creating the `Sound.new` function
sound_namespace.set("new", hlua::function0(|| Sound::new()));
}
lua.execute::<()>(r#"
s = Sound.new();
s:play();
print("hello world from within lua!");
print("is the sound playing:", s:is_playing());
s:stop();
print("is the sound playing:", s:is_playing());
"#).unwrap();
}
// this `Sound` struct is the object that we will use to demonstrate hlua
struct Sound {
playing: bool,
}
// this macro implements the required trait so that we can *push* the object to lua
// (ie. move it inside lua)
implement_lua_push!(Sound, |mut metatable| {
// we create a `__index` entry in the metatable
// when the lua code calls `sound:play()`, it will look for `play` in there
let mut index = metatable.empty_array("__index");
index.set("play", hlua::function1(|snd: &mut Sound| {
snd.play()
}));
index.set("stop", hlua::function1(|snd: &mut Sound| {
snd.stop()
}));
index.set("is_playing", hlua::function1(|snd: &Sound| {
snd.is_playing()
}));
});
// this macro implements the require traits so that we can *read* the object back
implement_lua_read!(Sound);
impl Sound {
pub fn new() -> Sound {
Sound {
playing: false
}
}
pub fn play(&mut self) {
println!("playing");
self.playing = true;
}
pub fn stop(&mut self) {
println!("stopping");
self.playing = false;
}
pub fn is_playing(&self) -> bool {
self.playing
}
}
// this destructor is here to show you that objects are properly getting destroyed
impl Drop for Sound {
fn drop(&mut self) {
println!("`Sound` object destroyed");
}
}
|
random_line_split
|
|
sound-api.rs
|
#[macro_use]
extern crate hlua;
fn main() {
let mut lua = hlua::Lua::new();
lua.openlibs();
// we create a fill an array named `Sound` which will be used as a class-like interface
{
let mut sound_namespace = lua.empty_array("Sound");
// creating the `Sound.new` function
sound_namespace.set("new", hlua::function0(|| Sound::new()));
}
lua.execute::<()>(r#"
s = Sound.new();
s:play();
print("hello world from within lua!");
print("is the sound playing:", s:is_playing());
s:stop();
print("is the sound playing:", s:is_playing());
"#).unwrap();
}
// this `Sound` struct is the object that we will use to demonstrate hlua
struct Sound {
playing: bool,
}
// this macro implements the required trait so that we can *push* the object to lua
// (ie. move it inside lua)
implement_lua_push!(Sound, |mut metatable| {
// we create a `__index` entry in the metatable
// when the lua code calls `sound:play()`, it will look for `play` in there
let mut index = metatable.empty_array("__index");
index.set("play", hlua::function1(|snd: &mut Sound| {
snd.play()
}));
index.set("stop", hlua::function1(|snd: &mut Sound| {
snd.stop()
}));
index.set("is_playing", hlua::function1(|snd: &Sound| {
snd.is_playing()
}));
});
// this macro implements the require traits so that we can *read* the object back
implement_lua_read!(Sound);
impl Sound {
pub fn
|
() -> Sound {
Sound {
playing: false
}
}
pub fn play(&mut self) {
println!("playing");
self.playing = true;
}
pub fn stop(&mut self) {
println!("stopping");
self.playing = false;
}
pub fn is_playing(&self) -> bool {
self.playing
}
}
// this destructor is here to show you that objects are properly getting destroyed
impl Drop for Sound {
fn drop(&mut self) {
println!("`Sound` object destroyed");
}
}
|
new
|
identifier_name
|
sound-api.rs
|
#[macro_use]
extern crate hlua;
fn main()
|
print("is the sound playing:", s:is_playing());
"#).unwrap();
}
// this `Sound` struct is the object that we will use to demonstrate hlua
struct Sound {
playing: bool,
}
// this macro implements the required trait so that we can *push* the object to lua
// (ie. move it inside lua)
implement_lua_push!(Sound, |mut metatable| {
// we create a `__index` entry in the metatable
// when the lua code calls `sound:play()`, it will look for `play` in there
let mut index = metatable.empty_array("__index");
index.set("play", hlua::function1(|snd: &mut Sound| {
snd.play()
}));
index.set("stop", hlua::function1(|snd: &mut Sound| {
snd.stop()
}));
index.set("is_playing", hlua::function1(|snd: &Sound| {
snd.is_playing()
}));
});
// this macro implements the require traits so that we can *read* the object back
implement_lua_read!(Sound);
impl Sound {
pub fn new() -> Sound {
Sound {
playing: false
}
}
pub fn play(&mut self) {
println!("playing");
self.playing = true;
}
pub fn stop(&mut self) {
println!("stopping");
self.playing = false;
}
pub fn is_playing(&self) -> bool {
self.playing
}
}
// this destructor is here to show you that objects are properly getting destroyed
impl Drop for Sound {
fn drop(&mut self) {
println!("`Sound` object destroyed");
}
}
|
{
let mut lua = hlua::Lua::new();
lua.openlibs();
// we create a fill an array named `Sound` which will be used as a class-like interface
{
let mut sound_namespace = lua.empty_array("Sound");
// creating the `Sound.new` function
sound_namespace.set("new", hlua::function0(|| Sound::new()));
}
lua.execute::<()>(r#"
s = Sound.new();
s:play();
print("hello world from within lua!");
print("is the sound playing:", s:is_playing());
s:stop();
|
identifier_body
|
xrtest.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::callback::ExceptionHandling;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::FunctionBinding::Function;
use crate::dom::bindings::codegen::Bindings::XRTestBinding::{
self, FakeXRDeviceInit, XRTestMethods,
};
use crate::dom::bindings::refcounted::{Trusted, TrustedPromise};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::fakexrdevice::{get_origin, get_views, FakeXRDevice};
use crate::dom::globalscope::GlobalScope;
use crate::dom::promise::Promise;
use crate::script_thread::ScriptThread;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
use ipc_channel::ipc::IpcSender;
use ipc_channel::router::ROUTER;
use profile_traits::ipc;
use std::rc::Rc;
use webxr_api::{self, Error as XRError, MockDeviceInit, MockDeviceMsg};
#[dom_struct]
pub struct XRTest {
reflector: Reflector,
devices_connected: DomRefCell<Vec<Dom<FakeXRDevice>>>,
}
impl XRTest {
pub fn new_inherited() -> XRTest
|
pub fn new(global: &GlobalScope) -> DomRoot<XRTest> {
reflect_dom_object(
Box::new(XRTest::new_inherited()),
global,
XRTestBinding::Wrap,
)
}
fn device_obtained(
&self,
response: Result<IpcSender<MockDeviceMsg>, XRError>,
trusted: TrustedPromise,
) {
let promise = trusted.root();
if let Ok(sender) = response {
let device = FakeXRDevice::new(&self.global(), sender);
self.devices_connected
.borrow_mut()
.push(Dom::from_ref(&device));
promise.resolve_native(&device);
} else {
promise.reject_native(&());
}
}
}
impl XRTestMethods for XRTest {
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
#[allow(unsafe_code)]
fn SimulateDeviceConnection(&self, init: &FakeXRDeviceInit) -> Rc<Promise> {
let global = self.global();
let p = Promise::new(&global);
let origin = if let Some(ref o) = init.viewerOrigin {
match get_origin(&o) {
Ok(origin) => Some(origin),
Err(e) => {
p.reject_error(e);
return p;
},
}
} else {
None
};
let floor_origin = if let Some(ref o) = init.floorOrigin {
match get_origin(&o) {
Ok(origin) => Some(origin),
Err(e) => {
p.reject_error(e);
return p;
},
}
} else {
None
};
let views = match get_views(&init.views) {
Ok(views) => views,
Err(e) => {
p.reject_error(e);
return p;
},
};
let supported_features = if let Some(ref s) = init.supportedFeatures {
s.iter().cloned().map(String::from).collect()
} else {
vec![]
};
let init = MockDeviceInit {
viewer_origin: origin,
views,
supports_immersive: init.supportsImmersive,
supports_unbounded: init.supportsUnbounded,
floor_origin,
supported_features,
};
let global = self.global();
let window = global.as_window();
let this = Trusted::new(self);
let mut trusted = Some(TrustedPromise::new(p.clone()));
let (task_source, canceller) = window
.task_manager()
.dom_manipulation_task_source_with_canceller();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |message| {
let trusted = trusted
.take()
.expect("SimulateDeviceConnection callback called twice");
let this = this.clone();
let message = message
.to()
.expect("SimulateDeviceConnection callback given incorrect payload");
let _ = task_source.queue_with_canceller(
task!(request_session: move || {
this.root().device_obtained(message, trusted);
}),
&canceller,
);
}),
);
window
.webxr_registry()
.simulate_device_connection(init, sender);
p
}
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
fn SimulateUserActivation(&self, f: Rc<Function>) {
ScriptThread::set_user_interacting(true);
let _ = f.Call__(vec![], ExceptionHandling::Rethrow);
ScriptThread::set_user_interacting(false);
}
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
fn DisconnectAllDevices(&self) -> Rc<Promise> {
// XXXManishearth implement device disconnection and session ending
let global = self.global();
let p = Promise::new(&global);
let mut devices = self.devices_connected.borrow_mut();
if devices.is_empty() {
p.resolve_native(&());
} else {
let mut len = devices.len();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
let mut rooted_devices: Vec<_> =
devices.iter().map(|x| DomRoot::from_ref(&**x)).collect();
devices.clear();
let mut trusted = Some(TrustedPromise::new(p.clone()));
let (task_source, canceller) = global
.as_window()
.task_manager()
.dom_manipulation_task_source_with_canceller();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |_| {
len -= 1;
if len == 0 {
let trusted = trusted
.take()
.expect("DisconnectAllDevices disconnected more devices than expected");
let _ =
task_source.queue_with_canceller(trusted.resolve_task(()), &canceller);
}
}),
);
for device in rooted_devices.drain(..) {
device.disconnect(sender.clone());
}
};
p
}
}
|
{
XRTest {
reflector: Reflector::new(),
devices_connected: DomRefCell::new(vec![]),
}
}
|
identifier_body
|
xrtest.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::callback::ExceptionHandling;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::FunctionBinding::Function;
use crate::dom::bindings::codegen::Bindings::XRTestBinding::{
self, FakeXRDeviceInit, XRTestMethods,
};
use crate::dom::bindings::refcounted::{Trusted, TrustedPromise};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::fakexrdevice::{get_origin, get_views, FakeXRDevice};
use crate::dom::globalscope::GlobalScope;
use crate::dom::promise::Promise;
use crate::script_thread::ScriptThread;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
use ipc_channel::ipc::IpcSender;
use ipc_channel::router::ROUTER;
use profile_traits::ipc;
use std::rc::Rc;
use webxr_api::{self, Error as XRError, MockDeviceInit, MockDeviceMsg};
#[dom_struct]
pub struct
|
{
reflector: Reflector,
devices_connected: DomRefCell<Vec<Dom<FakeXRDevice>>>,
}
impl XRTest {
pub fn new_inherited() -> XRTest {
XRTest {
reflector: Reflector::new(),
devices_connected: DomRefCell::new(vec![]),
}
}
pub fn new(global: &GlobalScope) -> DomRoot<XRTest> {
reflect_dom_object(
Box::new(XRTest::new_inherited()),
global,
XRTestBinding::Wrap,
)
}
fn device_obtained(
&self,
response: Result<IpcSender<MockDeviceMsg>, XRError>,
trusted: TrustedPromise,
) {
let promise = trusted.root();
if let Ok(sender) = response {
let device = FakeXRDevice::new(&self.global(), sender);
self.devices_connected
.borrow_mut()
.push(Dom::from_ref(&device));
promise.resolve_native(&device);
} else {
promise.reject_native(&());
}
}
}
impl XRTestMethods for XRTest {
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
#[allow(unsafe_code)]
fn SimulateDeviceConnection(&self, init: &FakeXRDeviceInit) -> Rc<Promise> {
let global = self.global();
let p = Promise::new(&global);
let origin = if let Some(ref o) = init.viewerOrigin {
match get_origin(&o) {
Ok(origin) => Some(origin),
Err(e) => {
p.reject_error(e);
return p;
},
}
} else {
None
};
let floor_origin = if let Some(ref o) = init.floorOrigin {
match get_origin(&o) {
Ok(origin) => Some(origin),
Err(e) => {
p.reject_error(e);
return p;
},
}
} else {
None
};
let views = match get_views(&init.views) {
Ok(views) => views,
Err(e) => {
p.reject_error(e);
return p;
},
};
let supported_features = if let Some(ref s) = init.supportedFeatures {
s.iter().cloned().map(String::from).collect()
} else {
vec![]
};
let init = MockDeviceInit {
viewer_origin: origin,
views,
supports_immersive: init.supportsImmersive,
supports_unbounded: init.supportsUnbounded,
floor_origin,
supported_features,
};
let global = self.global();
let window = global.as_window();
let this = Trusted::new(self);
let mut trusted = Some(TrustedPromise::new(p.clone()));
let (task_source, canceller) = window
.task_manager()
.dom_manipulation_task_source_with_canceller();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |message| {
let trusted = trusted
.take()
.expect("SimulateDeviceConnection callback called twice");
let this = this.clone();
let message = message
.to()
.expect("SimulateDeviceConnection callback given incorrect payload");
let _ = task_source.queue_with_canceller(
task!(request_session: move || {
this.root().device_obtained(message, trusted);
}),
&canceller,
);
}),
);
window
.webxr_registry()
.simulate_device_connection(init, sender);
p
}
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
fn SimulateUserActivation(&self, f: Rc<Function>) {
ScriptThread::set_user_interacting(true);
let _ = f.Call__(vec![], ExceptionHandling::Rethrow);
ScriptThread::set_user_interacting(false);
}
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
fn DisconnectAllDevices(&self) -> Rc<Promise> {
// XXXManishearth implement device disconnection and session ending
let global = self.global();
let p = Promise::new(&global);
let mut devices = self.devices_connected.borrow_mut();
if devices.is_empty() {
p.resolve_native(&());
} else {
let mut len = devices.len();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
let mut rooted_devices: Vec<_> =
devices.iter().map(|x| DomRoot::from_ref(&**x)).collect();
devices.clear();
let mut trusted = Some(TrustedPromise::new(p.clone()));
let (task_source, canceller) = global
.as_window()
.task_manager()
.dom_manipulation_task_source_with_canceller();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |_| {
len -= 1;
if len == 0 {
let trusted = trusted
.take()
.expect("DisconnectAllDevices disconnected more devices than expected");
let _ =
task_source.queue_with_canceller(trusted.resolve_task(()), &canceller);
}
}),
);
for device in rooted_devices.drain(..) {
device.disconnect(sender.clone());
}
};
p
}
}
|
XRTest
|
identifier_name
|
xrtest.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::callback::ExceptionHandling;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::FunctionBinding::Function;
use crate::dom::bindings::codegen::Bindings::XRTestBinding::{
self, FakeXRDeviceInit, XRTestMethods,
};
use crate::dom::bindings::refcounted::{Trusted, TrustedPromise};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::fakexrdevice::{get_origin, get_views, FakeXRDevice};
use crate::dom::globalscope::GlobalScope;
use crate::dom::promise::Promise;
use crate::script_thread::ScriptThread;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
use ipc_channel::ipc::IpcSender;
use ipc_channel::router::ROUTER;
use profile_traits::ipc;
use std::rc::Rc;
use webxr_api::{self, Error as XRError, MockDeviceInit, MockDeviceMsg};
#[dom_struct]
pub struct XRTest {
reflector: Reflector,
devices_connected: DomRefCell<Vec<Dom<FakeXRDevice>>>,
}
impl XRTest {
pub fn new_inherited() -> XRTest {
XRTest {
reflector: Reflector::new(),
devices_connected: DomRefCell::new(vec![]),
}
}
pub fn new(global: &GlobalScope) -> DomRoot<XRTest> {
reflect_dom_object(
Box::new(XRTest::new_inherited()),
|
fn device_obtained(
&self,
response: Result<IpcSender<MockDeviceMsg>, XRError>,
trusted: TrustedPromise,
) {
let promise = trusted.root();
if let Ok(sender) = response {
let device = FakeXRDevice::new(&self.global(), sender);
self.devices_connected
.borrow_mut()
.push(Dom::from_ref(&device));
promise.resolve_native(&device);
} else {
promise.reject_native(&());
}
}
}
impl XRTestMethods for XRTest {
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
#[allow(unsafe_code)]
fn SimulateDeviceConnection(&self, init: &FakeXRDeviceInit) -> Rc<Promise> {
let global = self.global();
let p = Promise::new(&global);
let origin = if let Some(ref o) = init.viewerOrigin {
match get_origin(&o) {
Ok(origin) => Some(origin),
Err(e) => {
p.reject_error(e);
return p;
},
}
} else {
None
};
let floor_origin = if let Some(ref o) = init.floorOrigin {
match get_origin(&o) {
Ok(origin) => Some(origin),
Err(e) => {
p.reject_error(e);
return p;
},
}
} else {
None
};
let views = match get_views(&init.views) {
Ok(views) => views,
Err(e) => {
p.reject_error(e);
return p;
},
};
let supported_features = if let Some(ref s) = init.supportedFeatures {
s.iter().cloned().map(String::from).collect()
} else {
vec![]
};
let init = MockDeviceInit {
viewer_origin: origin,
views,
supports_immersive: init.supportsImmersive,
supports_unbounded: init.supportsUnbounded,
floor_origin,
supported_features,
};
let global = self.global();
let window = global.as_window();
let this = Trusted::new(self);
let mut trusted = Some(TrustedPromise::new(p.clone()));
let (task_source, canceller) = window
.task_manager()
.dom_manipulation_task_source_with_canceller();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |message| {
let trusted = trusted
.take()
.expect("SimulateDeviceConnection callback called twice");
let this = this.clone();
let message = message
.to()
.expect("SimulateDeviceConnection callback given incorrect payload");
let _ = task_source.queue_with_canceller(
task!(request_session: move || {
this.root().device_obtained(message, trusted);
}),
&canceller,
);
}),
);
window
.webxr_registry()
.simulate_device_connection(init, sender);
p
}
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
fn SimulateUserActivation(&self, f: Rc<Function>) {
ScriptThread::set_user_interacting(true);
let _ = f.Call__(vec![], ExceptionHandling::Rethrow);
ScriptThread::set_user_interacting(false);
}
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
fn DisconnectAllDevices(&self) -> Rc<Promise> {
// XXXManishearth implement device disconnection and session ending
let global = self.global();
let p = Promise::new(&global);
let mut devices = self.devices_connected.borrow_mut();
if devices.is_empty() {
p.resolve_native(&());
} else {
let mut len = devices.len();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
let mut rooted_devices: Vec<_> =
devices.iter().map(|x| DomRoot::from_ref(&**x)).collect();
devices.clear();
let mut trusted = Some(TrustedPromise::new(p.clone()));
let (task_source, canceller) = global
.as_window()
.task_manager()
.dom_manipulation_task_source_with_canceller();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |_| {
len -= 1;
if len == 0 {
let trusted = trusted
.take()
.expect("DisconnectAllDevices disconnected more devices than expected");
let _ =
task_source.queue_with_canceller(trusted.resolve_task(()), &canceller);
}
}),
);
for device in rooted_devices.drain(..) {
device.disconnect(sender.clone());
}
};
p
}
}
|
global,
XRTestBinding::Wrap,
)
}
|
random_line_split
|
xrtest.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::callback::ExceptionHandling;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::FunctionBinding::Function;
use crate::dom::bindings::codegen::Bindings::XRTestBinding::{
self, FakeXRDeviceInit, XRTestMethods,
};
use crate::dom::bindings::refcounted::{Trusted, TrustedPromise};
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::fakexrdevice::{get_origin, get_views, FakeXRDevice};
use crate::dom::globalscope::GlobalScope;
use crate::dom::promise::Promise;
use crate::script_thread::ScriptThread;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
use ipc_channel::ipc::IpcSender;
use ipc_channel::router::ROUTER;
use profile_traits::ipc;
use std::rc::Rc;
use webxr_api::{self, Error as XRError, MockDeviceInit, MockDeviceMsg};
#[dom_struct]
pub struct XRTest {
reflector: Reflector,
devices_connected: DomRefCell<Vec<Dom<FakeXRDevice>>>,
}
impl XRTest {
pub fn new_inherited() -> XRTest {
XRTest {
reflector: Reflector::new(),
devices_connected: DomRefCell::new(vec![]),
}
}
pub fn new(global: &GlobalScope) -> DomRoot<XRTest> {
reflect_dom_object(
Box::new(XRTest::new_inherited()),
global,
XRTestBinding::Wrap,
)
}
fn device_obtained(
&self,
response: Result<IpcSender<MockDeviceMsg>, XRError>,
trusted: TrustedPromise,
) {
let promise = trusted.root();
if let Ok(sender) = response {
let device = FakeXRDevice::new(&self.global(), sender);
self.devices_connected
.borrow_mut()
.push(Dom::from_ref(&device));
promise.resolve_native(&device);
} else {
promise.reject_native(&());
}
}
}
impl XRTestMethods for XRTest {
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
#[allow(unsafe_code)]
fn SimulateDeviceConnection(&self, init: &FakeXRDeviceInit) -> Rc<Promise> {
let global = self.global();
let p = Promise::new(&global);
let origin = if let Some(ref o) = init.viewerOrigin {
match get_origin(&o) {
Ok(origin) => Some(origin),
Err(e) => {
p.reject_error(e);
return p;
},
}
} else {
None
};
let floor_origin = if let Some(ref o) = init.floorOrigin {
match get_origin(&o) {
Ok(origin) => Some(origin),
Err(e) => {
p.reject_error(e);
return p;
},
}
} else {
None
};
let views = match get_views(&init.views) {
Ok(views) => views,
Err(e) => {
p.reject_error(e);
return p;
},
};
let supported_features = if let Some(ref s) = init.supportedFeatures {
s.iter().cloned().map(String::from).collect()
} else {
vec![]
};
let init = MockDeviceInit {
viewer_origin: origin,
views,
supports_immersive: init.supportsImmersive,
supports_unbounded: init.supportsUnbounded,
floor_origin,
supported_features,
};
let global = self.global();
let window = global.as_window();
let this = Trusted::new(self);
let mut trusted = Some(TrustedPromise::new(p.clone()));
let (task_source, canceller) = window
.task_manager()
.dom_manipulation_task_source_with_canceller();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |message| {
let trusted = trusted
.take()
.expect("SimulateDeviceConnection callback called twice");
let this = this.clone();
let message = message
.to()
.expect("SimulateDeviceConnection callback given incorrect payload");
let _ = task_source.queue_with_canceller(
task!(request_session: move || {
this.root().device_obtained(message, trusted);
}),
&canceller,
);
}),
);
window
.webxr_registry()
.simulate_device_connection(init, sender);
p
}
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
fn SimulateUserActivation(&self, f: Rc<Function>) {
ScriptThread::set_user_interacting(true);
let _ = f.Call__(vec![], ExceptionHandling::Rethrow);
ScriptThread::set_user_interacting(false);
}
/// https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md
fn DisconnectAllDevices(&self) -> Rc<Promise> {
// XXXManishearth implement device disconnection and session ending
let global = self.global();
let p = Promise::new(&global);
let mut devices = self.devices_connected.borrow_mut();
if devices.is_empty() {
p.resolve_native(&());
} else {
let mut len = devices.len();
let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap();
let mut rooted_devices: Vec<_> =
devices.iter().map(|x| DomRoot::from_ref(&**x)).collect();
devices.clear();
let mut trusted = Some(TrustedPromise::new(p.clone()));
let (task_source, canceller) = global
.as_window()
.task_manager()
.dom_manipulation_task_source_with_canceller();
ROUTER.add_route(
receiver.to_opaque(),
Box::new(move |_| {
len -= 1;
if len == 0
|
}),
);
for device in rooted_devices.drain(..) {
device.disconnect(sender.clone());
}
};
p
}
}
|
{
let trusted = trusted
.take()
.expect("DisconnectAllDevices disconnected more devices than expected");
let _ =
task_source.queue_with_canceller(trusted.resolve_task(()), &canceller);
}
|
conditional_block
|
readme.rs
|
#[macro_use]
extern crate linxal;
extern crate ndarray;
use linxal::types::{c32, LinxalMatrix};
use linxal::eigenvalues::Eigen;
use linxal::solve_linear::SolveLinear;
use ndarray::{arr1, arr2};
fn f1() {
let m = arr2(&[[1.0f32, 2.0],
[-2.0, 1.0]]);
let r = m.eigenvalues();
assert!(r.is_ok());
let r = r.unwrap();
let true_evs = arr1(&[c32::new(1.0, 2.0), c32::new(1.0, -2.0)]);
assert_eq_within_tol!(true_evs, r, 0.01);
let b = arr1(&[-1.0, 1.0]);
let x = m.solve_linear(&b).unwrap();
let true_x = arr1(&[-0.6, -0.2]);
assert_eq_within_tol!(x, true_x, 0.0001);
}
fn f2()
|
fn main() {
f1();
f2();
}
|
{
let m = arr2(&[[1.0f32, 2.0],
[-2.0, 1.0]]);
let r = Eigen::compute(&m, false, false);
assert!(r.is_ok());
let r = r.unwrap();
let true_evs = arr1(&[c32::new(1.0, 2.0), c32::new(1.0, -2.0)]);
assert_eq_within_tol!(true_evs, r.values, 0.01);
let b = arr1(&[-1.0, 1.0]);
let x = SolveLinear::compute(&m, &b).unwrap();
let true_x = arr1(&[-0.6, -0.2]);
assert_eq_within_tol!(x, true_x, 0.0001);
}
|
identifier_body
|
readme.rs
|
#[macro_use]
extern crate linxal;
extern crate ndarray;
use linxal::types::{c32, LinxalMatrix};
use linxal::eigenvalues::Eigen;
use linxal::solve_linear::SolveLinear;
use ndarray::{arr1, arr2};
fn f1() {
let m = arr2(&[[1.0f32, 2.0],
[-2.0, 1.0]]);
let r = m.eigenvalues();
assert!(r.is_ok());
let r = r.unwrap();
let true_evs = arr1(&[c32::new(1.0, 2.0), c32::new(1.0, -2.0)]);
|
let b = arr1(&[-1.0, 1.0]);
let x = m.solve_linear(&b).unwrap();
let true_x = arr1(&[-0.6, -0.2]);
assert_eq_within_tol!(x, true_x, 0.0001);
}
fn f2() {
let m = arr2(&[[1.0f32, 2.0],
[-2.0, 1.0]]);
let r = Eigen::compute(&m, false, false);
assert!(r.is_ok());
let r = r.unwrap();
let true_evs = arr1(&[c32::new(1.0, 2.0), c32::new(1.0, -2.0)]);
assert_eq_within_tol!(true_evs, r.values, 0.01);
let b = arr1(&[-1.0, 1.0]);
let x = SolveLinear::compute(&m, &b).unwrap();
let true_x = arr1(&[-0.6, -0.2]);
assert_eq_within_tol!(x, true_x, 0.0001);
}
fn main() {
f1();
f2();
}
|
assert_eq_within_tol!(true_evs, r, 0.01);
|
random_line_split
|
readme.rs
|
#[macro_use]
extern crate linxal;
extern crate ndarray;
use linxal::types::{c32, LinxalMatrix};
use linxal::eigenvalues::Eigen;
use linxal::solve_linear::SolveLinear;
use ndarray::{arr1, arr2};
fn f1() {
let m = arr2(&[[1.0f32, 2.0],
[-2.0, 1.0]]);
let r = m.eigenvalues();
assert!(r.is_ok());
let r = r.unwrap();
let true_evs = arr1(&[c32::new(1.0, 2.0), c32::new(1.0, -2.0)]);
assert_eq_within_tol!(true_evs, r, 0.01);
let b = arr1(&[-1.0, 1.0]);
let x = m.solve_linear(&b).unwrap();
let true_x = arr1(&[-0.6, -0.2]);
assert_eq_within_tol!(x, true_x, 0.0001);
}
fn
|
() {
let m = arr2(&[[1.0f32, 2.0],
[-2.0, 1.0]]);
let r = Eigen::compute(&m, false, false);
assert!(r.is_ok());
let r = r.unwrap();
let true_evs = arr1(&[c32::new(1.0, 2.0), c32::new(1.0, -2.0)]);
assert_eq_within_tol!(true_evs, r.values, 0.01);
let b = arr1(&[-1.0, 1.0]);
let x = SolveLinear::compute(&m, &b).unwrap();
let true_x = arr1(&[-0.6, -0.2]);
assert_eq_within_tol!(x, true_x, 0.0001);
}
fn main() {
f1();
f2();
}
|
f2
|
identifier_name
|
task-perf-one-million.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test for concurrent tasks
use core::comm::*;
fn calc(children: uint, parent_wait_chan: &Chan<Chan<Chan<int>>>) {
let wait_ports: ~[Port<Chan<Chan<int>>>] = do vec::from_fn(children) |_| {
let (wait_port, wait_chan) = stream::<Chan<Chan<int>>>();
do task::spawn {
calc(children / 2, &wait_chan);
}
wait_port
};
let child_start_chans: ~[Chan<Chan<int>>] = vec::map_consume(wait_ports, |port| port.recv());
let (start_port, start_chan) = stream::<Chan<int>>();
parent_wait_chan.send(start_chan);
let parent_result_chan: Chan<int> = start_port.recv();
let child_sum_ports: ~[Port<int>] = do vec::map_consume(child_start_chans) |child_start_chan| {
let (child_sum_port, child_sum_chan) = stream::<int>();
child_start_chan.send(child_sum_chan);
child_sum_port
};
let mut sum = 0;
vec::consume(child_sum_ports, |_, sum_port| sum += sum_port.recv() );
parent_result_chan.send(sum + 1);
}
fn main()
|
;
let start_chan = wait_port.recv();
let (sum_port, sum_chan) = stream::<int>();
start_chan.send(sum_chan);
let sum = sum_port.recv();
error!("How many tasks? %d tasks.", sum);
}
|
{
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"30"]
} else if args.len() <= 1u {
~[~"", ~"10"]
} else {
args
};
let children = uint::from_str(args[1]).get();
let (wait_port, wait_chan) = stream();
do task::spawn {
calc(children, &wait_chan);
}
|
identifier_body
|
task-perf-one-million.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test for concurrent tasks
use core::comm::*;
fn
|
(children: uint, parent_wait_chan: &Chan<Chan<Chan<int>>>) {
let wait_ports: ~[Port<Chan<Chan<int>>>] = do vec::from_fn(children) |_| {
let (wait_port, wait_chan) = stream::<Chan<Chan<int>>>();
do task::spawn {
calc(children / 2, &wait_chan);
}
wait_port
};
let child_start_chans: ~[Chan<Chan<int>>] = vec::map_consume(wait_ports, |port| port.recv());
let (start_port, start_chan) = stream::<Chan<int>>();
parent_wait_chan.send(start_chan);
let parent_result_chan: Chan<int> = start_port.recv();
let child_sum_ports: ~[Port<int>] = do vec::map_consume(child_start_chans) |child_start_chan| {
let (child_sum_port, child_sum_chan) = stream::<int>();
child_start_chan.send(child_sum_chan);
child_sum_port
};
let mut sum = 0;
vec::consume(child_sum_ports, |_, sum_port| sum += sum_port.recv() );
parent_result_chan.send(sum + 1);
}
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"30"]
} else if args.len() <= 1u {
~[~"", ~"10"]
} else {
args
};
let children = uint::from_str(args[1]).get();
let (wait_port, wait_chan) = stream();
do task::spawn {
calc(children, &wait_chan);
};
let start_chan = wait_port.recv();
let (sum_port, sum_chan) = stream::<int>();
start_chan.send(sum_chan);
let sum = sum_port.recv();
error!("How many tasks? %d tasks.", sum);
}
|
calc
|
identifier_name
|
task-perf-one-million.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test for concurrent tasks
use core::comm::*;
fn calc(children: uint, parent_wait_chan: &Chan<Chan<Chan<int>>>) {
let wait_ports: ~[Port<Chan<Chan<int>>>] = do vec::from_fn(children) |_| {
let (wait_port, wait_chan) = stream::<Chan<Chan<int>>>();
do task::spawn {
calc(children / 2, &wait_chan);
}
wait_port
};
let child_start_chans: ~[Chan<Chan<int>>] = vec::map_consume(wait_ports, |port| port.recv());
let (start_port, start_chan) = stream::<Chan<int>>();
parent_wait_chan.send(start_chan);
let parent_result_chan: Chan<int> = start_port.recv();
let child_sum_ports: ~[Port<int>] = do vec::map_consume(child_start_chans) |child_start_chan| {
let (child_sum_port, child_sum_chan) = stream::<int>();
child_start_chan.send(child_sum_chan);
child_sum_port
};
let mut sum = 0;
vec::consume(child_sum_ports, |_, sum_port| sum += sum_port.recv() );
parent_result_chan.send(sum + 1);
}
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"30"]
} else if args.len() <= 1u {
~[~"", ~"10"]
} else
|
;
let children = uint::from_str(args[1]).get();
let (wait_port, wait_chan) = stream();
do task::spawn {
calc(children, &wait_chan);
};
let start_chan = wait_port.recv();
let (sum_port, sum_chan) = stream::<int>();
start_chan.send(sum_chan);
let sum = sum_port.recv();
error!("How many tasks? %d tasks.", sum);
}
|
{
args
}
|
conditional_block
|
task-perf-one-million.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
// Test for concurrent tasks
use core::comm::*;
fn calc(children: uint, parent_wait_chan: &Chan<Chan<Chan<int>>>) {
let wait_ports: ~[Port<Chan<Chan<int>>>] = do vec::from_fn(children) |_| {
let (wait_port, wait_chan) = stream::<Chan<Chan<int>>>();
do task::spawn {
calc(children / 2, &wait_chan);
}
wait_port
};
let child_start_chans: ~[Chan<Chan<int>>] = vec::map_consume(wait_ports, |port| port.recv());
let (start_port, start_chan) = stream::<Chan<int>>();
parent_wait_chan.send(start_chan);
let parent_result_chan: Chan<int> = start_port.recv();
let child_sum_ports: ~[Port<int>] = do vec::map_consume(child_start_chans) |child_start_chan| {
let (child_sum_port, child_sum_chan) = stream::<int>();
child_start_chan.send(child_sum_chan);
child_sum_port
};
let mut sum = 0;
vec::consume(child_sum_ports, |_, sum_port| sum += sum_port.recv() );
parent_result_chan.send(sum + 1);
}
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"30"]
} else if args.len() <= 1u {
~[~"", ~"10"]
} else {
args
};
let children = uint::from_str(args[1]).get();
let (wait_port, wait_chan) = stream();
do task::spawn {
calc(children, &wait_chan);
};
let start_chan = wait_port.recv();
let (sum_port, sum_chan) = stream::<int>();
start_chan.send(sum_chan);
let sum = sum_port.recv();
error!("How many tasks? %d tasks.", sum);
}
|
random_line_split
|
|
mod.rs
|
//! A simple adapter designe solely to print messages on the console.
//!
//! Useful for logging.
use foxbox_taxonomy::api::{ Error, InternalError, User };
use foxbox_taxonomy::channel::*;
use foxbox_taxonomy::manager::*;
use foxbox_taxonomy::services::*;
use foxbox_taxonomy::values::{ Value };
use std::collections::HashMap;
use std::sync::Arc;
static ADAPTER_NAME: &'static str = "Console adapter (built-in)";
static ADAPTER_VENDOR: &'static str = "[email protected]";
static ADAPTER_VERSION: [u32;4] = [0, 0, 0, 0];
pub struct Console {
setter_stdout_id: Id<Channel>
}
impl Console {
pub fn id() -> Id<AdapterId> {
Id::new("[email protected]")
}
pub fn service_console_id() -> Id<ServiceId> {
Id::new("service:[email protected]")
}
pub fn setter_stdout_id() -> Id<Channel> {
Id::new("setter:[email protected]")
}
}
impl Adapter for Console {
fn id(&self) -> Id<AdapterId> {
Self::id()
}
fn name(&self) -> &str {
ADAPTER_NAME
}
fn vendor(&self) -> &str {
ADAPTER_VENDOR
}
fn version(&self) -> &[u32;4] {
&ADAPTER_VERSION
}
fn fetch_values(&self, mut set: Vec<Id<Channel>>, _: User) -> ResultMap<Id<Channel>, Option<Value>, Error> {
set.drain(..).map(|id| {
(id.clone(), Err(Error::InternalError(InternalError::NoSuchChannel(id))))
}).collect()
}
fn send_values(&self, mut values: HashMap<Id<Channel>, Value>, user: User) -> ResultMap<Id<Channel>, (), Error> {
values.drain()
.map(|(id, value)| {
let result = {
if id == self.setter_stdout_id {
if let Value::String(s) = value {
info!("[[email protected]] {} (user {:?})", s, user);
} else {
info!("[[email protected]] {:?} (user {:?})", value, user);
}
Ok(())
} else
|
};
(id, result)
})
.collect()
}
}
impl Console {
pub fn init(adapt: &Arc<AdapterManager>) -> Result<(), Error> {
let service_console_id = Console::service_console_id();
let setter_stdout_id = Console::setter_stdout_id();
let adapter_id = Console::id();
let console = Arc::new(Console {
setter_stdout_id: setter_stdout_id.clone()
});
try!(adapt.add_adapter(console));
let mut service = Service::empty(&service_console_id, &adapter_id);
service.properties.insert("model".to_owned(), "Mozilla console v1".to_owned());
try!(adapt.add_service(service));
try!(adapt.add_channel(Channel {
id: setter_stdout_id,
service: service_console_id,
adapter: adapter_id,
..LOG.clone()
}));
Ok(())
}
}
|
{
Err(Error::InternalError(InternalError::NoSuchChannel(id.clone())))
}
|
conditional_block
|
mod.rs
|
//! A simple adapter designe solely to print messages on the console.
//!
//! Useful for logging.
use foxbox_taxonomy::api::{ Error, InternalError, User };
use foxbox_taxonomy::channel::*;
use foxbox_taxonomy::manager::*;
use foxbox_taxonomy::services::*;
use foxbox_taxonomy::values::{ Value };
use std::collections::HashMap;
use std::sync::Arc;
static ADAPTER_NAME: &'static str = "Console adapter (built-in)";
static ADAPTER_VENDOR: &'static str = "[email protected]";
static ADAPTER_VERSION: [u32;4] = [0, 0, 0, 0];
pub struct Console {
setter_stdout_id: Id<Channel>
}
impl Console {
pub fn id() -> Id<AdapterId> {
Id::new("[email protected]")
}
pub fn service_console_id() -> Id<ServiceId> {
Id::new("service:[email protected]")
}
pub fn setter_stdout_id() -> Id<Channel> {
Id::new("setter:[email protected]")
}
}
impl Adapter for Console {
fn id(&self) -> Id<AdapterId> {
Self::id()
}
fn name(&self) -> &str {
ADAPTER_NAME
}
fn vendor(&self) -> &str {
ADAPTER_VENDOR
}
fn version(&self) -> &[u32;4] {
&ADAPTER_VERSION
}
fn fetch_values(&self, mut set: Vec<Id<Channel>>, _: User) -> ResultMap<Id<Channel>, Option<Value>, Error> {
set.drain(..).map(|id| {
(id.clone(), Err(Error::InternalError(InternalError::NoSuchChannel(id))))
}).collect()
}
fn
|
(&self, mut values: HashMap<Id<Channel>, Value>, user: User) -> ResultMap<Id<Channel>, (), Error> {
values.drain()
.map(|(id, value)| {
let result = {
if id == self.setter_stdout_id {
if let Value::String(s) = value {
info!("[[email protected]] {} (user {:?})", s, user);
} else {
info!("[[email protected]] {:?} (user {:?})", value, user);
}
Ok(())
} else {
Err(Error::InternalError(InternalError::NoSuchChannel(id.clone())))
}
};
(id, result)
})
.collect()
}
}
impl Console {
pub fn init(adapt: &Arc<AdapterManager>) -> Result<(), Error> {
let service_console_id = Console::service_console_id();
let setter_stdout_id = Console::setter_stdout_id();
let adapter_id = Console::id();
let console = Arc::new(Console {
setter_stdout_id: setter_stdout_id.clone()
});
try!(adapt.add_adapter(console));
let mut service = Service::empty(&service_console_id, &adapter_id);
service.properties.insert("model".to_owned(), "Mozilla console v1".to_owned());
try!(adapt.add_service(service));
try!(adapt.add_channel(Channel {
id: setter_stdout_id,
service: service_console_id,
adapter: adapter_id,
..LOG.clone()
}));
Ok(())
}
}
|
send_values
|
identifier_name
|
mod.rs
|
//! A simple adapter designe solely to print messages on the console.
//!
//! Useful for logging.
use foxbox_taxonomy::api::{ Error, InternalError, User };
use foxbox_taxonomy::channel::*;
use foxbox_taxonomy::manager::*;
use foxbox_taxonomy::services::*;
use foxbox_taxonomy::values::{ Value };
use std::collections::HashMap;
use std::sync::Arc;
static ADAPTER_NAME: &'static str = "Console adapter (built-in)";
static ADAPTER_VENDOR: &'static str = "[email protected]";
static ADAPTER_VERSION: [u32;4] = [0, 0, 0, 0];
pub struct Console {
setter_stdout_id: Id<Channel>
}
impl Console {
pub fn id() -> Id<AdapterId> {
Id::new("[email protected]")
}
pub fn service_console_id() -> Id<ServiceId> {
Id::new("service:[email protected]")
}
pub fn setter_stdout_id() -> Id<Channel> {
Id::new("setter:[email protected]")
}
}
impl Adapter for Console {
fn id(&self) -> Id<AdapterId> {
Self::id()
}
fn name(&self) -> &str {
ADAPTER_NAME
}
fn vendor(&self) -> &str {
ADAPTER_VENDOR
}
fn version(&self) -> &[u32;4] {
&ADAPTER_VERSION
}
fn fetch_values(&self, mut set: Vec<Id<Channel>>, _: User) -> ResultMap<Id<Channel>, Option<Value>, Error> {
|
fn send_values(&self, mut values: HashMap<Id<Channel>, Value>, user: User) -> ResultMap<Id<Channel>, (), Error> {
values.drain()
.map(|(id, value)| {
let result = {
if id == self.setter_stdout_id {
if let Value::String(s) = value {
info!("[[email protected]] {} (user {:?})", s, user);
} else {
info!("[[email protected]] {:?} (user {:?})", value, user);
}
Ok(())
} else {
Err(Error::InternalError(InternalError::NoSuchChannel(id.clone())))
}
};
(id, result)
})
.collect()
}
}
impl Console {
pub fn init(adapt: &Arc<AdapterManager>) -> Result<(), Error> {
let service_console_id = Console::service_console_id();
let setter_stdout_id = Console::setter_stdout_id();
let adapter_id = Console::id();
let console = Arc::new(Console {
setter_stdout_id: setter_stdout_id.clone()
});
try!(adapt.add_adapter(console));
let mut service = Service::empty(&service_console_id, &adapter_id);
service.properties.insert("model".to_owned(), "Mozilla console v1".to_owned());
try!(adapt.add_service(service));
try!(adapt.add_channel(Channel {
id: setter_stdout_id,
service: service_console_id,
adapter: adapter_id,
..LOG.clone()
}));
Ok(())
}
}
|
set.drain(..).map(|id| {
(id.clone(), Err(Error::InternalError(InternalError::NoSuchChannel(id))))
}).collect()
}
|
random_line_split
|
astconv.rs
|
<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
def_id: ast::DefId,
decl_generics: &ty::Generics,
self_ty: Option<ty::t>,
path: &ast::Path) -> ty::substs
{
/*!
*
* Given a path `path` that refers to an item `I` with the
* declared generics `decl_generics`, returns an appropriate
* set of substitutions for this particular reference to `I`.
*/
let tcx = this.tcx();
// If the type is parameterized by the this region, then replace this
// region with the current anon region binding (in other words,
// whatever & would get replaced with).
let regions = match (&decl_generics.region_param,
&path.segments.last().lifetime) {
(&None, &None) => {
opt_vec::Empty
}
(&None, &Some(_)) => {
tcx.sess.span_err(
path.span,
fmt!("no region bound is allowed on `%s`, \
which is not declared as containing region pointers",
ty::item_path_str(tcx, def_id)));
opt_vec::Empty
}
(&Some(_), &None) => {
let res = rscope.anon_region(path.span);
let r = get_region_reporting_err(this.tcx(), path.span, &None, res);
opt_vec::with(r)
}
(&Some(_), &Some(_)) => {
opt_vec::with(
ast_region_to_region(this,
rscope,
path.span,
&path.segments.last().lifetime))
}
};
// Convert the type parameters supplied by the user.
let supplied_type_parameter_count =
path.segments.iter().flat_map(|s| s.types.iter()).len();
if decl_generics.type_param_defs.len()!= supplied_type_parameter_count {
this.tcx().sess.span_fatal(
path.span,
fmt!("wrong number of type arguments: expected %u but found %u",
decl_generics.type_param_defs.len(),
supplied_type_parameter_count));
}
let tps = path.segments
.iter()
.flat_map(|s| s.types.iter())
.map(|a_t| ast_ty_to_ty(this, rscope, a_t))
.collect();
substs {
regions: ty::NonerasedRegions(regions),
self_ty: self_ty,
tps: tps
}
}
pub fn ast_path_to_substs_and_ty<AC:AstConv,
RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
did: ast::DefId,
path: &ast::Path)
-> ty_param_substs_and_ty {
let tcx = this.tcx();
let ty::ty_param_bounds_and_ty {
generics: generics,
ty: decl_ty
} = this.get_item_ty(did);
let substs = ast_path_substs(this, rscope, did, &generics, None, path);
let ty = ty::subst(tcx, &substs, decl_ty);
ty_param_substs_and_ty { substs: substs, ty: ty }
}
pub fn ast_path_to_trait_ref<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
trait_def_id: ast::DefId,
self_ty: Option<ty::t>,
path: &ast::Path) -> @ty::TraitRef
{
let trait_def =
this.get_trait_def(trait_def_id);
let substs =
ast_path_substs(
this,
rscope,
trait_def.trait_ref.def_id,
&trait_def.generics,
self_ty,
path);
let trait_ref =
@ty::TraitRef {def_id: trait_def_id,
substs: substs};
return trait_ref;
}
pub fn ast_path_to_ty<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
did: ast::DefId,
path: &ast::Path)
-> ty_param_substs_and_ty
{
// Look up the polytype of the item and then substitute the provided types
// for any type/region parameters.
let ty::ty_param_substs_and_ty {
substs: substs,
ty: ty
} = ast_path_to_substs_and_ty(this, rscope, did, path);
ty_param_substs_and_ty { substs: substs, ty: ty }
}
pub static NO_REGIONS: uint = 1;
pub static NO_TPS: uint = 2;
// Parses the programmer's textual representation of a type into our
// internal notion of a type. `getter` is a function that returns the type
// corresponding to a definition ID:
pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope + Clone +'static>(
this: &AC, rscope: &RS, ast_ty: &ast::Ty) -> ty::t {
fn ast_mt_to_mt<AC:AstConv, RS:RegionScope + Clone +'static>(
this: &AC, rscope: &RS, mt: &ast::mt) -> ty::mt {
ty::mt {ty: ast_ty_to_ty(this, rscope, mt.ty), mutbl: mt.mutbl}
}
// Handle @, ~, and & being able to mean estrs and evecs.
// If a_seq_ty is a str or a vec, make it an estr/evec.
// Also handle first-class trait types.
fn mk_pointer<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
a_seq_ty: &ast::mt,
vst: ty::vstore,
constr: &fn(ty::mt) -> ty::t) -> ty::t
{
let tcx = this.tcx();
match a_seq_ty.ty.node {
ast::ty_vec(ref mt) => {
let mut mt = ast_mt_to_mt(this, rscope, mt);
if a_seq_ty.mutbl == ast::MutMutable {
mt = ty::mt { ty: mt.ty, mutbl: a_seq_ty.mutbl };
}
return ty::mk_evec(tcx, mt, vst);
}
ast::ty_path(ref path, ref bounds, id) => {
// Note that the "bounds must be empty if path is not a trait"
// restriction is enforced in the below case for ty_path, which
// will run after this as long as the path isn't a trait.
match tcx.def_map.find(&id) {
Some(&ast::DefPrimTy(ast::ty_str)) if a_seq_ty.mutbl == ast::MutImmutable => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
return ty::mk_estr(tcx, vst);
}
Some(&ast::DefTrait(trait_def_id)) => {
let result = ast_path_to_trait_ref(
this, rscope, trait_def_id, None, path);
let trait_store = match vst {
ty::vstore_box => ty::BoxTraitStore,
ty::vstore_uniq => ty::UniqTraitStore,
ty::vstore_slice(r) => {
ty::RegionTraitStore(r)
}
ty::vstore_fixed(*) => {
tcx.sess.span_err(
path.span,
"@trait, ~trait or &trait are the only supported \
forms of casting-to-trait");
ty::BoxTraitStore
}
};
let bounds = conv_builtin_bounds(this.tcx(), bounds, trait_store);
return ty::mk_trait(tcx,
result.def_id,
result.substs.clone(),
trait_store,
a_seq_ty.mutbl,
bounds);
}
_ => {}
}
}
_ => {}
}
let seq_ty = ast_mt_to_mt(this, rscope, a_seq_ty);
return constr(seq_ty);
}
fn check_path_args(tcx: ty::ctxt,
path: &ast::Path,
flags: uint) {
if (flags & NO_TPS)!= 0u {
if!path.segments.iter().all(|s| s.types.is_empty()) {
tcx.sess.span_err(
path.span,
"type parameters are not allowed on this type");
}
}
if (flags & NO_REGIONS)!= 0u {
if path.segments.last().lifetime.is_some() {
tcx.sess.span_err(
path.span,
"region parameters are not allowed on this type");
}
}
}
let tcx = this.tcx();
match tcx.ast_ty_to_ty_cache.find(&ast_ty.id) {
Some(&ty::atttce_resolved(ty)) => return ty,
Some(&ty::atttce_unresolved) => {
tcx.sess.span_fatal(ast_ty.span, "illegal recursive type; \
insert an enum in the cycle, if this is desired");
}
None => { /* go on */ }
}
tcx.ast_ty_to_ty_cache.insert(ast_ty.id, ty::atttce_unresolved);
let typ = match ast_ty.node {
ast::ty_nil => ty::mk_nil(),
ast::ty_bot => ty::mk_bot(),
ast::ty_box(ref mt) => {
mk_pointer(this, rscope, mt, ty::vstore_box,
|tmt| ty::mk_box(tcx, tmt))
}
ast::ty_uniq(ref mt) => {
mk_pointer(this, rscope, mt, ty::vstore_uniq,
|tmt| ty::mk_uniq(tcx, tmt))
}
ast::ty_vec(ref mt) => {
tcx.sess.span_err(ast_ty.span, "bare `[]` is not a type");
// return /something/ so they can at least get more errors
ty::mk_evec(tcx, ast_mt_to_mt(this, rscope, mt), ty::vstore_uniq)
}
ast::ty_ptr(ref mt) => {
ty::mk_ptr(tcx, ast_mt_to_mt(this, rscope, mt))
}
ast::ty_rptr(ref region, ref mt) => {
let r = ast_region_to_region(this, rscope, ast_ty.span, region);
mk_pointer(this, rscope, mt, ty::vstore_slice(r),
|tmt| ty::mk_rptr(tcx, r, tmt))
}
ast::ty_tup(ref fields) => {
let flds = fields.map(|t| ast_ty_to_ty(this, rscope, t));
ty::mk_tup(tcx, flds)
}
ast::ty_bare_fn(ref bf) => {
ty::mk_bare_fn(tcx, ty_of_bare_fn(this, rscope, bf.purity,
bf.abis, &bf.lifetimes, &bf.decl))
}
ast::ty_closure(ref f) => {
if f.sigil == ast::ManagedSigil {
tcx.sess.span_err(ast_ty.span,
"managed closures are not supported");
}
let bounds = conv_builtin_bounds(this.tcx(), &f.bounds, match f.sigil {
// Use corresponding trait store to figure out default bounds
// if none were specified.
ast::BorrowedSigil => ty::RegionTraitStore(ty::re_empty), // dummy region
ast::OwnedSigil => ty::UniqTraitStore,
ast::ManagedSigil => ty::BoxTraitStore,
});
let fn_decl = ty_of_closure(this,
rscope,
f.sigil,
f.purity,
f.onceness,
bounds,
&f.region,
&f.decl,
None,
&f.lifetimes,
ast_ty.span);
ty::mk_closure(tcx, fn_decl)
}
ast::ty_path(ref path, ref bounds, id) => {
let a_def = match tcx.def_map.find(&id) {
None => tcx.sess.span_fatal(
ast_ty.span, fmt!("unbound path %s",
path_to_str(path, tcx.sess.intr()))),
Some(&d) => d
};
// Kind bounds on path types are only supported for traits.
match a_def {
// But don't emit the error if the user meant to do a trait anyway.
ast::DefTrait(*) => { },
_ if bounds.is_some() =>
tcx.sess.span_err(ast_ty.span,
"kind bounds can only be used on trait types"),
_ => { },
}
match a_def {
ast::DefTrait(_) => {
let path_str = path_to_str(path, tcx.sess.intr());
tcx.sess.span_err(
ast_ty.span,
fmt!("reference to trait `%s` where a type is expected; \
try `@%s`, `~%s`, or `&%s`",
path_str, path_str, path_str, path_str));
ty::mk_err()
}
ast::DefTy(did) | ast::DefStruct(did) => {
ast_path_to_ty(this, rscope, did, path).ty
}
ast::DefPrimTy(nty) => {
match nty {
ast::ty_bool => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_bool()
}
ast::ty_char => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_char()
}
ast::ty_int(it) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_int(it)
}
ast::ty_uint(uit) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_uint(uit)
}
ast::ty_float(ft) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_float(ft)
}
ast::ty_str => {
tcx.sess.span_err(ast_ty.span,
"bare `str` is not a type");
// return /something/ so they can at least get more errors
ty::mk_estr(tcx, ty::vstore_uniq)
}
}
}
ast::DefTyParam(id, n) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_param(tcx, n, id)
}
ast::DefSelfTy(id) => {
// n.b.: resolve guarantees that the this type only appears in a
// trait, which we rely upon in various places when creating
// substs
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
let did = ast_util::local_def(id);
ty::mk_self(tcx, did)
}
_ => {
tcx.sess.span_fatal(ast_ty.span,
fmt!("found value name used as a type: %?", a_def));
}
}
}
ast::ty_fixed_length_vec(ref a_mt, e) => {
match const_eval::eval_const_expr_partial(&tcx, e) {
Ok(ref r) => {
match *r {
|
ast_path_substs
|
identifier_name
|
|
astconv.rs
|
(&Some(_), &None) => {
let res = rscope.anon_region(path.span);
let r = get_region_reporting_err(this.tcx(), path.span, &None, res);
opt_vec::with(r)
}
(&Some(_), &Some(_)) => {
opt_vec::with(
ast_region_to_region(this,
rscope,
path.span,
&path.segments.last().lifetime))
}
};
// Convert the type parameters supplied by the user.
let supplied_type_parameter_count =
path.segments.iter().flat_map(|s| s.types.iter()).len();
if decl_generics.type_param_defs.len()!= supplied_type_parameter_count {
this.tcx().sess.span_fatal(
path.span,
fmt!("wrong number of type arguments: expected %u but found %u",
decl_generics.type_param_defs.len(),
supplied_type_parameter_count));
}
let tps = path.segments
.iter()
.flat_map(|s| s.types.iter())
.map(|a_t| ast_ty_to_ty(this, rscope, a_t))
.collect();
substs {
regions: ty::NonerasedRegions(regions),
self_ty: self_ty,
tps: tps
}
}
pub fn ast_path_to_substs_and_ty<AC:AstConv,
RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
did: ast::DefId,
path: &ast::Path)
-> ty_param_substs_and_ty {
let tcx = this.tcx();
let ty::ty_param_bounds_and_ty {
generics: generics,
ty: decl_ty
} = this.get_item_ty(did);
let substs = ast_path_substs(this, rscope, did, &generics, None, path);
let ty = ty::subst(tcx, &substs, decl_ty);
ty_param_substs_and_ty { substs: substs, ty: ty }
}
pub fn ast_path_to_trait_ref<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
trait_def_id: ast::DefId,
self_ty: Option<ty::t>,
path: &ast::Path) -> @ty::TraitRef
{
let trait_def =
this.get_trait_def(trait_def_id);
let substs =
ast_path_substs(
this,
rscope,
trait_def.trait_ref.def_id,
&trait_def.generics,
self_ty,
path);
let trait_ref =
@ty::TraitRef {def_id: trait_def_id,
substs: substs};
return trait_ref;
}
pub fn ast_path_to_ty<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
did: ast::DefId,
path: &ast::Path)
-> ty_param_substs_and_ty
{
// Look up the polytype of the item and then substitute the provided types
// for any type/region parameters.
let ty::ty_param_substs_and_ty {
substs: substs,
ty: ty
} = ast_path_to_substs_and_ty(this, rscope, did, path);
ty_param_substs_and_ty { substs: substs, ty: ty }
}
pub static NO_REGIONS: uint = 1;
pub static NO_TPS: uint = 2;
// Parses the programmer's textual representation of a type into our
// internal notion of a type. `getter` is a function that returns the type
// corresponding to a definition ID:
pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope + Clone +'static>(
this: &AC, rscope: &RS, ast_ty: &ast::Ty) -> ty::t {
fn ast_mt_to_mt<AC:AstConv, RS:RegionScope + Clone +'static>(
this: &AC, rscope: &RS, mt: &ast::mt) -> ty::mt {
ty::mt {ty: ast_ty_to_ty(this, rscope, mt.ty), mutbl: mt.mutbl}
}
// Handle @, ~, and & being able to mean estrs and evecs.
// If a_seq_ty is a str or a vec, make it an estr/evec.
// Also handle first-class trait types.
fn mk_pointer<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
a_seq_ty: &ast::mt,
vst: ty::vstore,
constr: &fn(ty::mt) -> ty::t) -> ty::t
{
let tcx = this.tcx();
match a_seq_ty.ty.node {
ast::ty_vec(ref mt) => {
let mut mt = ast_mt_to_mt(this, rscope, mt);
if a_seq_ty.mutbl == ast::MutMutable {
mt = ty::mt { ty: mt.ty, mutbl: a_seq_ty.mutbl };
}
return ty::mk_evec(tcx, mt, vst);
}
ast::ty_path(ref path, ref bounds, id) => {
// Note that the "bounds must be empty if path is not a trait"
// restriction is enforced in the below case for ty_path, which
// will run after this as long as the path isn't a trait.
match tcx.def_map.find(&id) {
Some(&ast::DefPrimTy(ast::ty_str)) if a_seq_ty.mutbl == ast::MutImmutable => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
return ty::mk_estr(tcx, vst);
}
Some(&ast::DefTrait(trait_def_id)) =>
|
result.substs.clone(),
trait_store,
a_seq_ty.mutbl,
bounds);
}
_ => {}
}
}
_ => {}
}
let seq_ty = ast_mt_to_mt(this, rscope, a_seq_ty);
return constr(seq_ty);
}
fn check_path_args(tcx: ty::ctxt,
path: &ast::Path,
flags: uint) {
if (flags & NO_TPS)!= 0u {
if!path.segments.iter().all(|s| s.types.is_empty()) {
tcx.sess.span_err(
path.span,
"type parameters are not allowed on this type");
}
}
if (flags & NO_REGIONS)!= 0u {
if path.segments.last().lifetime.is_some() {
tcx.sess.span_err(
path.span,
"region parameters are not allowed on this type");
}
}
}
let tcx = this.tcx();
match tcx.ast_ty_to_ty_cache.find(&ast_ty.id) {
Some(&ty::atttce_resolved(ty)) => return ty,
Some(&ty::atttce_unresolved) => {
tcx.sess.span_fatal(ast_ty.span, "illegal recursive type; \
insert an enum in the cycle, if this is desired");
}
None => { /* go on */ }
}
tcx.ast_ty_to_ty_cache.insert(ast_ty.id, ty::atttce_unresolved);
let typ = match ast_ty.node {
ast::ty_nil => ty::mk_nil(),
ast::ty_bot => ty::mk_bot(),
ast::ty_box(ref mt) => {
mk_pointer(this, rscope, mt, ty::vstore_box,
|tmt| ty::mk_box(tcx, tmt))
}
ast::ty_uniq(ref mt) => {
mk_pointer(this, rscope, mt, ty::vstore_uniq,
|tmt| ty::mk_uniq(tcx, tmt))
}
ast::ty_vec(ref mt) => {
tcx.sess.span_err(ast_ty.span, "bare `[]` is not a type");
// return /something/ so they can at least get more errors
ty::mk_evec(tcx, ast_mt_to_mt(this, rscope, mt), ty::vstore_uniq)
}
ast::ty_ptr(ref mt) => {
ty::mk_ptr(tcx, ast_mt_to_mt(this, rscope, mt))
}
ast::ty_rptr(ref region, ref mt) => {
let r = ast_region_to_region(this, rscope, ast_ty.span, region);
mk_pointer(this, rscope, mt, ty::vstore_slice(r),
|tmt| ty::mk_rptr(tcx, r, tmt))
}
ast::ty_tup(ref fields) => {
let flds = fields.map(|t| ast_ty_to_ty(this, rscope, t));
ty::mk_tup(tcx, flds)
}
ast::ty_bare_fn(ref bf) => {
ty::mk_bare_fn(tcx, ty_of_bare_fn(this, rscope, bf.purity,
bf.abis, &bf.lifetimes, &bf.decl))
}
ast::ty_closure(ref f) => {
if f.sigil == ast::ManagedSigil {
tcx.sess.span_err(ast_ty.span,
"managed closures are not supported");
}
let bounds = conv_builtin_bounds(this.tcx(), &f.bounds, match f.sigil {
// Use corresponding trait store to figure out default bounds
// if none were specified.
ast::BorrowedSigil => ty::RegionTraitStore(ty::re_empty), // dummy region
ast::OwnedSigil => ty::UniqTraitStore,
ast::ManagedSigil => ty::BoxTraitStore,
});
let fn_decl = ty_of_closure(this,
rscope,
f.sigil,
f.purity,
f.onceness,
bounds,
&f.region,
&f.decl,
None,
&f.lifetimes,
ast_ty.span);
ty::mk_closure(tcx, fn_decl)
}
ast::ty_path(ref path, ref bounds, id) => {
let a_def = match tcx.def_map.find(&id) {
None => tcx.sess.span_fatal(
ast_ty.span, fmt!("unbound path %s",
path_to_str(path, tcx.sess.intr()))),
Some(&d) => d
};
// Kind bounds on path types are only supported for traits.
match a_def {
// But don't emit the error if the user meant to do a trait anyway.
ast::DefTrait(*) => { },
_ if bounds.is_some() =>
tcx.sess.span_err(ast_ty.span,
"kind bounds can only be used on trait types"),
_ => { },
}
match a_def {
ast::DefTrait(_) => {
let path_str = path_to_str(path, tcx.sess.intr());
tcx.sess.span_err(
ast_ty.span,
fmt!("reference to trait `%s` where a type is expected; \
try `@%s`, `~%s`, or `&%s`",
path_str, path_str, path_str, path_str));
ty::mk_err()
}
ast::DefTy(did) | ast::DefStruct(did) => {
ast_path_to_ty(this, rscope, did, path).ty
}
ast::DefPrimTy(nty) => {
match nty {
ast::ty_bool => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_bool()
}
ast::ty_char => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_char()
}
ast::ty_int(it) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_int(it)
}
ast::ty_uint(uit) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_uint(uit)
}
ast::ty_float(ft) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_float(ft)
}
ast::ty_str => {
tcx.sess.span_err(ast_ty.span,
"bare `str` is not a type");
// return /something/ so they can at least get more errors
ty::mk_estr(tcx, ty::vstore_uniq)
}
}
}
ast::DefTyParam(id, n) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_param(tcx, n, id)
}
ast::DefSelfTy(id) => {
// n.b.: resolve guarantees that the this type only appears in a
// trait, which we rely upon in various places when creating
// substs
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
let did = ast_util::local_def(id);
ty::mk_self(tcx, did)
}
_ => {
tcx.sess.span_fatal(ast_ty.span,
fmt!("found value name used as a type: %?", a_def));
}
}
}
ast::ty_fixed_length_vec(ref a_mt, e) => {
match const_eval::eval_const_expr_partial(&tcx, e) {
Ok(ref r) => {
match *r {
const_eval::const_int(i) =>
ty::mk_evec(tcx, ast_mt_to_mt(this, rscope, a_mt),
ty::vstore_fixed(i as uint)),
const_eval::const_uint(i) =>
ty::mk_evec(tcx, ast_mt_to_mt(this, rscope, a_mt),
ty::vstore_fixed(i as uint)),
_ => {
tcx.sess.span_fatal(
ast_ty.span, "expected constant expr for vector length");
}
}
}
Err(ref r) => {
tcx.sess.span_fatal(
ast_ty.span,
fmt!("expected constant expr for vector length: %s",
*r));
}
}
}
ast::ty_typeof(_e) => {
tcx.sess.span_bug(ast_ty.span,
|
{
let result = ast_path_to_trait_ref(
this, rscope, trait_def_id, None, path);
let trait_store = match vst {
ty::vstore_box => ty::BoxTraitStore,
ty::vstore_uniq => ty::UniqTraitStore,
ty::vstore_slice(r) => {
ty::RegionTraitStore(r)
}
ty::vstore_fixed(*) => {
tcx.sess.span_err(
path.span,
"@trait, ~trait or &trait are the only supported \
forms of casting-to-trait");
ty::BoxTraitStore
}
};
let bounds = conv_builtin_bounds(this.tcx(), bounds, trait_store);
return ty::mk_trait(tcx,
result.def_id,
|
conditional_block
|
astconv.rs
|
(&Some(_), &None) => {
let res = rscope.anon_region(path.span);
let r = get_region_reporting_err(this.tcx(), path.span, &None, res);
opt_vec::with(r)
}
(&Some(_), &Some(_)) => {
opt_vec::with(
ast_region_to_region(this,
rscope,
path.span,
&path.segments.last().lifetime))
}
};
// Convert the type parameters supplied by the user.
let supplied_type_parameter_count =
|
fmt!("wrong number of type arguments: expected %u but found %u",
decl_generics.type_param_defs.len(),
supplied_type_parameter_count));
}
let tps = path.segments
.iter()
.flat_map(|s| s.types.iter())
.map(|a_t| ast_ty_to_ty(this, rscope, a_t))
.collect();
substs {
regions: ty::NonerasedRegions(regions),
self_ty: self_ty,
tps: tps
}
}
pub fn ast_path_to_substs_and_ty<AC:AstConv,
RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
did: ast::DefId,
path: &ast::Path)
-> ty_param_substs_and_ty {
let tcx = this.tcx();
let ty::ty_param_bounds_and_ty {
generics: generics,
ty: decl_ty
} = this.get_item_ty(did);
let substs = ast_path_substs(this, rscope, did, &generics, None, path);
let ty = ty::subst(tcx, &substs, decl_ty);
ty_param_substs_and_ty { substs: substs, ty: ty }
}
pub fn ast_path_to_trait_ref<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
trait_def_id: ast::DefId,
self_ty: Option<ty::t>,
path: &ast::Path) -> @ty::TraitRef
{
let trait_def =
this.get_trait_def(trait_def_id);
let substs =
ast_path_substs(
this,
rscope,
trait_def.trait_ref.def_id,
&trait_def.generics,
self_ty,
path);
let trait_ref =
@ty::TraitRef {def_id: trait_def_id,
substs: substs};
return trait_ref;
}
pub fn ast_path_to_ty<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
did: ast::DefId,
path: &ast::Path)
-> ty_param_substs_and_ty
{
// Look up the polytype of the item and then substitute the provided types
// for any type/region parameters.
let ty::ty_param_substs_and_ty {
substs: substs,
ty: ty
} = ast_path_to_substs_and_ty(this, rscope, did, path);
ty_param_substs_and_ty { substs: substs, ty: ty }
}
pub static NO_REGIONS: uint = 1;
pub static NO_TPS: uint = 2;
// Parses the programmer's textual representation of a type into our
// internal notion of a type. `getter` is a function that returns the type
// corresponding to a definition ID:
pub fn ast_ty_to_ty<AC:AstConv, RS:RegionScope + Clone +'static>(
this: &AC, rscope: &RS, ast_ty: &ast::Ty) -> ty::t {
fn ast_mt_to_mt<AC:AstConv, RS:RegionScope + Clone +'static>(
this: &AC, rscope: &RS, mt: &ast::mt) -> ty::mt {
ty::mt {ty: ast_ty_to_ty(this, rscope, mt.ty), mutbl: mt.mutbl}
}
// Handle @, ~, and & being able to mean estrs and evecs.
// If a_seq_ty is a str or a vec, make it an estr/evec.
// Also handle first-class trait types.
fn mk_pointer<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
a_seq_ty: &ast::mt,
vst: ty::vstore,
constr: &fn(ty::mt) -> ty::t) -> ty::t
{
let tcx = this.tcx();
match a_seq_ty.ty.node {
ast::ty_vec(ref mt) => {
let mut mt = ast_mt_to_mt(this, rscope, mt);
if a_seq_ty.mutbl == ast::MutMutable {
mt = ty::mt { ty: mt.ty, mutbl: a_seq_ty.mutbl };
}
return ty::mk_evec(tcx, mt, vst);
}
ast::ty_path(ref path, ref bounds, id) => {
// Note that the "bounds must be empty if path is not a trait"
// restriction is enforced in the below case for ty_path, which
// will run after this as long as the path isn't a trait.
match tcx.def_map.find(&id) {
Some(&ast::DefPrimTy(ast::ty_str)) if a_seq_ty.mutbl == ast::MutImmutable => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
return ty::mk_estr(tcx, vst);
}
Some(&ast::DefTrait(trait_def_id)) => {
let result = ast_path_to_trait_ref(
this, rscope, trait_def_id, None, path);
let trait_store = match vst {
ty::vstore_box => ty::BoxTraitStore,
ty::vstore_uniq => ty::UniqTraitStore,
ty::vstore_slice(r) => {
ty::RegionTraitStore(r)
}
ty::vstore_fixed(*) => {
tcx.sess.span_err(
path.span,
"@trait, ~trait or &trait are the only supported \
forms of casting-to-trait");
ty::BoxTraitStore
}
};
let bounds = conv_builtin_bounds(this.tcx(), bounds, trait_store);
return ty::mk_trait(tcx,
result.def_id,
result.substs.clone(),
trait_store,
a_seq_ty.mutbl,
bounds);
}
_ => {}
}
}
_ => {}
}
let seq_ty = ast_mt_to_mt(this, rscope, a_seq_ty);
return constr(seq_ty);
}
fn check_path_args(tcx: ty::ctxt,
path: &ast::Path,
flags: uint) {
if (flags & NO_TPS)!= 0u {
if!path.segments.iter().all(|s| s.types.is_empty()) {
tcx.sess.span_err(
path.span,
"type parameters are not allowed on this type");
}
}
if (flags & NO_REGIONS)!= 0u {
if path.segments.last().lifetime.is_some() {
tcx.sess.span_err(
path.span,
"region parameters are not allowed on this type");
}
}
}
let tcx = this.tcx();
match tcx.ast_ty_to_ty_cache.find(&ast_ty.id) {
Some(&ty::atttce_resolved(ty)) => return ty,
Some(&ty::atttce_unresolved) => {
tcx.sess.span_fatal(ast_ty.span, "illegal recursive type; \
insert an enum in the cycle, if this is desired");
}
None => { /* go on */ }
}
tcx.ast_ty_to_ty_cache.insert(ast_ty.id, ty::atttce_unresolved);
let typ = match ast_ty.node {
ast::ty_nil => ty::mk_nil(),
ast::ty_bot => ty::mk_bot(),
ast::ty_box(ref mt) => {
mk_pointer(this, rscope, mt, ty::vstore_box,
|tmt| ty::mk_box(tcx, tmt))
}
ast::ty_uniq(ref mt) => {
mk_pointer(this, rscope, mt, ty::vstore_uniq,
|tmt| ty::mk_uniq(tcx, tmt))
}
ast::ty_vec(ref mt) => {
tcx.sess.span_err(ast_ty.span, "bare `[]` is not a type");
// return /something/ so they can at least get more errors
ty::mk_evec(tcx, ast_mt_to_mt(this, rscope, mt), ty::vstore_uniq)
}
ast::ty_ptr(ref mt) => {
ty::mk_ptr(tcx, ast_mt_to_mt(this, rscope, mt))
}
ast::ty_rptr(ref region, ref mt) => {
let r = ast_region_to_region(this, rscope, ast_ty.span, region);
mk_pointer(this, rscope, mt, ty::vstore_slice(r),
|tmt| ty::mk_rptr(tcx, r, tmt))
}
ast::ty_tup(ref fields) => {
let flds = fields.map(|t| ast_ty_to_ty(this, rscope, t));
ty::mk_tup(tcx, flds)
}
ast::ty_bare_fn(ref bf) => {
ty::mk_bare_fn(tcx, ty_of_bare_fn(this, rscope, bf.purity,
bf.abis, &bf.lifetimes, &bf.decl))
}
ast::ty_closure(ref f) => {
if f.sigil == ast::ManagedSigil {
tcx.sess.span_err(ast_ty.span,
"managed closures are not supported");
}
let bounds = conv_builtin_bounds(this.tcx(), &f.bounds, match f.sigil {
// Use corresponding trait store to figure out default bounds
// if none were specified.
ast::BorrowedSigil => ty::RegionTraitStore(ty::re_empty), // dummy region
ast::OwnedSigil => ty::UniqTraitStore,
ast::ManagedSigil => ty::BoxTraitStore,
});
let fn_decl = ty_of_closure(this,
rscope,
f.sigil,
f.purity,
f.onceness,
bounds,
&f.region,
&f.decl,
None,
&f.lifetimes,
ast_ty.span);
ty::mk_closure(tcx, fn_decl)
}
ast::ty_path(ref path, ref bounds, id) => {
let a_def = match tcx.def_map.find(&id) {
None => tcx.sess.span_fatal(
ast_ty.span, fmt!("unbound path %s",
path_to_str(path, tcx.sess.intr()))),
Some(&d) => d
};
// Kind bounds on path types are only supported for traits.
match a_def {
// But don't emit the error if the user meant to do a trait anyway.
ast::DefTrait(*) => { },
_ if bounds.is_some() =>
tcx.sess.span_err(ast_ty.span,
"kind bounds can only be used on trait types"),
_ => { },
}
match a_def {
ast::DefTrait(_) => {
let path_str = path_to_str(path, tcx.sess.intr());
tcx.sess.span_err(
ast_ty.span,
fmt!("reference to trait `%s` where a type is expected; \
try `@%s`, `~%s`, or `&%s`",
path_str, path_str, path_str, path_str));
ty::mk_err()
}
ast::DefTy(did) | ast::DefStruct(did) => {
ast_path_to_ty(this, rscope, did, path).ty
}
ast::DefPrimTy(nty) => {
match nty {
ast::ty_bool => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_bool()
}
ast::ty_char => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_char()
}
ast::ty_int(it) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_int(it)
}
ast::ty_uint(uit) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_uint(uit)
}
ast::ty_float(ft) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_float(ft)
}
ast::ty_str => {
tcx.sess.span_err(ast_ty.span,
"bare `str` is not a type");
// return /something/ so they can at least get more errors
ty::mk_estr(tcx, ty::vstore_uniq)
}
}
}
ast::DefTyParam(id, n) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_param(tcx, n, id)
}
ast::DefSelfTy(id) => {
// n.b.: resolve guarantees that the this type only appears in a
// trait, which we rely upon in various places when creating
// substs
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
let did = ast_util::local_def(id);
ty::mk_self(tcx, did)
}
_ => {
tcx.sess.span_fatal(ast_ty.span,
fmt!("found value name used as a type: %?", a_def));
}
}
}
ast::ty_fixed_length_vec(ref a_mt, e) => {
match const_eval::eval_const_expr_partial(&tcx, e) {
Ok(ref r) => {
match *r {
const_eval::const_int(i) =>
ty::mk_evec(tcx, ast_mt_to_mt(this, rscope, a_mt),
ty::vstore_fixed(i as uint)),
const_eval::const_uint(i) =>
ty::mk_evec(tcx, ast_mt_to_mt(this, rscope, a_mt),
ty::vstore_fixed(i as uint)),
_ => {
tcx.sess.span_fatal(
ast_ty.span, "expected constant expr for vector length");
}
}
}
Err(ref r) => {
tcx.sess.span_fatal(
ast_ty.span,
fmt!("expected constant expr for vector length: %s",
*r));
}
}
}
ast::ty_typeof(_e) => {
tcx.sess.span_bug(ast_ty.span, "typeof
|
path.segments.iter().flat_map(|s| s.types.iter()).len();
if decl_generics.type_param_defs.len() != supplied_type_parameter_count {
this.tcx().sess.span_fatal(
path.span,
|
random_line_split
|
astconv.rs
|
let trait_store = match vst {
ty::vstore_box => ty::BoxTraitStore,
ty::vstore_uniq => ty::UniqTraitStore,
ty::vstore_slice(r) => {
ty::RegionTraitStore(r)
}
ty::vstore_fixed(*) => {
tcx.sess.span_err(
path.span,
"@trait, ~trait or &trait are the only supported \
forms of casting-to-trait");
ty::BoxTraitStore
}
};
let bounds = conv_builtin_bounds(this.tcx(), bounds, trait_store);
return ty::mk_trait(tcx,
result.def_id,
result.substs.clone(),
trait_store,
a_seq_ty.mutbl,
bounds);
}
_ => {}
}
}
_ => {}
}
let seq_ty = ast_mt_to_mt(this, rscope, a_seq_ty);
return constr(seq_ty);
}
fn check_path_args(tcx: ty::ctxt,
path: &ast::Path,
flags: uint) {
if (flags & NO_TPS)!= 0u {
if!path.segments.iter().all(|s| s.types.is_empty()) {
tcx.sess.span_err(
path.span,
"type parameters are not allowed on this type");
}
}
if (flags & NO_REGIONS)!= 0u {
if path.segments.last().lifetime.is_some() {
tcx.sess.span_err(
path.span,
"region parameters are not allowed on this type");
}
}
}
let tcx = this.tcx();
match tcx.ast_ty_to_ty_cache.find(&ast_ty.id) {
Some(&ty::atttce_resolved(ty)) => return ty,
Some(&ty::atttce_unresolved) => {
tcx.sess.span_fatal(ast_ty.span, "illegal recursive type; \
insert an enum in the cycle, if this is desired");
}
None => { /* go on */ }
}
tcx.ast_ty_to_ty_cache.insert(ast_ty.id, ty::atttce_unresolved);
let typ = match ast_ty.node {
ast::ty_nil => ty::mk_nil(),
ast::ty_bot => ty::mk_bot(),
ast::ty_box(ref mt) => {
mk_pointer(this, rscope, mt, ty::vstore_box,
|tmt| ty::mk_box(tcx, tmt))
}
ast::ty_uniq(ref mt) => {
mk_pointer(this, rscope, mt, ty::vstore_uniq,
|tmt| ty::mk_uniq(tcx, tmt))
}
ast::ty_vec(ref mt) => {
tcx.sess.span_err(ast_ty.span, "bare `[]` is not a type");
// return /something/ so they can at least get more errors
ty::mk_evec(tcx, ast_mt_to_mt(this, rscope, mt), ty::vstore_uniq)
}
ast::ty_ptr(ref mt) => {
ty::mk_ptr(tcx, ast_mt_to_mt(this, rscope, mt))
}
ast::ty_rptr(ref region, ref mt) => {
let r = ast_region_to_region(this, rscope, ast_ty.span, region);
mk_pointer(this, rscope, mt, ty::vstore_slice(r),
|tmt| ty::mk_rptr(tcx, r, tmt))
}
ast::ty_tup(ref fields) => {
let flds = fields.map(|t| ast_ty_to_ty(this, rscope, t));
ty::mk_tup(tcx, flds)
}
ast::ty_bare_fn(ref bf) => {
ty::mk_bare_fn(tcx, ty_of_bare_fn(this, rscope, bf.purity,
bf.abis, &bf.lifetimes, &bf.decl))
}
ast::ty_closure(ref f) => {
if f.sigil == ast::ManagedSigil {
tcx.sess.span_err(ast_ty.span,
"managed closures are not supported");
}
let bounds = conv_builtin_bounds(this.tcx(), &f.bounds, match f.sigil {
// Use corresponding trait store to figure out default bounds
// if none were specified.
ast::BorrowedSigil => ty::RegionTraitStore(ty::re_empty), // dummy region
ast::OwnedSigil => ty::UniqTraitStore,
ast::ManagedSigil => ty::BoxTraitStore,
});
let fn_decl = ty_of_closure(this,
rscope,
f.sigil,
f.purity,
f.onceness,
bounds,
&f.region,
&f.decl,
None,
&f.lifetimes,
ast_ty.span);
ty::mk_closure(tcx, fn_decl)
}
ast::ty_path(ref path, ref bounds, id) => {
let a_def = match tcx.def_map.find(&id) {
None => tcx.sess.span_fatal(
ast_ty.span, fmt!("unbound path %s",
path_to_str(path, tcx.sess.intr()))),
Some(&d) => d
};
// Kind bounds on path types are only supported for traits.
match a_def {
// But don't emit the error if the user meant to do a trait anyway.
ast::DefTrait(*) => { },
_ if bounds.is_some() =>
tcx.sess.span_err(ast_ty.span,
"kind bounds can only be used on trait types"),
_ => { },
}
match a_def {
ast::DefTrait(_) => {
let path_str = path_to_str(path, tcx.sess.intr());
tcx.sess.span_err(
ast_ty.span,
fmt!("reference to trait `%s` where a type is expected; \
try `@%s`, `~%s`, or `&%s`",
path_str, path_str, path_str, path_str));
ty::mk_err()
}
ast::DefTy(did) | ast::DefStruct(did) => {
ast_path_to_ty(this, rscope, did, path).ty
}
ast::DefPrimTy(nty) => {
match nty {
ast::ty_bool => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_bool()
}
ast::ty_char => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_char()
}
ast::ty_int(it) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_int(it)
}
ast::ty_uint(uit) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_uint(uit)
}
ast::ty_float(ft) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_mach_float(ft)
}
ast::ty_str => {
tcx.sess.span_err(ast_ty.span,
"bare `str` is not a type");
// return /something/ so they can at least get more errors
ty::mk_estr(tcx, ty::vstore_uniq)
}
}
}
ast::DefTyParam(id, n) => {
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
ty::mk_param(tcx, n, id)
}
ast::DefSelfTy(id) => {
// n.b.: resolve guarantees that the this type only appears in a
// trait, which we rely upon in various places when creating
// substs
check_path_args(tcx, path, NO_TPS | NO_REGIONS);
let did = ast_util::local_def(id);
ty::mk_self(tcx, did)
}
_ => {
tcx.sess.span_fatal(ast_ty.span,
fmt!("found value name used as a type: %?", a_def));
}
}
}
ast::ty_fixed_length_vec(ref a_mt, e) => {
match const_eval::eval_const_expr_partial(&tcx, e) {
Ok(ref r) => {
match *r {
const_eval::const_int(i) =>
ty::mk_evec(tcx, ast_mt_to_mt(this, rscope, a_mt),
ty::vstore_fixed(i as uint)),
const_eval::const_uint(i) =>
ty::mk_evec(tcx, ast_mt_to_mt(this, rscope, a_mt),
ty::vstore_fixed(i as uint)),
_ => {
tcx.sess.span_fatal(
ast_ty.span, "expected constant expr for vector length");
}
}
}
Err(ref r) => {
tcx.sess.span_fatal(
ast_ty.span,
fmt!("expected constant expr for vector length: %s",
*r));
}
}
}
ast::ty_typeof(_e) => {
tcx.sess.span_bug(ast_ty.span, "typeof is reserved but unimplemented");
}
ast::ty_infer => {
// ty_infer should only appear as the type of arguments or return
// values in a fn_expr, or as the type of local variables. Both of
// these cases are handled specially and should not descend into this
// routine.
this.tcx().sess.span_bug(
ast_ty.span,
"found `ty_infer` in unexpected place");
}
ast::ty_mac(_) => {
tcx.sess.span_bug(ast_ty.span,
"found `ty_mac` in unexpected place");
}
};
tcx.ast_ty_to_ty_cache.insert(ast_ty.id, ty::atttce_resolved(typ));
return typ;
}
pub fn ty_of_arg<AC:AstConv,
RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
a: &ast::arg,
expected_ty: Option<ty::t>)
-> ty::t {
match a.ty.node {
ast::ty_infer if expected_ty.is_some() => expected_ty.unwrap(),
ast::ty_infer => this.ty_infer(a.ty.span),
_ => ast_ty_to_ty(this, rscope, &a.ty),
}
}
pub fn bound_lifetimes<AC:AstConv>(
this: &AC,
ast_lifetimes: &OptVec<ast::Lifetime>) -> OptVec<ast::Ident>
{
/*!
*
* Converts a list of lifetimes into a list of bound identifier
* names. Does not permit special names like'static or 'this to
* be bound. Note that this function is for use in closures,
* methods, and fn definitions. It is legal to bind 'this in a
* type. Eventually this distinction should go away and the same
* rules should apply everywhere ('this would not be a special name
* at that point).
*/
let special_idents = [special_idents::statik, special_idents::self_];
let mut bound_lifetime_names = opt_vec::Empty;
ast_lifetimes.map_to_vec(|ast_lifetime| {
if special_idents.iter().any(|&i| i == ast_lifetime.ident) {
this.tcx().sess.span_err(
ast_lifetime.span,
fmt!("illegal lifetime parameter name: `%s`",
lifetime_to_str(ast_lifetime, this.tcx().sess.intr())));
} else {
bound_lifetime_names.push(ast_lifetime.ident);
}
});
bound_lifetime_names
}
struct SelfInfo {
untransformed_self_ty: ty::t,
explicit_self: ast::explicit_self
}
pub fn ty_of_method<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
purity: ast::purity,
lifetimes: &OptVec<ast::Lifetime>,
untransformed_self_ty: ty::t,
explicit_self: ast::explicit_self,
decl: &ast::fn_decl) -> (Option<ty::t>, ty::BareFnTy)
{
let self_info = SelfInfo {
untransformed_self_ty: untransformed_self_ty,
explicit_self: explicit_self
};
let (a, b) = ty_of_method_or_bare_fn(
this, rscope, purity, AbiSet::Rust(), lifetimes, Some(&self_info), decl);
(a.unwrap(), b)
}
pub fn ty_of_bare_fn<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
purity: ast::purity,
abi: AbiSet,
lifetimes: &OptVec<ast::Lifetime>,
decl: &ast::fn_decl) -> ty::BareFnTy
{
let (_, b) = ty_of_method_or_bare_fn(
this, rscope, purity, abi, lifetimes, None, decl);
b
}
fn ty_of_method_or_bare_fn<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
purity: ast::purity,
abi: AbiSet,
lifetimes: &OptVec<ast::Lifetime>,
opt_self_info: Option<&SelfInfo>,
decl: &ast::fn_decl) -> (Option<Option<ty::t>>, ty::BareFnTy)
{
debug!("ty_of_bare_fn");
// new region names that appear inside of the fn decl are bound to
// that function type
let bound_lifetime_names = bound_lifetimes(this, lifetimes);
let rb =
in_binding_rscope(rscope,
RegionParamNames(bound_lifetime_names.clone()));
let opt_transformed_self_ty = do opt_self_info.map_move |self_info| {
transform_self_ty(this, &rb, self_info)
};
let input_tys = decl.inputs.map(|a| ty_of_arg(this, &rb, a, None));
let output_ty = match decl.output.node {
ast::ty_infer => this.ty_infer(decl.output.span),
_ => ast_ty_to_ty(this, &rb, &decl.output)
};
return (opt_transformed_self_ty,
ty::BareFnTy {
purity: purity,
abis: abi,
sig: ty::FnSig {bound_lifetime_names: bound_lifetime_names,
inputs: input_tys,
output: output_ty}
});
fn transform_self_ty<AC:AstConv,RS:RegionScope + Clone +'static>(
this: &AC,
rscope: &RS,
self_info: &SelfInfo) -> Option<ty::t>
|
{
match self_info.explicit_self.node {
ast::sty_static => None,
ast::sty_value => {
Some(self_info.untransformed_self_ty)
}
ast::sty_region(ref lifetime, mutability) => {
let region =
ast_region_to_region(this, rscope,
self_info.explicit_self.span,
lifetime);
Some(ty::mk_rptr(this.tcx(), region,
ty::mt {ty: self_info.untransformed_self_ty,
mutbl: mutability}))
}
ast::sty_box(mutability) => {
Some(ty::mk_box(this.tcx(),
ty::mt {ty: self_info.untransformed_self_ty,
mutbl: mutability}))
}
|
identifier_body
|
|
lib.rs
|
//#![feature(iter_arith_traits)]
//#![feature(conservative_impl_trait)]
//#![feature(reflect_marker)]
#![feature(specialization)]
extern crate byteorder;
extern crate densearray;
extern crate float;
extern crate iter_utils;
#[cfg(feature = "mkldnn")]
extern crate mkl_dnn;
//extern crate neuralops_kernels;
//extern crate neuralops_omp_kernels;
//extern crate nnpack;
extern crate operator;
//extern crate rayon;
extern crate rng;
extern crate sharedmem;
extern crate stb_image;
extern crate turbojpeg;
//extern crate typemap;
extern crate varraydb;
extern crate libc;
extern crate rand;
|
pub mod archs_mkl;
pub mod checkpoint;
pub mod class_loss;
pub mod common;
pub mod conv;
pub mod conv_gemm;
#[cfg(feature = "mkldnn")]
pub mod conv_mkldnn;
#[cfg(feature = "mkl")]
pub mod conv_mkl;
#[cfg(not(feature = "mkl"))]
pub mod conv_nnpack;
pub mod data;
pub mod deconv;
pub mod dropout;
pub mod dummy;
pub mod input;
pub mod join;
pub mod kernels;
pub mod loss;
pub mod mux;
//pub mod param;
pub mod pool;
#[cfg(feature = "mkldnn")]
pub mod pool_mkldnn;
pub mod prelude;
pub mod regress_loss;
pub mod split;
pub mod util;
|
pub mod affine;
pub mod archs;
#[cfg(feature = "mkldnn")]
|
random_line_split
|
player.rs
|
use rand::Rng;
use super::{Gun, PowerupKind};
use geometry::{Advance, Collide, Point, Position, Size, Vector};
use geometry_derive::{Advance, Position};
/// The `Player` is the rocket controlled by the user
#[derive(Default, Advance, Position)]
pub struct Player {
pub vector: Vector,
pub is_dead: bool,
pub powerup: Option<PowerupKind>,
pub gun: Gun,
}
/// The player is represented as the polygon below
pub const POLYGON: &'static [[f32; 2]] = &[[-10.0, -8.0], [10.0, 0.0], [-10.0, 8.0]];
impl Player {
/// Create a new `Player` with a random position and direction
pub fn random<R: Rng>(rng: &mut R, bounds: Size) -> Player {
Player {
vector: Vector::random(rng, bounds),
is_dead: true,
powerup: None,
gun: Gun::new(),
}
}
/// Returns the front of the rocket
pub fn front(&self) -> Point {
Point::new(POLYGON[1][0], POLYGON[1][1])
.rotate(self.direction())
.translate(&self.position())
}
}
impl Collide for Player {
|
}
}
|
fn radius(&self) -> f32 {
6.0
|
random_line_split
|
player.rs
|
use rand::Rng;
use super::{Gun, PowerupKind};
use geometry::{Advance, Collide, Point, Position, Size, Vector};
use geometry_derive::{Advance, Position};
/// The `Player` is the rocket controlled by the user
#[derive(Default, Advance, Position)]
pub struct Player {
pub vector: Vector,
pub is_dead: bool,
pub powerup: Option<PowerupKind>,
pub gun: Gun,
}
/// The player is represented as the polygon below
pub const POLYGON: &'static [[f32; 2]] = &[[-10.0, -8.0], [10.0, 0.0], [-10.0, 8.0]];
impl Player {
/// Create a new `Player` with a random position and direction
pub fn random<R: Rng>(rng: &mut R, bounds: Size) -> Player {
Player {
vector: Vector::random(rng, bounds),
is_dead: true,
powerup: None,
gun: Gun::new(),
}
}
/// Returns the front of the rocket
pub fn front(&self) -> Point {
Point::new(POLYGON[1][0], POLYGON[1][1])
.rotate(self.direction())
.translate(&self.position())
}
}
impl Collide for Player {
fn
|
(&self) -> f32 {
6.0
}
}
|
radius
|
identifier_name
|
reflect.rs
|
= self.bcx;
let str_ty = ty::mk_str_slice(bcx.tcx(), ty::ReStatic, ast::MutImmutable);
let scratch = rvalue_scratch_datum(bcx, str_ty, "");
let len = C_uint(bcx.ccx(), s.get().len());
let c_str = PointerCast(bcx, C_cstr(bcx.ccx(), s, false), Type::i8p(bcx.ccx()));
Store(bcx, c_str, GEPi(bcx, scratch.val, [ 0, 0 ]));
Store(bcx, len, GEPi(bcx, scratch.val, [ 0, 1 ]));
scratch.val
}
pub fn c_size_and_align(&mut self, t: ty::t) -> Vec<ValueRef> {
let tr = type_of(self.bcx.ccx(), t);
let s = machine::llsize_of_real(self.bcx.ccx(), tr);
let a = align_of(self.bcx.ccx(), t);
return vec!(self.c_uint(s as uint),
self.c_uint(a as uint));
}
pub fn c_tydesc(&mut self, t: ty::t) -> ValueRef {
let bcx = self.bcx;
let static_ti = get_tydesc(bcx.ccx(), t);
glue::lazily_emit_visit_glue(bcx.ccx(), &*static_ti);
PointerCast(bcx, static_ti.tydesc, self.tydesc_ty.ptr_to())
}
pub fn c_mt(&mut self, mt: &ty::mt) -> Vec<ValueRef> {
vec!(self.c_uint(mt.mutbl as uint),
self.c_tydesc(mt.ty))
}
pub fn visit(&mut self, ty_name: &str, args: &[ValueRef]) {
|
"visit_{}", ty_name).as_slice()),
self.visitor_items.as_slice()).expect(
format!("couldn't find visit method for {}", ty_name).as_slice());
let method = match self.visitor_items[mth_idx] {
ty::MethodTraitItem(ref method) => (*method).clone(),
};
let mth_ty = ty::mk_bare_fn(tcx, method.fty.clone());
debug!("Emit call visit method: visit_{}: {}", ty_name, ty_to_string(tcx, mth_ty));
let v = self.visitor_val;
debug!("passing {} args:", args.len());
let mut bcx = self.bcx;
for (i, a) in args.iter().enumerate() {
debug!("arg {}: {}", i, bcx.val_to_string(*a));
}
let result = unpack_result!(bcx, callee::trans_call_inner(
self.bcx, None, mth_ty,
|bcx, _| meth::trans_trait_callee_from_llval(bcx,
mth_ty,
mth_idx,
v),
ArgVals(args), None));
let next_bcx = fcx.new_temp_block("next");
CondBr(bcx, result, next_bcx.llbb, self.final_bcx.llbb);
self.bcx = next_bcx
}
pub fn bracketed(&mut self,
bracket_name: &str,
extra: &[ValueRef],
inner: |&mut Reflector|) {
self.visit(format!("enter_{}", bracket_name).as_slice(), extra);
inner(self);
self.visit(format!("leave_{}", bracket_name).as_slice(), extra);
}
pub fn leaf(&mut self, name: &str) {
self.visit(name, []);
}
// Entrypoint
pub fn visit_ty(&mut self, t: ty::t) {
let bcx = self.bcx;
let tcx = bcx.tcx();
debug!("reflect::visit_ty {}", ty_to_string(bcx.tcx(), t));
match ty::get(t).sty {
ty::ty_bot => self.leaf("bot"),
ty::ty_nil => self.leaf("nil"),
ty::ty_bool => self.leaf("bool"),
ty::ty_char => self.leaf("char"),
ty::ty_int(ast::TyI) => self.leaf("int"),
ty::ty_int(ast::TyI8) => self.leaf("i8"),
ty::ty_int(ast::TyI16) => self.leaf("i16"),
ty::ty_int(ast::TyI32) => self.leaf("i32"),
ty::ty_int(ast::TyI64) => self.leaf("i64"),
ty::ty_uint(ast::TyU) => self.leaf("uint"),
ty::ty_uint(ast::TyU8) => self.leaf("u8"),
ty::ty_uint(ast::TyU16) => self.leaf("u16"),
ty::ty_uint(ast::TyU32) => self.leaf("u32"),
ty::ty_uint(ast::TyU64) => self.leaf("u64"),
ty::ty_float(ast::TyF32) => self.leaf("f32"),
ty::ty_float(ast::TyF64) => self.leaf("f64"),
ty::ty_open(_) | ty::ty_str | ty::ty_vec(_, None) | ty::ty_trait(..) => {
// Unfortunately we can't do anything here because at runtime we
// pass around the value by pointer (*u8). But unsized pointers are
// fat and so we can't just cast them to *u8 and back. So we have
// to work with the pointer directly (see ty_rptr/ty_uniq).
fail!("Can't reflect unsized type")
}
// FIXME(15049) Reflection for unsized structs.
ty::ty_struct(..) if!ty::type_is_sized(bcx.tcx(), t) => {
fail!("Can't reflect unsized type")
}
// Should rename to vec_*.
ty::ty_vec(ty, Some(sz)) => {
let mut extra = (vec!(self.c_uint(sz))).append(self.c_size_and_align(t).as_slice());
extra.push(self.c_tydesc(ty));
self.visit("evec_fixed", extra.as_slice())
}
// Should remove mt from box and uniq.
ty::ty_box(typ) => {
let extra = self.c_mt(&ty::mt {
ty: typ,
mutbl: ast::MutImmutable,
});
self.visit("box", extra.as_slice())
}
ty::ty_ptr(ref mt) => {
let extra = self.c_mt(mt);
self.visit("ptr", extra.as_slice())
}
ty::ty_uniq(typ) => {
match ty::get(typ).sty {
ty::ty_trait(..) => {
let extra = [
self.c_slice(token::intern_and_get_ident(
ty_to_string(tcx, t).as_slice()))
];
self.visit("trait", extra);
}
// FIXME(15049) allow reflection of Box<[T]>. You'll need to
// restore visit_evec_uniq.
ty::ty_vec(_, None) => {
fail!("Box<[T]> theoretically doesn't exist, so don't try to reflect it")
}
ty::ty_str => fail!("Can't reflect Box<str> which shouldn't be used anyway"),
_ => {
let extra = self.c_mt(&ty::mt {
ty: typ,
mutbl: ast::MutImmutable,
});
self.visit("uniq", extra.as_slice())
}
}
}
ty::ty_rptr(_, ref mt) => {
match ty::get(mt.ty).sty {
ty::ty_vec(ty, None) => {
let extra = self.c_mt(&ty::mt{ty: ty, mutbl: mt.mutbl});
self.visit("evec_slice", extra.as_slice())
}
ty::ty_str => self.visit("estr_slice", &[]),
ty::ty_trait(..) => {
let extra = [
self.c_slice(token::intern_and_get_ident(
ty_to_string(tcx, t).as_slice()))
];
self.visit("trait", extra);
}
_ => {
let extra = self.c_mt(mt);
self.visit("rptr", extra.as_slice())
}
}
}
ty::ty_tup(ref tys) => {
let extra = (vec!(self.c_uint(tys.len())))
.append(self.c_size_and_align(t).as_slice());
self.bracketed("tup", extra.as_slice(), |this| {
for (i, t) in tys.iter().enumerate() {
let extra = vec!(this.c_uint(i), this.c_tydesc(*t));
this.visit("tup_field", extra.as_slice());
}
})
}
// FIXME (#2594): fetch constants out of intrinsic
// FIXME (#4809): visitor should break out bare fns from other fns
ty::ty_closure(ref fty) => {
let pureval = ast_fn_style_constant(fty.fn_style);
let sigilval = match fty.store {
ty::UniqTraitStore => 2u,
ty::RegionTraitStore(..) => 4u,
};
let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u};
let extra = vec!(self.c_uint(pureval),
self.c_uint(sigilval),
self.c_uint(fty.sig.inputs.len()),
self.c_uint(retval));
self.visit("enter_fn", extra.as_slice());
self.visit_sig(retval, &fty.sig);
self.visit("leave_fn", extra.as_slice());
}
// FIXME (#2594): fetch constants out of intrinsic:: for the
// numbers.
ty::ty_bare_fn(ref fty) => {
let pureval = ast_fn_style_constant(fty.fn_style);
let sigilval = 0u;
let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u};
let extra = vec!(self.c_uint(pureval),
self.c_uint(sigilval),
self.c_uint(fty.sig.inputs.len()),
self.c_uint(retval));
self.visit("enter_fn", extra.as_slice());
self.visit_sig(retval, &fty.sig);
self.visit("leave_fn", extra.as_slice());
}
ty::ty_struct(did, ref substs) => {
let fields = ty::struct_fields(tcx, did, substs);
let mut named_fields = false;
if!fields.is_empty() {
named_fields = fields.get(0).ident.name!=
special_idents::unnamed_field.name;
}
// This and the type_is_sized check on individual field types are
// because we cannot reflect unsized types (see note above). We
// just pretend the unsized field does not exist and print nothing.
// This is sub-optimal.
let len = fields.len();
let extra = (vec!(
self.c_slice(
token::intern_and_get_ident(ty_to_string(tcx,
t).as_slice())),
self.c_bool(named_fields),
self.c_uint(len)
)).append(self.c_size_and_align(t).as_slice());
self.bracketed("class", extra.as_slice(), |this| {
for (i, field) in fields.iter().enumerate() {
let extra = (vec!(
this.c_uint(i),
this.c_slice(token::get_ident(field.ident)),
this.c_bool(named_fields)
)).append(this.c_mt(&field.mt).as_slice());
this.visit("class_field", extra.as_slice());
}
})
}
// FIXME (#2595): visiting all the variants in turn is probably
// not ideal. It'll work but will get costly on big enums. Maybe
// let the visitor tell us if it wants to visit only a particular
// variant?
ty::ty_enum(did, ref substs) => {
let ccx = bcx.ccx();
let repr = adt::represent_type(bcx.ccx(), t);
let variants = ty::substd_enum_variants(ccx.tcx(), did, substs);
let llptrty = type_of(ccx, t).ptr_to();
let opaquety = ty::get_opaque_ty(ccx.tcx()).unwrap();
let opaqueptrty = ty::mk_ptr(ccx.tcx(), ty::mt { ty: opaquety,
mutbl: ast::MutImmutable });
let make_get_disr = || {
let sym = mangle_internal_name_by_path_and_seq(
ast_map::Values([].iter()).chain(None), "get_disr");
let fn_ty = ty::mk_ctor_fn(&ccx.tcx, ast::DUMMY_NODE_ID,
[opaqueptrty], ty::mk_u64());
let llfdecl = decl_internal_rust_fn(ccx,
fn_ty,
sym.as_slice());
let arena = TypedArena::new();
let empty_param_substs = param_substs::empty();
let fcx = new_fn_ctxt(ccx, llfdecl, ast::DUMMY_NODE_ID, false,
ty::mk_u64(), &empty_param_substs,
None, &arena);
let bcx = init_function(&fcx, false, ty::mk_u64());
// we know the return type of llfdecl is an int here, so
// no need for a special check to see if the return type
// is immediate.
let arg = get_param(llfdecl, fcx.arg_pos(0u) as c_uint);
let arg = BitCast(bcx, arg, llptrty);
let ret = adt::trans_get_discr(bcx, &*repr, arg, Some(Type::i64(ccx)));
assert!(!fcx.needs_ret_allocas);
let ret_slot = fcx.get_ret_slot(bcx, ty::mk_u64(), "ret_slot");
Store(bcx, ret, ret_slot);
match fcx.llreturn.get() {
Some(llreturn) => Br(bcx, llreturn),
None => {}
};
finish_fn(&fcx, bcx, ty::mk_u64());
llfdecl
};
let enum_args = (vec!(self.c_uint(variants.len()), make_get_disr()))
.append(self.c_size_and_align(t).as_slice());
self.bracketed("enum", enum_args.as_slice(), |this| {
for (i, v) in variants.iter().enumerate() {
let name = token::get_ident(v.name);
let variant_args = [this.c_uint(i),
C_u64(ccx, v.disr_val),
this.c_uint(v.args.len()),
this.c_slice(name)];
this.bracketed("enum_variant",
variant_args,
|this| {
for (j, a) in v.args.iter().enumerate() {
let bcx = this.bcx;
let null = C_null(llptrty);
let ptr = adt::trans_field_ptr(bcx, &*repr, null, v.disr_val, j);
|
let fcx = self.bcx.fcx;
let tcx = self.bcx.tcx();
let mth_idx = ty::impl_or_trait_item_idx(token::str_to_ident(format!(
|
random_line_split
|
reflect.rs
|
self.bcx;
let str_ty = ty::mk_str_slice(bcx.tcx(), ty::ReStatic, ast::MutImmutable);
let scratch = rvalue_scratch_datum(bcx, str_ty, "");
let len = C_uint(bcx.ccx(), s.get().len());
let c_str = PointerCast(bcx, C_cstr(bcx.ccx(), s, false), Type::i8p(bcx.ccx()));
Store(bcx, c_str, GEPi(bcx, scratch.val, [ 0, 0 ]));
Store(bcx, len, GEPi(bcx, scratch.val, [ 0, 1 ]));
scratch.val
}
pub fn c_size_and_align(&mut self, t: ty::t) -> Vec<ValueRef> {
let tr = type_of(self.bcx.ccx(), t);
let s = machine::llsize_of_real(self.bcx.ccx(), tr);
let a = align_of(self.bcx.ccx(), t);
return vec!(self.c_uint(s as uint),
self.c_uint(a as uint));
}
pub fn c_tydesc(&mut self, t: ty::t) -> ValueRef
|
pub fn c_mt(&mut self, mt: &ty::mt) -> Vec<ValueRef> {
vec!(self.c_uint(mt.mutbl as uint),
self.c_tydesc(mt.ty))
}
pub fn visit(&mut self, ty_name: &str, args: &[ValueRef]) {
let fcx = self.bcx.fcx;
let tcx = self.bcx.tcx();
let mth_idx = ty::impl_or_trait_item_idx(token::str_to_ident(format!(
"visit_{}", ty_name).as_slice()),
self.visitor_items.as_slice()).expect(
format!("couldn't find visit method for {}", ty_name).as_slice());
let method = match self.visitor_items[mth_idx] {
ty::MethodTraitItem(ref method) => (*method).clone(),
};
let mth_ty = ty::mk_bare_fn(tcx, method.fty.clone());
debug!("Emit call visit method: visit_{}: {}", ty_name, ty_to_string(tcx, mth_ty));
let v = self.visitor_val;
debug!("passing {} args:", args.len());
let mut bcx = self.bcx;
for (i, a) in args.iter().enumerate() {
debug!("arg {}: {}", i, bcx.val_to_string(*a));
}
let result = unpack_result!(bcx, callee::trans_call_inner(
self.bcx, None, mth_ty,
|bcx, _| meth::trans_trait_callee_from_llval(bcx,
mth_ty,
mth_idx,
v),
ArgVals(args), None));
let next_bcx = fcx.new_temp_block("next");
CondBr(bcx, result, next_bcx.llbb, self.final_bcx.llbb);
self.bcx = next_bcx
}
pub fn bracketed(&mut self,
bracket_name: &str,
extra: &[ValueRef],
inner: |&mut Reflector|) {
self.visit(format!("enter_{}", bracket_name).as_slice(), extra);
inner(self);
self.visit(format!("leave_{}", bracket_name).as_slice(), extra);
}
pub fn leaf(&mut self, name: &str) {
self.visit(name, []);
}
// Entrypoint
pub fn visit_ty(&mut self, t: ty::t) {
let bcx = self.bcx;
let tcx = bcx.tcx();
debug!("reflect::visit_ty {}", ty_to_string(bcx.tcx(), t));
match ty::get(t).sty {
ty::ty_bot => self.leaf("bot"),
ty::ty_nil => self.leaf("nil"),
ty::ty_bool => self.leaf("bool"),
ty::ty_char => self.leaf("char"),
ty::ty_int(ast::TyI) => self.leaf("int"),
ty::ty_int(ast::TyI8) => self.leaf("i8"),
ty::ty_int(ast::TyI16) => self.leaf("i16"),
ty::ty_int(ast::TyI32) => self.leaf("i32"),
ty::ty_int(ast::TyI64) => self.leaf("i64"),
ty::ty_uint(ast::TyU) => self.leaf("uint"),
ty::ty_uint(ast::TyU8) => self.leaf("u8"),
ty::ty_uint(ast::TyU16) => self.leaf("u16"),
ty::ty_uint(ast::TyU32) => self.leaf("u32"),
ty::ty_uint(ast::TyU64) => self.leaf("u64"),
ty::ty_float(ast::TyF32) => self.leaf("f32"),
ty::ty_float(ast::TyF64) => self.leaf("f64"),
ty::ty_open(_) | ty::ty_str | ty::ty_vec(_, None) | ty::ty_trait(..) => {
// Unfortunately we can't do anything here because at runtime we
// pass around the value by pointer (*u8). But unsized pointers are
// fat and so we can't just cast them to *u8 and back. So we have
// to work with the pointer directly (see ty_rptr/ty_uniq).
fail!("Can't reflect unsized type")
}
// FIXME(15049) Reflection for unsized structs.
ty::ty_struct(..) if!ty::type_is_sized(bcx.tcx(), t) => {
fail!("Can't reflect unsized type")
}
// Should rename to vec_*.
ty::ty_vec(ty, Some(sz)) => {
let mut extra = (vec!(self.c_uint(sz))).append(self.c_size_and_align(t).as_slice());
extra.push(self.c_tydesc(ty));
self.visit("evec_fixed", extra.as_slice())
}
// Should remove mt from box and uniq.
ty::ty_box(typ) => {
let extra = self.c_mt(&ty::mt {
ty: typ,
mutbl: ast::MutImmutable,
});
self.visit("box", extra.as_slice())
}
ty::ty_ptr(ref mt) => {
let extra = self.c_mt(mt);
self.visit("ptr", extra.as_slice())
}
ty::ty_uniq(typ) => {
match ty::get(typ).sty {
ty::ty_trait(..) => {
let extra = [
self.c_slice(token::intern_and_get_ident(
ty_to_string(tcx, t).as_slice()))
];
self.visit("trait", extra);
}
// FIXME(15049) allow reflection of Box<[T]>. You'll need to
// restore visit_evec_uniq.
ty::ty_vec(_, None) => {
fail!("Box<[T]> theoretically doesn't exist, so don't try to reflect it")
}
ty::ty_str => fail!("Can't reflect Box<str> which shouldn't be used anyway"),
_ => {
let extra = self.c_mt(&ty::mt {
ty: typ,
mutbl: ast::MutImmutable,
});
self.visit("uniq", extra.as_slice())
}
}
}
ty::ty_rptr(_, ref mt) => {
match ty::get(mt.ty).sty {
ty::ty_vec(ty, None) => {
let extra = self.c_mt(&ty::mt{ty: ty, mutbl: mt.mutbl});
self.visit("evec_slice", extra.as_slice())
}
ty::ty_str => self.visit("estr_slice", &[]),
ty::ty_trait(..) => {
let extra = [
self.c_slice(token::intern_and_get_ident(
ty_to_string(tcx, t).as_slice()))
];
self.visit("trait", extra);
}
_ => {
let extra = self.c_mt(mt);
self.visit("rptr", extra.as_slice())
}
}
}
ty::ty_tup(ref tys) => {
let extra = (vec!(self.c_uint(tys.len())))
.append(self.c_size_and_align(t).as_slice());
self.bracketed("tup", extra.as_slice(), |this| {
for (i, t) in tys.iter().enumerate() {
let extra = vec!(this.c_uint(i), this.c_tydesc(*t));
this.visit("tup_field", extra.as_slice());
}
})
}
// FIXME (#2594): fetch constants out of intrinsic
// FIXME (#4809): visitor should break out bare fns from other fns
ty::ty_closure(ref fty) => {
let pureval = ast_fn_style_constant(fty.fn_style);
let sigilval = match fty.store {
ty::UniqTraitStore => 2u,
ty::RegionTraitStore(..) => 4u,
};
let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u};
let extra = vec!(self.c_uint(pureval),
self.c_uint(sigilval),
self.c_uint(fty.sig.inputs.len()),
self.c_uint(retval));
self.visit("enter_fn", extra.as_slice());
self.visit_sig(retval, &fty.sig);
self.visit("leave_fn", extra.as_slice());
}
// FIXME (#2594): fetch constants out of intrinsic:: for the
// numbers.
ty::ty_bare_fn(ref fty) => {
let pureval = ast_fn_style_constant(fty.fn_style);
let sigilval = 0u;
let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u};
let extra = vec!(self.c_uint(pureval),
self.c_uint(sigilval),
self.c_uint(fty.sig.inputs.len()),
self.c_uint(retval));
self.visit("enter_fn", extra.as_slice());
self.visit_sig(retval, &fty.sig);
self.visit("leave_fn", extra.as_slice());
}
ty::ty_struct(did, ref substs) => {
let fields = ty::struct_fields(tcx, did, substs);
let mut named_fields = false;
if!fields.is_empty() {
named_fields = fields.get(0).ident.name!=
special_idents::unnamed_field.name;
}
// This and the type_is_sized check on individual field types are
// because we cannot reflect unsized types (see note above). We
// just pretend the unsized field does not exist and print nothing.
// This is sub-optimal.
let len = fields.len();
let extra = (vec!(
self.c_slice(
token::intern_and_get_ident(ty_to_string(tcx,
t).as_slice())),
self.c_bool(named_fields),
self.c_uint(len)
)).append(self.c_size_and_align(t).as_slice());
self.bracketed("class", extra.as_slice(), |this| {
for (i, field) in fields.iter().enumerate() {
let extra = (vec!(
this.c_uint(i),
this.c_slice(token::get_ident(field.ident)),
this.c_bool(named_fields)
)).append(this.c_mt(&field.mt).as_slice());
this.visit("class_field", extra.as_slice());
}
})
}
// FIXME (#2595): visiting all the variants in turn is probably
// not ideal. It'll work but will get costly on big enums. Maybe
// let the visitor tell us if it wants to visit only a particular
// variant?
ty::ty_enum(did, ref substs) => {
let ccx = bcx.ccx();
let repr = adt::represent_type(bcx.ccx(), t);
let variants = ty::substd_enum_variants(ccx.tcx(), did, substs);
let llptrty = type_of(ccx, t).ptr_to();
let opaquety = ty::get_opaque_ty(ccx.tcx()).unwrap();
let opaqueptrty = ty::mk_ptr(ccx.tcx(), ty::mt { ty: opaquety,
mutbl: ast::MutImmutable });
let make_get_disr = || {
let sym = mangle_internal_name_by_path_and_seq(
ast_map::Values([].iter()).chain(None), "get_disr");
let fn_ty = ty::mk_ctor_fn(&ccx.tcx, ast::DUMMY_NODE_ID,
[opaqueptrty], ty::mk_u64());
let llfdecl = decl_internal_rust_fn(ccx,
fn_ty,
sym.as_slice());
let arena = TypedArena::new();
let empty_param_substs = param_substs::empty();
let fcx = new_fn_ctxt(ccx, llfdecl, ast::DUMMY_NODE_ID, false,
ty::mk_u64(), &empty_param_substs,
None, &arena);
let bcx = init_function(&fcx, false, ty::mk_u64());
// we know the return type of llfdecl is an int here, so
// no need for a special check to see if the return type
// is immediate.
let arg = get_param(llfdecl, fcx.arg_pos(0u) as c_uint);
let arg = BitCast(bcx, arg, llptrty);
let ret = adt::trans_get_discr(bcx, &*repr, arg, Some(Type::i64(ccx)));
assert!(!fcx.needs_ret_allocas);
let ret_slot = fcx.get_ret_slot(bcx, ty::mk_u64(), "ret_slot");
Store(bcx, ret, ret_slot);
match fcx.llreturn.get() {
Some(llreturn) => Br(bcx, llreturn),
None => {}
};
finish_fn(&fcx, bcx, ty::mk_u64());
llfdecl
};
let enum_args = (vec!(self.c_uint(variants.len()), make_get_disr()))
.append(self.c_size_and_align(t).as_slice());
self.bracketed("enum", enum_args.as_slice(), |this| {
for (i, v) in variants.iter().enumerate() {
let name = token::get_ident(v.name);
let variant_args = [this.c_uint(i),
C_u64(ccx, v.disr_val),
this.c_uint(v.args.len()),
this.c_slice(name)];
this.bracketed("enum_variant",
variant_args,
|this| {
for (j, a) in v.args.iter().enumerate() {
let bcx = this.bcx;
let null = C_null(llptrty);
let ptr = adt::trans_field_ptr(bcx, &*repr, null, v.disr_val, j
|
{
let bcx = self.bcx;
let static_ti = get_tydesc(bcx.ccx(), t);
glue::lazily_emit_visit_glue(bcx.ccx(), &*static_ti);
PointerCast(bcx, static_ti.tydesc, self.tydesc_ty.ptr_to())
}
|
identifier_body
|
reflect.rs
|
bare_fn(tcx, method.fty.clone());
debug!("Emit call visit method: visit_{}: {}", ty_name, ty_to_string(tcx, mth_ty));
let v = self.visitor_val;
debug!("passing {} args:", args.len());
let mut bcx = self.bcx;
for (i, a) in args.iter().enumerate() {
debug!("arg {}: {}", i, bcx.val_to_string(*a));
}
let result = unpack_result!(bcx, callee::trans_call_inner(
self.bcx, None, mth_ty,
|bcx, _| meth::trans_trait_callee_from_llval(bcx,
mth_ty,
mth_idx,
v),
ArgVals(args), None));
let next_bcx = fcx.new_temp_block("next");
CondBr(bcx, result, next_bcx.llbb, self.final_bcx.llbb);
self.bcx = next_bcx
}
pub fn bracketed(&mut self,
bracket_name: &str,
extra: &[ValueRef],
inner: |&mut Reflector|) {
self.visit(format!("enter_{}", bracket_name).as_slice(), extra);
inner(self);
self.visit(format!("leave_{}", bracket_name).as_slice(), extra);
}
pub fn leaf(&mut self, name: &str) {
self.visit(name, []);
}
// Entrypoint
pub fn visit_ty(&mut self, t: ty::t) {
let bcx = self.bcx;
let tcx = bcx.tcx();
debug!("reflect::visit_ty {}", ty_to_string(bcx.tcx(), t));
match ty::get(t).sty {
ty::ty_bot => self.leaf("bot"),
ty::ty_nil => self.leaf("nil"),
ty::ty_bool => self.leaf("bool"),
ty::ty_char => self.leaf("char"),
ty::ty_int(ast::TyI) => self.leaf("int"),
ty::ty_int(ast::TyI8) => self.leaf("i8"),
ty::ty_int(ast::TyI16) => self.leaf("i16"),
ty::ty_int(ast::TyI32) => self.leaf("i32"),
ty::ty_int(ast::TyI64) => self.leaf("i64"),
ty::ty_uint(ast::TyU) => self.leaf("uint"),
ty::ty_uint(ast::TyU8) => self.leaf("u8"),
ty::ty_uint(ast::TyU16) => self.leaf("u16"),
ty::ty_uint(ast::TyU32) => self.leaf("u32"),
ty::ty_uint(ast::TyU64) => self.leaf("u64"),
ty::ty_float(ast::TyF32) => self.leaf("f32"),
ty::ty_float(ast::TyF64) => self.leaf("f64"),
ty::ty_open(_) | ty::ty_str | ty::ty_vec(_, None) | ty::ty_trait(..) => {
// Unfortunately we can't do anything here because at runtime we
// pass around the value by pointer (*u8). But unsized pointers are
// fat and so we can't just cast them to *u8 and back. So we have
// to work with the pointer directly (see ty_rptr/ty_uniq).
fail!("Can't reflect unsized type")
}
// FIXME(15049) Reflection for unsized structs.
ty::ty_struct(..) if!ty::type_is_sized(bcx.tcx(), t) => {
fail!("Can't reflect unsized type")
}
// Should rename to vec_*.
ty::ty_vec(ty, Some(sz)) => {
let mut extra = (vec!(self.c_uint(sz))).append(self.c_size_and_align(t).as_slice());
extra.push(self.c_tydesc(ty));
self.visit("evec_fixed", extra.as_slice())
}
// Should remove mt from box and uniq.
ty::ty_box(typ) => {
let extra = self.c_mt(&ty::mt {
ty: typ,
mutbl: ast::MutImmutable,
});
self.visit("box", extra.as_slice())
}
ty::ty_ptr(ref mt) => {
let extra = self.c_mt(mt);
self.visit("ptr", extra.as_slice())
}
ty::ty_uniq(typ) => {
match ty::get(typ).sty {
ty::ty_trait(..) => {
let extra = [
self.c_slice(token::intern_and_get_ident(
ty_to_string(tcx, t).as_slice()))
];
self.visit("trait", extra);
}
// FIXME(15049) allow reflection of Box<[T]>. You'll need to
// restore visit_evec_uniq.
ty::ty_vec(_, None) => {
fail!("Box<[T]> theoretically doesn't exist, so don't try to reflect it")
}
ty::ty_str => fail!("Can't reflect Box<str> which shouldn't be used anyway"),
_ => {
let extra = self.c_mt(&ty::mt {
ty: typ,
mutbl: ast::MutImmutable,
});
self.visit("uniq", extra.as_slice())
}
}
}
ty::ty_rptr(_, ref mt) => {
match ty::get(mt.ty).sty {
ty::ty_vec(ty, None) => {
let extra = self.c_mt(&ty::mt{ty: ty, mutbl: mt.mutbl});
self.visit("evec_slice", extra.as_slice())
}
ty::ty_str => self.visit("estr_slice", &[]),
ty::ty_trait(..) => {
let extra = [
self.c_slice(token::intern_and_get_ident(
ty_to_string(tcx, t).as_slice()))
];
self.visit("trait", extra);
}
_ => {
let extra = self.c_mt(mt);
self.visit("rptr", extra.as_slice())
}
}
}
ty::ty_tup(ref tys) => {
let extra = (vec!(self.c_uint(tys.len())))
.append(self.c_size_and_align(t).as_slice());
self.bracketed("tup", extra.as_slice(), |this| {
for (i, t) in tys.iter().enumerate() {
let extra = vec!(this.c_uint(i), this.c_tydesc(*t));
this.visit("tup_field", extra.as_slice());
}
})
}
// FIXME (#2594): fetch constants out of intrinsic
// FIXME (#4809): visitor should break out bare fns from other fns
ty::ty_closure(ref fty) => {
let pureval = ast_fn_style_constant(fty.fn_style);
let sigilval = match fty.store {
ty::UniqTraitStore => 2u,
ty::RegionTraitStore(..) => 4u,
};
let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u};
let extra = vec!(self.c_uint(pureval),
self.c_uint(sigilval),
self.c_uint(fty.sig.inputs.len()),
self.c_uint(retval));
self.visit("enter_fn", extra.as_slice());
self.visit_sig(retval, &fty.sig);
self.visit("leave_fn", extra.as_slice());
}
// FIXME (#2594): fetch constants out of intrinsic:: for the
// numbers.
ty::ty_bare_fn(ref fty) => {
let pureval = ast_fn_style_constant(fty.fn_style);
let sigilval = 0u;
let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u};
let extra = vec!(self.c_uint(pureval),
self.c_uint(sigilval),
self.c_uint(fty.sig.inputs.len()),
self.c_uint(retval));
self.visit("enter_fn", extra.as_slice());
self.visit_sig(retval, &fty.sig);
self.visit("leave_fn", extra.as_slice());
}
ty::ty_struct(did, ref substs) => {
let fields = ty::struct_fields(tcx, did, substs);
let mut named_fields = false;
if!fields.is_empty() {
named_fields = fields.get(0).ident.name!=
special_idents::unnamed_field.name;
}
// This and the type_is_sized check on individual field types are
// because we cannot reflect unsized types (see note above). We
// just pretend the unsized field does not exist and print nothing.
// This is sub-optimal.
let len = fields.len();
let extra = (vec!(
self.c_slice(
token::intern_and_get_ident(ty_to_string(tcx,
t).as_slice())),
self.c_bool(named_fields),
self.c_uint(len)
)).append(self.c_size_and_align(t).as_slice());
self.bracketed("class", extra.as_slice(), |this| {
for (i, field) in fields.iter().enumerate() {
let extra = (vec!(
this.c_uint(i),
this.c_slice(token::get_ident(field.ident)),
this.c_bool(named_fields)
)).append(this.c_mt(&field.mt).as_slice());
this.visit("class_field", extra.as_slice());
}
})
}
// FIXME (#2595): visiting all the variants in turn is probably
// not ideal. It'll work but will get costly on big enums. Maybe
// let the visitor tell us if it wants to visit only a particular
// variant?
ty::ty_enum(did, ref substs) => {
let ccx = bcx.ccx();
let repr = adt::represent_type(bcx.ccx(), t);
let variants = ty::substd_enum_variants(ccx.tcx(), did, substs);
let llptrty = type_of(ccx, t).ptr_to();
let opaquety = ty::get_opaque_ty(ccx.tcx()).unwrap();
let opaqueptrty = ty::mk_ptr(ccx.tcx(), ty::mt { ty: opaquety,
mutbl: ast::MutImmutable });
let make_get_disr = || {
let sym = mangle_internal_name_by_path_and_seq(
ast_map::Values([].iter()).chain(None), "get_disr");
let fn_ty = ty::mk_ctor_fn(&ccx.tcx, ast::DUMMY_NODE_ID,
[opaqueptrty], ty::mk_u64());
let llfdecl = decl_internal_rust_fn(ccx,
fn_ty,
sym.as_slice());
let arena = TypedArena::new();
let empty_param_substs = param_substs::empty();
let fcx = new_fn_ctxt(ccx, llfdecl, ast::DUMMY_NODE_ID, false,
ty::mk_u64(), &empty_param_substs,
None, &arena);
let bcx = init_function(&fcx, false, ty::mk_u64());
// we know the return type of llfdecl is an int here, so
// no need for a special check to see if the return type
// is immediate.
let arg = get_param(llfdecl, fcx.arg_pos(0u) as c_uint);
let arg = BitCast(bcx, arg, llptrty);
let ret = adt::trans_get_discr(bcx, &*repr, arg, Some(Type::i64(ccx)));
assert!(!fcx.needs_ret_allocas);
let ret_slot = fcx.get_ret_slot(bcx, ty::mk_u64(), "ret_slot");
Store(bcx, ret, ret_slot);
match fcx.llreturn.get() {
Some(llreturn) => Br(bcx, llreturn),
None => {}
};
finish_fn(&fcx, bcx, ty::mk_u64());
llfdecl
};
let enum_args = (vec!(self.c_uint(variants.len()), make_get_disr()))
.append(self.c_size_and_align(t).as_slice());
self.bracketed("enum", enum_args.as_slice(), |this| {
for (i, v) in variants.iter().enumerate() {
let name = token::get_ident(v.name);
let variant_args = [this.c_uint(i),
C_u64(ccx, v.disr_val),
this.c_uint(v.args.len()),
this.c_slice(name)];
this.bracketed("enum_variant",
variant_args,
|this| {
for (j, a) in v.args.iter().enumerate() {
let bcx = this.bcx;
let null = C_null(llptrty);
let ptr = adt::trans_field_ptr(bcx, &*repr, null, v.disr_val, j);
let offset = p2i(ccx, ptr);
let field_args = [this.c_uint(j),
offset,
this.c_tydesc(*a)];
this.visit("enum_variant_field",
field_args);
}
})
}
})
}
// Miscellaneous extra types
ty::ty_infer(_) => self.leaf("infer"),
ty::ty_err => self.leaf("err"),
ty::ty_unboxed_closure(..) => self.leaf("err"),
ty::ty_param(ref p) => {
let extra = vec!(self.c_uint(p.idx));
self.visit("param", extra.as_slice())
}
}
}
pub fn visit_sig(&mut self, retval: uint, sig: &ty::FnSig) {
for (i, arg) in sig.inputs.iter().enumerate() {
let modeval = 5u; // "by copy"
let extra = vec!(self.c_uint(i),
self.c_uint(modeval),
self.c_tydesc(*arg));
self.visit("fn_input", extra.as_slice());
}
let extra = vec!(self.c_uint(retval),
self.c_bool(sig.variadic),
self.c_tydesc(sig.output));
self.visit("fn_output", extra.as_slice());
}
}
// Emit a sequence of calls to visit_ty::visit_foo
pub fn
|
emit_calls_to_trait_visit_ty
|
identifier_name
|
|
lib.rs
|
#![crate_type = "dylib"]
extern crate libc;
extern crate qpick;
use std::ffi::{CStr, CString};
/// Get an immutable reference from a raw pointer
macro_rules! ref_from_ptr {
($p: ident) => {
unsafe {
assert!(!$p.is_null());
&*$p
}
};
}
/// Get the object referenced by the raw pointer
macro_rules! val_from_ptr {
($p: ident) => {
unsafe {
assert!(!$p.is_null());
Box::from_raw($p)
}
};
}
/// Declare a function that frees a struct's memory
macro_rules! make_free_fn {
($name: ident, $t: ty) => {
#[no_mangle]
pub extern "C" fn $name(ptr: $t) {
assert!(!ptr.is_null());
val_from_ptr!(ptr);
}
};
}
pub fn str_to_cstr(string: &str) -> *mut libc::c_char {
CString::new(string).unwrap().into_raw()
}
pub fn cstr_to_str<'a>(s: *mut libc::c_char) -> &'a str {
let cstr = unsafe { CStr::from_ptr(s) };
cstr.to_str().unwrap()
}
pub fn to_raw_ptr<T>(v: T) -> *mut T {
Box::into_raw(Box::new(v))
}
use qpick::Qpick;
// `#[no_mangle]` warns for lifetime parameters,
// a known issue: https://github.com/rust-lang/rust/issues/40342
#[no_mangle]
pub extern "C" fn qpick_init(path: *mut libc::c_char) -> *mut Qpick {
let path = cstr_to_str(path);
let qpick = Qpick::from_path(path.to_string());
to_raw_ptr(qpick)
}
#[no_mangle]
pub extern "C" fn
|
(
path: *mut libc::c_char,
start_shard: libc::uint32_t,
end_shard: libc::uint32_t,
) -> *mut Qpick {
let path = cstr_to_str(path);
let qpick = Qpick::from_path_with_shard_range(path.to_string(), start_shard..end_shard);
to_raw_ptr(qpick)
}
make_free_fn!(qpick_free, *mut Qpick);
#[no_mangle]
pub extern "C" fn string_free(s: *mut libc::c_char) {
unsafe { CString::from_raw(s) };
}
#[no_mangle]
pub extern "C" fn qpick_get_as_string(
ptr: *mut Qpick,
query: *mut libc::c_char,
count: libc::uint32_t,
with_tfidf: libc::uint8_t,
) -> *const libc::c_char {
let query = cstr_to_str(query);
let s = ref_from_ptr!(ptr).get_search_results_as_string(query, count, with_tfidf!= 0);
CString::new(s).unwrap().into_raw()
}
// Get a mutable reference from a raw pointer
macro_rules! mutref_from_ptr {
($p: ident) => {
unsafe {
assert!(!$p.is_null());
&mut *$p
}
};
}
// ------ iterators ---
#[repr(C)]
#[derive(Debug)]
#[allow(dead_code)]
pub struct QpickSearchItem {
qid: libc::uint64_t,
sc: libc::c_float, //f32
}
// Declare a function that returns the next item from a qpick vector
#[no_mangle]
pub extern "C" fn qpick_search_iter_next(ptr: *mut qpick::SearchResults) -> *mut QpickSearchItem {
let res = mutref_from_ptr!(ptr);
// let mut iter = res.items.iter();
match res.next() {
Some(qid_sc) => to_raw_ptr(QpickSearchItem {
qid: qid_sc.id,
sc: qid_sc.sc,
}),
None => ::std::ptr::null_mut(),
}
}
make_free_fn!(qpick_search_results_free, *mut qpick::SearchResults);
make_free_fn!(qpick_search_item_free, *mut QpickSearchItem);
// --- end iterators ---
#[no_mangle]
pub extern "C" fn qpick_get(
ptr: *mut Qpick,
query: *mut libc::c_char,
count: libc::uint32_t,
with_tfidf: libc::uint8_t,
) -> *mut qpick::SearchResults {
let query = cstr_to_str(query);
let res = ref_from_ptr!(ptr).get_search_results(query, count, with_tfidf!= 0);
to_raw_ptr(res)
}
// --- nget queries api
#[no_mangle]
pub extern "C" fn query_vec_init() -> *mut Vec<String> {
to_raw_ptr(vec![])
}
make_free_fn!(query_vec_free, *mut Vec<String>);
#[no_mangle]
pub extern "C" fn query_vec_push(ptr: *mut Vec<String>, query: *mut libc::c_char) {
let query = cstr_to_str(query);
mutref_from_ptr!(ptr).push(query.to_string());
}
|
qpick_init_with_shard_range
|
identifier_name
|
lib.rs
|
#![crate_type = "dylib"]
extern crate libc;
extern crate qpick;
use std::ffi::{CStr, CString};
/// Get an immutable reference from a raw pointer
macro_rules! ref_from_ptr {
($p: ident) => {
unsafe {
assert!(!$p.is_null());
&*$p
}
};
}
/// Get the object referenced by the raw pointer
macro_rules! val_from_ptr {
($p: ident) => {
unsafe {
assert!(!$p.is_null());
Box::from_raw($p)
}
};
}
/// Declare a function that frees a struct's memory
macro_rules! make_free_fn {
($name: ident, $t: ty) => {
#[no_mangle]
pub extern "C" fn $name(ptr: $t) {
assert!(!ptr.is_null());
val_from_ptr!(ptr);
}
};
}
pub fn str_to_cstr(string: &str) -> *mut libc::c_char {
CString::new(string).unwrap().into_raw()
}
pub fn cstr_to_str<'a>(s: *mut libc::c_char) -> &'a str {
let cstr = unsafe { CStr::from_ptr(s) };
cstr.to_str().unwrap()
}
pub fn to_raw_ptr<T>(v: T) -> *mut T {
Box::into_raw(Box::new(v))
}
use qpick::Qpick;
// `#[no_mangle]` warns for lifetime parameters,
// a known issue: https://github.com/rust-lang/rust/issues/40342
#[no_mangle]
pub extern "C" fn qpick_init(path: *mut libc::c_char) -> *mut Qpick {
let path = cstr_to_str(path);
let qpick = Qpick::from_path(path.to_string());
to_raw_ptr(qpick)
}
#[no_mangle]
pub extern "C" fn qpick_init_with_shard_range(
path: *mut libc::c_char,
start_shard: libc::uint32_t,
end_shard: libc::uint32_t,
) -> *mut Qpick {
let path = cstr_to_str(path);
let qpick = Qpick::from_path_with_shard_range(path.to_string(), start_shard..end_shard);
to_raw_ptr(qpick)
}
make_free_fn!(qpick_free, *mut Qpick);
#[no_mangle]
pub extern "C" fn string_free(s: *mut libc::c_char) {
unsafe { CString::from_raw(s) };
}
#[no_mangle]
pub extern "C" fn qpick_get_as_string(
ptr: *mut Qpick,
query: *mut libc::c_char,
count: libc::uint32_t,
with_tfidf: libc::uint8_t,
) -> *const libc::c_char {
let query = cstr_to_str(query);
let s = ref_from_ptr!(ptr).get_search_results_as_string(query, count, with_tfidf!= 0);
CString::new(s).unwrap().into_raw()
}
// Get a mutable reference from a raw pointer
macro_rules! mutref_from_ptr {
($p: ident) => {
unsafe {
assert!(!$p.is_null());
&mut *$p
}
};
}
// ------ iterators ---
#[repr(C)]
#[derive(Debug)]
#[allow(dead_code)]
pub struct QpickSearchItem {
qid: libc::uint64_t,
sc: libc::c_float, //f32
}
// Declare a function that returns the next item from a qpick vector
#[no_mangle]
pub extern "C" fn qpick_search_iter_next(ptr: *mut qpick::SearchResults) -> *mut QpickSearchItem
|
make_free_fn!(qpick_search_results_free, *mut qpick::SearchResults);
make_free_fn!(qpick_search_item_free, *mut QpickSearchItem);
// --- end iterators ---
#[no_mangle]
pub extern "C" fn qpick_get(
ptr: *mut Qpick,
query: *mut libc::c_char,
count: libc::uint32_t,
with_tfidf: libc::uint8_t,
) -> *mut qpick::SearchResults {
let query = cstr_to_str(query);
let res = ref_from_ptr!(ptr).get_search_results(query, count, with_tfidf!= 0);
to_raw_ptr(res)
}
// --- nget queries api
#[no_mangle]
pub extern "C" fn query_vec_init() -> *mut Vec<String> {
to_raw_ptr(vec![])
}
make_free_fn!(query_vec_free, *mut Vec<String>);
#[no_mangle]
pub extern "C" fn query_vec_push(ptr: *mut Vec<String>, query: *mut libc::c_char) {
let query = cstr_to_str(query);
mutref_from_ptr!(ptr).push(query.to_string());
}
|
{
let res = mutref_from_ptr!(ptr);
// let mut iter = res.items.iter();
match res.next() {
Some(qid_sc) => to_raw_ptr(QpickSearchItem {
qid: qid_sc.id,
sc: qid_sc.sc,
}),
None => ::std::ptr::null_mut(),
}
}
|
identifier_body
|
lib.rs
|
#![crate_type = "dylib"]
extern crate libc;
extern crate qpick;
use std::ffi::{CStr, CString};
/// Get an immutable reference from a raw pointer
macro_rules! ref_from_ptr {
($p: ident) => {
unsafe {
assert!(!$p.is_null());
&*$p
}
};
}
/// Get the object referenced by the raw pointer
macro_rules! val_from_ptr {
($p: ident) => {
unsafe {
assert!(!$p.is_null());
Box::from_raw($p)
}
};
}
/// Declare a function that frees a struct's memory
macro_rules! make_free_fn {
($name: ident, $t: ty) => {
#[no_mangle]
pub extern "C" fn $name(ptr: $t) {
assert!(!ptr.is_null());
val_from_ptr!(ptr);
}
};
}
pub fn str_to_cstr(string: &str) -> *mut libc::c_char {
CString::new(string).unwrap().into_raw()
}
pub fn cstr_to_str<'a>(s: *mut libc::c_char) -> &'a str {
let cstr = unsafe { CStr::from_ptr(s) };
cstr.to_str().unwrap()
|
use qpick::Qpick;
// `#[no_mangle]` warns for lifetime parameters,
// a known issue: https://github.com/rust-lang/rust/issues/40342
#[no_mangle]
pub extern "C" fn qpick_init(path: *mut libc::c_char) -> *mut Qpick {
let path = cstr_to_str(path);
let qpick = Qpick::from_path(path.to_string());
to_raw_ptr(qpick)
}
#[no_mangle]
pub extern "C" fn qpick_init_with_shard_range(
path: *mut libc::c_char,
start_shard: libc::uint32_t,
end_shard: libc::uint32_t,
) -> *mut Qpick {
let path = cstr_to_str(path);
let qpick = Qpick::from_path_with_shard_range(path.to_string(), start_shard..end_shard);
to_raw_ptr(qpick)
}
make_free_fn!(qpick_free, *mut Qpick);
#[no_mangle]
pub extern "C" fn string_free(s: *mut libc::c_char) {
unsafe { CString::from_raw(s) };
}
#[no_mangle]
pub extern "C" fn qpick_get_as_string(
ptr: *mut Qpick,
query: *mut libc::c_char,
count: libc::uint32_t,
with_tfidf: libc::uint8_t,
) -> *const libc::c_char {
let query = cstr_to_str(query);
let s = ref_from_ptr!(ptr).get_search_results_as_string(query, count, with_tfidf!= 0);
CString::new(s).unwrap().into_raw()
}
// Get a mutable reference from a raw pointer
macro_rules! mutref_from_ptr {
($p: ident) => {
unsafe {
assert!(!$p.is_null());
&mut *$p
}
};
}
// ------ iterators ---
#[repr(C)]
#[derive(Debug)]
#[allow(dead_code)]
pub struct QpickSearchItem {
qid: libc::uint64_t,
sc: libc::c_float, //f32
}
// Declare a function that returns the next item from a qpick vector
#[no_mangle]
pub extern "C" fn qpick_search_iter_next(ptr: *mut qpick::SearchResults) -> *mut QpickSearchItem {
let res = mutref_from_ptr!(ptr);
// let mut iter = res.items.iter();
match res.next() {
Some(qid_sc) => to_raw_ptr(QpickSearchItem {
qid: qid_sc.id,
sc: qid_sc.sc,
}),
None => ::std::ptr::null_mut(),
}
}
make_free_fn!(qpick_search_results_free, *mut qpick::SearchResults);
make_free_fn!(qpick_search_item_free, *mut QpickSearchItem);
// --- end iterators ---
#[no_mangle]
pub extern "C" fn qpick_get(
ptr: *mut Qpick,
query: *mut libc::c_char,
count: libc::uint32_t,
with_tfidf: libc::uint8_t,
) -> *mut qpick::SearchResults {
let query = cstr_to_str(query);
let res = ref_from_ptr!(ptr).get_search_results(query, count, with_tfidf!= 0);
to_raw_ptr(res)
}
// --- nget queries api
#[no_mangle]
pub extern "C" fn query_vec_init() -> *mut Vec<String> {
to_raw_ptr(vec![])
}
make_free_fn!(query_vec_free, *mut Vec<String>);
#[no_mangle]
pub extern "C" fn query_vec_push(ptr: *mut Vec<String>, query: *mut libc::c_char) {
let query = cstr_to_str(query);
mutref_from_ptr!(ptr).push(query.to_string());
}
|
}
pub fn to_raw_ptr<T>(v: T) -> *mut T {
Box::into_raw(Box::new(v))
}
|
random_line_split
|
client.rs
|
#![deny(warnings)]
#![feature(vec_push_all, test)]
extern crate hyper;
extern crate test;
use std::fmt;
use std::io::{self, Read, Write, Cursor};
use std::net::SocketAddr;
use std::time::Duration;
use hyper::net;
static README: &'static [u8] = include_bytes!("../README.md");
struct MockStream {
read: Cursor<Vec<u8>>
}
impl MockStream {
fn new() -> MockStream {
let head = b"HTTP/1.1 200 OK\r\nServer: Mock\r\n\r\n";
let mut res = head.to_vec();
res.push_all(README);
MockStream {
read: Cursor::new(res)
}
}
}
impl Clone for MockStream {
fn clone(&self) -> MockStream {
MockStream {
read: Cursor::new(self.read.get_ref().clone())
}
}
}
impl Read for MockStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize>
|
}
impl Write for MockStream {
fn write(&mut self, msg: &[u8]) -> io::Result<usize> {
// we're mocking, what do we care.
Ok(msg.len())
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
#[derive(Clone, Debug)]
struct Foo;
impl hyper::header::Header for Foo {
fn header_name() -> &'static str {
"x-foo"
}
fn parse_header(_: &[Vec<u8>]) -> hyper::Result<Foo> {
Err(hyper::Error::Header)
}
}
impl hyper::header::HeaderFormat for Foo {
fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("Bar")
}
}
impl net::NetworkStream for MockStream {
fn peer_addr(&mut self) -> io::Result<SocketAddr> {
Ok("127.0.0.1:1337".parse().unwrap())
}
fn set_read_timeout(&self, _: Option<Duration>) -> io::Result<()> {
// can't time out
Ok(())
}
fn set_write_timeout(&self, _: Option<Duration>) -> io::Result<()> {
// can't time out
Ok(())
}
}
struct MockConnector;
impl net::NetworkConnector for MockConnector {
type Stream = MockStream;
fn connect(&self, _: &str, _: u16, _: &str) -> hyper::Result<MockStream> {
Ok(MockStream::new())
}
}
#[bench]
fn bench_mock_hyper(b: &mut test::Bencher) {
let url = "http://127.0.0.1:1337/";
b.iter(|| {
let mut req = hyper::client::Request::with_connector(
hyper::Get, hyper::Url::parse(url).unwrap(), &MockConnector
).unwrap();
req.headers_mut().set(Foo);
let mut s = String::new();
req
.start().unwrap()
.send().unwrap()
.read_to_string(&mut s).unwrap()
});
}
|
{
self.read.read(buf)
}
|
identifier_body
|
client.rs
|
#![deny(warnings)]
#![feature(vec_push_all, test)]
extern crate hyper;
extern crate test;
use std::fmt;
use std::io::{self, Read, Write, Cursor};
use std::net::SocketAddr;
use std::time::Duration;
use hyper::net;
static README: &'static [u8] = include_bytes!("../README.md");
struct MockStream {
read: Cursor<Vec<u8>>
}
impl MockStream {
fn new() -> MockStream {
let head = b"HTTP/1.1 200 OK\r\nServer: Mock\r\n\r\n";
let mut res = head.to_vec();
res.push_all(README);
MockStream {
read: Cursor::new(res)
}
}
}
impl Clone for MockStream {
fn clone(&self) -> MockStream {
MockStream {
read: Cursor::new(self.read.get_ref().clone())
}
}
}
impl Read for MockStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.read.read(buf)
}
}
impl Write for MockStream {
fn write(&mut self, msg: &[u8]) -> io::Result<usize> {
// we're mocking, what do we care.
Ok(msg.len())
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
#[derive(Clone, Debug)]
struct Foo;
impl hyper::header::Header for Foo {
fn header_name() -> &'static str {
"x-foo"
}
fn parse_header(_: &[Vec<u8>]) -> hyper::Result<Foo> {
Err(hyper::Error::Header)
}
}
impl hyper::header::HeaderFormat for Foo {
fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("Bar")
}
}
impl net::NetworkStream for MockStream {
fn peer_addr(&mut self) -> io::Result<SocketAddr> {
Ok("127.0.0.1:1337".parse().unwrap())
}
fn set_read_timeout(&self, _: Option<Duration>) -> io::Result<()> {
// can't time out
Ok(())
}
fn set_write_timeout(&self, _: Option<Duration>) -> io::Result<()> {
// can't time out
Ok(())
}
}
struct
|
;
impl net::NetworkConnector for MockConnector {
type Stream = MockStream;
fn connect(&self, _: &str, _: u16, _: &str) -> hyper::Result<MockStream> {
Ok(MockStream::new())
}
}
#[bench]
fn bench_mock_hyper(b: &mut test::Bencher) {
let url = "http://127.0.0.1:1337/";
b.iter(|| {
let mut req = hyper::client::Request::with_connector(
hyper::Get, hyper::Url::parse(url).unwrap(), &MockConnector
).unwrap();
req.headers_mut().set(Foo);
let mut s = String::new();
req
.start().unwrap()
.send().unwrap()
.read_to_string(&mut s).unwrap()
});
}
|
MockConnector
|
identifier_name
|
client.rs
|
#![deny(warnings)]
#![feature(vec_push_all, test)]
extern crate hyper;
extern crate test;
use std::fmt;
use std::io::{self, Read, Write, Cursor};
use std::net::SocketAddr;
use std::time::Duration;
use hyper::net;
static README: &'static [u8] = include_bytes!("../README.md");
struct MockStream {
read: Cursor<Vec<u8>>
}
impl MockStream {
fn new() -> MockStream {
let head = b"HTTP/1.1 200 OK\r\nServer: Mock\r\n\r\n";
let mut res = head.to_vec();
res.push_all(README);
MockStream {
read: Cursor::new(res)
}
}
}
impl Clone for MockStream {
fn clone(&self) -> MockStream {
MockStream {
read: Cursor::new(self.read.get_ref().clone())
}
}
}
impl Read for MockStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.read.read(buf)
}
}
impl Write for MockStream {
fn write(&mut self, msg: &[u8]) -> io::Result<usize> {
// we're mocking, what do we care.
Ok(msg.len())
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
#[derive(Clone, Debug)]
struct Foo;
impl hyper::header::Header for Foo {
fn header_name() -> &'static str {
"x-foo"
}
fn parse_header(_: &[Vec<u8>]) -> hyper::Result<Foo> {
Err(hyper::Error::Header)
}
}
impl hyper::header::HeaderFormat for Foo {
fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("Bar")
}
}
impl net::NetworkStream for MockStream {
fn peer_addr(&mut self) -> io::Result<SocketAddr> {
Ok("127.0.0.1:1337".parse().unwrap())
}
fn set_read_timeout(&self, _: Option<Duration>) -> io::Result<()> {
// can't time out
Ok(())
}
fn set_write_timeout(&self, _: Option<Duration>) -> io::Result<()> {
// can't time out
Ok(())
}
}
struct MockConnector;
|
Ok(MockStream::new())
}
}
#[bench]
fn bench_mock_hyper(b: &mut test::Bencher) {
let url = "http://127.0.0.1:1337/";
b.iter(|| {
let mut req = hyper::client::Request::with_connector(
hyper::Get, hyper::Url::parse(url).unwrap(), &MockConnector
).unwrap();
req.headers_mut().set(Foo);
let mut s = String::new();
req
.start().unwrap()
.send().unwrap()
.read_to_string(&mut s).unwrap()
});
}
|
impl net::NetworkConnector for MockConnector {
type Stream = MockStream;
fn connect(&self, _: &str, _: u16, _: &str) -> hyper::Result<MockStream> {
|
random_line_split
|
serial.rs
|
use kern::arch::port::Port;
use spin::Mutex;
const SERIAL_PORT: u16 = 0x3f8; /* COM1 */
#[derive(Debug)]
pub struct Serial {
ports: [Port<u8>; 8]
}
pub static COM1: Mutex<Serial> = Mutex::new(Serial::new(SERIAL_PORT));
impl Serial {
pub const fn new(base: u16) -> Serial {
Serial {
ports: [
Port::new(base),
Port::new(base + 1),
Port::new(base + 2),
Port::new(base + 3),
Port::new(base + 4),
Port::new(base + 5),
Port::new(base + 6),
Port::new(base + 7),
]
}
}
pub unsafe fn init(&mut self) {
self.ports[1].write(0x00); // Disable all interrupts
self.ports[3].write(0x80); // Enable DLAB (set baud rate divisor)
self.ports[0].write(0x03); // Set divisor to 3 (lo byte) 38400 baud
self.ports[1].write(0x00); // (hi byte)
self.ports[3].write(0x03); // 8 bits, no parity, one stop bit
self.ports[2].write(0xC7); // Enable FIFO, clear them, with 14-byte threshold
self.ports[4].write(0x0B); // IRQs enabled, RTS/DSR set
}
unsafe fn is_transmit_empty(&mut self) -> bool {
self.ports[5].read() & 0x20!= 0
}
pub unsafe fn
|
(&mut self, a: u8) {
while!self.is_transmit_empty() {
}
self.ports[0].write(a);
}
unsafe fn serial_received(&mut self) -> bool {
self.ports[5].read() & 0x1!= 0
}
pub unsafe fn read(&mut self) -> u8 {
while!self.serial_received() {
}
self.ports[0].read()
}
}
|
write
|
identifier_name
|
serial.rs
|
use kern::arch::port::Port;
use spin::Mutex;
const SERIAL_PORT: u16 = 0x3f8; /* COM1 */
#[derive(Debug)]
pub struct Serial {
|
impl Serial {
pub const fn new(base: u16) -> Serial {
Serial {
ports: [
Port::new(base),
Port::new(base + 1),
Port::new(base + 2),
Port::new(base + 3),
Port::new(base + 4),
Port::new(base + 5),
Port::new(base + 6),
Port::new(base + 7),
]
}
}
pub unsafe fn init(&mut self) {
self.ports[1].write(0x00); // Disable all interrupts
self.ports[3].write(0x80); // Enable DLAB (set baud rate divisor)
self.ports[0].write(0x03); // Set divisor to 3 (lo byte) 38400 baud
self.ports[1].write(0x00); // (hi byte)
self.ports[3].write(0x03); // 8 bits, no parity, one stop bit
self.ports[2].write(0xC7); // Enable FIFO, clear them, with 14-byte threshold
self.ports[4].write(0x0B); // IRQs enabled, RTS/DSR set
}
unsafe fn is_transmit_empty(&mut self) -> bool {
self.ports[5].read() & 0x20!= 0
}
pub unsafe fn write(&mut self, a: u8) {
while!self.is_transmit_empty() {
}
self.ports[0].write(a);
}
unsafe fn serial_received(&mut self) -> bool {
self.ports[5].read() & 0x1!= 0
}
pub unsafe fn read(&mut self) -> u8 {
while!self.serial_received() {
}
self.ports[0].read()
}
}
|
ports: [Port<u8>; 8]
}
pub static COM1: Mutex<Serial> = Mutex::new(Serial::new(SERIAL_PORT));
|
random_line_split
|
mod.rs
|
//! Unix-specific extension to the primitives in the `std::ffi` module.
//!
//! # Examples
//!
//! ```
//! use std::ffi::OsString;
//! use std::os::unix::ffi::OsStringExt;
//!
//! let bytes = b"foo".to_vec();
//!
|
//! let os_string = OsString::from_vec(bytes);
//! assert_eq!(os_string.to_str(), Some("foo"));
//!
//! // OsStringExt::into_vec
//! let bytes = os_string.into_vec();
//! assert_eq!(bytes, b"foo");
//! ```
//!
//! ```
//! use std::ffi::OsStr;
//! use std::os::unix::ffi::OsStrExt;
//!
//! let bytes = b"foo";
//!
//! // OsStrExt::from_bytes
//! let os_str = OsStr::from_bytes(bytes);
//! assert_eq!(os_str.to_str(), Some("foo"));
//!
//! // OsStrExt::as_bytes
//! let bytes = os_str.as_bytes();
//! assert_eq!(bytes, b"foo");
//! ```
#![stable(feature = "rust1", since = "1.0.0")]
mod os_str;
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::os_str::{OsStrExt, OsStringExt};
|
//! // OsStringExt::from_vec
|
random_line_split
|
http.rs
|
#[macro_use]
extern crate criterion;
#[macro_use]
extern crate combine;
use std::fmt;
use {
combine::{
many, many1,
parser::range::{range, take_while1},
stream::easy,
token, ParseError, Parser, RangeStream,
},
criterion::{black_box, Bencher, Criterion},
};
#[derive(Debug)]
struct Request<'a> {
method: &'a [u8],
uri: &'a [u8],
version: &'a [u8],
}
#[derive(Debug)]
struct Header<'a> {
name: &'a [u8],
value: Vec<&'a [u8]>,
}
fn is_token(c: u8) -> bool {
match c {
128..=255
| 0..=31
| b'('
| b')'
| b'<'
| b'>'
| b'@'
| b','
| b';'
| b':'
| b'\\'
| b'"'
| b'/'
| b'['
| b']'
| b'?'
| b'='
| b'{'
| b'}'
| b''=> false,
_ => true,
}
}
fn is_horizontal_space(c: u8) -> bool {
c == b''|| c == b'\t'
}
fn is_space(c: u8) -> bool {
c == b' '
}
fn is_not_space(c: u8) -> bool {
c!= b' '
}
fn is_http_version(c: u8) -> bool {
c >= b'0' && c <= b'9' || c == b'.'
}
fn end_of_line<'a, Input>() -> impl Parser<Input, Output = u8>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
(token(b'\r'), token(b'\n')).map(|_| b'\r').or(token(b'\n'))
}
fn message_header<'a, Input>() -> impl Parser<Input, Output = Header<'a>>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
let message_header_line = (
take_while1(is_horizontal_space),
take_while1(|c| c!= b'\r' && c!= b'\n'),
end_of_line(),
)
.map(|(_, line, _)| line);
struct_parser!(Header {
name: take_while1(is_token),
_: token(b':'),
value: many1(message_header_line),
})
}
fn parse_http_request<'a, Input>(
input: Input,
) -> Result<((Request<'a>, Vec<Header<'a>>), Input), Input::Error>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
let http_version = range(&b"HTTP/"[..]).with(take_while1(is_http_version));
let request_line = struct_parser!(Request {
method: take_while1(is_token),
_: take_while1(is_space),
uri: take_while1(is_not_space),
_: take_while1(is_space),
version: http_version,
});
let mut request = (
request_line,
end_of_line(),
many(message_header()),
end_of_line(),
)
.map(|(request, _, headers, _)| (request, headers));
request.parse(input)
}
static REQUESTS: &'static [u8] = include_bytes!("http-requests.txt");
fn http_requests_small(b: &mut Bencher<'_>) {
http_requests_bench(b, easy::Stream(REQUESTS))
}
fn http_requests_large(b: &mut Bencher<'_>) {
use std::iter;
let mut buffer = Vec::with_capacity(REQUESTS.len() * 5);
for buf in iter::repeat(REQUESTS).take(5) {
buffer.extend_from_slice(buf);
}
http_requests_bench(b, easy::Stream(&buffer[..]))
}
fn http_requests_large_cheap_error(b: &mut Bencher<'_>) {
use std::iter;
let mut buffer = Vec::with_capacity(REQUESTS.len() * 5);
for buf in iter::repeat(REQUESTS).take(5) {
buffer.extend_from_slice(buf);
}
http_requests_bench(b, &buffer[..])
}
fn http_requests_bench<'a, Input>(b: &mut Bencher<'_>, buffer: Input)
where
Input: RangeStream<Token = u8, Range = &'a [u8]> + Clone,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position> + fmt::Debug,
{
b.iter(|| {
let mut buf = black_box(buffer.clone());
while buf.clone().uncons().is_ok() {
match parse_http_request(buf) {
Ok(((_, _), b)) => {
buf = b;
}
Err(err) => panic!("{:?}", err),
}
}
});
}
fn http_requests(c: &mut Criterion)
|
criterion_group!(http, http_requests,);
criterion_main!(http);
|
{
c.bench_function("http_requests_small", http_requests_small);
c.bench_function("http_requests_large", http_requests_large);
c.bench_function(
"http_requests_large_cheap_error",
http_requests_large_cheap_error,
);
}
|
identifier_body
|
http.rs
|
#[macro_use]
extern crate criterion;
#[macro_use]
extern crate combine;
use std::fmt;
use {
combine::{
many, many1,
parser::range::{range, take_while1},
stream::easy,
token, ParseError, Parser, RangeStream,
},
criterion::{black_box, Bencher, Criterion},
};
#[derive(Debug)]
struct Request<'a> {
method: &'a [u8],
|
version: &'a [u8],
}
#[derive(Debug)]
struct Header<'a> {
name: &'a [u8],
value: Vec<&'a [u8]>,
}
fn is_token(c: u8) -> bool {
match c {
128..=255
| 0..=31
| b'('
| b')'
| b'<'
| b'>'
| b'@'
| b','
| b';'
| b':'
| b'\\'
| b'"'
| b'/'
| b'['
| b']'
| b'?'
| b'='
| b'{'
| b'}'
| b''=> false,
_ => true,
}
}
fn is_horizontal_space(c: u8) -> bool {
c == b''|| c == b'\t'
}
fn is_space(c: u8) -> bool {
c == b' '
}
fn is_not_space(c: u8) -> bool {
c!= b' '
}
fn is_http_version(c: u8) -> bool {
c >= b'0' && c <= b'9' || c == b'.'
}
fn end_of_line<'a, Input>() -> impl Parser<Input, Output = u8>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
(token(b'\r'), token(b'\n')).map(|_| b'\r').or(token(b'\n'))
}
fn message_header<'a, Input>() -> impl Parser<Input, Output = Header<'a>>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
let message_header_line = (
take_while1(is_horizontal_space),
take_while1(|c| c!= b'\r' && c!= b'\n'),
end_of_line(),
)
.map(|(_, line, _)| line);
struct_parser!(Header {
name: take_while1(is_token),
_: token(b':'),
value: many1(message_header_line),
})
}
fn parse_http_request<'a, Input>(
input: Input,
) -> Result<((Request<'a>, Vec<Header<'a>>), Input), Input::Error>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
let http_version = range(&b"HTTP/"[..]).with(take_while1(is_http_version));
let request_line = struct_parser!(Request {
method: take_while1(is_token),
_: take_while1(is_space),
uri: take_while1(is_not_space),
_: take_while1(is_space),
version: http_version,
});
let mut request = (
request_line,
end_of_line(),
many(message_header()),
end_of_line(),
)
.map(|(request, _, headers, _)| (request, headers));
request.parse(input)
}
static REQUESTS: &'static [u8] = include_bytes!("http-requests.txt");
fn http_requests_small(b: &mut Bencher<'_>) {
http_requests_bench(b, easy::Stream(REQUESTS))
}
fn http_requests_large(b: &mut Bencher<'_>) {
use std::iter;
let mut buffer = Vec::with_capacity(REQUESTS.len() * 5);
for buf in iter::repeat(REQUESTS).take(5) {
buffer.extend_from_slice(buf);
}
http_requests_bench(b, easy::Stream(&buffer[..]))
}
fn http_requests_large_cheap_error(b: &mut Bencher<'_>) {
use std::iter;
let mut buffer = Vec::with_capacity(REQUESTS.len() * 5);
for buf in iter::repeat(REQUESTS).take(5) {
buffer.extend_from_slice(buf);
}
http_requests_bench(b, &buffer[..])
}
fn http_requests_bench<'a, Input>(b: &mut Bencher<'_>, buffer: Input)
where
Input: RangeStream<Token = u8, Range = &'a [u8]> + Clone,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position> + fmt::Debug,
{
b.iter(|| {
let mut buf = black_box(buffer.clone());
while buf.clone().uncons().is_ok() {
match parse_http_request(buf) {
Ok(((_, _), b)) => {
buf = b;
}
Err(err) => panic!("{:?}", err),
}
}
});
}
fn http_requests(c: &mut Criterion) {
c.bench_function("http_requests_small", http_requests_small);
c.bench_function("http_requests_large", http_requests_large);
c.bench_function(
"http_requests_large_cheap_error",
http_requests_large_cheap_error,
);
}
criterion_group!(http, http_requests,);
criterion_main!(http);
|
uri: &'a [u8],
|
random_line_split
|
http.rs
|
#[macro_use]
extern crate criterion;
#[macro_use]
extern crate combine;
use std::fmt;
use {
combine::{
many, many1,
parser::range::{range, take_while1},
stream::easy,
token, ParseError, Parser, RangeStream,
},
criterion::{black_box, Bencher, Criterion},
};
#[derive(Debug)]
struct Request<'a> {
method: &'a [u8],
uri: &'a [u8],
version: &'a [u8],
}
#[derive(Debug)]
struct Header<'a> {
name: &'a [u8],
value: Vec<&'a [u8]>,
}
fn is_token(c: u8) -> bool {
match c {
128..=255
| 0..=31
| b'('
| b')'
| b'<'
| b'>'
| b'@'
| b','
| b';'
| b':'
| b'\\'
| b'"'
| b'/'
| b'['
| b']'
| b'?'
| b'='
| b'{'
| b'}'
| b''=> false,
_ => true,
}
}
fn is_horizontal_space(c: u8) -> bool {
c == b''|| c == b'\t'
}
fn is_space(c: u8) -> bool {
c == b' '
}
fn is_not_space(c: u8) -> bool {
c!= b' '
}
fn
|
(c: u8) -> bool {
c >= b'0' && c <= b'9' || c == b'.'
}
fn end_of_line<'a, Input>() -> impl Parser<Input, Output = u8>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
(token(b'\r'), token(b'\n')).map(|_| b'\r').or(token(b'\n'))
}
fn message_header<'a, Input>() -> impl Parser<Input, Output = Header<'a>>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
let message_header_line = (
take_while1(is_horizontal_space),
take_while1(|c| c!= b'\r' && c!= b'\n'),
end_of_line(),
)
.map(|(_, line, _)| line);
struct_parser!(Header {
name: take_while1(is_token),
_: token(b':'),
value: many1(message_header_line),
})
}
fn parse_http_request<'a, Input>(
input: Input,
) -> Result<((Request<'a>, Vec<Header<'a>>), Input), Input::Error>
where
Input: RangeStream<Token = u8, Range = &'a [u8]>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
let http_version = range(&b"HTTP/"[..]).with(take_while1(is_http_version));
let request_line = struct_parser!(Request {
method: take_while1(is_token),
_: take_while1(is_space),
uri: take_while1(is_not_space),
_: take_while1(is_space),
version: http_version,
});
let mut request = (
request_line,
end_of_line(),
many(message_header()),
end_of_line(),
)
.map(|(request, _, headers, _)| (request, headers));
request.parse(input)
}
static REQUESTS: &'static [u8] = include_bytes!("http-requests.txt");
fn http_requests_small(b: &mut Bencher<'_>) {
http_requests_bench(b, easy::Stream(REQUESTS))
}
fn http_requests_large(b: &mut Bencher<'_>) {
use std::iter;
let mut buffer = Vec::with_capacity(REQUESTS.len() * 5);
for buf in iter::repeat(REQUESTS).take(5) {
buffer.extend_from_slice(buf);
}
http_requests_bench(b, easy::Stream(&buffer[..]))
}
fn http_requests_large_cheap_error(b: &mut Bencher<'_>) {
use std::iter;
let mut buffer = Vec::with_capacity(REQUESTS.len() * 5);
for buf in iter::repeat(REQUESTS).take(5) {
buffer.extend_from_slice(buf);
}
http_requests_bench(b, &buffer[..])
}
fn http_requests_bench<'a, Input>(b: &mut Bencher<'_>, buffer: Input)
where
Input: RangeStream<Token = u8, Range = &'a [u8]> + Clone,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position> + fmt::Debug,
{
b.iter(|| {
let mut buf = black_box(buffer.clone());
while buf.clone().uncons().is_ok() {
match parse_http_request(buf) {
Ok(((_, _), b)) => {
buf = b;
}
Err(err) => panic!("{:?}", err),
}
}
});
}
fn http_requests(c: &mut Criterion) {
c.bench_function("http_requests_small", http_requests_small);
c.bench_function("http_requests_large", http_requests_large);
c.bench_function(
"http_requests_large_cheap_error",
http_requests_large_cheap_error,
);
}
criterion_group!(http, http_requests,);
criterion_main!(http);
|
is_http_version
|
identifier_name
|
css_provider.rs
|
// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use Error;
use CssProvider;
use ffi;
use glib::object::IsA;
use glib::translate::*;
use std::ptr;
use libc::ssize_t;
pub trait CssProviderExtManual {
fn load_from_data(&self, data: &str) -> Result<(), Error>;
}
//Other functions autogenerated in src\auto\css_provider.rs
impl<O: IsA<CssProvider>> CssProviderExtManual for O {
fn
|
(&self, data: &str) -> Result<(), Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = ffi::gtk_css_provider_load_from_data(self.to_glib_none().0, mut_override(data.as_bytes().as_ptr()),
data.len() as ssize_t, &mut error);
if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) }
}
}
}
|
load_from_data
|
identifier_name
|
css_provider.rs
|
// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use Error;
use CssProvider;
use ffi;
use glib::object::IsA;
use glib::translate::*;
use std::ptr;
use libc::ssize_t;
pub trait CssProviderExtManual {
fn load_from_data(&self, data: &str) -> Result<(), Error>;
}
//Other functions autogenerated in src\auto\css_provider.rs
impl<O: IsA<CssProvider>> CssProviderExtManual for O {
fn load_from_data(&self, data: &str) -> Result<(), Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = ffi::gtk_css_provider_load_from_data(self.to_glib_none().0, mut_override(data.as_bytes().as_ptr()),
data.len() as ssize_t, &mut error);
if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) }
}
}
|
}
|
random_line_split
|
|
css_provider.rs
|
// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use Error;
use CssProvider;
use ffi;
use glib::object::IsA;
use glib::translate::*;
use std::ptr;
use libc::ssize_t;
pub trait CssProviderExtManual {
fn load_from_data(&self, data: &str) -> Result<(), Error>;
}
//Other functions autogenerated in src\auto\css_provider.rs
impl<O: IsA<CssProvider>> CssProviderExtManual for O {
fn load_from_data(&self, data: &str) -> Result<(), Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = ffi::gtk_css_provider_load_from_data(self.to_glib_none().0, mut_override(data.as_bytes().as_ptr()),
data.len() as ssize_t, &mut error);
if error.is_null()
|
else { Err(from_glib_full(error)) }
}
}
}
|
{ Ok(()) }
|
conditional_block
|
css_provider.rs
|
// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use Error;
use CssProvider;
use ffi;
use glib::object::IsA;
use glib::translate::*;
use std::ptr;
use libc::ssize_t;
pub trait CssProviderExtManual {
fn load_from_data(&self, data: &str) -> Result<(), Error>;
}
//Other functions autogenerated in src\auto\css_provider.rs
impl<O: IsA<CssProvider>> CssProviderExtManual for O {
fn load_from_data(&self, data: &str) -> Result<(), Error>
|
}
|
{
unsafe {
let mut error = ptr::null_mut();
let _ = ffi::gtk_css_provider_load_from_data(self.to_glib_none().0, mut_override(data.as_bytes().as_ptr()),
data.len() as ssize_t, &mut error);
if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) }
}
}
|
identifier_body
|
rm.rs
|
use config::Config;
use errors::*;
|
SubCommand::with_name("rm")
.about("Removes a user")
.arg(
Arg::with_name("USER")
.help("Specifies the user")
.required(true)
.index(1)
)
}
pub fn call(args: &ArgMatches) -> Result<()> {
// Load the config
let config_file = args.value_of("config").unwrap();
let mut config = Config::load(config_file)?;
// Remove the user, if existing
let user = args.value_of("USER").unwrap();
if config.users.remove(user).is_none() {
bail!(format!("A user named '{}' does not exist", user)); // TODO Use proper error!
}
// Store the config
config.store(config_file)?;
Ok(())
}
|
use clap::{App, Arg, ArgMatches, SubCommand};
pub fn setup<'a, 'b>() -> App<'a, 'b> {
|
random_line_split
|
rm.rs
|
use config::Config;
use errors::*;
use clap::{App, Arg, ArgMatches, SubCommand};
pub fn setup<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("rm")
.about("Removes a user")
.arg(
Arg::with_name("USER")
.help("Specifies the user")
.required(true)
.index(1)
)
}
pub fn call(args: &ArgMatches) -> Result<()> {
// Load the config
let config_file = args.value_of("config").unwrap();
let mut config = Config::load(config_file)?;
// Remove the user, if existing
let user = args.value_of("USER").unwrap();
if config.users.remove(user).is_none()
|
// Store the config
config.store(config_file)?;
Ok(())
}
|
{
bail!(format!("A user named '{}' does not exist", user)); // TODO Use proper error!
}
|
conditional_block
|
rm.rs
|
use config::Config;
use errors::*;
use clap::{App, Arg, ArgMatches, SubCommand};
pub fn
|
<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("rm")
.about("Removes a user")
.arg(
Arg::with_name("USER")
.help("Specifies the user")
.required(true)
.index(1)
)
}
pub fn call(args: &ArgMatches) -> Result<()> {
// Load the config
let config_file = args.value_of("config").unwrap();
let mut config = Config::load(config_file)?;
// Remove the user, if existing
let user = args.value_of("USER").unwrap();
if config.users.remove(user).is_none() {
bail!(format!("A user named '{}' does not exist", user)); // TODO Use proper error!
}
// Store the config
config.store(config_file)?;
Ok(())
}
|
setup
|
identifier_name
|
rm.rs
|
use config::Config;
use errors::*;
use clap::{App, Arg, ArgMatches, SubCommand};
pub fn setup<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("rm")
.about("Removes a user")
.arg(
Arg::with_name("USER")
.help("Specifies the user")
.required(true)
.index(1)
)
}
pub fn call(args: &ArgMatches) -> Result<()>
|
{
// Load the config
let config_file = args.value_of("config").unwrap();
let mut config = Config::load(config_file)?;
// Remove the user, if existing
let user = args.value_of("USER").unwrap();
if config.users.remove(user).is_none() {
bail!(format!("A user named '{}' does not exist", user)); // TODO Use proper error!
}
// Store the config
config.store(config_file)?;
Ok(())
}
|
identifier_body
|
|
struct_list.rs
|
// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! List of structs.
use private::layout::{ListReader, ListBuilder, PointerReader, PointerBuilder, InlineComposite};
use traits::{FromPointerReader, FromPointerBuilder,
FromStructBuilder, FromStructReader, HasStructSize,
IndexMove, ListIter};
use Result;
#[derive(Copy, Clone)]
pub struct Owned<T> where T: for<'a> ::traits::OwnedStruct<'a> {
marker: ::std::marker::PhantomData<T>,
}
impl<'a, T> ::traits::Owned<'a> for Owned<T> where T: for<'b> ::traits::OwnedStruct<'b> {
type Reader = Reader<'a, T>;
type Builder = Builder<'a, T>;
}
pub struct Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
marker: ::std::marker::PhantomData<T>,
reader: ListReader<'a>
}
impl <'a, T> Clone for Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
fn clone(&self) -> Reader<'a, T> {
Reader { marker : self.marker, reader : self.reader }
}
}
impl <'a, T> Copy for Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {}
impl <'a, T> Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn new<'b>(reader : ListReader<'b>) -> Reader<'b, T> {
Reader::<'b, T> { reader : reader, marker : ::std::marker::PhantomData }
}
pub fn len(&self) -> u32 { self.reader.len() }
pub fn iter(self) -> ListIter<Reader<'a, T>, <T as ::traits::OwnedStruct<'a>>::Reader> {
ListIter::new(self, self.len())
}
}
impl <'a, T> Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn borrow<'b>(&'b self) -> Reader<'b, T> {
Reader {reader : self.reader, marker : ::std::marker::PhantomData}
}
}
impl <'a, T> FromPointerReader<'a> for Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
fn get_from_pointer(reader : &PointerReader<'a>) -> Result<Reader<'a, T>> {
Ok(Reader { reader : try!(reader.get_list(InlineComposite, ::std::ptr::null())),
marker : ::std::marker::PhantomData })
}
}
impl <'a, T> IndexMove<u32, <T as ::traits::OwnedStruct<'a>>::Reader> for Reader<'a, T>
where T: for<'b> ::traits::OwnedStruct<'b> {
fn index_move(&self, index : u32) -> <T as ::traits::OwnedStruct<'a>>::Reader {
self.get(index)
}
}
impl <'a, T> Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn get(self, index: u32) -> <T as ::traits::OwnedStruct<'a>>::Reader {
assert!(index < self.len());
FromStructReader::new(self.reader.get_struct_element(index))
}
}
pub struct Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
marker : ::std::marker::PhantomData<T>,
builder : ListBuilder<'a>
}
impl <'a, T> Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn new(builder : ListBuilder<'a>) -> Builder<'a, T>
|
pub fn len(&self) -> u32 { self.builder.len() }
// pub fn set(&self, index : uint, value : T) {
// }
}
impl <'a, T> Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn borrow<'b>(&'b mut self) -> Builder<'b, T> {
Builder {builder : self.builder, marker : ::std::marker::PhantomData}
}
}
impl <'a, T> FromPointerBuilder<'a> for Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
fn init_pointer(builder : PointerBuilder<'a>, size : u32) -> Builder<'a, T> {
Builder {
marker : ::std::marker::PhantomData,
builder : builder.init_struct_list(
size,
<<T as ::traits::OwnedStruct>::Builder as HasStructSize>::struct_size())
}
}
fn get_from_pointer(builder : PointerBuilder<'a>) -> Result<Builder<'a, T>> {
Ok(Builder {
marker : ::std::marker::PhantomData,
builder :
try!(builder.get_struct_list(<<T as ::traits::OwnedStruct>::Builder as HasStructSize>::struct_size(),
::std::ptr::null()))
})
}
}
impl <'a, T> Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn get(self, index: u32) -> <T as ::traits::OwnedStruct<'a>>::Builder {
assert!(index < self.len());
FromStructBuilder::new(self.builder.get_struct_element(index))
}
}
impl <'a, T> ::traits::SetPointerBuilder<Builder<'a, T>> for Reader<'a, T>
where T: for<'b> ::traits::OwnedStruct<'b>
{
fn set_pointer_builder<'b>(pointer : ::private::layout::PointerBuilder<'b>,
value : Reader<'a, T>) -> Result<()> {
pointer.set_list(&value.reader)
}
}
|
{
Builder { builder : builder, marker : ::std::marker::PhantomData }
}
|
identifier_body
|
struct_list.rs
|
// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! List of structs.
use private::layout::{ListReader, ListBuilder, PointerReader, PointerBuilder, InlineComposite};
use traits::{FromPointerReader, FromPointerBuilder,
FromStructBuilder, FromStructReader, HasStructSize,
IndexMove, ListIter};
use Result;
#[derive(Copy, Clone)]
pub struct Owned<T> where T: for<'a> ::traits::OwnedStruct<'a> {
marker: ::std::marker::PhantomData<T>,
}
impl<'a, T> ::traits::Owned<'a> for Owned<T> where T: for<'b> ::traits::OwnedStruct<'b> {
type Reader = Reader<'a, T>;
type Builder = Builder<'a, T>;
}
pub struct Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
marker: ::std::marker::PhantomData<T>,
reader: ListReader<'a>
|
fn clone(&self) -> Reader<'a, T> {
Reader { marker : self.marker, reader : self.reader }
}
}
impl <'a, T> Copy for Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {}
impl <'a, T> Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn new<'b>(reader : ListReader<'b>) -> Reader<'b, T> {
Reader::<'b, T> { reader : reader, marker : ::std::marker::PhantomData }
}
pub fn len(&self) -> u32 { self.reader.len() }
pub fn iter(self) -> ListIter<Reader<'a, T>, <T as ::traits::OwnedStruct<'a>>::Reader> {
ListIter::new(self, self.len())
}
}
impl <'a, T> Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn borrow<'b>(&'b self) -> Reader<'b, T> {
Reader {reader : self.reader, marker : ::std::marker::PhantomData}
}
}
impl <'a, T> FromPointerReader<'a> for Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
fn get_from_pointer(reader : &PointerReader<'a>) -> Result<Reader<'a, T>> {
Ok(Reader { reader : try!(reader.get_list(InlineComposite, ::std::ptr::null())),
marker : ::std::marker::PhantomData })
}
}
impl <'a, T> IndexMove<u32, <T as ::traits::OwnedStruct<'a>>::Reader> for Reader<'a, T>
where T: for<'b> ::traits::OwnedStruct<'b> {
fn index_move(&self, index : u32) -> <T as ::traits::OwnedStruct<'a>>::Reader {
self.get(index)
}
}
impl <'a, T> Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn get(self, index: u32) -> <T as ::traits::OwnedStruct<'a>>::Reader {
assert!(index < self.len());
FromStructReader::new(self.reader.get_struct_element(index))
}
}
pub struct Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
marker : ::std::marker::PhantomData<T>,
builder : ListBuilder<'a>
}
impl <'a, T> Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn new(builder : ListBuilder<'a>) -> Builder<'a, T> {
Builder { builder : builder, marker : ::std::marker::PhantomData }
}
pub fn len(&self) -> u32 { self.builder.len() }
// pub fn set(&self, index : uint, value : T) {
// }
}
impl <'a, T> Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn borrow<'b>(&'b mut self) -> Builder<'b, T> {
Builder {builder : self.builder, marker : ::std::marker::PhantomData}
}
}
impl <'a, T> FromPointerBuilder<'a> for Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
fn init_pointer(builder : PointerBuilder<'a>, size : u32) -> Builder<'a, T> {
Builder {
marker : ::std::marker::PhantomData,
builder : builder.init_struct_list(
size,
<<T as ::traits::OwnedStruct>::Builder as HasStructSize>::struct_size())
}
}
fn get_from_pointer(builder : PointerBuilder<'a>) -> Result<Builder<'a, T>> {
Ok(Builder {
marker : ::std::marker::PhantomData,
builder :
try!(builder.get_struct_list(<<T as ::traits::OwnedStruct>::Builder as HasStructSize>::struct_size(),
::std::ptr::null()))
})
}
}
impl <'a, T> Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn get(self, index: u32) -> <T as ::traits::OwnedStruct<'a>>::Builder {
assert!(index < self.len());
FromStructBuilder::new(self.builder.get_struct_element(index))
}
}
impl <'a, T> ::traits::SetPointerBuilder<Builder<'a, T>> for Reader<'a, T>
where T: for<'b> ::traits::OwnedStruct<'b>
{
fn set_pointer_builder<'b>(pointer : ::private::layout::PointerBuilder<'b>,
value : Reader<'a, T>) -> Result<()> {
pointer.set_list(&value.reader)
}
}
|
}
impl <'a, T> Clone for Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
|
random_line_split
|
struct_list.rs
|
// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//! List of structs.
use private::layout::{ListReader, ListBuilder, PointerReader, PointerBuilder, InlineComposite};
use traits::{FromPointerReader, FromPointerBuilder,
FromStructBuilder, FromStructReader, HasStructSize,
IndexMove, ListIter};
use Result;
#[derive(Copy, Clone)]
pub struct Owned<T> where T: for<'a> ::traits::OwnedStruct<'a> {
marker: ::std::marker::PhantomData<T>,
}
impl<'a, T> ::traits::Owned<'a> for Owned<T> where T: for<'b> ::traits::OwnedStruct<'b> {
type Reader = Reader<'a, T>;
type Builder = Builder<'a, T>;
}
pub struct Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
marker: ::std::marker::PhantomData<T>,
reader: ListReader<'a>
}
impl <'a, T> Clone for Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
fn clone(&self) -> Reader<'a, T> {
Reader { marker : self.marker, reader : self.reader }
}
}
impl <'a, T> Copy for Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {}
impl <'a, T> Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn new<'b>(reader : ListReader<'b>) -> Reader<'b, T> {
Reader::<'b, T> { reader : reader, marker : ::std::marker::PhantomData }
}
pub fn len(&self) -> u32 { self.reader.len() }
pub fn iter(self) -> ListIter<Reader<'a, T>, <T as ::traits::OwnedStruct<'a>>::Reader> {
ListIter::new(self, self.len())
}
}
impl <'a, T> Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn borrow<'b>(&'b self) -> Reader<'b, T> {
Reader {reader : self.reader, marker : ::std::marker::PhantomData}
}
}
impl <'a, T> FromPointerReader<'a> for Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
fn get_from_pointer(reader : &PointerReader<'a>) -> Result<Reader<'a, T>> {
Ok(Reader { reader : try!(reader.get_list(InlineComposite, ::std::ptr::null())),
marker : ::std::marker::PhantomData })
}
}
impl <'a, T> IndexMove<u32, <T as ::traits::OwnedStruct<'a>>::Reader> for Reader<'a, T>
where T: for<'b> ::traits::OwnedStruct<'b> {
fn
|
(&self, index : u32) -> <T as ::traits::OwnedStruct<'a>>::Reader {
self.get(index)
}
}
impl <'a, T> Reader<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn get(self, index: u32) -> <T as ::traits::OwnedStruct<'a>>::Reader {
assert!(index < self.len());
FromStructReader::new(self.reader.get_struct_element(index))
}
}
pub struct Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
marker : ::std::marker::PhantomData<T>,
builder : ListBuilder<'a>
}
impl <'a, T> Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn new(builder : ListBuilder<'a>) -> Builder<'a, T> {
Builder { builder : builder, marker : ::std::marker::PhantomData }
}
pub fn len(&self) -> u32 { self.builder.len() }
// pub fn set(&self, index : uint, value : T) {
// }
}
impl <'a, T> Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn borrow<'b>(&'b mut self) -> Builder<'b, T> {
Builder {builder : self.builder, marker : ::std::marker::PhantomData}
}
}
impl <'a, T> FromPointerBuilder<'a> for Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
fn init_pointer(builder : PointerBuilder<'a>, size : u32) -> Builder<'a, T> {
Builder {
marker : ::std::marker::PhantomData,
builder : builder.init_struct_list(
size,
<<T as ::traits::OwnedStruct>::Builder as HasStructSize>::struct_size())
}
}
fn get_from_pointer(builder : PointerBuilder<'a>) -> Result<Builder<'a, T>> {
Ok(Builder {
marker : ::std::marker::PhantomData,
builder :
try!(builder.get_struct_list(<<T as ::traits::OwnedStruct>::Builder as HasStructSize>::struct_size(),
::std::ptr::null()))
})
}
}
impl <'a, T> Builder<'a, T> where T: for<'b> ::traits::OwnedStruct<'b> {
pub fn get(self, index: u32) -> <T as ::traits::OwnedStruct<'a>>::Builder {
assert!(index < self.len());
FromStructBuilder::new(self.builder.get_struct_element(index))
}
}
impl <'a, T> ::traits::SetPointerBuilder<Builder<'a, T>> for Reader<'a, T>
where T: for<'b> ::traits::OwnedStruct<'b>
{
fn set_pointer_builder<'b>(pointer : ::private::layout::PointerBuilder<'b>,
value : Reader<'a, T>) -> Result<()> {
pointer.set_list(&value.reader)
}
}
|
index_move
|
identifier_name
|
instruct_mq.rs
|
use crate::qaruntime::base::Instruct;
use crate::qaruntime::qamanagers::monitor_manager::MonitorManager;
use actix::{Actor, Addr, Context};
use amiquip::{
Channel, Connection, ConsumerMessage, ConsumerOptions, Exchange, ExchangeDeclareOptions,
ExchangeType, FieldTable, Publish, QueueDeclareOptions, Result,
};
use log::{error, info, warn};
// 指令接收
pub struct InstructMQ {
pub amqp: String,
pub exchange: String,
pub model: String,
pub routing_key: String,
// connection:
pub morm: Addr<MonitorManager>,
}
impl InstructMQ {
fn consume_
|
-> Result<()> {
let mut connection = Connection::insecure_open(&self.amqp)?;
let channel = connection.open_channel(None)?;
let exchange = channel.exchange_declare(
ExchangeType::Direct,
&self.exchange,
ExchangeDeclareOptions {
durable: false,
auto_delete: false,
internal: false,
arguments: Default::default(),
},
)?;
let queue = channel.queue_declare(
"",
QueueDeclareOptions {
exclusive: true,
..QueueDeclareOptions::default()
},
)?;
info!("[InstructMQ] Start at <{}> ", self.routing_key);
queue.bind(&exchange, self.routing_key.clone(), FieldTable::new())?;
let consumer = queue.consume(ConsumerOptions {
no_ack: true,
..ConsumerOptions::default()
})?;
for (_i, message) in consumer.receiver().iter().enumerate() {
match message {
ConsumerMessage::Delivery(delivery) => {
let msg = delivery.body.clone();
let foo = String::from_utf8(msg).unwrap();
let data = foo.to_string();
match serde_json::from_str(&data) {
Ok(v) => match self.morm.try_send::<Instruct>(v) {
Ok(_) => {}
Err(e) => {
error!("[Monitor Manager] send instruct fail {}", e.to_string())
}
},
Err(e) => error!("[Monitor Manager] Instruct parse fail {}", e.to_string()),
}
}
other => {
warn!("Consumer ended: {:?}", other);
break;
}
}
}
connection.close()
}
}
impl Actor for InstructMQ {
type Context = Context<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
ctx.set_mailbox_capacity(1000); // 设置邮箱容量
self.consume_direct();
}
}
|
direct(&self)
|
identifier_name
|
instruct_mq.rs
|
use crate::qaruntime::base::Instruct;
use crate::qaruntime::qamanagers::monitor_manager::MonitorManager;
use actix::{Actor, Addr, Context};
use amiquip::{
Channel, Connection, ConsumerMessage, ConsumerOptions, Exchange, ExchangeDeclareOptions,
ExchangeType, FieldTable, Publish, QueueDeclareOptions, Result,
};
use log::{error, info, warn};
// 指令接收
pub struct InstructMQ {
pub amqp: String,
pub exchange: String,
pub model: String,
pub routing_key: String,
// connection:
pub morm: Addr<MonitorManager>,
}
impl InstructMQ {
fn consume_direct(&self) -> Result<()> {
let mut connection = Connection::insecure_open(&self.amqp)?;
let channel = connection.open_channel(None)?;
let exchange = channel.exchange_declare(
ExchangeType::Direct,
&self.exchange,
ExchangeDeclareOptions {
durable: false,
auto_delete: false,
internal: false,
arguments: Default::default(),
},
)?;
let queue = channel.queue_declare(
"",
QueueDeclareOptions {
exclusive: true,
..QueueDeclareOptions::default()
},
)?;
info!("[InstructMQ] Start at <{}> ", self.routing_key);
queue.bind(&exchange, self.routing_key.clone(), FieldTable::new())?;
let consumer = queue.consume(ConsumerOptions {
no_ack: true,
..ConsumerOptions::default()
})?;
for (_i, message) in consumer.receiver().iter().enumerate() {
match message {
ConsumerMessage::Delivery(delivery) => {
let msg = delivery.body.clone();
let foo = String::from_utf8(msg).unwrap();
let data = foo.to_string();
|
error!("[Monitor Manager] send instruct fail {}", e.to_string())
}
},
Err(e) => error!("[Monitor Manager] Instruct parse fail {}", e.to_string()),
}
}
other => {
warn!("Consumer ended: {:?}", other);
break;
}
}
}
connection.close()
}
}
impl Actor for InstructMQ {
type Context = Context<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
ctx.set_mailbox_capacity(1000); // 设置邮箱容量
self.consume_direct();
}
}
|
match serde_json::from_str(&data) {
Ok(v) => match self.morm.try_send::<Instruct>(v) {
Ok(_) => {}
Err(e) => {
|
random_line_split
|
instruct_mq.rs
|
use crate::qaruntime::base::Instruct;
use crate::qaruntime::qamanagers::monitor_manager::MonitorManager;
use actix::{Actor, Addr, Context};
use amiquip::{
Channel, Connection, ConsumerMessage, ConsumerOptions, Exchange, ExchangeDeclareOptions,
ExchangeType, FieldTable, Publish, QueueDeclareOptions, Result,
};
use log::{error, info, warn};
// 指令接收
pub struct InstructMQ {
pub amqp: String,
pub exchange: String,
pub model: String,
pub routing_key: String,
// connection:
pub morm: Addr<MonitorManager>,
}
impl InstructMQ {
fn consume_direct(&self) -> Result<()> {
let mut connection = Connection::insecure_open(&self.amqp)?;
let channel = connection.open_channel(None)?;
let exchange = channel.exchange_declare(
ExchangeType::Direct,
&self.exchange,
ExchangeDeclareOptions {
durable: false,
auto_delete: false,
internal: false,
arguments: Default::default(),
},
)?;
let queue = channel.queue_declare(
"",
QueueDeclareOptions {
exclusive: true,
..QueueDeclareOptions::default()
},
)?;
info!("[InstructMQ] Start at <{}> ", self.routing_key);
queue.bind(&exchange, self.routing_key.clone(), FieldTable::new())?;
let consumer = queue.consume(ConsumerOptions {
no_ack: true,
..ConsumerOptions::default()
})?;
for (_i, message) in consumer.receiver().iter().enumerate() {
match message {
ConsumerMessage::Delivery(delivery) => {
let msg = delivery.body.clone();
let foo = String::from_utf8(msg).unwrap();
let data = foo.to_string();
match serde_json::from_str(&data) {
Ok(v) => match self.morm.try_send::<Instruct>(v) {
Ok(_) => {}
Err(e) => {
error!("[Monitor Manager] send instruct fail {}", e.to_string())
}
},
Err(e) => error!("[Monitor Manager] Instruct parse fail {}", e.to_string()),
}
}
other => {
warn!("Consumer ended: {:?}", other);
break;
}
}
}
connection.close()
}
}
impl Actor for InstructMQ {
type Context = Context<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
|
ctx.set_mailbox_capacity(1000); // 设置邮箱容量
self.consume_direct();
}
}
|
identifier_body
|
|
windows.rs
|
use std::sync::mpsc::{channel, Sender, Receiver, TryRecvError, sync_channel, SyncSender};
use std::io::{Read, Write};
use std::{io, thread};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use named_pipe::{PipeOptions, OpenMode, PipeClient};
use libc;
fn run<F: FnOnce(&SyncSender<()>) -> io::Result<()> + Send +'static>(_: &'static str, f: F) {
let (sync_tx, sync_rx) = sync_channel(0);
thread::spawn(move || {
match f(&sync_tx) {
Ok(_) => {},
Err(_) => {
sync_tx.try_send(()).ok();
//println!("MIPC {} thread failed with {:?}", name, e);
}
};
});
sync_rx.recv().unwrap();
}
pub struct IpcClient {
send: Sender<Vec<u8>>,
recv: Receiver<Vec<u8>>,
}
struct S<T>(T);
unsafe impl<T> Send for S<T> {}
impl IpcClient {
pub fn send(&self, message: Vec<u8>) -> bool {
self.send.send(message).is_ok()
}
pub fn recv(&self) -> Option<Vec<u8>> {
self.recv.recv().ok()
}
pub fn try_recv(&self) -> Option<Option<Vec<u8>>> {
match self.recv.try_recv() {
Ok(buf) => Some(Some(buf)),
Err(TryRecvError::Empty) => Some(None),
Err(TryRecvError::Disconnected) => None
}
}
pub fn
|
(name: &str) -> io::Result<IpcClient> {
let (send_tx, send_rx) = channel::<Vec<u8>>();
let (recv_tx, recv_rx) = channel::<Vec<u8>>();
let pid = unsafe { libc::getpid() as u32 };
let path = format!("\\\\.\\pipe\\messageipc_{}_{}", name, pid);
let mut servers = try!(PipeOptions::new(path).open_mode(OpenMode::Duplex).multiple(2));
let read_server = S(servers.pop().unwrap());
let write_server = S(servers.pop().unwrap());
// Read thread
run("server-read", move |sync| {
sync.send(()).unwrap();
let mut read_server = try!(read_server.0.wait());
// Write thread
run("server-write", move |sync| {
let mut write_server = try!(write_server.0.wait());
sync.send(()).unwrap();
while let Ok(buffer) = send_rx.recv() {
let size = buffer.len() as u32;
try!(write_server.write_u32::<LittleEndian>(size));
try!(write_server.write_all(&buffer[..]));
}
Ok(())
});
loop {
let bytes = try!(read_server.read_u32::<LittleEndian>());
let mut buffer = vec![0; bytes as usize];
try!(read_server.read_exact(&mut buffer[..]));
if let Err(_) = recv_tx.send(buffer) {
return Ok(());
}
}
});
Ok(IpcClient {
send: send_tx,
recv: recv_rx,
})
}
pub fn open_client(name: &str, pid: u32) -> io::Result<IpcClient> {
let (send_tx, send_rx) = channel::<Vec<u8>>();
let (recv_tx, recv_rx) = channel::<Vec<u8>>();
let read_path = format!("\\\\.\\pipe\\messageipc_{}_{}", name, pid);
let write_path = format!("\\\\.\\pipe\\messageipc_{}_{}", name, pid);
// Read thread
run("client-read", move |sync| {
let mut read_client = try!(PipeClient::connect(read_path));
// Write thread
run("client-write", move |sync| {
let mut write_client = try!(PipeClient::connect(write_path));
sync.send(()).unwrap();
while let Ok(buffer) = send_rx.recv() {
let size = buffer.len() as u32;
try!(write_client.write_u32::<LittleEndian>(size));
try!(write_client.write_all(&buffer[..]));
}
Ok(())
});
sync.send(()).unwrap();
loop {
let bytes = try!(read_client.read_u32::<LittleEndian>());
let mut buffer = vec![0; bytes as usize];
try!(read_client.read_exact(&mut buffer[..]));
if let Err(_) = recv_tx.send(buffer) {
return Ok(());
}
}
});
Ok(IpcClient {
send: send_tx,
recv: recv_rx,
})
}
}
|
open_server
|
identifier_name
|
windows.rs
|
use std::sync::mpsc::{channel, Sender, Receiver, TryRecvError, sync_channel, SyncSender};
use std::io::{Read, Write};
use std::{io, thread};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use named_pipe::{PipeOptions, OpenMode, PipeClient};
use libc;
fn run<F: FnOnce(&SyncSender<()>) -> io::Result<()> + Send +'static>(_: &'static str, f: F)
|
pub struct IpcClient {
send: Sender<Vec<u8>>,
recv: Receiver<Vec<u8>>,
}
struct S<T>(T);
unsafe impl<T> Send for S<T> {}
impl IpcClient {
pub fn send(&self, message: Vec<u8>) -> bool {
self.send.send(message).is_ok()
}
pub fn recv(&self) -> Option<Vec<u8>> {
self.recv.recv().ok()
}
pub fn try_recv(&self) -> Option<Option<Vec<u8>>> {
match self.recv.try_recv() {
Ok(buf) => Some(Some(buf)),
Err(TryRecvError::Empty) => Some(None),
Err(TryRecvError::Disconnected) => None
}
}
pub fn open_server(name: &str) -> io::Result<IpcClient> {
let (send_tx, send_rx) = channel::<Vec<u8>>();
let (recv_tx, recv_rx) = channel::<Vec<u8>>();
let pid = unsafe { libc::getpid() as u32 };
let path = format!("\\\\.\\pipe\\messageipc_{}_{}", name, pid);
let mut servers = try!(PipeOptions::new(path).open_mode(OpenMode::Duplex).multiple(2));
let read_server = S(servers.pop().unwrap());
let write_server = S(servers.pop().unwrap());
// Read thread
run("server-read", move |sync| {
sync.send(()).unwrap();
let mut read_server = try!(read_server.0.wait());
// Write thread
run("server-write", move |sync| {
let mut write_server = try!(write_server.0.wait());
sync.send(()).unwrap();
while let Ok(buffer) = send_rx.recv() {
let size = buffer.len() as u32;
try!(write_server.write_u32::<LittleEndian>(size));
try!(write_server.write_all(&buffer[..]));
}
Ok(())
});
loop {
let bytes = try!(read_server.read_u32::<LittleEndian>());
let mut buffer = vec![0; bytes as usize];
try!(read_server.read_exact(&mut buffer[..]));
if let Err(_) = recv_tx.send(buffer) {
return Ok(());
}
}
});
Ok(IpcClient {
send: send_tx,
recv: recv_rx,
})
}
pub fn open_client(name: &str, pid: u32) -> io::Result<IpcClient> {
let (send_tx, send_rx) = channel::<Vec<u8>>();
let (recv_tx, recv_rx) = channel::<Vec<u8>>();
let read_path = format!("\\\\.\\pipe\\messageipc_{}_{}", name, pid);
let write_path = format!("\\\\.\\pipe\\messageipc_{}_{}", name, pid);
// Read thread
run("client-read", move |sync| {
let mut read_client = try!(PipeClient::connect(read_path));
// Write thread
run("client-write", move |sync| {
let mut write_client = try!(PipeClient::connect(write_path));
sync.send(()).unwrap();
while let Ok(buffer) = send_rx.recv() {
let size = buffer.len() as u32;
try!(write_client.write_u32::<LittleEndian>(size));
try!(write_client.write_all(&buffer[..]));
}
Ok(())
});
sync.send(()).unwrap();
loop {
let bytes = try!(read_client.read_u32::<LittleEndian>());
let mut buffer = vec![0; bytes as usize];
try!(read_client.read_exact(&mut buffer[..]));
if let Err(_) = recv_tx.send(buffer) {
return Ok(());
}
}
});
Ok(IpcClient {
send: send_tx,
recv: recv_rx,
})
}
}
|
{
let (sync_tx, sync_rx) = sync_channel(0);
thread::spawn(move || {
match f(&sync_tx) {
Ok(_) => {},
Err(_) => {
sync_tx.try_send(()).ok();
//println!("MIPC {} thread failed with {:?}", name, e);
}
};
});
sync_rx.recv().unwrap();
}
|
identifier_body
|
windows.rs
|
use std::sync::mpsc::{channel, Sender, Receiver, TryRecvError, sync_channel, SyncSender};
use std::io::{Read, Write};
use std::{io, thread};
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use named_pipe::{PipeOptions, OpenMode, PipeClient};
use libc;
fn run<F: FnOnce(&SyncSender<()>) -> io::Result<()> + Send +'static>(_: &'static str, f: F) {
let (sync_tx, sync_rx) = sync_channel(0);
thread::spawn(move || {
match f(&sync_tx) {
Ok(_) => {},
Err(_) => {
sync_tx.try_send(()).ok();
//println!("MIPC {} thread failed with {:?}", name, e);
}
};
});
sync_rx.recv().unwrap();
}
pub struct IpcClient {
send: Sender<Vec<u8>>,
recv: Receiver<Vec<u8>>,
}
struct S<T>(T);
unsafe impl<T> Send for S<T> {}
impl IpcClient {
pub fn send(&self, message: Vec<u8>) -> bool {
self.send.send(message).is_ok()
}
pub fn recv(&self) -> Option<Vec<u8>> {
self.recv.recv().ok()
}
|
pub fn try_recv(&self) -> Option<Option<Vec<u8>>> {
match self.recv.try_recv() {
Ok(buf) => Some(Some(buf)),
Err(TryRecvError::Empty) => Some(None),
Err(TryRecvError::Disconnected) => None
}
}
pub fn open_server(name: &str) -> io::Result<IpcClient> {
let (send_tx, send_rx) = channel::<Vec<u8>>();
let (recv_tx, recv_rx) = channel::<Vec<u8>>();
let pid = unsafe { libc::getpid() as u32 };
let path = format!("\\\\.\\pipe\\messageipc_{}_{}", name, pid);
let mut servers = try!(PipeOptions::new(path).open_mode(OpenMode::Duplex).multiple(2));
let read_server = S(servers.pop().unwrap());
let write_server = S(servers.pop().unwrap());
// Read thread
run("server-read", move |sync| {
sync.send(()).unwrap();
let mut read_server = try!(read_server.0.wait());
// Write thread
run("server-write", move |sync| {
let mut write_server = try!(write_server.0.wait());
sync.send(()).unwrap();
while let Ok(buffer) = send_rx.recv() {
let size = buffer.len() as u32;
try!(write_server.write_u32::<LittleEndian>(size));
try!(write_server.write_all(&buffer[..]));
}
Ok(())
});
loop {
let bytes = try!(read_server.read_u32::<LittleEndian>());
let mut buffer = vec![0; bytes as usize];
try!(read_server.read_exact(&mut buffer[..]));
if let Err(_) = recv_tx.send(buffer) {
return Ok(());
}
}
});
Ok(IpcClient {
send: send_tx,
recv: recv_rx,
})
}
pub fn open_client(name: &str, pid: u32) -> io::Result<IpcClient> {
let (send_tx, send_rx) = channel::<Vec<u8>>();
let (recv_tx, recv_rx) = channel::<Vec<u8>>();
let read_path = format!("\\\\.\\pipe\\messageipc_{}_{}", name, pid);
let write_path = format!("\\\\.\\pipe\\messageipc_{}_{}", name, pid);
// Read thread
run("client-read", move |sync| {
let mut read_client = try!(PipeClient::connect(read_path));
// Write thread
run("client-write", move |sync| {
let mut write_client = try!(PipeClient::connect(write_path));
sync.send(()).unwrap();
while let Ok(buffer) = send_rx.recv() {
let size = buffer.len() as u32;
try!(write_client.write_u32::<LittleEndian>(size));
try!(write_client.write_all(&buffer[..]));
}
Ok(())
});
sync.send(()).unwrap();
loop {
let bytes = try!(read_client.read_u32::<LittleEndian>());
let mut buffer = vec![0; bytes as usize];
try!(read_client.read_exact(&mut buffer[..]));
if let Err(_) = recv_tx.send(buffer) {
return Ok(());
}
}
});
Ok(IpcClient {
send: send_tx,
recv: recv_rx,
})
}
}
|
random_line_split
|
|
command.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A thin wrapper around `Command` in the standard library which allows us to
//! read the arguments that are built up.
use std::ffi::{OsStr, OsString};
use std::fmt;
use std::io;
use std::mem;
use std::process::{self, Output};
use rustc_target::spec::LldFlavor;
#[derive(Clone)]
pub struct Command {
program: Program,
args: Vec<OsString>,
env: Vec<(OsString, OsString)>,
}
#[derive(Clone)]
enum Program {
Normal(OsString),
CmdBatScript(OsString),
Lld(OsString, LldFlavor)
}
impl Command {
pub fn new<P: AsRef<OsStr>>(program: P) -> Command {
Command::_new(Program::Normal(program.as_ref().to_owned()))
}
pub fn bat_script<P: AsRef<OsStr>>(program: P) -> Command {
Command::_new(Program::CmdBatScript(program.as_ref().to_owned()))
}
pub fn lld<P: AsRef<OsStr>>(program: P, flavor: LldFlavor) -> Command {
Command::_new(Program::Lld(program.as_ref().to_owned(), flavor))
}
fn _new(program: Program) -> Command {
Command {
program,
args: Vec::new(),
env: Vec::new(),
}
}
pub fn arg<P: AsRef<OsStr>>(&mut self, arg: P) -> &mut Command {
self._arg(arg.as_ref());
self
}
pub fn args<I>(&mut self, args: I) -> &mut Command
where I: IntoIterator,
I::Item: AsRef<OsStr>,
{
for arg in args {
self._arg(arg.as_ref());
}
self
}
fn _arg(&mut self, arg: &OsStr)
|
pub fn env<K, V>(&mut self, key: K, value: V) -> &mut Command
where K: AsRef<OsStr>,
V: AsRef<OsStr>
{
self._env(key.as_ref(), value.as_ref());
self
}
fn _env(&mut self, key: &OsStr, value: &OsStr) {
self.env.push((key.to_owned(), value.to_owned()));
}
pub fn output(&mut self) -> io::Result<Output> {
self.command().output()
}
pub fn command(&self) -> process::Command {
let mut ret = match self.program {
Program::Normal(ref p) => process::Command::new(p),
Program::CmdBatScript(ref p) => {
let mut c = process::Command::new("cmd");
c.arg("/c").arg(p);
c
}
Program::Lld(ref p, flavor) => {
let mut c = process::Command::new(p);
c.arg("-flavor").arg(match flavor {
LldFlavor::Wasm => "wasm",
LldFlavor::Ld => "gnu",
LldFlavor::Link => "link",
LldFlavor::Ld64 => "darwin",
});
c
}
};
ret.args(&self.args);
ret.envs(self.env.clone());
return ret
}
// extensions
pub fn get_args(&self) -> &[OsString] {
&self.args
}
pub fn take_args(&mut self) -> Vec<OsString> {
mem::replace(&mut self.args, Vec::new())
}
/// Returns a `true` if we're pretty sure that this'll blow OS spawn limits,
/// or `false` if we should attempt to spawn and see what the OS says.
pub fn very_likely_to_exceed_some_spawn_limit(&self) -> bool {
// We mostly only care about Windows in this method, on Unix the limits
// can be gargantuan anyway so we're pretty unlikely to hit them
if cfg!(unix) {
return false
}
// Right now LLD doesn't support the `@` syntax of passing an argument
// through files, so regardless of the platform we try to go to the OS
// on this one.
if let Program::Lld(..) = self.program {
return false
}
// Ok so on Windows to spawn a process is 32,768 characters in its
// command line [1]. Unfortunately we don't actually have access to that
// as it's calculated just before spawning. Instead we perform a
// poor-man's guess as to how long our command line will be. We're
// assuming here that we don't have to escape every character...
//
// Turns out though that `cmd.exe` has even smaller limits, 8192
// characters [2]. Linkers can often be batch scripts (for example
// Emscripten, Gecko's current build system) which means that we're
// running through batch scripts. These linkers often just forward
// arguments elsewhere (and maybe tack on more), so if we blow 8192
// bytes we'll typically cause them to blow as well.
//
// Basically as a result just perform an inflated estimate of what our
// command line will look like and test if it's > 8192 (we actually
// test against 6k to artificially inflate our estimate). If all else
// fails we'll fall back to the normal unix logic of testing the OS
// error code if we fail to spawn and automatically re-spawning the
// linker with smaller arguments.
//
// [1]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms682425(v=vs.85).aspx
// [2]: https://blogs.msdn.microsoft.com/oldnewthing/20031210-00/?p=41553
let estimated_command_line_len =
self.args.iter().map(|a| a.len()).sum::<usize>();
estimated_command_line_len > 1024 * 6
}
}
impl fmt::Debug for Command {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.command().fmt(f)
}
}
|
{
self.args.push(arg.to_owned());
}
|
identifier_body
|
command.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
//! A thin wrapper around `Command` in the standard library which allows us to
//! read the arguments that are built up.
use std::ffi::{OsStr, OsString};
use std::fmt;
use std::io;
use std::mem;
use std::process::{self, Output};
use rustc_target::spec::LldFlavor;
#[derive(Clone)]
pub struct Command {
program: Program,
args: Vec<OsString>,
env: Vec<(OsString, OsString)>,
}
#[derive(Clone)]
enum Program {
Normal(OsString),
CmdBatScript(OsString),
Lld(OsString, LldFlavor)
}
impl Command {
pub fn new<P: AsRef<OsStr>>(program: P) -> Command {
Command::_new(Program::Normal(program.as_ref().to_owned()))
}
pub fn bat_script<P: AsRef<OsStr>>(program: P) -> Command {
Command::_new(Program::CmdBatScript(program.as_ref().to_owned()))
}
pub fn lld<P: AsRef<OsStr>>(program: P, flavor: LldFlavor) -> Command {
Command::_new(Program::Lld(program.as_ref().to_owned(), flavor))
}
fn _new(program: Program) -> Command {
Command {
program,
args: Vec::new(),
env: Vec::new(),
}
}
pub fn arg<P: AsRef<OsStr>>(&mut self, arg: P) -> &mut Command {
self._arg(arg.as_ref());
self
}
pub fn args<I>(&mut self, args: I) -> &mut Command
where I: IntoIterator,
I::Item: AsRef<OsStr>,
{
for arg in args {
self._arg(arg.as_ref());
}
self
}
fn _arg(&mut self, arg: &OsStr) {
self.args.push(arg.to_owned());
}
pub fn env<K, V>(&mut self, key: K, value: V) -> &mut Command
where K: AsRef<OsStr>,
V: AsRef<OsStr>
{
self._env(key.as_ref(), value.as_ref());
self
}
fn _env(&mut self, key: &OsStr, value: &OsStr) {
self.env.push((key.to_owned(), value.to_owned()));
}
pub fn output(&mut self) -> io::Result<Output> {
self.command().output()
}
pub fn command(&self) -> process::Command {
let mut ret = match self.program {
Program::Normal(ref p) => process::Command::new(p),
Program::CmdBatScript(ref p) => {
let mut c = process::Command::new("cmd");
c.arg("/c").arg(p);
c
}
Program::Lld(ref p, flavor) => {
let mut c = process::Command::new(p);
c.arg("-flavor").arg(match flavor {
LldFlavor::Wasm => "wasm",
LldFlavor::Ld => "gnu",
LldFlavor::Link => "link",
LldFlavor::Ld64 => "darwin",
});
c
}
};
ret.args(&self.args);
ret.envs(self.env.clone());
return ret
}
// extensions
pub fn get_args(&self) -> &[OsString] {
&self.args
}
pub fn take_args(&mut self) -> Vec<OsString> {
mem::replace(&mut self.args, Vec::new())
}
/// Returns a `true` if we're pretty sure that this'll blow OS spawn limits,
/// or `false` if we should attempt to spawn and see what the OS says.
pub fn very_likely_to_exceed_some_spawn_limit(&self) -> bool {
// We mostly only care about Windows in this method, on Unix the limits
// can be gargantuan anyway so we're pretty unlikely to hit them
if cfg!(unix) {
return false
}
// Right now LLD doesn't support the `@` syntax of passing an argument
// through files, so regardless of the platform we try to go to the OS
// on this one.
if let Program::Lld(..) = self.program {
return false
}
// Ok so on Windows to spawn a process is 32,768 characters in its
// command line [1]. Unfortunately we don't actually have access to that
// as it's calculated just before spawning. Instead we perform a
// poor-man's guess as to how long our command line will be. We're
// assuming here that we don't have to escape every character...
//
// Turns out though that `cmd.exe` has even smaller limits, 8192
// characters [2]. Linkers can often be batch scripts (for example
// Emscripten, Gecko's current build system) which means that we're
// running through batch scripts. These linkers often just forward
// arguments elsewhere (and maybe tack on more), so if we blow 8192
// bytes we'll typically cause them to blow as well.
//
// Basically as a result just perform an inflated estimate of what our
// command line will look like and test if it's > 8192 (we actually
// test against 6k to artificially inflate our estimate). If all else
// fails we'll fall back to the normal unix logic of testing the OS
// error code if we fail to spawn and automatically re-spawning the
// linker with smaller arguments.
//
// [1]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms682425(v=vs.85).aspx
// [2]: https://blogs.msdn.microsoft.com/oldnewthing/20031210-00/?p=41553
let estimated_command_line_len =
self.args.iter().map(|a| a.len()).sum::<usize>();
estimated_command_line_len > 1024 * 6
}
}
impl fmt::Debug for Command {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.command().fmt(f)
}
}
|
random_line_split
|
|
command.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A thin wrapper around `Command` in the standard library which allows us to
//! read the arguments that are built up.
use std::ffi::{OsStr, OsString};
use std::fmt;
use std::io;
use std::mem;
use std::process::{self, Output};
use rustc_target::spec::LldFlavor;
#[derive(Clone)]
pub struct Command {
program: Program,
args: Vec<OsString>,
env: Vec<(OsString, OsString)>,
}
#[derive(Clone)]
enum Program {
Normal(OsString),
CmdBatScript(OsString),
Lld(OsString, LldFlavor)
}
impl Command {
pub fn new<P: AsRef<OsStr>>(program: P) -> Command {
Command::_new(Program::Normal(program.as_ref().to_owned()))
}
pub fn bat_script<P: AsRef<OsStr>>(program: P) -> Command {
Command::_new(Program::CmdBatScript(program.as_ref().to_owned()))
}
pub fn lld<P: AsRef<OsStr>>(program: P, flavor: LldFlavor) -> Command {
Command::_new(Program::Lld(program.as_ref().to_owned(), flavor))
}
fn _new(program: Program) -> Command {
Command {
program,
args: Vec::new(),
env: Vec::new(),
}
}
pub fn arg<P: AsRef<OsStr>>(&mut self, arg: P) -> &mut Command {
self._arg(arg.as_ref());
self
}
pub fn args<I>(&mut self, args: I) -> &mut Command
where I: IntoIterator,
I::Item: AsRef<OsStr>,
{
for arg in args {
self._arg(arg.as_ref());
}
self
}
fn _arg(&mut self, arg: &OsStr) {
self.args.push(arg.to_owned());
}
pub fn env<K, V>(&mut self, key: K, value: V) -> &mut Command
where K: AsRef<OsStr>,
V: AsRef<OsStr>
{
self._env(key.as_ref(), value.as_ref());
self
}
fn
|
(&mut self, key: &OsStr, value: &OsStr) {
self.env.push((key.to_owned(), value.to_owned()));
}
pub fn output(&mut self) -> io::Result<Output> {
self.command().output()
}
pub fn command(&self) -> process::Command {
let mut ret = match self.program {
Program::Normal(ref p) => process::Command::new(p),
Program::CmdBatScript(ref p) => {
let mut c = process::Command::new("cmd");
c.arg("/c").arg(p);
c
}
Program::Lld(ref p, flavor) => {
let mut c = process::Command::new(p);
c.arg("-flavor").arg(match flavor {
LldFlavor::Wasm => "wasm",
LldFlavor::Ld => "gnu",
LldFlavor::Link => "link",
LldFlavor::Ld64 => "darwin",
});
c
}
};
ret.args(&self.args);
ret.envs(self.env.clone());
return ret
}
// extensions
pub fn get_args(&self) -> &[OsString] {
&self.args
}
pub fn take_args(&mut self) -> Vec<OsString> {
mem::replace(&mut self.args, Vec::new())
}
/// Returns a `true` if we're pretty sure that this'll blow OS spawn limits,
/// or `false` if we should attempt to spawn and see what the OS says.
pub fn very_likely_to_exceed_some_spawn_limit(&self) -> bool {
// We mostly only care about Windows in this method, on Unix the limits
// can be gargantuan anyway so we're pretty unlikely to hit them
if cfg!(unix) {
return false
}
// Right now LLD doesn't support the `@` syntax of passing an argument
// through files, so regardless of the platform we try to go to the OS
// on this one.
if let Program::Lld(..) = self.program {
return false
}
// Ok so on Windows to spawn a process is 32,768 characters in its
// command line [1]. Unfortunately we don't actually have access to that
// as it's calculated just before spawning. Instead we perform a
// poor-man's guess as to how long our command line will be. We're
// assuming here that we don't have to escape every character...
//
// Turns out though that `cmd.exe` has even smaller limits, 8192
// characters [2]. Linkers can often be batch scripts (for example
// Emscripten, Gecko's current build system) which means that we're
// running through batch scripts. These linkers often just forward
// arguments elsewhere (and maybe tack on more), so if we blow 8192
// bytes we'll typically cause them to blow as well.
//
// Basically as a result just perform an inflated estimate of what our
// command line will look like and test if it's > 8192 (we actually
// test against 6k to artificially inflate our estimate). If all else
// fails we'll fall back to the normal unix logic of testing the OS
// error code if we fail to spawn and automatically re-spawning the
// linker with smaller arguments.
//
// [1]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms682425(v=vs.85).aspx
// [2]: https://blogs.msdn.microsoft.com/oldnewthing/20031210-00/?p=41553
let estimated_command_line_len =
self.args.iter().map(|a| a.len()).sum::<usize>();
estimated_command_line_len > 1024 * 6
}
}
impl fmt::Debug for Command {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.command().fmt(f)
}
}
|
_env
|
identifier_name
|
dir_action.rs
|
//! Parsing the options for `DirAction`.
use crate::options::parser::MatchedFlags;
use crate::options::{flags, OptionsError, NumberSource};
use crate::fs::dir_action::{DirAction, RecurseOptions};
impl DirAction {
/// Determine which action to perform when trying to list a directory.
/// There are three possible actions, and they overlap somewhat: the
/// `--tree` flag is another form of recursion, so those two are allowed
/// to both be present, but the `--list-dirs` flag is used separately.
pub fn deduce(matches: &MatchedFlags<'_>, can_tree: bool) -> Result<Self, OptionsError> {
let recurse = matches.has(&flags::RECURSE)?;
let as_file = matches.has(&flags::LIST_DIRS)?;
let tree = matches.has(&flags::TREE)?;
if matches.is_strict() {
// Early check for --level when it wouldn’t do anything
if! recurse &&! tree && matches.count(&flags::LEVEL) > 0 {
return Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE));
}
else if recurse && as_file {
return Err(OptionsError::Conflict(&flags::RECURSE, &flags::LIST_DIRS));
}
else if tree && as_file {
return Err(OptionsError::Conflict(&flags::TREE, &flags::LIST_DIRS));
}
}
if tree && can_tree {
// Tree is only appropriate in details mode, so this has to
// examine the View, which should have already been deduced by now
Ok(Self::Recurse(RecurseOptions::deduce(matches, true)?))
}
else if recurse {
Ok(Self::Recurse(RecurseOptions::deduce(matches, false)?))
}
else if as_file {
Ok(Self::AsFile)
}
else {
Ok(Self::List)
}
}
}
impl RecurseOptions {
/// Determine which files should be recursed into, based on the `--level`
/// flag’s value, and whether the `--tree` flag was passed, which was
/// determined earlier. The maximum level should be a number, and this
/// will fail with an `Err` if it isn’t.
pub fn deduce(matches: &MatchedFlags<'_>, tree: bool) -> Result<Self, OptionsError> {
if let Some(level) = matches.get(&flags::LEVEL)? {
let arg_str = level.to_string_lossy();
match arg_str.parse() {
Ok(l) => {
Ok(Self { tree, max_depth: Some(l) })
}
Err(e) => {
|
}
}
else {
Ok(Self { tree, max_depth: None })
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::options::flags;
use crate::options::parser::Flag;
macro_rules! test {
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => $result:expr) => {
#[test]
fn $name() {
use crate::options::parser::Arg;
use crate::options::test::parse_for_test;
use crate::options::test::Strictnesses::*;
static TEST_ARGS: &[&Arg] = &[&flags::RECURSE, &flags::LIST_DIRS, &flags::TREE, &flags::LEVEL ];
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf, true)) {
assert_eq!(result, $result);
}
}
};
}
// Default behaviour
test!(empty: DirAction <- []; Both => Ok(DirAction::List));
// Listing files as directories
test!(dirs_short: DirAction <- ["-d"]; Both => Ok(DirAction::AsFile));
test!(dirs_long: DirAction <- ["--list-dirs"]; Both => Ok(DirAction::AsFile));
// Recursing
use self::DirAction::Recurse;
test!(rec_short: DirAction <- ["-R"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(rec_long: DirAction <- ["--recurse"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(rec_lim_short: DirAction <- ["-RL4"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(4) })));
test!(rec_lim_short_2: DirAction <- ["-RL=5"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(5) })));
test!(rec_lim_long: DirAction <- ["--recurse", "--level", "666"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(666) })));
test!(rec_lim_long_2: DirAction <- ["--recurse", "--level=0118"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(118) })));
test!(tree: DirAction <- ["--tree"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(rec_tree: DirAction <- ["--recurse", "--tree"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(rec_short_tree: DirAction <- ["-TR"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
// Overriding --list-dirs, --recurse, and --tree
test!(dirs_recurse: DirAction <- ["--list-dirs", "--recurse"]; Last => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(dirs_tree: DirAction <- ["--list-dirs", "--tree"]; Last => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(just_level: DirAction <- ["--level=4"]; Last => Ok(DirAction::List));
test!(dirs_recurse_2: DirAction <- ["--list-dirs", "--recurse"]; Complain => Err(OptionsError::Conflict(&flags::RECURSE, &flags::LIST_DIRS)));
test!(dirs_tree_2: DirAction <- ["--list-dirs", "--tree"]; Complain => Err(OptionsError::Conflict(&flags::TREE, &flags::LIST_DIRS)));
test!(just_level_2: DirAction <- ["--level=4"]; Complain => Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE)));
// Overriding levels
test!(overriding_1: DirAction <- ["-RL=6", "-L=7"]; Last => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(7) })));
test!(overriding_2: DirAction <- ["-RL=6", "-L=7"]; Complain => Err(OptionsError::Duplicate(Flag::Short(b'L'), Flag::Short(b'L'))));
}
|
let source = NumberSource::Arg(&flags::LEVEL);
Err(OptionsError::FailedParse(arg_str.to_string(), source, e))
}
|
conditional_block
|
dir_action.rs
|
//! Parsing the options for `DirAction`.
use crate::options::parser::MatchedFlags;
use crate::options::{flags, OptionsError, NumberSource};
use crate::fs::dir_action::{DirAction, RecurseOptions};
impl DirAction {
|
/// Determine which action to perform when trying to list a directory.
/// There are three possible actions, and they overlap somewhat: the
/// `--tree` flag is another form of recursion, so those two are allowed
/// to both be present, but the `--list-dirs` flag is used separately.
pub fn deduce(matches: &MatchedFlags<'_>, can_tree: bool) -> Result<Self, OptionsError> {
let recurse = matches.has(&flags::RECURSE)?;
let as_file = matches.has(&flags::LIST_DIRS)?;
let tree = matches.has(&flags::TREE)?;
if matches.is_strict() {
// Early check for --level when it wouldn’t do anything
if! recurse &&! tree && matches.count(&flags::LEVEL) > 0 {
return Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE));
}
else if recurse && as_file {
return Err(OptionsError::Conflict(&flags::RECURSE, &flags::LIST_DIRS));
}
else if tree && as_file {
return Err(OptionsError::Conflict(&flags::TREE, &flags::LIST_DIRS));
}
}
if tree && can_tree {
// Tree is only appropriate in details mode, so this has to
// examine the View, which should have already been deduced by now
Ok(Self::Recurse(RecurseOptions::deduce(matches, true)?))
}
else if recurse {
Ok(Self::Recurse(RecurseOptions::deduce(matches, false)?))
}
else if as_file {
Ok(Self::AsFile)
}
else {
Ok(Self::List)
}
}
}
impl RecurseOptions {
/// Determine which files should be recursed into, based on the `--level`
/// flag’s value, and whether the `--tree` flag was passed, which was
/// determined earlier. The maximum level should be a number, and this
/// will fail with an `Err` if it isn’t.
pub fn deduce(matches: &MatchedFlags<'_>, tree: bool) -> Result<Self, OptionsError> {
if let Some(level) = matches.get(&flags::LEVEL)? {
let arg_str = level.to_string_lossy();
match arg_str.parse() {
Ok(l) => {
Ok(Self { tree, max_depth: Some(l) })
}
Err(e) => {
let source = NumberSource::Arg(&flags::LEVEL);
Err(OptionsError::FailedParse(arg_str.to_string(), source, e))
}
}
}
else {
Ok(Self { tree, max_depth: None })
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::options::flags;
use crate::options::parser::Flag;
macro_rules! test {
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => $result:expr) => {
#[test]
fn $name() {
use crate::options::parser::Arg;
use crate::options::test::parse_for_test;
use crate::options::test::Strictnesses::*;
static TEST_ARGS: &[&Arg] = &[&flags::RECURSE, &flags::LIST_DIRS, &flags::TREE, &flags::LEVEL ];
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf, true)) {
assert_eq!(result, $result);
}
}
};
}
// Default behaviour
test!(empty: DirAction <- []; Both => Ok(DirAction::List));
// Listing files as directories
test!(dirs_short: DirAction <- ["-d"]; Both => Ok(DirAction::AsFile));
test!(dirs_long: DirAction <- ["--list-dirs"]; Both => Ok(DirAction::AsFile));
// Recursing
use self::DirAction::Recurse;
test!(rec_short: DirAction <- ["-R"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(rec_long: DirAction <- ["--recurse"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(rec_lim_short: DirAction <- ["-RL4"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(4) })));
test!(rec_lim_short_2: DirAction <- ["-RL=5"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(5) })));
test!(rec_lim_long: DirAction <- ["--recurse", "--level", "666"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(666) })));
test!(rec_lim_long_2: DirAction <- ["--recurse", "--level=0118"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(118) })));
test!(tree: DirAction <- ["--tree"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(rec_tree: DirAction <- ["--recurse", "--tree"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(rec_short_tree: DirAction <- ["-TR"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
// Overriding --list-dirs, --recurse, and --tree
test!(dirs_recurse: DirAction <- ["--list-dirs", "--recurse"]; Last => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(dirs_tree: DirAction <- ["--list-dirs", "--tree"]; Last => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(just_level: DirAction <- ["--level=4"]; Last => Ok(DirAction::List));
test!(dirs_recurse_2: DirAction <- ["--list-dirs", "--recurse"]; Complain => Err(OptionsError::Conflict(&flags::RECURSE, &flags::LIST_DIRS)));
test!(dirs_tree_2: DirAction <- ["--list-dirs", "--tree"]; Complain => Err(OptionsError::Conflict(&flags::TREE, &flags::LIST_DIRS)));
test!(just_level_2: DirAction <- ["--level=4"]; Complain => Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE)));
// Overriding levels
test!(overriding_1: DirAction <- ["-RL=6", "-L=7"]; Last => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(7) })));
test!(overriding_2: DirAction <- ["-RL=6", "-L=7"]; Complain => Err(OptionsError::Duplicate(Flag::Short(b'L'), Flag::Short(b'L'))));
}
|
random_line_split
|
|
dir_action.rs
|
//! Parsing the options for `DirAction`.
use crate::options::parser::MatchedFlags;
use crate::options::{flags, OptionsError, NumberSource};
use crate::fs::dir_action::{DirAction, RecurseOptions};
impl DirAction {
/// Determine which action to perform when trying to list a directory.
/// There are three possible actions, and they overlap somewhat: the
/// `--tree` flag is another form of recursion, so those two are allowed
/// to both be present, but the `--list-dirs` flag is used separately.
pub fn deduce(matches: &MatchedFlags<'_>, can_tree: bool) -> Result<Self, OptionsError>
|
// examine the View, which should have already been deduced by now
Ok(Self::Recurse(RecurseOptions::deduce(matches, true)?))
}
else if recurse {
Ok(Self::Recurse(RecurseOptions::deduce(matches, false)?))
}
else if as_file {
Ok(Self::AsFile)
}
else {
Ok(Self::List)
}
}
}
impl RecurseOptions {
/// Determine which files should be recursed into, based on the `--level`
/// flag’s value, and whether the `--tree` flag was passed, which was
/// determined earlier. The maximum level should be a number, and this
/// will fail with an `Err` if it isn’t.
pub fn deduce(matches: &MatchedFlags<'_>, tree: bool) -> Result<Self, OptionsError> {
if let Some(level) = matches.get(&flags::LEVEL)? {
let arg_str = level.to_string_lossy();
match arg_str.parse() {
Ok(l) => {
Ok(Self { tree, max_depth: Some(l) })
}
Err(e) => {
let source = NumberSource::Arg(&flags::LEVEL);
Err(OptionsError::FailedParse(arg_str.to_string(), source, e))
}
}
}
else {
Ok(Self { tree, max_depth: None })
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::options::flags;
use crate::options::parser::Flag;
macro_rules! test {
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => $result:expr) => {
#[test]
fn $name() {
use crate::options::parser::Arg;
use crate::options::test::parse_for_test;
use crate::options::test::Strictnesses::*;
static TEST_ARGS: &[&Arg] = &[&flags::RECURSE, &flags::LIST_DIRS, &flags::TREE, &flags::LEVEL ];
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf, true)) {
assert_eq!(result, $result);
}
}
};
}
// Default behaviour
test!(empty: DirAction <- []; Both => Ok(DirAction::List));
// Listing files as directories
test!(dirs_short: DirAction <- ["-d"]; Both => Ok(DirAction::AsFile));
test!(dirs_long: DirAction <- ["--list-dirs"]; Both => Ok(DirAction::AsFile));
// Recursing
use self::DirAction::Recurse;
test!(rec_short: DirAction <- ["-R"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(rec_long: DirAction <- ["--recurse"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(rec_lim_short: DirAction <- ["-RL4"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(4) })));
test!(rec_lim_short_2: DirAction <- ["-RL=5"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(5) })));
test!(rec_lim_long: DirAction <- ["--recurse", "--level", "666"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(666) })));
test!(rec_lim_long_2: DirAction <- ["--recurse", "--level=0118"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(118) })));
test!(tree: DirAction <- ["--tree"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(rec_tree: DirAction <- ["--recurse", "--tree"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(rec_short_tree: DirAction <- ["-TR"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
// Overriding --list-dirs, --recurse, and --tree
test!(dirs_recurse: DirAction <- ["--list-dirs", "--recurse"]; Last => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(dirs_tree: DirAction <- ["--list-dirs", "--tree"]; Last => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(just_level: DirAction <- ["--level=4"]; Last => Ok(DirAction::List));
test!(dirs_recurse_2: DirAction <- ["--list-dirs", "--recurse"]; Complain => Err(OptionsError::Conflict(&flags::RECURSE, &flags::LIST_DIRS)));
test!(dirs_tree_2: DirAction <- ["--list-dirs", "--tree"]; Complain => Err(OptionsError::Conflict(&flags::TREE, &flags::LIST_DIRS)));
test!(just_level_2: DirAction <- ["--level=4"]; Complain => Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE)));
// Overriding levels
test!(overriding_1: DirAction <- ["-RL=6", "-L=7"]; Last => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(7) })));
test!(overriding_2: DirAction <- ["-RL=6", "-L=7"]; Complain => Err(OptionsError::Duplicate(Flag::Short(b'L'), Flag::Short(b'L'))));
}
|
{
let recurse = matches.has(&flags::RECURSE)?;
let as_file = matches.has(&flags::LIST_DIRS)?;
let tree = matches.has(&flags::TREE)?;
if matches.is_strict() {
// Early check for --level when it wouldn’t do anything
if ! recurse && ! tree && matches.count(&flags::LEVEL) > 0 {
return Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE));
}
else if recurse && as_file {
return Err(OptionsError::Conflict(&flags::RECURSE, &flags::LIST_DIRS));
}
else if tree && as_file {
return Err(OptionsError::Conflict(&flags::TREE, &flags::LIST_DIRS));
}
}
if tree && can_tree {
// Tree is only appropriate in details mode, so this has to
|
identifier_body
|
dir_action.rs
|
//! Parsing the options for `DirAction`.
use crate::options::parser::MatchedFlags;
use crate::options::{flags, OptionsError, NumberSource};
use crate::fs::dir_action::{DirAction, RecurseOptions};
impl DirAction {
/// Determine which action to perform when trying to list a directory.
/// There are three possible actions, and they overlap somewhat: the
/// `--tree` flag is another form of recursion, so those two are allowed
/// to both be present, but the `--list-dirs` flag is used separately.
pub fn
|
(matches: &MatchedFlags<'_>, can_tree: bool) -> Result<Self, OptionsError> {
let recurse = matches.has(&flags::RECURSE)?;
let as_file = matches.has(&flags::LIST_DIRS)?;
let tree = matches.has(&flags::TREE)?;
if matches.is_strict() {
// Early check for --level when it wouldn’t do anything
if! recurse &&! tree && matches.count(&flags::LEVEL) > 0 {
return Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE));
}
else if recurse && as_file {
return Err(OptionsError::Conflict(&flags::RECURSE, &flags::LIST_DIRS));
}
else if tree && as_file {
return Err(OptionsError::Conflict(&flags::TREE, &flags::LIST_DIRS));
}
}
if tree && can_tree {
// Tree is only appropriate in details mode, so this has to
// examine the View, which should have already been deduced by now
Ok(Self::Recurse(RecurseOptions::deduce(matches, true)?))
}
else if recurse {
Ok(Self::Recurse(RecurseOptions::deduce(matches, false)?))
}
else if as_file {
Ok(Self::AsFile)
}
else {
Ok(Self::List)
}
}
}
impl RecurseOptions {
/// Determine which files should be recursed into, based on the `--level`
/// flag’s value, and whether the `--tree` flag was passed, which was
/// determined earlier. The maximum level should be a number, and this
/// will fail with an `Err` if it isn’t.
pub fn deduce(matches: &MatchedFlags<'_>, tree: bool) -> Result<Self, OptionsError> {
if let Some(level) = matches.get(&flags::LEVEL)? {
let arg_str = level.to_string_lossy();
match arg_str.parse() {
Ok(l) => {
Ok(Self { tree, max_depth: Some(l) })
}
Err(e) => {
let source = NumberSource::Arg(&flags::LEVEL);
Err(OptionsError::FailedParse(arg_str.to_string(), source, e))
}
}
}
else {
Ok(Self { tree, max_depth: None })
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::options::flags;
use crate::options::parser::Flag;
macro_rules! test {
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => $result:expr) => {
#[test]
fn $name() {
use crate::options::parser::Arg;
use crate::options::test::parse_for_test;
use crate::options::test::Strictnesses::*;
static TEST_ARGS: &[&Arg] = &[&flags::RECURSE, &flags::LIST_DIRS, &flags::TREE, &flags::LEVEL ];
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf, true)) {
assert_eq!(result, $result);
}
}
};
}
// Default behaviour
test!(empty: DirAction <- []; Both => Ok(DirAction::List));
// Listing files as directories
test!(dirs_short: DirAction <- ["-d"]; Both => Ok(DirAction::AsFile));
test!(dirs_long: DirAction <- ["--list-dirs"]; Both => Ok(DirAction::AsFile));
// Recursing
use self::DirAction::Recurse;
test!(rec_short: DirAction <- ["-R"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(rec_long: DirAction <- ["--recurse"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(rec_lim_short: DirAction <- ["-RL4"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(4) })));
test!(rec_lim_short_2: DirAction <- ["-RL=5"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(5) })));
test!(rec_lim_long: DirAction <- ["--recurse", "--level", "666"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(666) })));
test!(rec_lim_long_2: DirAction <- ["--recurse", "--level=0118"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(118) })));
test!(tree: DirAction <- ["--tree"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(rec_tree: DirAction <- ["--recurse", "--tree"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(rec_short_tree: DirAction <- ["-TR"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
// Overriding --list-dirs, --recurse, and --tree
test!(dirs_recurse: DirAction <- ["--list-dirs", "--recurse"]; Last => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(dirs_tree: DirAction <- ["--list-dirs", "--tree"]; Last => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(just_level: DirAction <- ["--level=4"]; Last => Ok(DirAction::List));
test!(dirs_recurse_2: DirAction <- ["--list-dirs", "--recurse"]; Complain => Err(OptionsError::Conflict(&flags::RECURSE, &flags::LIST_DIRS)));
test!(dirs_tree_2: DirAction <- ["--list-dirs", "--tree"]; Complain => Err(OptionsError::Conflict(&flags::TREE, &flags::LIST_DIRS)));
test!(just_level_2: DirAction <- ["--level=4"]; Complain => Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE)));
// Overriding levels
test!(overriding_1: DirAction <- ["-RL=6", "-L=7"]; Last => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(7) })));
test!(overriding_2: DirAction <- ["-RL=6", "-L=7"]; Complain => Err(OptionsError::Duplicate(Flag::Short(b'L'), Flag::Short(b'L'))));
}
|
deduce
|
identifier_name
|
constellation_msg.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The high-level interface from script to constellation. Using this abstract interface helps
//! reduce coupling between these two components.
use canvas_traits::CanvasMsg;
use compositor_msg::Epoch;
use euclid::scale_factor::ScaleFactor;
use euclid::size::{Size2D, TypedSize2D};
use hyper::header::Headers;
use hyper::method::Method;
use ipc_channel::ipc::IpcSender;
use layers::geometry::DevicePixel;
use offscreen_gl_context::GLContextAttributes;
use png::Image;
use std::cell::Cell;
use std::collections::HashMap;
use std::fmt;
use std::sync::mpsc::{Receiver, Sender, channel};
use style_traits::viewport::ViewportConstraints;
use url::Url;
use util::cursor::Cursor;
use util::geometry::{PagePx, ViewportPx};
use util::mem::HeapSizeOf;
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
#[derive(Clone)]
pub struct ConstellationChan(pub Sender<Msg>);
impl ConstellationChan {
pub fn new() -> (Receiver<Msg>, ConstellationChan) {
let (chan, port) = channel();
(port, ConstellationChan(chan))
|
}
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum IFrameSandboxState {
IFrameSandboxed,
IFrameUnsandboxed
}
// We pass this info to various tasks, so it lives in a separate, cloneable struct.
#[derive(Clone, Copy, Deserialize, Serialize)]
pub struct Failure {
pub pipeline_id: PipelineId,
pub parent_info: Option<(PipelineId, SubpageId)>,
}
#[derive(Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct WindowSizeData {
/// The size of the initial layout viewport, before parsing an
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport
pub initial_viewport: TypedSize2D<ViewportPx, f32>,
/// The "viewing area" in page px. See `PagePx` documentation for details.
pub visible_viewport: TypedSize2D<PagePx, f32>,
/// The resolution of the window in dppx, not including any "pinch zoom" factor.
pub device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>,
}
#[derive(PartialEq, Eq, Copy, Clone, Deserialize, Serialize)]
pub enum KeyState {
Pressed,
Released,
Repeated,
}
//N.B. Based on the glutin key enum
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub enum Key {
Space,
Apostrophe,
Comma,
Minus,
Period,
Slash,
Num0,
Num1,
Num2,
Num3,
Num4,
Num5,
Num6,
Num7,
Num8,
Num9,
Semicolon,
Equal,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
LeftBracket,
Backslash,
RightBracket,
GraveAccent,
World1,
World2,
Escape,
Enter,
Tab,
Backspace,
Insert,
Delete,
Right,
Left,
Down,
Up,
PageUp,
PageDown,
Home,
End,
CapsLock,
ScrollLock,
NumLock,
PrintScreen,
Pause,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
F25,
Kp0,
Kp1,
Kp2,
Kp3,
Kp4,
Kp5,
Kp6,
Kp7,
Kp8,
Kp9,
KpDecimal,
KpDivide,
KpMultiply,
KpSubtract,
KpAdd,
KpEnter,
KpEqual,
LeftShift,
LeftControl,
LeftAlt,
LeftSuper,
RightShift,
RightControl,
RightAlt,
RightSuper,
Menu,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags KeyModifiers: u8 {
const NONE = 0x00,
const SHIFT = 0x01,
const CONTROL = 0x02,
const ALT = 0x04,
const SUPER = 0x08,
}
}
/// Specifies the type of focus event that is sent to a pipeline
#[derive(Copy, Clone, PartialEq)]
pub enum FocusType {
Element, // The first focus message - focus the element itself
Parent, // Focusing a parent element (an iframe)
}
/// Specifies the information required to load a URL in an iframe.
#[derive(Deserialize, Serialize)]
pub struct IframeLoadInfo {
/// Url to load
pub url: Url,
/// Pipeline ID of the parent of this iframe
pub containing_pipeline_id: PipelineId,
/// The new subpage ID for this load
pub new_subpage_id: SubpageId,
/// The old subpage ID for this iframe, if a page was previously loaded.
pub old_subpage_id: Option<SubpageId>,
/// The new pipeline ID that the iframe has generated.
pub new_pipeline_id: PipelineId,
/// Sandbox type of this iframe
pub sandbox: IFrameSandboxState,
}
/// Messages from the compositor and script to the constellation.
#[derive(Deserialize, Serialize)]
pub enum Msg {
Exit,
Failure(Failure),
InitLoadUrl(Url),
LoadComplete(PipelineId),
/// Dispatched after the DOM load event has fired on a document
DOMLoad(PipelineId),
FrameSize(PipelineId, SubpageId, Size2D<f32>),
LoadUrl(PipelineId, LoadData),
ScriptLoadedURLInIFrame(IframeLoadInfo),
Navigate(Option<(PipelineId, SubpageId)>, NavigationDirection),
PainterReady(PipelineId),
ResizedWindow(WindowSizeData),
KeyEvent(Key, KeyState, KeyModifiers),
/// Requests that the constellation inform the compositor of the title of the pipeline
/// immediately.
GetPipelineTitle(PipelineId),
/// Requests that the constellation inform the compositor of the a cursor change.
SetCursor(Cursor),
/// Dispatch a mozbrowser event to a given iframe. Only available in experimental mode.
MozBrowserEvent(PipelineId, SubpageId, MozBrowserEvent),
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Requests that the constellation instruct layout to begin a new tick of the animation.
TickAnimation(PipelineId),
/// Request that the constellation send the current pipeline id for the provided frame
/// id, or for the root frame if this is None, over a provided channel
GetPipeline(Option<FrameId>, IpcSender<Option<PipelineId>>),
/// Request that the constellation send the FrameId corresponding to the document
/// with the provided parent pipeline id and subpage id
GetFrame(PipelineId, SubpageId, IpcSender<Option<FrameId>>),
/// Notifies the constellation that this frame has received focus.
Focus(PipelineId),
/// Requests that the constellation retrieve the current contents of the clipboard
GetClipboardContents(IpcSender<String>),
/// Requests that the constellation set the contents of the clipboard
SetClipboardContents(String),
/// Dispatch a webdriver command
WebDriverCommand(WebDriverCommandMsg),
/// Notifies the constellation that the viewport has been constrained in some manner
ViewportConstrained(PipelineId, ViewportConstraints),
/// Query the constellation to see if the current compositor output is stable
IsReadyToSaveImage(HashMap<PipelineId, Epoch>),
/// Notification that this iframe should be removed.
RemoveIFrame(PipelineId, SubpageId),
/// Favicon detected
NewFavicon(Url),
/// <head> tag finished parsing
HeadParsed,
/// Requests that a new 2D canvas thread be created. (This is done in the constellation because
/// 2D canvases may use the GPU and we don't want to give untrusted content access to the GPU.)
CreateCanvasPaintTask(Size2D<i32>, IpcSender<(IpcSender<CanvasMsg>, usize)>),
/// Requests that a new WebGL thread be created. (This is done in the constellation because
/// WebGL uses the GPU and we don't want to give untrusted content access to the GPU.)
CreateWebGLPaintTask(Size2D<i32>,
GLContextAttributes,
IpcSender<Result<(IpcSender<CanvasMsg>, usize), String>>),
/// Status message to be displayed in the chrome, eg. a link URL on mouseover.
NodeStatus(Option<String>),
/// Requests that the pipeline ID of the subpage identified by a (pipeline ID, subpage ID)
/// pair be sent to the compositor via a `CreateLayerForSubpage` message.
PrepareForSubpageLayerCreation(PipelineId, SubpageId),
}
#[derive(Clone, Eq, PartialEq, Deserialize, Serialize, Debug)]
pub enum AnimationState {
AnimationsPresent,
AnimationCallbacksPresent,
NoAnimationsPresent,
NoAnimationCallbacksPresent,
}
// https://developer.mozilla.org/en-US/docs/Web/API/Using_the_Browser_API#Events
#[derive(Deserialize, Serialize)]
pub enum MozBrowserEvent {
/// Sent when the scroll position within a browser <iframe> changes.
AsyncScroll,
/// Sent when window.close() is called within a browser <iframe>.
Close,
/// Sent when a browser <iframe> tries to open a context menu. This allows
/// handling <menuitem> element available within the browser <iframe>'s content.
ContextMenu,
/// Sent when an error occurred while trying to load content within a browser <iframe>.
Error,
/// Sent when the favicon of a browser <iframe> changes.
IconChange,
/// Sent when the browser <iframe> has finished loading all its assets.
LoadEnd,
/// Sent when the browser <iframe> starts to load a new page.
LoadStart,
/// Sent when a browser <iframe>'s location changes.
LocationChange(String),
/// Sent when window.open() is called within a browser <iframe>.
OpenWindow,
/// Sent when the SSL state changes within a browser <iframe>.
SecurityChange,
/// Sent when alert(), confirm(), or prompt() is called within a browser <iframe>.
ShowModalPrompt,
/// Sent when the document.title changes within a browser <iframe>.
TitleChange(String),
/// Sent when an HTTP authentification is requested.
UsernameAndPasswordRequired,
/// Sent when a link to a search engine is found.
OpenSearch,
}
impl MozBrowserEvent {
pub fn name(&self) -> &'static str {
match *self {
MozBrowserEvent::AsyncScroll => "mozbrowserasyncscroll",
MozBrowserEvent::Close => "mozbrowserclose",
MozBrowserEvent::ContextMenu => "mozbrowsercontextmenu",
MozBrowserEvent::Error => "mozbrowsererror",
MozBrowserEvent::IconChange => "mozbrowsericonchange",
MozBrowserEvent::LoadEnd => "mozbrowserloadend",
MozBrowserEvent::LoadStart => "mozbrowserloadstart",
MozBrowserEvent::LocationChange(_) => "mozbrowserlocationchange",
MozBrowserEvent::OpenWindow => "mozbrowseropenwindow",
MozBrowserEvent::SecurityChange => "mozbrowsersecuritychange",
MozBrowserEvent::ShowModalPrompt => "mozbrowsershowmodalprompt",
MozBrowserEvent::TitleChange(_) => "mozbrowsertitlechange",
MozBrowserEvent::UsernameAndPasswordRequired => "mozbrowserusernameandpasswordrequired",
MozBrowserEvent::OpenSearch => "mozbrowseropensearch"
}
}
pub fn detail(&self) -> Option<String> {
match *self {
MozBrowserEvent::AsyncScroll | MozBrowserEvent::Close | MozBrowserEvent::ContextMenu |
MozBrowserEvent::Error | MozBrowserEvent::IconChange | MozBrowserEvent::LoadEnd |
MozBrowserEvent::LoadStart | MozBrowserEvent::OpenWindow | MozBrowserEvent::SecurityChange |
MozBrowserEvent::ShowModalPrompt | MozBrowserEvent::UsernameAndPasswordRequired |
MozBrowserEvent::OpenSearch => None,
MozBrowserEvent::LocationChange(ref new_location) => Some(new_location.clone()),
MozBrowserEvent::TitleChange(ref new_title) => Some(new_title.clone()),
}
}
}
#[derive(Deserialize, Serialize)]
pub enum WebDriverCommandMsg {
LoadUrl(PipelineId, LoadData, IpcSender<LoadStatus>),
Refresh(PipelineId, IpcSender<LoadStatus>),
ScriptCommand(PipelineId, WebDriverScriptCommand),
TakeScreenshot(PipelineId, IpcSender<Option<Image>>)
}
/// Similar to net::resource_task::LoadData
/// can be passed to LoadUrl to load a page with GET/POST
/// parameters or headers
#[derive(Clone, Deserialize, Serialize)]
pub struct LoadData {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub data: Option<Vec<u8>>,
}
impl LoadData {
pub fn new(url: Url) -> LoadData {
LoadData {
url: url,
method: Method::Get,
headers: Headers::new(),
data: None,
}
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub enum NavigationDirection {
Forward,
Back,
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct FrameId(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct WorkerId(pub u32);
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
/// generate the pipeline ID from an iframe element (this simplifies a lot of other
/// code that makes use of pipeline IDs).
///
/// To achieve this, each pipeline index belongs to a particular namespace. There is
/// a namespace for the constellation thread, and also one for every script thread.
/// This allows pipeline IDs to be generated by any of those threads without conflicting
/// with pipeline IDs created by other script threads or the constellation. The
/// constellation is the only code that is responsible for creating new *namespaces*.
/// This ensures that namespaces are always unique, even when using multi-process mode.
///
/// It may help conceptually to think of the namespace ID as an identifier for the
/// thread that created this pipeline ID - however this is really an implementation
/// detail so shouldn't be relied upon in code logic. It's best to think of the
/// pipeline ID as a simple unique identifier that doesn't convey any more information.
#[derive(Clone, Copy)]
pub struct PipelineNamespace {
id: PipelineNamespaceId,
next_index: PipelineIndex,
}
impl PipelineNamespace {
pub fn install(namespace_id: PipelineNamespaceId) {
PIPELINE_NAMESPACE.with(|tls| {
assert!(tls.get().is_none());
tls.set(Some(PipelineNamespace {
id: namespace_id,
next_index: PipelineIndex(0),
}));
});
}
fn next(&mut self) -> PipelineId {
let pipeline_id = PipelineId {
namespace_id: self.id,
index: self.next_index,
};
let PipelineIndex(current_index) = self.next_index;
self.next_index = PipelineIndex(current_index + 1);
pipeline_id
}
}
thread_local!(pub static PIPELINE_NAMESPACE: Cell<Option<PipelineNamespace>> = Cell::new(None));
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineNamespaceId(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineIndex(u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineId {
namespace_id: PipelineNamespaceId,
index: PipelineIndex
}
impl PipelineId {
pub fn new() -> PipelineId {
PIPELINE_NAMESPACE.with(|tls| {
let mut namespace = tls.get().expect("No namespace set for this thread!");
let new_pipeline_id = namespace.next();
tls.set(Some(namespace));
new_pipeline_id
})
}
// TODO(gw): This should be removed. It's only required because of the code
// that uses it in the devtools lib.rs file (which itself is a TODO). Once
// that is fixed, this should be removed. It also relies on the first
// call to PipelineId::new() returning (0,0), which is checked with an
// assert in handle_init_load().
pub fn fake_root_pipeline_id() -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(0),
index: PipelineIndex(0),
}
}
}
impl fmt::Display for PipelineId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
write!(fmt, "({},{})", namespace_id, index)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct SubpageId(pub u32);
// The type of pipeline exit. During complete shutdowns, pipelines do not have to
// release resources automatically released on process termination.
#[derive(Copy, Clone, Debug, Deserialize, Serialize)]
pub enum PipelineExitType {
PipelineOnly,
Complete,
}
|
random_line_split
|
|
constellation_msg.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The high-level interface from script to constellation. Using this abstract interface helps
//! reduce coupling between these two components.
use canvas_traits::CanvasMsg;
use compositor_msg::Epoch;
use euclid::scale_factor::ScaleFactor;
use euclid::size::{Size2D, TypedSize2D};
use hyper::header::Headers;
use hyper::method::Method;
use ipc_channel::ipc::IpcSender;
use layers::geometry::DevicePixel;
use offscreen_gl_context::GLContextAttributes;
use png::Image;
use std::cell::Cell;
use std::collections::HashMap;
use std::fmt;
use std::sync::mpsc::{Receiver, Sender, channel};
use style_traits::viewport::ViewportConstraints;
use url::Url;
use util::cursor::Cursor;
use util::geometry::{PagePx, ViewportPx};
use util::mem::HeapSizeOf;
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
#[derive(Clone)]
pub struct ConstellationChan(pub Sender<Msg>);
impl ConstellationChan {
pub fn new() -> (Receiver<Msg>, ConstellationChan) {
let (chan, port) = channel();
(port, ConstellationChan(chan))
}
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum IFrameSandboxState {
IFrameSandboxed,
IFrameUnsandboxed
}
// We pass this info to various tasks, so it lives in a separate, cloneable struct.
#[derive(Clone, Copy, Deserialize, Serialize)]
pub struct Failure {
pub pipeline_id: PipelineId,
pub parent_info: Option<(PipelineId, SubpageId)>,
}
#[derive(Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct WindowSizeData {
/// The size of the initial layout viewport, before parsing an
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport
pub initial_viewport: TypedSize2D<ViewportPx, f32>,
/// The "viewing area" in page px. See `PagePx` documentation for details.
pub visible_viewport: TypedSize2D<PagePx, f32>,
/// The resolution of the window in dppx, not including any "pinch zoom" factor.
pub device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>,
}
#[derive(PartialEq, Eq, Copy, Clone, Deserialize, Serialize)]
pub enum KeyState {
Pressed,
Released,
Repeated,
}
//N.B. Based on the glutin key enum
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub enum Key {
Space,
Apostrophe,
Comma,
Minus,
Period,
Slash,
Num0,
Num1,
Num2,
Num3,
Num4,
Num5,
Num6,
Num7,
Num8,
Num9,
Semicolon,
Equal,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
LeftBracket,
Backslash,
RightBracket,
GraveAccent,
World1,
World2,
Escape,
Enter,
Tab,
Backspace,
Insert,
Delete,
Right,
Left,
Down,
Up,
PageUp,
PageDown,
Home,
End,
CapsLock,
ScrollLock,
NumLock,
PrintScreen,
Pause,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
F25,
Kp0,
Kp1,
Kp2,
Kp3,
Kp4,
Kp5,
Kp6,
Kp7,
Kp8,
Kp9,
KpDecimal,
KpDivide,
KpMultiply,
KpSubtract,
KpAdd,
KpEnter,
KpEqual,
LeftShift,
LeftControl,
LeftAlt,
LeftSuper,
RightShift,
RightControl,
RightAlt,
RightSuper,
Menu,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags KeyModifiers: u8 {
const NONE = 0x00,
const SHIFT = 0x01,
const CONTROL = 0x02,
const ALT = 0x04,
const SUPER = 0x08,
}
}
/// Specifies the type of focus event that is sent to a pipeline
#[derive(Copy, Clone, PartialEq)]
pub enum FocusType {
Element, // The first focus message - focus the element itself
Parent, // Focusing a parent element (an iframe)
}
/// Specifies the information required to load a URL in an iframe.
#[derive(Deserialize, Serialize)]
pub struct IframeLoadInfo {
/// Url to load
pub url: Url,
/// Pipeline ID of the parent of this iframe
pub containing_pipeline_id: PipelineId,
/// The new subpage ID for this load
pub new_subpage_id: SubpageId,
/// The old subpage ID for this iframe, if a page was previously loaded.
pub old_subpage_id: Option<SubpageId>,
/// The new pipeline ID that the iframe has generated.
pub new_pipeline_id: PipelineId,
/// Sandbox type of this iframe
pub sandbox: IFrameSandboxState,
}
/// Messages from the compositor and script to the constellation.
#[derive(Deserialize, Serialize)]
pub enum Msg {
Exit,
Failure(Failure),
InitLoadUrl(Url),
LoadComplete(PipelineId),
/// Dispatched after the DOM load event has fired on a document
DOMLoad(PipelineId),
FrameSize(PipelineId, SubpageId, Size2D<f32>),
LoadUrl(PipelineId, LoadData),
ScriptLoadedURLInIFrame(IframeLoadInfo),
Navigate(Option<(PipelineId, SubpageId)>, NavigationDirection),
PainterReady(PipelineId),
ResizedWindow(WindowSizeData),
KeyEvent(Key, KeyState, KeyModifiers),
/// Requests that the constellation inform the compositor of the title of the pipeline
/// immediately.
GetPipelineTitle(PipelineId),
/// Requests that the constellation inform the compositor of the a cursor change.
SetCursor(Cursor),
/// Dispatch a mozbrowser event to a given iframe. Only available in experimental mode.
MozBrowserEvent(PipelineId, SubpageId, MozBrowserEvent),
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Requests that the constellation instruct layout to begin a new tick of the animation.
TickAnimation(PipelineId),
/// Request that the constellation send the current pipeline id for the provided frame
/// id, or for the root frame if this is None, over a provided channel
GetPipeline(Option<FrameId>, IpcSender<Option<PipelineId>>),
/// Request that the constellation send the FrameId corresponding to the document
/// with the provided parent pipeline id and subpage id
GetFrame(PipelineId, SubpageId, IpcSender<Option<FrameId>>),
/// Notifies the constellation that this frame has received focus.
Focus(PipelineId),
/// Requests that the constellation retrieve the current contents of the clipboard
GetClipboardContents(IpcSender<String>),
/// Requests that the constellation set the contents of the clipboard
SetClipboardContents(String),
/// Dispatch a webdriver command
WebDriverCommand(WebDriverCommandMsg),
/// Notifies the constellation that the viewport has been constrained in some manner
ViewportConstrained(PipelineId, ViewportConstraints),
/// Query the constellation to see if the current compositor output is stable
IsReadyToSaveImage(HashMap<PipelineId, Epoch>),
/// Notification that this iframe should be removed.
RemoveIFrame(PipelineId, SubpageId),
/// Favicon detected
NewFavicon(Url),
/// <head> tag finished parsing
HeadParsed,
/// Requests that a new 2D canvas thread be created. (This is done in the constellation because
/// 2D canvases may use the GPU and we don't want to give untrusted content access to the GPU.)
CreateCanvasPaintTask(Size2D<i32>, IpcSender<(IpcSender<CanvasMsg>, usize)>),
/// Requests that a new WebGL thread be created. (This is done in the constellation because
/// WebGL uses the GPU and we don't want to give untrusted content access to the GPU.)
CreateWebGLPaintTask(Size2D<i32>,
GLContextAttributes,
IpcSender<Result<(IpcSender<CanvasMsg>, usize), String>>),
/// Status message to be displayed in the chrome, eg. a link URL on mouseover.
NodeStatus(Option<String>),
/// Requests that the pipeline ID of the subpage identified by a (pipeline ID, subpage ID)
/// pair be sent to the compositor via a `CreateLayerForSubpage` message.
PrepareForSubpageLayerCreation(PipelineId, SubpageId),
}
#[derive(Clone, Eq, PartialEq, Deserialize, Serialize, Debug)]
pub enum AnimationState {
AnimationsPresent,
AnimationCallbacksPresent,
NoAnimationsPresent,
NoAnimationCallbacksPresent,
}
// https://developer.mozilla.org/en-US/docs/Web/API/Using_the_Browser_API#Events
#[derive(Deserialize, Serialize)]
pub enum MozBrowserEvent {
/// Sent when the scroll position within a browser <iframe> changes.
AsyncScroll,
/// Sent when window.close() is called within a browser <iframe>.
Close,
/// Sent when a browser <iframe> tries to open a context menu. This allows
/// handling <menuitem> element available within the browser <iframe>'s content.
ContextMenu,
/// Sent when an error occurred while trying to load content within a browser <iframe>.
Error,
/// Sent when the favicon of a browser <iframe> changes.
IconChange,
/// Sent when the browser <iframe> has finished loading all its assets.
LoadEnd,
/// Sent when the browser <iframe> starts to load a new page.
LoadStart,
/// Sent when a browser <iframe>'s location changes.
LocationChange(String),
/// Sent when window.open() is called within a browser <iframe>.
OpenWindow,
/// Sent when the SSL state changes within a browser <iframe>.
SecurityChange,
/// Sent when alert(), confirm(), or prompt() is called within a browser <iframe>.
ShowModalPrompt,
/// Sent when the document.title changes within a browser <iframe>.
TitleChange(String),
/// Sent when an HTTP authentification is requested.
UsernameAndPasswordRequired,
/// Sent when a link to a search engine is found.
OpenSearch,
}
impl MozBrowserEvent {
pub fn name(&self) -> &'static str {
match *self {
MozBrowserEvent::AsyncScroll => "mozbrowserasyncscroll",
MozBrowserEvent::Close => "mozbrowserclose",
MozBrowserEvent::ContextMenu => "mozbrowsercontextmenu",
MozBrowserEvent::Error => "mozbrowsererror",
MozBrowserEvent::IconChange => "mozbrowsericonchange",
MozBrowserEvent::LoadEnd => "mozbrowserloadend",
MozBrowserEvent::LoadStart => "mozbrowserloadstart",
MozBrowserEvent::LocationChange(_) => "mozbrowserlocationchange",
MozBrowserEvent::OpenWindow => "mozbrowseropenwindow",
MozBrowserEvent::SecurityChange => "mozbrowsersecuritychange",
MozBrowserEvent::ShowModalPrompt => "mozbrowsershowmodalprompt",
MozBrowserEvent::TitleChange(_) => "mozbrowsertitlechange",
MozBrowserEvent::UsernameAndPasswordRequired => "mozbrowserusernameandpasswordrequired",
MozBrowserEvent::OpenSearch => "mozbrowseropensearch"
}
}
pub fn detail(&self) -> Option<String> {
match *self {
MozBrowserEvent::AsyncScroll | MozBrowserEvent::Close | MozBrowserEvent::ContextMenu |
MozBrowserEvent::Error | MozBrowserEvent::IconChange | MozBrowserEvent::LoadEnd |
MozBrowserEvent::LoadStart | MozBrowserEvent::OpenWindow | MozBrowserEvent::SecurityChange |
MozBrowserEvent::ShowModalPrompt | MozBrowserEvent::UsernameAndPasswordRequired |
MozBrowserEvent::OpenSearch => None,
MozBrowserEvent::LocationChange(ref new_location) => Some(new_location.clone()),
MozBrowserEvent::TitleChange(ref new_title) => Some(new_title.clone()),
}
}
}
#[derive(Deserialize, Serialize)]
pub enum WebDriverCommandMsg {
LoadUrl(PipelineId, LoadData, IpcSender<LoadStatus>),
Refresh(PipelineId, IpcSender<LoadStatus>),
ScriptCommand(PipelineId, WebDriverScriptCommand),
TakeScreenshot(PipelineId, IpcSender<Option<Image>>)
}
/// Similar to net::resource_task::LoadData
/// can be passed to LoadUrl to load a page with GET/POST
/// parameters or headers
#[derive(Clone, Deserialize, Serialize)]
pub struct LoadData {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub data: Option<Vec<u8>>,
}
impl LoadData {
pub fn new(url: Url) -> LoadData {
LoadData {
url: url,
method: Method::Get,
headers: Headers::new(),
data: None,
}
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub enum NavigationDirection {
Forward,
Back,
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct FrameId(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct WorkerId(pub u32);
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
/// generate the pipeline ID from an iframe element (this simplifies a lot of other
/// code that makes use of pipeline IDs).
///
/// To achieve this, each pipeline index belongs to a particular namespace. There is
/// a namespace for the constellation thread, and also one for every script thread.
/// This allows pipeline IDs to be generated by any of those threads without conflicting
/// with pipeline IDs created by other script threads or the constellation. The
/// constellation is the only code that is responsible for creating new *namespaces*.
/// This ensures that namespaces are always unique, even when using multi-process mode.
///
/// It may help conceptually to think of the namespace ID as an identifier for the
/// thread that created this pipeline ID - however this is really an implementation
/// detail so shouldn't be relied upon in code logic. It's best to think of the
/// pipeline ID as a simple unique identifier that doesn't convey any more information.
#[derive(Clone, Copy)]
pub struct PipelineNamespace {
id: PipelineNamespaceId,
next_index: PipelineIndex,
}
impl PipelineNamespace {
pub fn install(namespace_id: PipelineNamespaceId) {
PIPELINE_NAMESPACE.with(|tls| {
assert!(tls.get().is_none());
tls.set(Some(PipelineNamespace {
id: namespace_id,
next_index: PipelineIndex(0),
}));
});
}
fn next(&mut self) -> PipelineId {
let pipeline_id = PipelineId {
namespace_id: self.id,
index: self.next_index,
};
let PipelineIndex(current_index) = self.next_index;
self.next_index = PipelineIndex(current_index + 1);
pipeline_id
}
}
thread_local!(pub static PIPELINE_NAMESPACE: Cell<Option<PipelineNamespace>> = Cell::new(None));
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineNamespaceId(pub u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineIndex(u32);
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct
|
{
namespace_id: PipelineNamespaceId,
index: PipelineIndex
}
impl PipelineId {
pub fn new() -> PipelineId {
PIPELINE_NAMESPACE.with(|tls| {
let mut namespace = tls.get().expect("No namespace set for this thread!");
let new_pipeline_id = namespace.next();
tls.set(Some(namespace));
new_pipeline_id
})
}
// TODO(gw): This should be removed. It's only required because of the code
// that uses it in the devtools lib.rs file (which itself is a TODO). Once
// that is fixed, this should be removed. It also relies on the first
// call to PipelineId::new() returning (0,0), which is checked with an
// assert in handle_init_load().
pub fn fake_root_pipeline_id() -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(0),
index: PipelineIndex(0),
}
}
}
impl fmt::Display for PipelineId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
write!(fmt, "({},{})", namespace_id, index)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct SubpageId(pub u32);
// The type of pipeline exit. During complete shutdowns, pipelines do not have to
// release resources automatically released on process termination.
#[derive(Copy, Clone, Debug, Deserialize, Serialize)]
pub enum PipelineExitType {
PipelineOnly,
Complete,
}
|
PipelineId
|
identifier_name
|
tests.rs
|
use State;
use GLOBALSINDEX;
use Type;
use raw;
use libc;
use std::task;
use std::any::AnyRefExt;
#[test]
fn test_state_init() {
let mut _s = State::new();
}
#[test]
#[should_fail]
fn test_error()
|
#[test]
fn test_errorstr() {
let res = task::try::<()>(proc() {
let mut s = State::new();
s.errorstr("some err");
});
let err = res.unwrap_err();
let expected = "unprotected error in call to Lua API (some err)";
let s = err.as_ref::<String>();
if s.is_some() {
assert_eq!(s.unwrap().as_slice(), expected);
} else {
let s = err.as_ref::<&'static str>();
if s.is_some() {
assert_eq!(*s.unwrap(), expected);
} else {
fail!("unexpected failure result");
}
}
}
#[test]
fn test_describe() {
let mut s = State::new();
assert_eq!(s.typename(1), "no value");
s.pushnil();
assert_eq!(s.typename(-1), "nil");
s.pushinteger(42);
assert_eq!(s.typename(-1), "number");
s.pushstring("test");
assert_eq!(s.typename(-1), "string");
s.pushboolean(true);
assert_eq!(s.typename(-1), "boolean");
s.pushcfunction(dummy);
assert_eq!(s.typename(-1), "function");
extern "C" fn dummy(_L: *mut ::raw::lua_State) -> ::libc::c_int {
0
}
}
#[test]
fn test_openlibs() {
let mut s = State::new();
s.openlibs();
s.getfield(GLOBALSINDEX, "table");
assert_eq!(s.type_(-1), Some(Type::Table));
}
#[deriving(PartialEq,Eq,Show)]
enum CheckOptionEnum {
COEOne,
COETwo,
COEThree
}
#[test]
fn test_checkoption() {
let lst = [("one", COEOne), ("two", COETwo), ("three", COEThree)];
let mut s = State::new();
for &(k,ref v) in lst.iter() {
s.pushstring(k);
assert_eq!(*s.checkoption(1, None, lst), *v);
s.pop(1);
}
assert_eq!(*s.checkoption(1, Some("three"), lst), COEThree);
let res = task::try(proc() {
let mut s = State::new();
s.checkoption(1, None, lst);
});
assert!(res.is_err(), "expected error from checkoption");
let res = task::try(proc() {
let mut s = State::new();
s.checkoption(1, Some("four"), lst);
});
assert!(res.is_err(), "expected error from checkoption");
}
#[test]
fn test_tocfunction() {
let mut s = State::new();
// extern "C" fns don't implement Eq, so cast them to a pointer instead
s.pushstring("foo");
assert_eq!(s.tocfunction(1).map(|f| f as *()), None);
s.pushcfunction(cfunc);
assert_eq!(s.tocfunction(2).map(|f| f as *()), Some(cfunc as *()));
extern "C" fn cfunc(_L: *mut raw::lua_State) -> libc::c_int { 0 }
}
#[test]
fn test_gsub() {
// do some pretty basic gsub tests
let mut L = State::new();
assert_eq!(L.gsub("foobar", "bar", "quux"), "fooquux");
assert_eq!(L.gsub("foo", "o", "ö"), "föö");
assert_eq!(L.gsub("test", "a", "b"), "test");
assert_eq!(L.gsub("a b c d e", " ", "."), "a.b.c.d.e");
}
|
{
let mut s = State::new();
s.pushinteger(42);
s.error()
}
|
identifier_body
|
tests.rs
|
use State;
use GLOBALSINDEX;
use Type;
use raw;
use libc;
|
fn test_state_init() {
let mut _s = State::new();
}
#[test]
#[should_fail]
fn test_error() {
let mut s = State::new();
s.pushinteger(42);
s.error()
}
#[test]
fn test_errorstr() {
let res = task::try::<()>(proc() {
let mut s = State::new();
s.errorstr("some err");
});
let err = res.unwrap_err();
let expected = "unprotected error in call to Lua API (some err)";
let s = err.as_ref::<String>();
if s.is_some() {
assert_eq!(s.unwrap().as_slice(), expected);
} else {
let s = err.as_ref::<&'static str>();
if s.is_some() {
assert_eq!(*s.unwrap(), expected);
} else {
fail!("unexpected failure result");
}
}
}
#[test]
fn test_describe() {
let mut s = State::new();
assert_eq!(s.typename(1), "no value");
s.pushnil();
assert_eq!(s.typename(-1), "nil");
s.pushinteger(42);
assert_eq!(s.typename(-1), "number");
s.pushstring("test");
assert_eq!(s.typename(-1), "string");
s.pushboolean(true);
assert_eq!(s.typename(-1), "boolean");
s.pushcfunction(dummy);
assert_eq!(s.typename(-1), "function");
extern "C" fn dummy(_L: *mut ::raw::lua_State) -> ::libc::c_int {
0
}
}
#[test]
fn test_openlibs() {
let mut s = State::new();
s.openlibs();
s.getfield(GLOBALSINDEX, "table");
assert_eq!(s.type_(-1), Some(Type::Table));
}
#[deriving(PartialEq,Eq,Show)]
enum CheckOptionEnum {
COEOne,
COETwo,
COEThree
}
#[test]
fn test_checkoption() {
let lst = [("one", COEOne), ("two", COETwo), ("three", COEThree)];
let mut s = State::new();
for &(k,ref v) in lst.iter() {
s.pushstring(k);
assert_eq!(*s.checkoption(1, None, lst), *v);
s.pop(1);
}
assert_eq!(*s.checkoption(1, Some("three"), lst), COEThree);
let res = task::try(proc() {
let mut s = State::new();
s.checkoption(1, None, lst);
});
assert!(res.is_err(), "expected error from checkoption");
let res = task::try(proc() {
let mut s = State::new();
s.checkoption(1, Some("four"), lst);
});
assert!(res.is_err(), "expected error from checkoption");
}
#[test]
fn test_tocfunction() {
let mut s = State::new();
// extern "C" fns don't implement Eq, so cast them to a pointer instead
s.pushstring("foo");
assert_eq!(s.tocfunction(1).map(|f| f as *()), None);
s.pushcfunction(cfunc);
assert_eq!(s.tocfunction(2).map(|f| f as *()), Some(cfunc as *()));
extern "C" fn cfunc(_L: *mut raw::lua_State) -> libc::c_int { 0 }
}
#[test]
fn test_gsub() {
// do some pretty basic gsub tests
let mut L = State::new();
assert_eq!(L.gsub("foobar", "bar", "quux"), "fooquux");
assert_eq!(L.gsub("foo", "o", "ö"), "föö");
assert_eq!(L.gsub("test", "a", "b"), "test");
assert_eq!(L.gsub("a b c d e", " ", "."), "a.b.c.d.e");
}
|
use std::task;
use std::any::AnyRefExt;
#[test]
|
random_line_split
|
tests.rs
|
use State;
use GLOBALSINDEX;
use Type;
use raw;
use libc;
use std::task;
use std::any::AnyRefExt;
#[test]
fn test_state_init() {
let mut _s = State::new();
}
#[test]
#[should_fail]
fn test_error() {
let mut s = State::new();
s.pushinteger(42);
s.error()
}
#[test]
fn test_errorstr() {
let res = task::try::<()>(proc() {
let mut s = State::new();
s.errorstr("some err");
});
let err = res.unwrap_err();
let expected = "unprotected error in call to Lua API (some err)";
let s = err.as_ref::<String>();
if s.is_some() {
assert_eq!(s.unwrap().as_slice(), expected);
} else
|
}
#[test]
fn test_describe() {
let mut s = State::new();
assert_eq!(s.typename(1), "no value");
s.pushnil();
assert_eq!(s.typename(-1), "nil");
s.pushinteger(42);
assert_eq!(s.typename(-1), "number");
s.pushstring("test");
assert_eq!(s.typename(-1), "string");
s.pushboolean(true);
assert_eq!(s.typename(-1), "boolean");
s.pushcfunction(dummy);
assert_eq!(s.typename(-1), "function");
extern "C" fn dummy(_L: *mut ::raw::lua_State) -> ::libc::c_int {
0
}
}
#[test]
fn test_openlibs() {
let mut s = State::new();
s.openlibs();
s.getfield(GLOBALSINDEX, "table");
assert_eq!(s.type_(-1), Some(Type::Table));
}
#[deriving(PartialEq,Eq,Show)]
enum CheckOptionEnum {
COEOne,
COETwo,
COEThree
}
#[test]
fn test_checkoption() {
let lst = [("one", COEOne), ("two", COETwo), ("three", COEThree)];
let mut s = State::new();
for &(k,ref v) in lst.iter() {
s.pushstring(k);
assert_eq!(*s.checkoption(1, None, lst), *v);
s.pop(1);
}
assert_eq!(*s.checkoption(1, Some("three"), lst), COEThree);
let res = task::try(proc() {
let mut s = State::new();
s.checkoption(1, None, lst);
});
assert!(res.is_err(), "expected error from checkoption");
let res = task::try(proc() {
let mut s = State::new();
s.checkoption(1, Some("four"), lst);
});
assert!(res.is_err(), "expected error from checkoption");
}
#[test]
fn test_tocfunction() {
let mut s = State::new();
// extern "C" fns don't implement Eq, so cast them to a pointer instead
s.pushstring("foo");
assert_eq!(s.tocfunction(1).map(|f| f as *()), None);
s.pushcfunction(cfunc);
assert_eq!(s.tocfunction(2).map(|f| f as *()), Some(cfunc as *()));
extern "C" fn cfunc(_L: *mut raw::lua_State) -> libc::c_int { 0 }
}
#[test]
fn test_gsub() {
// do some pretty basic gsub tests
let mut L = State::new();
assert_eq!(L.gsub("foobar", "bar", "quux"), "fooquux");
assert_eq!(L.gsub("foo", "o", "ö"), "föö");
assert_eq!(L.gsub("test", "a", "b"), "test");
assert_eq!(L.gsub("a b c d e", " ", "."), "a.b.c.d.e");
}
|
{
let s = err.as_ref::<&'static str>();
if s.is_some() {
assert_eq!(*s.unwrap(), expected);
} else {
fail!("unexpected failure result");
}
}
|
conditional_block
|
tests.rs
|
use State;
use GLOBALSINDEX;
use Type;
use raw;
use libc;
use std::task;
use std::any::AnyRefExt;
#[test]
fn test_state_init() {
let mut _s = State::new();
}
#[test]
#[should_fail]
fn test_error() {
let mut s = State::new();
s.pushinteger(42);
s.error()
}
#[test]
fn test_errorstr() {
let res = task::try::<()>(proc() {
let mut s = State::new();
s.errorstr("some err");
});
let err = res.unwrap_err();
let expected = "unprotected error in call to Lua API (some err)";
let s = err.as_ref::<String>();
if s.is_some() {
assert_eq!(s.unwrap().as_slice(), expected);
} else {
let s = err.as_ref::<&'static str>();
if s.is_some() {
assert_eq!(*s.unwrap(), expected);
} else {
fail!("unexpected failure result");
}
}
}
#[test]
fn test_describe() {
let mut s = State::new();
assert_eq!(s.typename(1), "no value");
s.pushnil();
assert_eq!(s.typename(-1), "nil");
s.pushinteger(42);
assert_eq!(s.typename(-1), "number");
s.pushstring("test");
assert_eq!(s.typename(-1), "string");
s.pushboolean(true);
assert_eq!(s.typename(-1), "boolean");
s.pushcfunction(dummy);
assert_eq!(s.typename(-1), "function");
extern "C" fn dummy(_L: *mut ::raw::lua_State) -> ::libc::c_int {
0
}
}
#[test]
fn
|
() {
let mut s = State::new();
s.openlibs();
s.getfield(GLOBALSINDEX, "table");
assert_eq!(s.type_(-1), Some(Type::Table));
}
#[deriving(PartialEq,Eq,Show)]
enum CheckOptionEnum {
COEOne,
COETwo,
COEThree
}
#[test]
fn test_checkoption() {
let lst = [("one", COEOne), ("two", COETwo), ("three", COEThree)];
let mut s = State::new();
for &(k,ref v) in lst.iter() {
s.pushstring(k);
assert_eq!(*s.checkoption(1, None, lst), *v);
s.pop(1);
}
assert_eq!(*s.checkoption(1, Some("three"), lst), COEThree);
let res = task::try(proc() {
let mut s = State::new();
s.checkoption(1, None, lst);
});
assert!(res.is_err(), "expected error from checkoption");
let res = task::try(proc() {
let mut s = State::new();
s.checkoption(1, Some("four"), lst);
});
assert!(res.is_err(), "expected error from checkoption");
}
#[test]
fn test_tocfunction() {
let mut s = State::new();
// extern "C" fns don't implement Eq, so cast them to a pointer instead
s.pushstring("foo");
assert_eq!(s.tocfunction(1).map(|f| f as *()), None);
s.pushcfunction(cfunc);
assert_eq!(s.tocfunction(2).map(|f| f as *()), Some(cfunc as *()));
extern "C" fn cfunc(_L: *mut raw::lua_State) -> libc::c_int { 0 }
}
#[test]
fn test_gsub() {
// do some pretty basic gsub tests
let mut L = State::new();
assert_eq!(L.gsub("foobar", "bar", "quux"), "fooquux");
assert_eq!(L.gsub("foo", "o", "ö"), "föö");
assert_eq!(L.gsub("test", "a", "b"), "test");
assert_eq!(L.gsub("a b c d e", " ", "."), "a.b.c.d.e");
}
|
test_openlibs
|
identifier_name
|
stylesheets.rs
|
frame_list};
use media_queries::{Device, MediaQueryList, parse_media_query_list};
use parking_lot::RwLock;
use parser::{ParserContext, ParserContextExtraData, log_css_error};
use properties::{PropertyDeclarationBlock, parse_property_declaration_list};
use selector_impl::TheSelectorImpl;
use selectors::parser::{Selector, parse_selector_list};
use smallvec::SmallVec;
use std::cell::Cell;
use std::iter::Iterator;
use std::slice;
use std::sync::Arc;
use string_cache::{Atom, Namespace};
use url::Url;
use viewport::ViewportRule;
/// Each style rule has an origin, which determines where it enters the cascade.
///
/// http://dev.w3.org/csswg/css-cascade/#cascading-origins
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum Origin {
/// http://dev.w3.org/csswg/css-cascade/#cascade-origin-ua
UserAgent,
/// http://dev.w3.org/csswg/css-cascade/#cascade-origin-author
Author,
/// http://dev.w3.org/csswg/css-cascade/#cascade-origin-user
User,
}
#[derive(Debug)]
pub struct Stylesheet {
/// List of rules in the order they were found (important for
/// cascading order)
pub rules: Vec<CSSRule>,
/// List of media associated with the Stylesheet, if any.
pub media: Option<MediaQueryList>,
pub origin: Origin,
pub dirty_on_viewport_size_change: bool,
}
/// This structure holds the user-agent and user stylesheets.
pub struct UserAgentStylesheets {
pub user_or_user_agent_stylesheets: Vec<Stylesheet>,
pub quirks_mode_stylesheet: Stylesheet,
}
#[derive(Debug)]
pub enum CSSRule {
// No Charset here, CSSCharsetRule has been removed from CSSOM
// https://drafts.csswg.org/cssom/#changes-from-5-december-2013
Namespace(Arc<NamespaceRule>),
Style(Arc<StyleRule>),
Media(Arc<MediaRule>),
FontFace(Arc<FontFaceRule>),
Viewport(Arc<ViewportRule>),
Keyframes(Arc<KeyframesRule>),
}
#[derive(Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct NamespaceRule {
/// `None` for the default Namespace
pub prefix: Option<Atom>,
pub url: Namespace,
}
#[derive(Debug)]
pub struct KeyframesRule {
pub name: Atom,
pub keyframes: Vec<Arc<Keyframe>>,
}
#[derive(Debug)]
pub struct MediaRule {
pub media_queries: Arc<MediaQueryList>,
pub rules: Vec<CSSRule>,
}
impl MediaRule {
#[inline]
pub fn evaluate(&self, device: &Device) -> bool {
self.media_queries.evaluate(device)
}
}
#[derive(Debug)]
pub struct StyleRule {
pub selectors: Vec<Selector<TheSelectorImpl>>,
pub block: Arc<RwLock<PropertyDeclarationBlock>>,
}
impl Stylesheet {
pub fn from_bytes_iter<I: Iterator<Item=Vec<u8>>>(
input: I, base_url: Url, protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>, origin: Origin,
error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData) -> Stylesheet {
let mut bytes = vec![];
// TODO: incremental decoding and tokenization/parsing
for chunk in input {
bytes.extend_from_slice(&chunk)
}
Stylesheet::from_bytes(&bytes, base_url, protocol_encoding_label,
environment_encoding, origin, error_reporter,
extra_data)
}
pub fn from_bytes(bytes: &[u8],
base_url: Url,
protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>,
origin: Origin, error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData)
-> Stylesheet {
// TODO: bytes.as_slice could be bytes.container_as_bytes()
let (string, _) = decode_stylesheet_bytes(
bytes, protocol_encoding_label, environment_encoding);
Stylesheet::from_str(&string, base_url, origin, error_reporter, extra_data)
}
pub fn from_str(css: &str, base_url: Url, origin: Origin,
error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData) -> Stylesheet {
let rule_parser = TopLevelRuleParser {
context: ParserContext::new_with_extra_data(origin, &base_url, error_reporter.clone(),
extra_data),
state: Cell::new(State::Start),
};
let mut input = Parser::new(css);
input.look_for_viewport_percentages();
let mut rules = vec![];
{
let mut iter = RuleListParser::new_for_stylesheet(&mut input, rule_parser);
while let Some(result) = iter.next() {
match result {
Ok(rule) => rules.push(rule),
Err(range) => {
let pos = range.start;
let message = format!("Invalid rule: '{}'", iter.input.slice(range));
let context = ParserContext::new(origin, &base_url, error_reporter.clone());
log_css_error(iter.input, pos, &*message, &context);
}
}
}
}
Stylesheet {
origin: origin,
rules: rules,
media: None,
dirty_on_viewport_size_change:
input.seen_viewport_percentages(),
}
}
/// Set the MediaQueryList associated with the style-sheet.
pub fn set_media(&mut self, media: Option<MediaQueryList>) {
self.media = media;
}
/// Returns whether the style-sheet applies for the current device depending
/// on the associated MediaQueryList.
///
/// Always true if no associated MediaQueryList exists.
pub fn is_effective_for_device(&self, device: &Device) -> bool {
self.media.as_ref().map_or(true, |ref media| media.evaluate(device))
}
/// Return an iterator over all the rules within the style-sheet.
#[inline]
pub fn rules(&self) -> Rules {
Rules::new(self.rules.iter(), None)
}
/// Return an iterator over the effective rules within the style-sheet, as
/// according to the supplied `Device`.
///
/// If a condition does not hold, its associated conditional group rule and
/// nested rules will be skipped. Use `rules` if all rules need to be
/// examined.
#[inline]
pub fn effective_rules<'a>(&'a self, device: &'a Device) -> Rules<'a> {
Rules::new(self.rules.iter(), Some(device))
}
}
/// `CSSRule` iterator.
///
/// The iteration order is pre-order. Specifically, this implies that a
/// conditional group rule will come before its nested rules.
pub struct Rules<'a> {
// 2 because normal case is likely to be just one level of nesting (@media)
stack: SmallVec<[slice::Iter<'a, CSSRule>; 2]>,
device: Option<&'a Device>
}
impl<'a> Rules<'a> {
fn new(iter: slice::Iter<'a, CSSRule>, device: Option<&'a Device>) -> Rules<'a> {
let mut stack: SmallVec<[slice::Iter<'a, CSSRule>; 2]> = SmallVec::new();
stack.push(iter);
Rules { stack: stack, device: device }
}
}
impl<'a> Iterator for Rules<'a> {
type Item = &'a CSSRule;
fn next(&mut self) -> Option<&'a CSSRule> {
while!self.stack.is_empty() {
let top = self.stack.len() - 1;
while let Some(rule) = self.stack[top].next() {
// handle conditional group rules
if let &CSSRule::Media(ref rule) = rule {
if let Some(device) = self.device {
if rule.evaluate(device) {
self.stack.push(rule.rules.iter());
} else {
continue
}
} else {
self.stack.push(rule.rules.iter());
}
}
return Some(rule)
}
self.stack.pop();
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
// TODO: track total number of rules in style-sheet for upper bound?
(0, None)
}
}
pub mod rule_filter {
//! Specific `CSSRule` variant iterators.
use std::marker::PhantomData;
use super::{CSSRule, KeyframesRule, MediaRule, StyleRule};
use super::super::font_face::FontFaceRule;
use super::super::viewport::ViewportRule;
macro_rules! rule_filter {
($variant:ident -> $value:ty) => {
/// An iterator that only yields rules that are of the synonymous `CSSRule` variant.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct $variant<'a, I> {
iter: I,
_lifetime: PhantomData<&'a ()>
}
impl<'a, I> $variant<'a, I>
where I: Iterator<Item=&'a CSSRule> {
#[inline]
pub fn new(iter: I) -> $variant<'a, I> {
$variant {
iter: iter,
_lifetime: PhantomData
}
}
}
impl<'a, I> Iterator for $variant<'a, I>
where I: Iterator<Item=&'a CSSRule> {
type Item = &'a $value;
fn next(&mut self) -> Option<&'a $value> {
while let Some(rule) = self.iter.next() {
match *rule {
CSSRule::$variant(ref value) => return Some(value),
_ => continue
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(0, self.iter.size_hint().1)
}
}
}
}
rule_filter!(Media -> MediaRule);
rule_filter!(Style -> StyleRule);
rule_filter!(FontFace -> FontFaceRule);
rule_filter!(Viewport -> ViewportRule);
rule_filter!(Keyframes -> KeyframesRule);
}
/// Extension methods for `CSSRule` iterators.
pub trait CSSRuleIteratorExt<'a>: Iterator<Item=&'a CSSRule> + Sized {
/// Yield only @font-face rules.
fn font_face(self) -> rule_filter::FontFace<'a, Self>;
/// Yield only @media rules.
fn media(self) -> rule_filter::Media<'a, Self>;
/// Yield only style rules.
fn style(self) -> rule_filter::Style<'a, Self>;
/// Yield only @viewport rules.
fn viewport(self) -> rule_filter::Viewport<'a, Self>;
/// Yield only @keyframes rules.
fn keyframes(self) -> rule_filter::Keyframes<'a, Self>;
}
impl<'a, I> CSSRuleIteratorExt<'a> for I where I: Iterator<Item=&'a CSSRule> {
#[inline]
fn font_face(self) -> rule_filter::FontFace<'a, I> {
rule_filter::FontFace::new(self)
}
#[inline]
fn media(self) -> rule_filter::Media<'a, I> {
rule_filter::Media::new(self)
|
}
#[inline]
fn style(self) -> rule_filter::Style<'a, I> {
rule_filter::Style::new(self)
}
#[inline]
fn viewport(self) -> rule_filter::Viewport<'a, I> {
rule_filter::Viewport::new(self)
}
#[inline]
fn keyframes(self) -> rule_filter::Keyframes<'a, I> {
rule_filter::Keyframes::new(self)
}
}
fn parse_nested_rules(context: &ParserContext, input: &mut Parser) -> Vec<CSSRule> {
let mut iter = RuleListParser::new_for_nested_rule(input,
NestedRuleParser { context: context });
let mut rules = Vec::new();
while let Some(result) = iter.next() {
match result {
Ok(rule) => rules.push(rule),
Err(range) => {
let pos = range.start;
let message = format!("Unsupported rule: '{}'", iter.input.slice(range));
log_css_error(iter.input, pos, &*message, &context);
}
}
}
rules
}
struct TopLevelRuleParser<'a> {
context: ParserContext<'a>,
state: Cell<State>,
}
#[derive(Eq, PartialEq, Ord, PartialOrd, Copy, Clone)]
enum State {
Start = 1,
Imports = 2,
Namespaces = 3,
Body = 4,
}
enum AtRulePrelude {
/// A @font-face rule prelude.
FontFace,
/// A @media rule prelude, with its media queries.
Media(Arc<MediaQueryList>),
/// A @viewport rule prelude.
Viewport,
/// A @keyframes rule, with its animation name.
Keyframes(Atom),
}
impl<'a> AtRuleParser for TopLevelRuleParser<'a> {
type Prelude = AtRulePrelude;
type AtRule = CSSRule;
fn parse_prelude(&mut self, name: &str, input: &mut Parser)
-> Result<AtRuleType<AtRulePrelude, CSSRule>, ()> {
match_ignore_ascii_case! { name,
"import" => {
if self.state.get() <= State::Imports {
self.state.set(State::Imports);
// TODO: support @import
return Err(()) // "@import is not supported yet"
} else {
return Err(()) // "@import must be before any rule but @charset"
}
},
"namespace" => {
if self.state.get() <= State::Namespaces {
self.state.set(State::Namespaces);
let prefix_result = input.try(|input| input.expect_ident());
let url = Namespace(Atom::from(try!(input.expect_url_or_string())));
let opt_prefix = if let Ok(prefix) = prefix_result {
let prefix: Atom = prefix.into();
self.context.selector_context.namespace_prefixes.insert(
prefix.clone(), url.clone());
Some(prefix)
} else {
self.context.selector_context.default_namespace = Some(url.clone());
None
};
return Ok(AtRuleType::WithoutBlock(CSSRule::Namespace(Arc::new(NamespaceRule {
prefix: opt_prefix,
url: url,
}))))
} else {
return Err(()) // "@namespace must be before any rule but @charset and @import"
}
},
// @charset is removed by rust-cssparser if it’s the first rule in the stylesheet
// anything left is invalid.
"charset" => return Err(()), // (insert appropriate error message)
_ => {}
}
self.state.set(State::Body);
AtRuleParser::parse_prelude(&mut NestedRuleParser { context: &self.context }, name, input)
}
#[inline]
fn parse_block(&mut self, prelude: AtRulePrelude, input: &mut Parser) -> Result<CSSRule, ()> {
AtRuleParser::parse_block(&mut NestedRuleParser { context: &self.context }, prelude, input)
}
}
impl<'a> QualifiedRuleParser for TopLevelRuleParser<'a> {
type Prelude = Vec<Selector<TheSelectorImpl>>;
type QualifiedRule = CSSRule;
#[inline]
fn parse_prelude(&mut self, input: &mut Parser) -> Result<Vec<Selector<TheSelectorImpl>>, ()> {
self.state.set(State::Body);
QualifiedRuleParser::parse_prelude(&mut NestedRuleParser { context: &self.context }, input)
}
#[inline]
fn parse_block(&mut self, prelude: Vec<Selector<TheSelectorImpl>>, input: &mut Parser)
-> Result<CSSRule, ()> {
QualifiedRuleParser::parse_block(&mut NestedRuleParser { context: &self.context },
prelude, input)
}
}
struct NestedRuleParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
}
impl<'a, 'b> AtRuleParser for NestedRuleParser<'a, 'b> {
type Prelude = AtRulePrelude;
type AtRule = CSSRule;
fn parse_prelude(&mut self, name: &str, input: &mut Parser)
-> Result<AtRuleType<AtRulePrelude, CSSRule>, ()> {
match_ignore_ascii_case! { name,
"media" => {
let media_queries = parse_media_query_list(input);
Ok(AtRuleType::WithBlock(AtRulePrelude::Media(Arc::new(media_queries))))
},
"font-face" => {
Ok(AtRuleType::WithBlock(AtRulePrelude::FontFace))
},
"viewport" => {
if ::util::prefs::PREFS.get("layout.viewport.enabled").as_boolean().unwrap_or(false) {
Ok(AtRuleType::WithBlock(AtRulePrelude::Viewport))
} else {
Err(())
}
},
"keyframes" => {
let name = match input.next() {
Ok(Token::Ident(ref value)) if value!= "none" => Atom::from(&**value),
Ok(Token::QuotedString(value)) => Atom::from(&*value),
_ => return Err(())
};
Ok(AtRuleType::WithBlock(AtRulePrelude::Keyframes(Atom::from(name))))
},
_ => Err(())
}
}
fn parse_block(&mut self, prelude: AtRulePrelude, input: &mut Parser) -> Result<CSSRule, ()> {
match prelude {
AtRulePrelude::FontFace => {
Ok(CSSRule::FontFace(Arc::new(try!(parse_font_face_block(self.context, input)))))
}
AtRulePrelude::Media(media_queries) => {
Ok(CSSRule::Media(Arc::new(MediaRule {
media_queries: media_queries,
rules: parse_nested_rules(self.context, input),
})))
}
AtRulePrelude::Viewport => {
Ok(CSSRule::Viewport(Arc::new(try!(ViewportRule::parse(input, self.context)))))
}
AtRulePrelude::Keyframes(name) => {
Ok(CSSRule::Keyframes(Arc::new(KeyframesRule {
|
random_line_split
|
|
stylesheets.rs
|
_list};
use media_queries::{Device, MediaQueryList, parse_media_query_list};
use parking_lot::RwLock;
use parser::{ParserContext, ParserContextExtraData, log_css_error};
use properties::{PropertyDeclarationBlock, parse_property_declaration_list};
use selector_impl::TheSelectorImpl;
use selectors::parser::{Selector, parse_selector_list};
use smallvec::SmallVec;
use std::cell::Cell;
use std::iter::Iterator;
use std::slice;
use std::sync::Arc;
use string_cache::{Atom, Namespace};
use url::Url;
use viewport::ViewportRule;
/// Each style rule has an origin, which determines where it enters the cascade.
///
/// http://dev.w3.org/csswg/css-cascade/#cascading-origins
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum Origin {
/// http://dev.w3.org/csswg/css-cascade/#cascade-origin-ua
UserAgent,
/// http://dev.w3.org/csswg/css-cascade/#cascade-origin-author
Author,
/// http://dev.w3.org/csswg/css-cascade/#cascade-origin-user
User,
}
#[derive(Debug)]
pub struct Stylesheet {
/// List of rules in the order they were found (important for
/// cascading order)
pub rules: Vec<CSSRule>,
/// List of media associated with the Stylesheet, if any.
pub media: Option<MediaQueryList>,
pub origin: Origin,
pub dirty_on_viewport_size_change: bool,
}
/// This structure holds the user-agent and user stylesheets.
pub struct UserAgentStylesheets {
pub user_or_user_agent_stylesheets: Vec<Stylesheet>,
pub quirks_mode_stylesheet: Stylesheet,
}
#[derive(Debug)]
pub enum CSSRule {
// No Charset here, CSSCharsetRule has been removed from CSSOM
// https://drafts.csswg.org/cssom/#changes-from-5-december-2013
Namespace(Arc<NamespaceRule>),
Style(Arc<StyleRule>),
Media(Arc<MediaRule>),
FontFace(Arc<FontFaceRule>),
Viewport(Arc<ViewportRule>),
Keyframes(Arc<KeyframesRule>),
}
#[derive(Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct NamespaceRule {
/// `None` for the default Namespace
pub prefix: Option<Atom>,
pub url: Namespace,
}
#[derive(Debug)]
pub struct KeyframesRule {
pub name: Atom,
pub keyframes: Vec<Arc<Keyframe>>,
}
#[derive(Debug)]
pub struct MediaRule {
pub media_queries: Arc<MediaQueryList>,
pub rules: Vec<CSSRule>,
}
impl MediaRule {
#[inline]
pub fn evaluate(&self, device: &Device) -> bool {
self.media_queries.evaluate(device)
}
}
#[derive(Debug)]
pub struct StyleRule {
pub selectors: Vec<Selector<TheSelectorImpl>>,
pub block: Arc<RwLock<PropertyDeclarationBlock>>,
}
impl Stylesheet {
pub fn from_bytes_iter<I: Iterator<Item=Vec<u8>>>(
input: I, base_url: Url, protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>, origin: Origin,
error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData) -> Stylesheet {
let mut bytes = vec![];
// TODO: incremental decoding and tokenization/parsing
for chunk in input {
bytes.extend_from_slice(&chunk)
}
Stylesheet::from_bytes(&bytes, base_url, protocol_encoding_label,
environment_encoding, origin, error_reporter,
extra_data)
}
pub fn from_bytes(bytes: &[u8],
base_url: Url,
protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>,
origin: Origin, error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData)
-> Stylesheet {
// TODO: bytes.as_slice could be bytes.container_as_bytes()
let (string, _) = decode_stylesheet_bytes(
bytes, protocol_encoding_label, environment_encoding);
Stylesheet::from_str(&string, base_url, origin, error_reporter, extra_data)
}
pub fn from_str(css: &str, base_url: Url, origin: Origin,
error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData) -> Stylesheet {
let rule_parser = TopLevelRuleParser {
context: ParserContext::new_with_extra_data(origin, &base_url, error_reporter.clone(),
extra_data),
state: Cell::new(State::Start),
};
let mut input = Parser::new(css);
input.look_for_viewport_percentages();
let mut rules = vec![];
{
let mut iter = RuleListParser::new_for_stylesheet(&mut input, rule_parser);
while let Some(result) = iter.next() {
match result {
Ok(rule) => rules.push(rule),
Err(range) => {
let pos = range.start;
let message = format!("Invalid rule: '{}'", iter.input.slice(range));
let context = ParserContext::new(origin, &base_url, error_reporter.clone());
log_css_error(iter.input, pos, &*message, &context);
}
}
}
}
Stylesheet {
origin: origin,
rules: rules,
media: None,
dirty_on_viewport_size_change:
input.seen_viewport_percentages(),
}
}
/// Set the MediaQueryList associated with the style-sheet.
pub fn set_media(&mut self, media: Option<MediaQueryList>) {
self.media = media;
}
/// Returns whether the style-sheet applies for the current device depending
/// on the associated MediaQueryList.
///
/// Always true if no associated MediaQueryList exists.
pub fn is_effective_for_device(&self, device: &Device) -> bool {
self.media.as_ref().map_or(true, |ref media| media.evaluate(device))
}
/// Return an iterator over all the rules within the style-sheet.
#[inline]
pub fn rules(&self) -> Rules {
Rules::new(self.rules.iter(), None)
}
/// Return an iterator over the effective rules within the style-sheet, as
/// according to the supplied `Device`.
///
/// If a condition does not hold, its associated conditional group rule and
/// nested rules will be skipped. Use `rules` if all rules need to be
/// examined.
#[inline]
pub fn effective_rules<'a>(&'a self, device: &'a Device) -> Rules<'a> {
Rules::new(self.rules.iter(), Some(device))
}
}
/// `CSSRule` iterator.
///
/// The iteration order is pre-order. Specifically, this implies that a
/// conditional group rule will come before its nested rules.
pub struct Rules<'a> {
// 2 because normal case is likely to be just one level of nesting (@media)
stack: SmallVec<[slice::Iter<'a, CSSRule>; 2]>,
device: Option<&'a Device>
}
impl<'a> Rules<'a> {
fn new(iter: slice::Iter<'a, CSSRule>, device: Option<&'a Device>) -> Rules<'a> {
let mut stack: SmallVec<[slice::Iter<'a, CSSRule>; 2]> = SmallVec::new();
stack.push(iter);
Rules { stack: stack, device: device }
}
}
impl<'a> Iterator for Rules<'a> {
type Item = &'a CSSRule;
fn next(&mut self) -> Option<&'a CSSRule> {
while!self.stack.is_empty() {
let top = self.stack.len() - 1;
while let Some(rule) = self.stack[top].next() {
// handle conditional group rules
if let &CSSRule::Media(ref rule) = rule {
if let Some(device) = self.device {
if rule.evaluate(device) {
self.stack.push(rule.rules.iter());
} else {
continue
}
} else {
self.stack.push(rule.rules.iter());
}
}
return Some(rule)
}
self.stack.pop();
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
// TODO: track total number of rules in style-sheet for upper bound?
(0, None)
}
}
pub mod rule_filter {
//! Specific `CSSRule` variant iterators.
use std::marker::PhantomData;
use super::{CSSRule, KeyframesRule, MediaRule, StyleRule};
use super::super::font_face::FontFaceRule;
use super::super::viewport::ViewportRule;
macro_rules! rule_filter {
($variant:ident -> $value:ty) => {
/// An iterator that only yields rules that are of the synonymous `CSSRule` variant.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct $variant<'a, I> {
iter: I,
_lifetime: PhantomData<&'a ()>
}
impl<'a, I> $variant<'a, I>
where I: Iterator<Item=&'a CSSRule> {
#[inline]
pub fn new(iter: I) -> $variant<'a, I> {
$variant {
iter: iter,
_lifetime: PhantomData
}
}
}
impl<'a, I> Iterator for $variant<'a, I>
where I: Iterator<Item=&'a CSSRule> {
type Item = &'a $value;
fn next(&mut self) -> Option<&'a $value> {
while let Some(rule) = self.iter.next() {
match *rule {
CSSRule::$variant(ref value) => return Some(value),
_ => continue
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(0, self.iter.size_hint().1)
}
}
}
}
rule_filter!(Media -> MediaRule);
rule_filter!(Style -> StyleRule);
rule_filter!(FontFace -> FontFaceRule);
rule_filter!(Viewport -> ViewportRule);
rule_filter!(Keyframes -> KeyframesRule);
}
/// Extension methods for `CSSRule` iterators.
pub trait CSSRuleIteratorExt<'a>: Iterator<Item=&'a CSSRule> + Sized {
/// Yield only @font-face rules.
fn font_face(self) -> rule_filter::FontFace<'a, Self>;
/// Yield only @media rules.
fn media(self) -> rule_filter::Media<'a, Self>;
/// Yield only style rules.
fn style(self) -> rule_filter::Style<'a, Self>;
/// Yield only @viewport rules.
fn viewport(self) -> rule_filter::Viewport<'a, Self>;
/// Yield only @keyframes rules.
fn keyframes(self) -> rule_filter::Keyframes<'a, Self>;
}
impl<'a, I> CSSRuleIteratorExt<'a> for I where I: Iterator<Item=&'a CSSRule> {
#[inline]
fn font_face(self) -> rule_filter::FontFace<'a, I> {
rule_filter::FontFace::new(self)
}
#[inline]
fn media(self) -> rule_filter::Media<'a, I> {
rule_filter::Media::new(self)
}
#[inline]
fn
|
(self) -> rule_filter::Style<'a, I> {
rule_filter::Style::new(self)
}
#[inline]
fn viewport(self) -> rule_filter::Viewport<'a, I> {
rule_filter::Viewport::new(self)
}
#[inline]
fn keyframes(self) -> rule_filter::Keyframes<'a, I> {
rule_filter::Keyframes::new(self)
}
}
fn parse_nested_rules(context: &ParserContext, input: &mut Parser) -> Vec<CSSRule> {
let mut iter = RuleListParser::new_for_nested_rule(input,
NestedRuleParser { context: context });
let mut rules = Vec::new();
while let Some(result) = iter.next() {
match result {
Ok(rule) => rules.push(rule),
Err(range) => {
let pos = range.start;
let message = format!("Unsupported rule: '{}'", iter.input.slice(range));
log_css_error(iter.input, pos, &*message, &context);
}
}
}
rules
}
struct TopLevelRuleParser<'a> {
context: ParserContext<'a>,
state: Cell<State>,
}
#[derive(Eq, PartialEq, Ord, PartialOrd, Copy, Clone)]
enum State {
Start = 1,
Imports = 2,
Namespaces = 3,
Body = 4,
}
enum AtRulePrelude {
/// A @font-face rule prelude.
FontFace,
/// A @media rule prelude, with its media queries.
Media(Arc<MediaQueryList>),
/// A @viewport rule prelude.
Viewport,
/// A @keyframes rule, with its animation name.
Keyframes(Atom),
}
impl<'a> AtRuleParser for TopLevelRuleParser<'a> {
type Prelude = AtRulePrelude;
type AtRule = CSSRule;
fn parse_prelude(&mut self, name: &str, input: &mut Parser)
-> Result<AtRuleType<AtRulePrelude, CSSRule>, ()> {
match_ignore_ascii_case! { name,
"import" => {
if self.state.get() <= State::Imports {
self.state.set(State::Imports);
// TODO: support @import
return Err(()) // "@import is not supported yet"
} else {
return Err(()) // "@import must be before any rule but @charset"
}
},
"namespace" => {
if self.state.get() <= State::Namespaces {
self.state.set(State::Namespaces);
let prefix_result = input.try(|input| input.expect_ident());
let url = Namespace(Atom::from(try!(input.expect_url_or_string())));
let opt_prefix = if let Ok(prefix) = prefix_result {
let prefix: Atom = prefix.into();
self.context.selector_context.namespace_prefixes.insert(
prefix.clone(), url.clone());
Some(prefix)
} else {
self.context.selector_context.default_namespace = Some(url.clone());
None
};
return Ok(AtRuleType::WithoutBlock(CSSRule::Namespace(Arc::new(NamespaceRule {
prefix: opt_prefix,
url: url,
}))))
} else {
return Err(()) // "@namespace must be before any rule but @charset and @import"
}
},
// @charset is removed by rust-cssparser if it’s the first rule in the stylesheet
// anything left is invalid.
"charset" => return Err(()), // (insert appropriate error message)
_ => {}
}
self.state.set(State::Body);
AtRuleParser::parse_prelude(&mut NestedRuleParser { context: &self.context }, name, input)
}
#[inline]
fn parse_block(&mut self, prelude: AtRulePrelude, input: &mut Parser) -> Result<CSSRule, ()> {
AtRuleParser::parse_block(&mut NestedRuleParser { context: &self.context }, prelude, input)
}
}
impl<'a> QualifiedRuleParser for TopLevelRuleParser<'a> {
type Prelude = Vec<Selector<TheSelectorImpl>>;
type QualifiedRule = CSSRule;
#[inline]
fn parse_prelude(&mut self, input: &mut Parser) -> Result<Vec<Selector<TheSelectorImpl>>, ()> {
self.state.set(State::Body);
QualifiedRuleParser::parse_prelude(&mut NestedRuleParser { context: &self.context }, input)
}
#[inline]
fn parse_block(&mut self, prelude: Vec<Selector<TheSelectorImpl>>, input: &mut Parser)
-> Result<CSSRule, ()> {
QualifiedRuleParser::parse_block(&mut NestedRuleParser { context: &self.context },
prelude, input)
}
}
struct NestedRuleParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
}
impl<'a, 'b> AtRuleParser for NestedRuleParser<'a, 'b> {
type Prelude = AtRulePrelude;
type AtRule = CSSRule;
fn parse_prelude(&mut self, name: &str, input: &mut Parser)
-> Result<AtRuleType<AtRulePrelude, CSSRule>, ()> {
match_ignore_ascii_case! { name,
"media" => {
let media_queries = parse_media_query_list(input);
Ok(AtRuleType::WithBlock(AtRulePrelude::Media(Arc::new(media_queries))))
},
"font-face" => {
Ok(AtRuleType::WithBlock(AtRulePrelude::FontFace))
},
"viewport" => {
if ::util::prefs::PREFS.get("layout.viewport.enabled").as_boolean().unwrap_or(false) {
Ok(AtRuleType::WithBlock(AtRulePrelude::Viewport))
} else {
Err(())
}
},
"keyframes" => {
let name = match input.next() {
Ok(Token::Ident(ref value)) if value!= "none" => Atom::from(&**value),
Ok(Token::QuotedString(value)) => Atom::from(&*value),
_ => return Err(())
};
Ok(AtRuleType::WithBlock(AtRulePrelude::Keyframes(Atom::from(name))))
},
_ => Err(())
}
}
fn parse_block(&mut self, prelude: AtRulePrelude, input: &mut Parser) -> Result<CSSRule, ()> {
match prelude {
AtRulePrelude::FontFace => {
Ok(CSSRule::FontFace(Arc::new(try!(parse_font_face_block(self.context, input)))))
}
AtRulePrelude::Media(media_queries) => {
Ok(CSSRule::Media(Arc::new(MediaRule {
media_queries: media_queries,
rules: parse_nested_rules(self.context, input),
})))
}
AtRulePrelude::Viewport => {
Ok(CSSRule::Viewport(Arc::new(try!(ViewportRule::parse(input, self.context)))))
}
AtRulePrelude::Keyframes(name) => {
Ok(CSSRule::Keyframes(Arc::new(KeyframesRule {
|
style
|
identifier_name
|
stylesheets.rs
|
_list};
use media_queries::{Device, MediaQueryList, parse_media_query_list};
use parking_lot::RwLock;
use parser::{ParserContext, ParserContextExtraData, log_css_error};
use properties::{PropertyDeclarationBlock, parse_property_declaration_list};
use selector_impl::TheSelectorImpl;
use selectors::parser::{Selector, parse_selector_list};
use smallvec::SmallVec;
use std::cell::Cell;
use std::iter::Iterator;
use std::slice;
use std::sync::Arc;
use string_cache::{Atom, Namespace};
use url::Url;
use viewport::ViewportRule;
/// Each style rule has an origin, which determines where it enters the cascade.
///
/// http://dev.w3.org/csswg/css-cascade/#cascading-origins
#[derive(Clone, PartialEq, Eq, Copy, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum Origin {
/// http://dev.w3.org/csswg/css-cascade/#cascade-origin-ua
UserAgent,
/// http://dev.w3.org/csswg/css-cascade/#cascade-origin-author
Author,
/// http://dev.w3.org/csswg/css-cascade/#cascade-origin-user
User,
}
#[derive(Debug)]
pub struct Stylesheet {
/// List of rules in the order they were found (important for
/// cascading order)
pub rules: Vec<CSSRule>,
/// List of media associated with the Stylesheet, if any.
pub media: Option<MediaQueryList>,
pub origin: Origin,
pub dirty_on_viewport_size_change: bool,
}
/// This structure holds the user-agent and user stylesheets.
pub struct UserAgentStylesheets {
pub user_or_user_agent_stylesheets: Vec<Stylesheet>,
pub quirks_mode_stylesheet: Stylesheet,
}
#[derive(Debug)]
pub enum CSSRule {
// No Charset here, CSSCharsetRule has been removed from CSSOM
// https://drafts.csswg.org/cssom/#changes-from-5-december-2013
Namespace(Arc<NamespaceRule>),
Style(Arc<StyleRule>),
Media(Arc<MediaRule>),
FontFace(Arc<FontFaceRule>),
Viewport(Arc<ViewportRule>),
Keyframes(Arc<KeyframesRule>),
}
#[derive(Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct NamespaceRule {
/// `None` for the default Namespace
pub prefix: Option<Atom>,
pub url: Namespace,
}
#[derive(Debug)]
pub struct KeyframesRule {
pub name: Atom,
pub keyframes: Vec<Arc<Keyframe>>,
}
#[derive(Debug)]
pub struct MediaRule {
pub media_queries: Arc<MediaQueryList>,
pub rules: Vec<CSSRule>,
}
impl MediaRule {
#[inline]
pub fn evaluate(&self, device: &Device) -> bool {
self.media_queries.evaluate(device)
}
}
#[derive(Debug)]
pub struct StyleRule {
pub selectors: Vec<Selector<TheSelectorImpl>>,
pub block: Arc<RwLock<PropertyDeclarationBlock>>,
}
impl Stylesheet {
pub fn from_bytes_iter<I: Iterator<Item=Vec<u8>>>(
input: I, base_url: Url, protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>, origin: Origin,
error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData) -> Stylesheet {
let mut bytes = vec![];
// TODO: incremental decoding and tokenization/parsing
for chunk in input {
bytes.extend_from_slice(&chunk)
}
Stylesheet::from_bytes(&bytes, base_url, protocol_encoding_label,
environment_encoding, origin, error_reporter,
extra_data)
}
pub fn from_bytes(bytes: &[u8],
base_url: Url,
protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>,
origin: Origin, error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData)
-> Stylesheet {
// TODO: bytes.as_slice could be bytes.container_as_bytes()
let (string, _) = decode_stylesheet_bytes(
bytes, protocol_encoding_label, environment_encoding);
Stylesheet::from_str(&string, base_url, origin, error_reporter, extra_data)
}
pub fn from_str(css: &str, base_url: Url, origin: Origin,
error_reporter: Box<ParseErrorReporter + Send>,
extra_data: ParserContextExtraData) -> Stylesheet {
let rule_parser = TopLevelRuleParser {
context: ParserContext::new_with_extra_data(origin, &base_url, error_reporter.clone(),
extra_data),
state: Cell::new(State::Start),
};
let mut input = Parser::new(css);
input.look_for_viewport_percentages();
let mut rules = vec![];
{
let mut iter = RuleListParser::new_for_stylesheet(&mut input, rule_parser);
while let Some(result) = iter.next() {
match result {
Ok(rule) => rules.push(rule),
Err(range) => {
let pos = range.start;
let message = format!("Invalid rule: '{}'", iter.input.slice(range));
let context = ParserContext::new(origin, &base_url, error_reporter.clone());
log_css_error(iter.input, pos, &*message, &context);
}
}
}
}
Stylesheet {
origin: origin,
rules: rules,
media: None,
dirty_on_viewport_size_change:
input.seen_viewport_percentages(),
}
}
/// Set the MediaQueryList associated with the style-sheet.
pub fn set_media(&mut self, media: Option<MediaQueryList>) {
self.media = media;
}
/// Returns whether the style-sheet applies for the current device depending
/// on the associated MediaQueryList.
///
/// Always true if no associated MediaQueryList exists.
pub fn is_effective_for_device(&self, device: &Device) -> bool {
self.media.as_ref().map_or(true, |ref media| media.evaluate(device))
}
/// Return an iterator over all the rules within the style-sheet.
#[inline]
pub fn rules(&self) -> Rules {
Rules::new(self.rules.iter(), None)
}
/// Return an iterator over the effective rules within the style-sheet, as
/// according to the supplied `Device`.
///
/// If a condition does not hold, its associated conditional group rule and
/// nested rules will be skipped. Use `rules` if all rules need to be
/// examined.
#[inline]
pub fn effective_rules<'a>(&'a self, device: &'a Device) -> Rules<'a> {
Rules::new(self.rules.iter(), Some(device))
}
}
/// `CSSRule` iterator.
///
/// The iteration order is pre-order. Specifically, this implies that a
/// conditional group rule will come before its nested rules.
pub struct Rules<'a> {
// 2 because normal case is likely to be just one level of nesting (@media)
stack: SmallVec<[slice::Iter<'a, CSSRule>; 2]>,
device: Option<&'a Device>
}
impl<'a> Rules<'a> {
fn new(iter: slice::Iter<'a, CSSRule>, device: Option<&'a Device>) -> Rules<'a> {
let mut stack: SmallVec<[slice::Iter<'a, CSSRule>; 2]> = SmallVec::new();
stack.push(iter);
Rules { stack: stack, device: device }
}
}
impl<'a> Iterator for Rules<'a> {
type Item = &'a CSSRule;
fn next(&mut self) -> Option<&'a CSSRule> {
while!self.stack.is_empty() {
let top = self.stack.len() - 1;
while let Some(rule) = self.stack[top].next() {
// handle conditional group rules
if let &CSSRule::Media(ref rule) = rule
|
return Some(rule)
}
self.stack.pop();
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
// TODO: track total number of rules in style-sheet for upper bound?
(0, None)
}
}
pub mod rule_filter {
//! Specific `CSSRule` variant iterators.
use std::marker::PhantomData;
use super::{CSSRule, KeyframesRule, MediaRule, StyleRule};
use super::super::font_face::FontFaceRule;
use super::super::viewport::ViewportRule;
macro_rules! rule_filter {
($variant:ident -> $value:ty) => {
/// An iterator that only yields rules that are of the synonymous `CSSRule` variant.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct $variant<'a, I> {
iter: I,
_lifetime: PhantomData<&'a ()>
}
impl<'a, I> $variant<'a, I>
where I: Iterator<Item=&'a CSSRule> {
#[inline]
pub fn new(iter: I) -> $variant<'a, I> {
$variant {
iter: iter,
_lifetime: PhantomData
}
}
}
impl<'a, I> Iterator for $variant<'a, I>
where I: Iterator<Item=&'a CSSRule> {
type Item = &'a $value;
fn next(&mut self) -> Option<&'a $value> {
while let Some(rule) = self.iter.next() {
match *rule {
CSSRule::$variant(ref value) => return Some(value),
_ => continue
}
}
None
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(0, self.iter.size_hint().1)
}
}
}
}
rule_filter!(Media -> MediaRule);
rule_filter!(Style -> StyleRule);
rule_filter!(FontFace -> FontFaceRule);
rule_filter!(Viewport -> ViewportRule);
rule_filter!(Keyframes -> KeyframesRule);
}
/// Extension methods for `CSSRule` iterators.
pub trait CSSRuleIteratorExt<'a>: Iterator<Item=&'a CSSRule> + Sized {
/// Yield only @font-face rules.
fn font_face(self) -> rule_filter::FontFace<'a, Self>;
/// Yield only @media rules.
fn media(self) -> rule_filter::Media<'a, Self>;
/// Yield only style rules.
fn style(self) -> rule_filter::Style<'a, Self>;
/// Yield only @viewport rules.
fn viewport(self) -> rule_filter::Viewport<'a, Self>;
/// Yield only @keyframes rules.
fn keyframes(self) -> rule_filter::Keyframes<'a, Self>;
}
impl<'a, I> CSSRuleIteratorExt<'a> for I where I: Iterator<Item=&'a CSSRule> {
#[inline]
fn font_face(self) -> rule_filter::FontFace<'a, I> {
rule_filter::FontFace::new(self)
}
#[inline]
fn media(self) -> rule_filter::Media<'a, I> {
rule_filter::Media::new(self)
}
#[inline]
fn style(self) -> rule_filter::Style<'a, I> {
rule_filter::Style::new(self)
}
#[inline]
fn viewport(self) -> rule_filter::Viewport<'a, I> {
rule_filter::Viewport::new(self)
}
#[inline]
fn keyframes(self) -> rule_filter::Keyframes<'a, I> {
rule_filter::Keyframes::new(self)
}
}
fn parse_nested_rules(context: &ParserContext, input: &mut Parser) -> Vec<CSSRule> {
let mut iter = RuleListParser::new_for_nested_rule(input,
NestedRuleParser { context: context });
let mut rules = Vec::new();
while let Some(result) = iter.next() {
match result {
Ok(rule) => rules.push(rule),
Err(range) => {
let pos = range.start;
let message = format!("Unsupported rule: '{}'", iter.input.slice(range));
log_css_error(iter.input, pos, &*message, &context);
}
}
}
rules
}
struct TopLevelRuleParser<'a> {
context: ParserContext<'a>,
state: Cell<State>,
}
#[derive(Eq, PartialEq, Ord, PartialOrd, Copy, Clone)]
enum State {
Start = 1,
Imports = 2,
Namespaces = 3,
Body = 4,
}
enum AtRulePrelude {
/// A @font-face rule prelude.
FontFace,
/// A @media rule prelude, with its media queries.
Media(Arc<MediaQueryList>),
/// A @viewport rule prelude.
Viewport,
/// A @keyframes rule, with its animation name.
Keyframes(Atom),
}
impl<'a> AtRuleParser for TopLevelRuleParser<'a> {
type Prelude = AtRulePrelude;
type AtRule = CSSRule;
fn parse_prelude(&mut self, name: &str, input: &mut Parser)
-> Result<AtRuleType<AtRulePrelude, CSSRule>, ()> {
match_ignore_ascii_case! { name,
"import" => {
if self.state.get() <= State::Imports {
self.state.set(State::Imports);
// TODO: support @import
return Err(()) // "@import is not supported yet"
} else {
return Err(()) // "@import must be before any rule but @charset"
}
},
"namespace" => {
if self.state.get() <= State::Namespaces {
self.state.set(State::Namespaces);
let prefix_result = input.try(|input| input.expect_ident());
let url = Namespace(Atom::from(try!(input.expect_url_or_string())));
let opt_prefix = if let Ok(prefix) = prefix_result {
let prefix: Atom = prefix.into();
self.context.selector_context.namespace_prefixes.insert(
prefix.clone(), url.clone());
Some(prefix)
} else {
self.context.selector_context.default_namespace = Some(url.clone());
None
};
return Ok(AtRuleType::WithoutBlock(CSSRule::Namespace(Arc::new(NamespaceRule {
prefix: opt_prefix,
url: url,
}))))
} else {
return Err(()) // "@namespace must be before any rule but @charset and @import"
}
},
// @charset is removed by rust-cssparser if it’s the first rule in the stylesheet
// anything left is invalid.
"charset" => return Err(()), // (insert appropriate error message)
_ => {}
}
self.state.set(State::Body);
AtRuleParser::parse_prelude(&mut NestedRuleParser { context: &self.context }, name, input)
}
#[inline]
fn parse_block(&mut self, prelude: AtRulePrelude, input: &mut Parser) -> Result<CSSRule, ()> {
AtRuleParser::parse_block(&mut NestedRuleParser { context: &self.context }, prelude, input)
}
}
impl<'a> QualifiedRuleParser for TopLevelRuleParser<'a> {
type Prelude = Vec<Selector<TheSelectorImpl>>;
type QualifiedRule = CSSRule;
#[inline]
fn parse_prelude(&mut self, input: &mut Parser) -> Result<Vec<Selector<TheSelectorImpl>>, ()> {
self.state.set(State::Body);
QualifiedRuleParser::parse_prelude(&mut NestedRuleParser { context: &self.context }, input)
}
#[inline]
fn parse_block(&mut self, prelude: Vec<Selector<TheSelectorImpl>>, input: &mut Parser)
-> Result<CSSRule, ()> {
QualifiedRuleParser::parse_block(&mut NestedRuleParser { context: &self.context },
prelude, input)
}
}
struct NestedRuleParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
}
impl<'a, 'b> AtRuleParser for NestedRuleParser<'a, 'b> {
type Prelude = AtRulePrelude;
type AtRule = CSSRule;
fn parse_prelude(&mut self, name: &str, input: &mut Parser)
-> Result<AtRuleType<AtRulePrelude, CSSRule>, ()> {
match_ignore_ascii_case! { name,
"media" => {
let media_queries = parse_media_query_list(input);
Ok(AtRuleType::WithBlock(AtRulePrelude::Media(Arc::new(media_queries))))
},
"font-face" => {
Ok(AtRuleType::WithBlock(AtRulePrelude::FontFace))
},
"viewport" => {
if ::util::prefs::PREFS.get("layout.viewport.enabled").as_boolean().unwrap_or(false) {
Ok(AtRuleType::WithBlock(AtRulePrelude::Viewport))
} else {
Err(())
}
},
"keyframes" => {
let name = match input.next() {
Ok(Token::Ident(ref value)) if value!= "none" => Atom::from(&**value),
Ok(Token::QuotedString(value)) => Atom::from(&*value),
_ => return Err(())
};
Ok(AtRuleType::WithBlock(AtRulePrelude::Keyframes(Atom::from(name))))
},
_ => Err(())
}
}
fn parse_block(&mut self, prelude: AtRulePrelude, input: &mut Parser) -> Result<CSSRule, ()> {
match prelude {
AtRulePrelude::FontFace => {
Ok(CSSRule::FontFace(Arc::new(try!(parse_font_face_block(self.context, input)))))
}
AtRulePrelude::Media(media_queries) => {
Ok(CSSRule::Media(Arc::new(MediaRule {
media_queries: media_queries,
rules: parse_nested_rules(self.context, input),
})))
}
AtRulePrelude::Viewport => {
Ok(CSSRule::Viewport(Arc::new(try!(ViewportRule::parse(input, self.context)))))
}
AtRulePrelude::Keyframes(name) => {
Ok(CSSRule::Keyframes(Arc::new(KeyframesRule {
|
{
if let Some(device) = self.device {
if rule.evaluate(device) {
self.stack.push(rule.rules.iter());
} else {
continue
}
} else {
self.stack.push(rule.rules.iter());
}
}
|
conditional_block
|
main.rs
|
lazy_static;
#[macro_use]
mod sorceries;
mod space;
mod identity;
mod motion;
mod landmark;
mod life;
mod mind;
mod substrate;
mod uci;
// Unlikely Command Integration
mod test_landmark;
use std::fs::OpenOptions;
use std::io;
use std::io::Write;
use std::process;
use ansi_term::Colour as Color;
use argparse::{ArgumentParser, Print, Store, StoreOption, StoreTrue};
use log::{LogLevelFilter, LogMetadata, LogRecord, SetLoggerError};
use time::{Duration, get_time};
use identity::{Agent, Team};
use life::{Commit, Patch, TransitPatch, WorldState};
use mind::{Variation, fixed_depth_sequence_kickoff, iterative_deepening_kickoff,
kickoff, pagan_variation_format, Memory};
use substrate::memory_free;
use serde_json::to_string;
use serde::Serialize;
fn encode<T>(value: &T) -> String
where T:?Sized + serde::ser::Serialize {
to_string(value).unwrap()
}
struct DebugLogger;
impl DebugLogger {
pub fn init() -> Result<(), SetLoggerError> {
log::set_logger(|max_log_level| {
max_log_level.set(LogLevelFilter::Debug);
Box::new(DebugLogger)
})
}
}
impl log::Log for DebugLogger {
fn enabled(&self, _metadata: &LogMetadata) -> bool {
true
}
fn log(&self, record: &LogRecord) {
// XXX: can't the open file handle live inside the DebugLogger struct?!
let mut log_file = OpenOptions::new()
.write(true)
.append(true)
.create(true)
.open("leafline.log")
.expect("couldn't open log file?!");
let log_message = format!("[{}] {}\n",
time::now()
.strftime("%Y-%m-%d %H:%M:%S.%f")
.unwrap(),
record.args());
log_file.write_all(&log_message.into_bytes())
.expect("couldn't write to log file?!");
}
}
#[derive(Debug, Clone)]
enum LookaheadBound {
Depth(u8, Option<u8>),
DepthSequence(Vec<u8>),
Seconds(u8),
}
impl LookaheadBound {
pub fn duration(&self) -> Duration {
match *self {
LookaheadBound::Seconds(secs) => Duration::seconds(i64::from(secs)),
_ => {
moral_panic!("`duration()` called on non-Seconds LookaheadBound \
variant")
}
}
}
pub fn new_from_sequence_depiction(depiction: &str) -> Self {
let depth_runes = depiction.split(',');
let depth_sequence = depth_runes.map(|dd| {
dd.parse::<u8>()
.expect("couldn't parse depth \
sequence")
})
.collect::<Vec<_>>();
LookaheadBound::DepthSequence(depth_sequence)
}
pub fn from_args(lookahead_depth: Option<u8>,
lookahead_extension: Option<u8>,
lookahead_depth_sequence: Option<String>,
lookahead_seconds: Option<u8>)
-> Result<Option<Self>, String> {
let mut bound = None;
let confirm_bound_is_none =
|
`--seconds` was passed"
.to_owned())
} else {
Ok(true)
}
};
if let Some(depth) = lookahead_depth {
confirm_bound_is_none(&bound)?;
bound = Some(LookaheadBound::Depth(depth, lookahead_extension));
}
if let Some(sequence_depiction) = lookahead_depth_sequence {
confirm_bound_is_none(&bound)?;
bound = Some(LookaheadBound::new_from_sequence_depiction(
&sequence_depiction));
}
if let Some(seconds) = lookahead_seconds {
confirm_bound_is_none(&bound)?;
bound = Some(LookaheadBound::Seconds(seconds));
}
Ok(bound)
}
}
fn forecast<T:'static + Memory>(world: WorldState, bound: LookaheadBound, déjà_vu_bound: f32)
-> (Vec<(Commit, f32, T)>, u8, Duration) {
let start_thinking = get_time();
let forecasts;
let depth;
match bound {
LookaheadBound::Depth(ds, es) => {
forecasts = kickoff::<T>(&world, ds, es, false, déjà_vu_bound);
depth = ds;
}
LookaheadBound::DepthSequence(ds) => {
depth = *ds.last().unwrap();
forecasts = fixed_depth_sequence_kickoff::<T>(
&world, ds, false, déjà_vu_bound);
// XXX TODO: if we're just returning a number, it should be the
// lowest depth, but we should really report all of them
}
LookaheadBound::Seconds(_) => {
let (fs, ds) = iterative_deepening_kickoff::<T>(
&world, bound.duration(), false, déjà_vu_bound);
forecasts = fs;
depth = ds;
}
}
let stop_thinking = get_time();
let thinking_time = stop_thinking - start_thinking;
(forecasts, depth, thinking_time)
}
#[derive(Serialize)]
struct Postcard {
world: String,
patch: TransitPatch,
hospitalization: Option<Agent>,
thinking_time: u64,
depth: u8,
counterreplies: Vec<TransitPatch>,
rosetta_stone: String,
}
#[derive(Serialize)]
struct LastMissive {
the_triumphant: Option<Team>,
}
#[allow(clippy::clcollapsible_if)]
fn correspondence(reminder: &str, bound: LookaheadBound, déjà_vu_bound: f32)
-> String {
let in_medias_res = WorldState::reconstruct(reminder);
let (mut forecasts, depth, sidereal) = forecast::<Patch>(in_medias_res,
bound,
déjà_vu_bound);
if!forecasts.is_empty() {
let (determination, _karma, _variation) = forecasts.swap_remove(0);
// XXX TODO FIXME: this doesn't distinguish amongst ascensions
// (and we can imagine somewhat contrived situations where only
// some of them are admissible movements)
let counterreplies = determination.tree
.lookahead()
.iter()
.map(|c| TransitPatch::from(c.patch))
.collect::<Vec<_>>();
if counterreplies.is_empty() {
if determination.tree.in_critical_endangerment(Team::Orange) {
return encode(&LastMissive {
the_triumphant: Some(Team::Blue),
})
} else {
return encode(&LastMissive { the_triumphant: None });
}
}
let postcard = Postcard {
world: determination.tree.preserve(),
patch: TransitPatch::from(determination.patch),
hospitalization: determination.hospitalization,
thinking_time: sidereal.num_milliseconds() as u64,
depth,
counterreplies,
rosetta_stone: determination.patch.abbreviated_pagan_movement_rune(),
};
encode(&postcard)
} else if in_medias_res.in_critical_endangerment(Team::Blue) {
encode(&LastMissive { the_triumphant: Some(Team::Orange) })
} else {
encode(&LastMissive { the_triumphant: None })
}
}
fn the_end() {
println!("THE END");
process::exit(0);
}
fn main() {
// Does argparse not offer an analogue of Python's argparse's
// `add_mutually_exclusive_group`
// (https://docs.python.org/3/library/argparse.html#mutual-exclusion)?
// Contribution opportunity if so??
let mut lookahead_depth: Option<u8> = None;
let mut lookahead_extension: Option<u8> = None;
// TODO CONSIDER: would argparse's Collect action be cleaner?
let mut lookahead_depth_sequence: Option<String> = None;
let mut lookahead_seconds: Option<u8> = None;
let mut from_runes: Option<String> = None;
let mut correspond: bool = false;
let mut uci_dæmon: bool = false;
let mut déjà_vu_bound: f32 = 2.0;
let mut debug_logging: bool = false;
{
let mut parser = ArgumentParser::new();
parser.set_description("Leafline: an oppositional strategy game engine");
parser.refer(&mut lookahead_depth).add_option(
&["--depth"],
StoreOption,
"rank moves using AI minimax lookahead this deep");
parser.refer(&mut lookahead_extension).add_option(
&["--quiet"],
StoreOption,
"search with quietness extension this deep");
parser.refer(&mut lookahead_depth_sequence).add_option(
&["--depth-sequence"],
StoreOption,
"rank moves using AI minimax lookahead to these depths");
parser.refer(&mut lookahead_seconds).add_option(
&["--seconds"],
StoreOption,
"rank moves using AI minimax for about this many seconds");
parser.refer(&mut correspond).add_option(
&["--correspond"],
StoreTrue,
"just output the serialization of the AI's top response and \
legal replies thereto");
parser.refer(&mut uci_dæmon).add_option(
&["--uci", "--deamon", "--dæmon"],
StoreTrue,
"run Unlikely Command Integration dæmon for external driver play");
parser.refer(&mut from_runes).add_option(
&["--from"],
StoreOption,
"start a game from the given book of preservation runes");
parser.refer(&mut déjà_vu_bound).add_option(
&["--déjà-vu-bound", "--deja-vu-bound"],
Store,
"try to not store more entries in the déjà vu table than fit in \
this many GiB of memory",
);
parser.refer(&mut debug_logging).add_option(
&["--debug"],
StoreTrue,
"run with debug logging to file",
);
parser.add_option(&["--version", "-v"],
Print(env!("CARGO_PKG_VERSION").to_owned()), "diplay the version");
parser.parse_args_or_exit();
}
if debug_logging {
DebugLogger::init().expect("couldn't initialize logging?!")
}
if correspond {
let bound_maybe_result = LookaheadBound::from_args(lookahead_depth,
lookahead_extension,
lookahead_depth_sequence,
lookahead_seconds);
let bound = match bound_maybe_result {
Ok(bound_maybe) => {
match bound_maybe {
Some(bound) => bound,
None => {
moral_panic!("`--correspond` passed without exactly one \
of `--depth`, `--depth-sequence`, or \
`--seconds`")
}
}
}
Err(error) => {
moral_panic!(error)
}
};
let from = from_runes.expect("`--correspond` requires `--from`");
println!("{}", correspondence(&from, bound, déjà_vu_bound));
process::exit(0);
}
if uci_dæmon {
uci::dæmon();
// ↑ dæmon will loop
process::exit(0);
}
println!("Welcome to Leafline v. {}!", env!("CARGO_PKG_VERSION"));
match memory_free() {
Some(bytes) => {
println!("Leafline substrate accountant detected {:.3} \
GiB of free memory.",
bytes.in_gib());
}
None => {
println!("Could not detect amount of free memory! \
It is possible \
that you are struggling with an inferior nonfree \
operating system forced on you by your masters in \
Cupertino or Redmond");
}
}
let mut world = match from_runes {
Some(runes) => WorldState::reconstruct(&runes),
None => WorldState::new(),
};
let mut premonitions: Vec<Commit>;
let bound_maybe = LookaheadBound::from_args(lookahead_depth,
lookahead_extension,
lookahead_depth_sequence,
lookahead_seconds)
.unwrap();
loop {
match bound_maybe {
None => {
premonitions = world.lookahead();
if premonitions.is_empty() {
// XXX TODO distinguish between deadlock and
// ultimate endangerment
the_end();
}
println!("{}", world);
for (index, premonition) in premonitions.iter().enumerate() {
println!("{:>2}. {}", index, premonition)
}
}
Some(ref bound) => {
let (our_forecasts, depth, thinking_time) =
forecast::<Variation>(world, bound.clone(), déjà_vu_bound);
let forecasts = our_forecasts;
println!("{}", world);
let depth_report = match *bound {
LookaheadBound::Depth(standard, Some(quietening)) => {
format!(
"at least {} and up to {}",
standard,
standard + quietening)
}
_ => format!("{}", depth),
};
println!("(scoring alternatives {} levels deep took {} ms)",
depth_report,
thinking_time.num_milliseconds());
premonitions = Vec::new();
for (index, sight) in forecasts.into_iter().enumerate() {
let (commit, score, variation) = sight;
println!("{:>2}: {} — score {} ‣ representative variation: {}",
index,
commit,
Color::Purple.bold()
.paint(&format!("{:.1}", score)),
pagan_variation_format(&variation));
premonitions.push(commit);
}
if premonitions.is_empty() {
the_end();
}
}
}
loop {
print!("\nSelect a move>> ");
io::stdout().flush().expect("couldn't flush stdout");
let mut input_buffer = String::new();
io::stdin()
.read_line(&mut input_buffer)
.expect("couldn't read input");
if input_buffer.trim() == "quit" {
the_end();
}
let choice: usize = match input_buffer.trim().parse() {
Ok(i) => i,
Err(e) => {
println!("Error parsing choice: {:?}. Try again.", e);
continue;
}
};
if choice < premonitions.len() {
world = premonitions[choice].tree;
break;
} else {
println!("{} isn't among the choices. Try again.", choice);
}
}
}
}
#[cfg(test)]
mod tests {
use super::{LookaheadBound, correspondence};
use ::{Postcard, encode};
use life::TransitPatch;
use space::{RelaxedLocale, Locale};
use identity::{Agent, JobDescription, Team};
use LastMissive;
#[test]
fn concerning_correspondence_victory_conditions() {
let blue_concession = correspondence("R6k/6pp/8/8/8/8/8/8 b - -",
LookaheadBound::Depth(2, None),
1.0);
assert_eq!("{\"the_triumphant\":\"Orange\"}".to_owned(),
blue_concession);
}
#[test]
fn test_serialize_postcard() {
let p = Postcard {
world: "rnbqkbnr/pp1ppppp/8/2p5/4P3/8/PPPP1PPP/RNBQKBNR w KQkq c6 0 2".to_string(),
patch: TransitPatch {
star: Agent { team: Team::Orange, job_description: JobDescription::Scholar },
whence: RelaxedLocale
|
|b: &Option<LookaheadBound>| -> Result<bool, String> {
if b.is_some() {
Err("more than one of `--depth`, `--depth-sequence`, or \
|
random_line_split
|
main.rs
|
_static;
#[macro_use]
mod sorceries;
mod space;
mod identity;
mod motion;
mod landmark;
mod life;
mod mind;
mod substrate;
mod uci;
// Unlikely Command Integration
mod test_landmark;
use std::fs::OpenOptions;
use std::io;
use std::io::Write;
use std::process;
use ansi_term::Colour as Color;
use argparse::{ArgumentParser, Print, Store, StoreOption, StoreTrue};
use log::{LogLevelFilter, LogMetadata, LogRecord, SetLoggerError};
use time::{Duration, get_time};
use identity::{Agent, Team};
use life::{Commit, Patch, TransitPatch, WorldState};
use mind::{Variation, fixed_depth_sequence_kickoff, iterative_deepening_kickoff,
kickoff, pagan_variation_format, Memory};
use substrate::memory_free;
use serde_json::to_string;
use serde::Serialize;
fn encode<T>(value: &T) -> String
where T:?Sized + serde::ser::Serialize {
to_string(value).unwrap()
}
struct DebugLogger;
impl DebugLogger {
pub fn init() -> Result<(), SetLoggerError> {
log::set_logger(|max_log_level| {
max_log_level.set(LogLevelFilter::Debug);
Box::new(DebugLogger)
})
}
}
impl log::Log for DebugLogger {
fn enabled(&self, _metadata: &LogMetadata) -> bool {
true
}
fn log(&self, record: &LogRecord) {
// XXX: can't the open file handle live inside the DebugLogger struct?!
let mut log_file = OpenOptions::new()
.write(true)
.append(true)
.create(true)
.open("leafline.log")
.expect("couldn't open log file?!");
let log_message = format!("[{}] {}\n",
time::now()
.strftime("%Y-%m-%d %H:%M:%S.%f")
.unwrap(),
record.args());
log_file.write_all(&log_message.into_bytes())
.expect("couldn't write to log file?!");
}
}
#[derive(Debug, Clone)]
enum LookaheadBound {
Depth(u8, Option<u8>),
DepthSequence(Vec<u8>),
Seconds(u8),
}
impl LookaheadBound {
pub fn duration(&self) -> Duration {
match *self {
LookaheadBound::Seconds(secs) => Duration::seconds(i64::from(secs)),
_ => {
moral_panic!("`duration()` called on non-Seconds LookaheadBound \
variant")
}
}
}
pub fn new_from_sequence_depiction(depiction: &str) -> Self {
let depth_runes = depiction.split(',');
let depth_sequence = depth_runes.map(|dd| {
dd.parse::<u8>()
.expect("couldn't parse depth \
sequence")
})
.collect::<Vec<_>>();
LookaheadBound::DepthSequence(depth_sequence)
}
pub fn from_args(lookahead_depth: Option<u8>,
lookahead_extension: Option<u8>,
lookahead_depth_sequence: Option<String>,
lookahead_seconds: Option<u8>)
-> Result<Option<Self>, String> {
let mut bound = None;
let confirm_bound_is_none =
|b: &Option<LookaheadBound>| -> Result<bool, String> {
if b.is_some() {
Err("more than one of `--depth`, `--depth-sequence`, or \
`--seconds` was passed"
.to_owned())
} else {
Ok(true)
}
};
if let Some(depth) = lookahead_depth {
confirm_bound_is_none(&bound)?;
bound = Some(LookaheadBound::Depth(depth, lookahead_extension));
}
if let Some(sequence_depiction) = lookahead_depth_sequence {
confirm_bound_is_none(&bound)?;
bound = Some(LookaheadBound::new_from_sequence_depiction(
&sequence_depiction));
}
if let Some(seconds) = lookahead_seconds {
confirm_bound_is_none(&bound)?;
bound = Some(LookaheadBound::Seconds(seconds));
}
Ok(bound)
}
}
fn forecast<T:'static + Memory>(world: WorldState, bound: LookaheadBound, déjà_vu_bound: f32)
-> (Vec<(Commit, f32, T)>, u8, Duration) {
let start_thinking = get_time();
let forecasts;
let depth;
match bound {
LookaheadBound::Depth(ds, es) => {
forecasts = kickoff::<T>(&world, ds, es, false, déjà_vu_bound);
depth = ds;
}
LookaheadBound::DepthSequence(ds) => {
depth = *ds.last().unwrap();
forecasts = fixed_depth_sequence_kickoff::<T>(
&world, ds, false, déjà_vu_bound);
// XXX TODO: if we're just returning a number, it should be the
// lowest depth, but we should really report all of them
}
LookaheadBound::Seconds(_) => {
|
let stop_thinking = get_time();
let thinking_time = stop_thinking - start_thinking;
(forecasts, depth, thinking_time)
}
#[derive(Serialize)]
struct Postcard {
world: String,
patch: TransitPatch,
hospitalization: Option<Agent>,
thinking_time: u64,
depth: u8,
counterreplies: Vec<TransitPatch>,
rosetta_stone: String,
}
#[derive(Serialize)]
struct LastMissive {
the_triumphant: Option<Team>,
}
#[allow(clippy::clcollapsible_if)]
fn correspondence(reminder: &str, bound: LookaheadBound, déjà_vu_bound: f32)
-> String {
let in_medias_res = WorldState::reconstruct(reminder);
let (mut forecasts, depth, sidereal) = forecast::<Patch>(in_medias_res,
bound,
déjà_vu_bound);
if!forecasts.is_empty() {
let (determination, _karma, _variation) = forecasts.swap_remove(0);
// XXX TODO FIXME: this doesn't distinguish amongst ascensions
// (and we can imagine somewhat contrived situations where only
// some of them are admissible movements)
let counterreplies = determination.tree
.lookahead()
.iter()
.map(|c| TransitPatch::from(c.patch))
.collect::<Vec<_>>();
if counterreplies.is_empty() {
if determination.tree.in_critical_endangerment(Team::Orange) {
return encode(&LastMissive {
the_triumphant: Some(Team::Blue),
})
} else {
return encode(&LastMissive { the_triumphant: None });
}
}
let postcard = Postcard {
world: determination.tree.preserve(),
patch: TransitPatch::from(determination.patch),
hospitalization: determination.hospitalization,
thinking_time: sidereal.num_milliseconds() as u64,
depth,
counterreplies,
rosetta_stone: determination.patch.abbreviated_pagan_movement_rune(),
};
encode(&postcard)
} else if in_medias_res.in_critical_endangerment(Team::Blue) {
encode(&LastMissive { the_triumphant: Some(Team::Orange) })
} else {
encode(&LastMissive { the_triumphant: None })
}
}
fn the_end() {
println!("THE END");
process::exit(0);
}
fn main() {
// Does argparse not offer an analogue of Python's argparse's
// `add_mutually_exclusive_group`
// (https://docs.python.org/3/library/argparse.html#mutual-exclusion)?
// Contribution opportunity if so??
let mut lookahead_depth: Option<u8> = None;
let mut lookahead_extension: Option<u8> = None;
// TODO CONSIDER: would argparse's Collect action be cleaner?
let mut lookahead_depth_sequence: Option<String> = None;
let mut lookahead_seconds: Option<u8> = None;
let mut from_runes: Option<String> = None;
let mut correspond: bool = false;
let mut uci_dæmon: bool = false;
let mut déjà_vu_bound: f32 = 2.0;
let mut debug_logging: bool = false;
{
let mut parser = ArgumentParser::new();
parser.set_description("Leafline: an oppositional strategy game engine");
parser.refer(&mut lookahead_depth).add_option(
&["--depth"],
StoreOption,
"rank moves using AI minimax lookahead this deep");
parser.refer(&mut lookahead_extension).add_option(
&["--quiet"],
StoreOption,
"search with quietness extension this deep");
parser.refer(&mut lookahead_depth_sequence).add_option(
&["--depth-sequence"],
StoreOption,
"rank moves using AI minimax lookahead to these depths");
parser.refer(&mut lookahead_seconds).add_option(
&["--seconds"],
StoreOption,
"rank moves using AI minimax for about this many seconds");
parser.refer(&mut correspond).add_option(
&["--correspond"],
StoreTrue,
"just output the serialization of the AI's top response and \
legal replies thereto");
parser.refer(&mut uci_dæmon).add_option(
&["--uci", "--deamon", "--dæmon"],
StoreTrue,
"run Unlikely Command Integration dæmon for external driver play");
parser.refer(&mut from_runes).add_option(
&["--from"],
StoreOption,
"start a game from the given book of preservation runes");
parser.refer(&mut déjà_vu_bound).add_option(
&["--déjà-vu-bound", "--deja-vu-bound"],
Store,
"try to not store more entries in the déjà vu table than fit in \
this many GiB of memory",
);
parser.refer(&mut debug_logging).add_option(
&["--debug"],
StoreTrue,
"run with debug logging to file",
);
parser.add_option(&["--version", "-v"],
Print(env!("CARGO_PKG_VERSION").to_owned()), "diplay the version");
parser.parse_args_or_exit();
}
if debug_logging {
DebugLogger::init().expect("couldn't initialize logging?!")
}
if correspond {
let bound_maybe_result = LookaheadBound::from_args(lookahead_depth,
lookahead_extension,
lookahead_depth_sequence,
lookahead_seconds);
let bound = match bound_maybe_result {
Ok(bound_maybe) => {
match bound_maybe {
Some(bound) => bound,
None => {
moral_panic!("`--correspond` passed without exactly one \
of `--depth`, `--depth-sequence`, or \
`--seconds`")
}
}
}
Err(error) => {
moral_panic!(error)
}
};
let from = from_runes.expect("`--correspond` requires `--from`");
println!("{}", correspondence(&from, bound, déjà_vu_bound));
process::exit(0);
}
if uci_dæmon {
uci::dæmon();
// ↑ dæmon will loop
process::exit(0);
}
println!("Welcome to Leafline v. {}!", env!("CARGO_PKG_VERSION"));
match memory_free() {
Some(bytes) => {
println!("Leafline substrate accountant detected {:.3} \
GiB of free memory.",
bytes.in_gib());
}
None => {
println!("Could not detect amount of free memory! \
It is possible \
that you are struggling with an inferior nonfree \
operating system forced on you by your masters in \
Cupertino or Redmond");
}
}
let mut world = match from_runes {
Some(runes) => WorldState::reconstruct(&runes),
None => WorldState::new(),
};
let mut premonitions: Vec<Commit>;
let bound_maybe = LookaheadBound::from_args(lookahead_depth,
lookahead_extension,
lookahead_depth_sequence,
lookahead_seconds)
.unwrap();
loop {
match bound_maybe {
None => {
premonitions = world.lookahead();
if premonitions.is_empty() {
// XXX TODO distinguish between deadlock and
// ultimate endangerment
the_end();
}
println!("{}", world);
for (index, premonition) in premonitions.iter().enumerate() {
println!("{:>2}. {}", index, premonition)
}
}
Some(ref bound) => {
let (our_forecasts, depth, thinking_time) =
forecast::<Variation>(world, bound.clone(), déjà_vu_bound);
let forecasts = our_forecasts;
println!("{}", world);
let depth_report = match *bound {
LookaheadBound::Depth(standard, Some(quietening)) => {
format!(
"at least {} and up to {}",
standard,
standard + quietening)
}
_ => format!("{}", depth),
};
println!("(scoring alternatives {} levels deep took {} ms)",
depth_report,
thinking_time.num_milliseconds());
premonitions = Vec::new();
for (index, sight) in forecasts.into_iter().enumerate() {
let (commit, score, variation) = sight;
println!("{:>2}: {} — score {} ‣ representative variation: {}",
index,
commit,
Color::Purple.bold()
.paint(&format!("{:.1}", score)),
pagan_variation_format(&variation));
premonitions.push(commit);
}
if premonitions.is_empty() {
the_end();
}
}
}
loop {
print!("\nSelect a move>> ");
io::stdout().flush().expect("couldn't flush stdout");
let mut input_buffer = String::new();
io::stdin()
.read_line(&mut input_buffer)
.expect("couldn't read input");
if input_buffer.trim() == "quit" {
the_end();
}
let choice: usize = match input_buffer.trim().parse() {
Ok(i) => i,
Err(e) => {
println!("Error parsing choice: {:?}. Try again.", e);
continue;
}
};
if choice < premonitions.len() {
world = premonitions[choice].tree;
break;
} else {
println!("{} isn't among the choices. Try again.", choice);
}
}
}
}
#[cfg(test)]
mod tests {
use super::{LookaheadBound, correspondence};
use ::{Postcard, encode};
use life::TransitPatch;
use space::{RelaxedLocale, Locale};
use identity::{Agent, JobDescription, Team};
use LastMissive;
#[test]
fn concerning_correspondence_victory_conditions() {
let blue_concession = correspondence("R6k/6pp/8/8/8/8/8/8 b - -",
LookaheadBound::Depth(2, None),
1.0);
assert_eq!("{\"the_triumphant\":\"Orange\"}".to_owned(),
blue_concession);
}
#[test]
fn test_serialize_postcard() {
let p = Postcard {
world: "rnbqkbnr/pp1ppppp/8/2p5/4P3/8/PPPP1PPP/RNBQKBNR w KQkq c6 0 2".to_string(),
patch: TransitPatch {
star: Agent { team: Team::Orange, job_description: JobDescription::Scholar },
whence: Relaxed
|
let (fs, ds) = iterative_deepening_kickoff::<T>(
&world, bound.duration(), false, déjà_vu_bound);
forecasts = fs;
depth = ds;
}
}
|
conditional_block
|
main.rs
|
static;
#[macro_use]
mod sorceries;
mod space;
mod identity;
mod motion;
mod landmark;
mod life;
mod mind;
mod substrate;
mod uci;
// Unlikely Command Integration
mod test_landmark;
use std::fs::OpenOptions;
use std::io;
use std::io::Write;
use std::process;
use ansi_term::Colour as Color;
use argparse::{ArgumentParser, Print, Store, StoreOption, StoreTrue};
use log::{LogLevelFilter, LogMetadata, LogRecord, SetLoggerError};
use time::{Duration, get_time};
use identity::{Agent, Team};
use life::{Commit, Patch, TransitPatch, WorldState};
use mind::{Variation, fixed_depth_sequence_kickoff, iterative_deepening_kickoff,
kickoff, pagan_variation_format, Memory};
use substrate::memory_free;
use serde_json::to_string;
use serde::Serialize;
fn encode<T>(value: &T) -> String
where T:?Sized + serde::ser::Serialize {
to_string(value).unwrap()
}
struct DebugLogger;
impl DebugLogger {
pub fn init() -> Result<(), SetLoggerError> {
log::set_logger(|max_log_level| {
max_log_level.set(LogLevelFilter::Debug);
Box::new(DebugLogger)
})
}
}
impl log::Log for DebugLogger {
fn enabled(&self, _metadata: &LogMetadata) -> bool {
true
}
fn log(&self, record: &LogRecord) {
// XXX: can't the open file handle live inside the DebugLogger struct?!
let mut log_file = OpenOptions::new()
.write(true)
.append(true)
.create(true)
.open("leafline.log")
.expect("couldn't open log file?!");
let log_message = format!("[{}] {}\n",
time::now()
.strftime("%Y-%m-%d %H:%M:%S.%f")
.unwrap(),
record.args());
log_file.write_all(&log_message.into_bytes())
.expect("couldn't write to log file?!");
}
}
#[derive(Debug, Clone)]
enum LookaheadBound {
Depth(u8, Option<u8>),
DepthSequence(Vec<u8>),
Seconds(u8),
}
impl LookaheadBound {
pub fn duration(&self) -> Duration {
match *self {
LookaheadBound::Seconds(secs) => Duration::seconds(i64::from(secs)),
_ => {
moral_panic!("`duration()` called on non-Seconds LookaheadBound \
variant")
}
}
}
pub fn new_from_sequence_depiction(depiction: &str) -> Self {
let depth_runes = depiction.split(',');
let depth_sequence = depth_runes.map(|dd| {
dd.parse::<u8>()
.expect("couldn't parse depth \
sequence")
})
.collect::<Vec<_>>();
LookaheadBound::DepthSequence(depth_sequence)
}
pub fn from_args(lookahead_depth: Option<u8>,
lookahead_extension: Option<u8>,
lookahead_depth_sequence: Option<String>,
lookahead_seconds: Option<u8>)
-> Result<Option<Self>, String> {
let mut bound = None;
let confirm_bound_is_none =
|b: &Option<LookaheadBound>| -> Result<bool, String> {
if b.is_some() {
Err("more than one of `--depth`, `--depth-sequence`, or \
`--seconds` was passed"
.to_owned())
} else {
Ok(true)
}
};
if let Some(depth) = lookahead_depth {
confirm_bound_is_none(&bound)?;
bound = Some(LookaheadBound::Depth(depth, lookahead_extension));
}
if let Some(sequence_depiction) = lookahead_depth_sequence {
confirm_bound_is_none(&bound)?;
bound = Some(LookaheadBound::new_from_sequence_depiction(
&sequence_depiction));
}
if let Some(seconds) = lookahead_seconds {
confirm_bound_is_none(&bound)?;
bound = Some(LookaheadBound::Seconds(seconds));
}
Ok(bound)
}
}
fn forecast<T:'static + Memory>(world: WorldState, bound: LookaheadBound, déjà_vu_bound: f32)
-> (Vec<(Commit, f32, T)>, u8, Duration) {
let start_thinking = get_time();
let forecasts;
let depth;
match bound {
LookaheadBound::Depth(ds, es) => {
forecasts = kickoff::<T>(&world, ds, es, false, déjà_vu_bound);
depth = ds;
}
LookaheadBound::DepthSequence(ds) => {
depth = *ds.last().unwrap();
forecasts = fixed_depth_sequence_kickoff::<T>(
&world, ds, false, déjà_vu_bound);
// XXX TODO: if we're just returning a number, it should be the
// lowest depth, but we should really report all of them
}
LookaheadBound::Seconds(_) => {
let (fs, ds) = iterative_deepening_kickoff::<T>(
&world, bound.duration(), false, déjà_vu_bound);
forecasts = fs;
depth = ds;
}
}
let stop_thinking = get_time();
let thinking_time = stop_thinking - start_thinking;
(forecasts, depth, thinking_time)
}
#[derive(Serialize)]
struct Postcard {
world: String,
patch: TransitPatch,
hospitalization: Option<Agent>,
thinking_time: u64,
depth: u8,
counterreplies: Vec<TransitPatch>,
rosetta_stone: String,
}
#[derive(Serialize)]
struct LastMissive {
the_triumphant: Option<Team>,
}
#[allow(clippy::clcollapsible_if)]
fn correspo
|
r: &str, bound: LookaheadBound, déjà_vu_bound: f32)
-> String {
let in_medias_res = WorldState::reconstruct(reminder);
let (mut forecasts, depth, sidereal) = forecast::<Patch>(in_medias_res,
bound,
déjà_vu_bound);
if!forecasts.is_empty() {
let (determination, _karma, _variation) = forecasts.swap_remove(0);
// XXX TODO FIXME: this doesn't distinguish amongst ascensions
// (and we can imagine somewhat contrived situations where only
// some of them are admissible movements)
let counterreplies = determination.tree
.lookahead()
.iter()
.map(|c| TransitPatch::from(c.patch))
.collect::<Vec<_>>();
if counterreplies.is_empty() {
if determination.tree.in_critical_endangerment(Team::Orange) {
return encode(&LastMissive {
the_triumphant: Some(Team::Blue),
})
} else {
return encode(&LastMissive { the_triumphant: None });
}
}
let postcard = Postcard {
world: determination.tree.preserve(),
patch: TransitPatch::from(determination.patch),
hospitalization: determination.hospitalization,
thinking_time: sidereal.num_milliseconds() as u64,
depth,
counterreplies,
rosetta_stone: determination.patch.abbreviated_pagan_movement_rune(),
};
encode(&postcard)
} else if in_medias_res.in_critical_endangerment(Team::Blue) {
encode(&LastMissive { the_triumphant: Some(Team::Orange) })
} else {
encode(&LastMissive { the_triumphant: None })
}
}
fn the_end() {
println!("THE END");
process::exit(0);
}
fn main() {
// Does argparse not offer an analogue of Python's argparse's
// `add_mutually_exclusive_group`
// (https://docs.python.org/3/library/argparse.html#mutual-exclusion)?
// Contribution opportunity if so??
let mut lookahead_depth: Option<u8> = None;
let mut lookahead_extension: Option<u8> = None;
// TODO CONSIDER: would argparse's Collect action be cleaner?
let mut lookahead_depth_sequence: Option<String> = None;
let mut lookahead_seconds: Option<u8> = None;
let mut from_runes: Option<String> = None;
let mut correspond: bool = false;
let mut uci_dæmon: bool = false;
let mut déjà_vu_bound: f32 = 2.0;
let mut debug_logging: bool = false;
{
let mut parser = ArgumentParser::new();
parser.set_description("Leafline: an oppositional strategy game engine");
parser.refer(&mut lookahead_depth).add_option(
&["--depth"],
StoreOption,
"rank moves using AI minimax lookahead this deep");
parser.refer(&mut lookahead_extension).add_option(
&["--quiet"],
StoreOption,
"search with quietness extension this deep");
parser.refer(&mut lookahead_depth_sequence).add_option(
&["--depth-sequence"],
StoreOption,
"rank moves using AI minimax lookahead to these depths");
parser.refer(&mut lookahead_seconds).add_option(
&["--seconds"],
StoreOption,
"rank moves using AI minimax for about this many seconds");
parser.refer(&mut correspond).add_option(
&["--correspond"],
StoreTrue,
"just output the serialization of the AI's top response and \
legal replies thereto");
parser.refer(&mut uci_dæmon).add_option(
&["--uci", "--deamon", "--dæmon"],
StoreTrue,
"run Unlikely Command Integration dæmon for external driver play");
parser.refer(&mut from_runes).add_option(
&["--from"],
StoreOption,
"start a game from the given book of preservation runes");
parser.refer(&mut déjà_vu_bound).add_option(
&["--déjà-vu-bound", "--deja-vu-bound"],
Store,
"try to not store more entries in the déjà vu table than fit in \
this many GiB of memory",
);
parser.refer(&mut debug_logging).add_option(
&["--debug"],
StoreTrue,
"run with debug logging to file",
);
parser.add_option(&["--version", "-v"],
Print(env!("CARGO_PKG_VERSION").to_owned()), "diplay the version");
parser.parse_args_or_exit();
}
if debug_logging {
DebugLogger::init().expect("couldn't initialize logging?!")
}
if correspond {
let bound_maybe_result = LookaheadBound::from_args(lookahead_depth,
lookahead_extension,
lookahead_depth_sequence,
lookahead_seconds);
let bound = match bound_maybe_result {
Ok(bound_maybe) => {
match bound_maybe {
Some(bound) => bound,
None => {
moral_panic!("`--correspond` passed without exactly one \
of `--depth`, `--depth-sequence`, or \
`--seconds`")
}
}
}
Err(error) => {
moral_panic!(error)
}
};
let from = from_runes.expect("`--correspond` requires `--from`");
println!("{}", correspondence(&from, bound, déjà_vu_bound));
process::exit(0);
}
if uci_dæmon {
uci::dæmon();
// ↑ dæmon will loop
process::exit(0);
}
println!("Welcome to Leafline v. {}!", env!("CARGO_PKG_VERSION"));
match memory_free() {
Some(bytes) => {
println!("Leafline substrate accountant detected {:.3} \
GiB of free memory.",
bytes.in_gib());
}
None => {
println!("Could not detect amount of free memory! \
It is possible \
that you are struggling with an inferior nonfree \
operating system forced on you by your masters in \
Cupertino or Redmond");
}
}
let mut world = match from_runes {
Some(runes) => WorldState::reconstruct(&runes),
None => WorldState::new(),
};
let mut premonitions: Vec<Commit>;
let bound_maybe = LookaheadBound::from_args(lookahead_depth,
lookahead_extension,
lookahead_depth_sequence,
lookahead_seconds)
.unwrap();
loop {
match bound_maybe {
None => {
premonitions = world.lookahead();
if premonitions.is_empty() {
// XXX TODO distinguish between deadlock and
// ultimate endangerment
the_end();
}
println!("{}", world);
for (index, premonition) in premonitions.iter().enumerate() {
println!("{:>2}. {}", index, premonition)
}
}
Some(ref bound) => {
let (our_forecasts, depth, thinking_time) =
forecast::<Variation>(world, bound.clone(), déjà_vu_bound);
let forecasts = our_forecasts;
println!("{}", world);
let depth_report = match *bound {
LookaheadBound::Depth(standard, Some(quietening)) => {
format!(
"at least {} and up to {}",
standard,
standard + quietening)
}
_ => format!("{}", depth),
};
println!("(scoring alternatives {} levels deep took {} ms)",
depth_report,
thinking_time.num_milliseconds());
premonitions = Vec::new();
for (index, sight) in forecasts.into_iter().enumerate() {
let (commit, score, variation) = sight;
println!("{:>2}: {} — score {} ‣ representative variation: {}",
index,
commit,
Color::Purple.bold()
.paint(&format!("{:.1}", score)),
pagan_variation_format(&variation));
premonitions.push(commit);
}
if premonitions.is_empty() {
the_end();
}
}
}
loop {
print!("\nSelect a move>> ");
io::stdout().flush().expect("couldn't flush stdout");
let mut input_buffer = String::new();
io::stdin()
.read_line(&mut input_buffer)
.expect("couldn't read input");
if input_buffer.trim() == "quit" {
the_end();
}
let choice: usize = match input_buffer.trim().parse() {
Ok(i) => i,
Err(e) => {
println!("Error parsing choice: {:?}. Try again.", e);
continue;
}
};
if choice < premonitions.len() {
world = premonitions[choice].tree;
break;
} else {
println!("{} isn't among the choices. Try again.", choice);
}
}
}
}
#[cfg(test)]
mod tests {
use super::{LookaheadBound, correspondence};
use ::{Postcard, encode};
use life::TransitPatch;
use space::{RelaxedLocale, Locale};
use identity::{Agent, JobDescription, Team};
use LastMissive;
#[test]
fn concerning_correspondence_victory_conditions() {
let blue_concession = correspondence("R6k/6pp/8/8/8/8/8/8 b - -",
LookaheadBound::Depth(2, None),
1.0);
assert_eq!("{\"the_triumphant\":\"Orange\"}".to_owned(),
blue_concession);
}
#[test]
fn test_serialize_postcard() {
let p = Postcard {
world: "rnbqkbnr/pp1ppppp/8/2p5/4P3/8/PPPP1PPP/RNBQKBNR w KQkq c6 0 2".to_string(),
patch: TransitPatch {
star: Agent { team: Team::Orange, job_description: JobDescription::Scholar },
whence:
|
ndence(reminde
|
identifier_name
|
lib.rs
|
#![allow(dead_code)]
//! # tray\_rust - A Toy Ray Tracer in Rust
//!
//! tray\_rust is a toy physically based ray tracer built off of the techniques
//! discussed in [Physically Based Rendering](http://pbrt.org/). It began life as a port of
//! [tray](https://github.com/Twinklebear/tray) to [Rust](http://www.rust-lang.org) to check out the language.
//! The renderer is currently capable of path tracing, supports triangle meshes (MTL support coming soon),
//! and various physically based material models (including measured data from the
//! [MERL BRDF Database](http://www.merl.com/brdf/)). tray\_rust also supports rigid body animation along
//! B-spline paths and distributed rendering.
//!
//! [](https://travis-ci.org/Twinklebear/tray_rust)
//!
//! ## Running
//!
//! Running and passing `--help` or `-h` will print out options you can pass to the renderer which are documented in the help.
//! For the more complicated use cases I hope to do some write ups and guides on how to use them (e.g. distributed rendering,
//! animation) but this may take a while. I strongly recommend running the release build as the debug version will be very slow.
//!
//! ## Building Your Own Scenes
//!
//! To position and animate objects, the camera and so on the
//! [Blender plugin](https://github.com/Twinklebear/tray_rust_blender) is the easiest to use. However the plugin
//! is still in development and missing some features like setting materials, changing light properties and such so
//! you'll still currently need to do those by hand in the exported JSON file. For materials take a look at
//! the [materials documentation](http://www.willusher.io/tray_rust/tray_rust/material/index.html) for lights
//! you'll likely just want to change the emission color which is an RGB color plus a strength term.
//!
//! Start at the documentation for the [scene module](http://www.willusher.io/tray_rust/tray_rust/scene/index.html),
//! there are also a few example [scenes](https://github.com/Twinklebear/tray_rust/tree/master/scenes) included but not all
//! the models are provided. From a clean `git clone` you should be able to run
//! [cornell\_box.json](https://github.com/Twinklebear/tray_rust/blob/master/scenes/cornell_box.json) and
//! [smallpt.json](https://github.com/Twinklebear/tray_rust/blob/master/scenes/smallpt.json). I plan to add some
//! more simple scenes that show usage of other features like animation to provide examples. The rigid body animation
//! feature is relatively new though so I haven't had time to document it properly yet.
//!
//! ## TODO
//!
//! - More material models (eg. more microfacet models, rough glass, etc.)
//! - Textures
//! - Support for using an OBJ's associated MTL files
//! - Bump mapping
//! - [Subsurface scattering?](http://en.wikipedia.org/wiki/Subsurface_scattering)
//! - [Vertex Connection and Merging?](http://iliyan.com/publications/VertexMerging)
//!
//! ## Sample Renders
//!
//! In the samples the the Buddha, Dragon, Bunny and Lucy statue are from
//! [The Stanford Scanning Repository](http://graphics.stanford.edu/data/3Dscanrep/).
//! The Rust logo model was made by
//! [Nylithius on BlenderArtists](http://blenderartists.org/forum/showthread.php?362836-Rust-language-3D-logo).
//! The Utah teapot used is from [Morgan McGuire's page](http://graphics.cs.williams.edu/data/meshes.xml) and
//! the monkey head is Blender's Suzanne. I've made minor tweaks to some of the models so for convenience
//! you can find versions that can be easily loaded into the sample scenes [here](https://drive.google.com/folderview?id=0B-l_lLEMo1YeflUzUndCd01hOHhRNUhrQUowM3hVd2pCc3JrSXRiS3FQSzRYLWtGcGM0eGc&usp=sharing), though the
//! cube model for the Cornell box scene is included.
//! The materials on the Rust logo, Buddha, Dragon and Lucy are from the
//! [MERL BRDF Database](http://www.merl.com/brdf/).
|
//! Render times are formatted as hh:mm:ss and were measured using 144 threads on a machine with four
//! [Xeon E7-8890 v3](http://ark.intel.com/products/84685/Intel-Xeon-Processor-E7-8890-v3-45M-Cache-2_50-GHz)
//! CPUs. The machine is an early/engineering sample from Intel so your results may differ, but massive thanks to
//! Intel for the hardware! Some older renders are shown as well without timing since they were
//! run on a different machine.
//!
//! Some more sample renders can be found [here](http://imgur.com/a/3qNBc).
//!
//! <a href="http://i.imgur.com/X5y8oIq.png">
//! <img src="http://i.imgur.com/X5y8oIq.png" alt="Model gallery"
//! style="display:block; max-width:100%; height:auto">
//! </a>
//!
//! 1920x1080, 4096 samples/pixel. Rendering: 00:43:36.45.
//!
//! <a href="http://i.imgur.com/E1ylrZW.png">
//! <img src="http://i.imgur.com/E1ylrZW.png" alt="Rust Logo with friends, disk"
//! style="display:block; max-width:100%; height:auto">
//! </a>
//!
//! 1920x1080, 4096 samples/pixel. Rendering: 00:49:33.514.
//!
extern crate enum_set as enum_set;
extern crate rand;
extern crate byteorder;
extern crate serde_json;
extern crate bspline;
extern crate docopt;
#[macro_use]
extern crate serde_derive;
extern crate scoped_threadpool;
extern crate image;
extern crate bincode;
extern crate mio;
extern crate la;
extern crate light_arena;
pub mod linalg;
pub mod film;
pub mod geometry;
pub mod sampler;
pub mod integrator;
pub mod scene;
pub mod bxdf;
pub mod material;
pub mod light;
pub mod mc;
pub mod partition;
pub mod exec;
pub mod texture;
|
//!
|
random_line_split
|
main.rs
|
use std::net::{TcpListener,TcpStream};
use std::io::{BufReader,BufRead,Write,Result as IoResult};
use std::fs::File;
use std::collections::HashMap;
fn handle_connection(map: &HashMap<String,String>,mut stream: &mut TcpStream) -> IoResult<()>
{
let mut line = String::new();
let mut reader = BufReader::new(try!(stream.try_clone()));
while let Ok(_) = reader.read_line(&mut line)
{
line.pop();
if line == "quit"
{
break;
}
if let Some(data) = map.get(&line)
|
{
try!(stream.write(format!("{}\n",data).as_bytes()));
}
else
{
try!(stream.write(b"not found\n"));
}
line.clear();
}
Ok(())
}
fn main()
{
let mut map = HashMap::new();
for line in BufReader::new(File::open("input.csv").unwrap()).lines()
{
if let Ok(line) = line
{
map.insert(String::from(line.split(";").nth(1).unwrap()),line);
}
}
for stream in TcpListener::bind("0.0.0.0:1337").unwrap().incoming()
{
if let Ok(mut stream) = stream
{
let _ = handle_connection(&map,&mut stream);
}
}
}
|
random_line_split
|
|
main.rs
|
use std::net::{TcpListener,TcpStream};
use std::io::{BufReader,BufRead,Write,Result as IoResult};
use std::fs::File;
use std::collections::HashMap;
fn handle_connection(map: &HashMap<String,String>,mut stream: &mut TcpStream) -> IoResult<()>
{
let mut line = String::new();
let mut reader = BufReader::new(try!(stream.try_clone()));
while let Ok(_) = reader.read_line(&mut line)
{
line.pop();
if line == "quit"
{
break;
}
if let Some(data) = map.get(&line)
{
try!(stream.write(format!("{}\n",data).as_bytes()));
}
else
{
try!(stream.write(b"not found\n"));
}
line.clear();
}
Ok(())
}
fn main()
|
{
let mut map = HashMap::new();
for line in BufReader::new(File::open("input.csv").unwrap()).lines()
{
if let Ok(line) = line
{
map.insert(String::from(line.split(";").nth(1).unwrap()),line);
}
}
for stream in TcpListener::bind("0.0.0.0:1337").unwrap().incoming()
{
if let Ok(mut stream) = stream
{
let _ = handle_connection(&map,&mut stream);
}
}
}
|
identifier_body
|
|
main.rs
|
use std::net::{TcpListener,TcpStream};
use std::io::{BufReader,BufRead,Write,Result as IoResult};
use std::fs::File;
use std::collections::HashMap;
fn handle_connection(map: &HashMap<String,String>,mut stream: &mut TcpStream) -> IoResult<()>
{
let mut line = String::new();
let mut reader = BufReader::new(try!(stream.try_clone()));
while let Ok(_) = reader.read_line(&mut line)
{
line.pop();
if line == "quit"
{
break;
}
if let Some(data) = map.get(&line)
{
try!(stream.write(format!("{}\n",data).as_bytes()));
}
else
{
try!(stream.write(b"not found\n"));
}
line.clear();
}
Ok(())
}
fn
|
()
{
let mut map = HashMap::new();
for line in BufReader::new(File::open("input.csv").unwrap()).lines()
{
if let Ok(line) = line
{
map.insert(String::from(line.split(";").nth(1).unwrap()),line);
}
}
for stream in TcpListener::bind("0.0.0.0:1337").unwrap().incoming()
{
if let Ok(mut stream) = stream
{
let _ = handle_connection(&map,&mut stream);
}
}
}
|
main
|
identifier_name
|
main.rs
|
use std::net::{TcpListener,TcpStream};
use std::io::{BufReader,BufRead,Write,Result as IoResult};
use std::fs::File;
use std::collections::HashMap;
fn handle_connection(map: &HashMap<String,String>,mut stream: &mut TcpStream) -> IoResult<()>
{
let mut line = String::new();
let mut reader = BufReader::new(try!(stream.try_clone()));
while let Ok(_) = reader.read_line(&mut line)
{
line.pop();
if line == "quit"
{
break;
}
if let Some(data) = map.get(&line)
{
try!(stream.write(format!("{}\n",data).as_bytes()));
}
else
|
line.clear();
}
Ok(())
}
fn main()
{
let mut map = HashMap::new();
for line in BufReader::new(File::open("input.csv").unwrap()).lines()
{
if let Ok(line) = line
{
map.insert(String::from(line.split(";").nth(1).unwrap()),line);
}
}
for stream in TcpListener::bind("0.0.0.0:1337").unwrap().incoming()
{
if let Ok(mut stream) = stream
{
let _ = handle_connection(&map,&mut stream);
}
}
}
|
{
try!(stream.write(b"not found\n"));
}
|
conditional_block
|
unpark_mutex.rs
|
use std::cell::UnsafeCell;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst;
/// A "lock" around data `D`, which employs a *helping* strategy.
///
/// Used to ensure that concurrent `unpark` invocations lead to (1) `poll` being
/// invoked on only a single thread at a time (2) `poll` being invoked at least
/// once after each `unpark` (unless the future has completed).
pub struct UnparkMutex<D> {
// The state of task execution (state machine described below)
status: AtomicUsize,
// The actual task data, accessible only in the POLLING state
inner: UnsafeCell<Option<D>>,
}
// `UnparkMutex<D>` functions in many ways like a `Mutex<D>`, except that on
// acquisition failure, the current lockholder performs the desired work --
// re-polling.
//
// As such, these impls mirror those for `Mutex<D>`. In particular, a reference
// to `UnparkMutex` can be used to gain `&mut` access to the inner data, which
// must therefore be `Send`.
unsafe impl<D: Send> Send for UnparkMutex<D> {}
unsafe impl<D: Send> Sync for UnparkMutex<D> {}
// There are four possible task states, listed below with their possible
// transitions:
// The task is blocked, waiting on an event
const WAITING: usize = 0; // --> POLLING
// The task is actively being polled by a thread; arrival of additional events
// of interest should move it to the REPOLL state
const POLLING: usize = 1; // --> WAITING, REPOLL, or COMPLETE
// The task is actively being polled, but will need to be re-polled upon
// completion to ensure that all events were observed.
const REPOLL: usize = 2; // --> POLLING
// The task has finished executing (either successfully or with an error/panic)
const COMPLETE: usize = 3; // No transitions out
impl<D> UnparkMutex<D> {
pub fn new() -> UnparkMutex<D> {
UnparkMutex {
status: AtomicUsize::new(WAITING),
inner: UnsafeCell::new(None),
}
}
/// Attempt to "notify" the mutex that a poll should occur.
///
/// An `Ok` result indicates that the `POLLING` state has been entered, and
/// the caller can proceed to poll the future. An `Err` result indicates
/// that polling is not necessary (because the task is finished or the
/// polling has been delegated).
pub fn notify(&self) -> Result<D, ()> {
let mut status = self.status.load(SeqCst);
loop {
match status {
// The task is idle, so try to run it immediately.
WAITING => {
match self.status.compare_exchange(WAITING, POLLING,
SeqCst, SeqCst) {
Ok(_) => {
let data = unsafe {
// SAFETY: we've ensured mutual exclusion via
// the status protocol; we are the only thread
// that has transitioned to the POLLING state,
// and we won't transition back to QUEUED until
// the lock is "released" by this thread. See
// the protocol diagram above.
(*self.inner.get()).take().unwrap()
};
return Ok(data);
}
Err(cur) => status = cur,
}
}
// The task is being polled, so we need to record that it should
// be *repolled* when complete.
POLLING => {
match self.status.compare_exchange(POLLING, REPOLL,
SeqCst, SeqCst) {
Ok(_) => return Err(()),
Err(cur) => status = cur,
}
}
// The task is already scheduled for polling, or is complete, so
// we've got nothing to do.
_ => return Err(()),
}
}
}
/// Alert the mutex that polling is about to begin, clearing any accumulated
/// re-poll requests.
///
/// # Safety
///
/// Callable only from the `POLLING`/`REPOLL` states, i.e. between
/// successful calls to `notify` and `wait`/`complete`.
pub unsafe fn start_poll(&self)
|
/// Alert the mutex that polling completed with NotReady.
///
/// # Safety
///
/// Callable only from the `POLLING`/`REPOLL` states, i.e. between
/// successful calls to `notify` and `wait`/`complete`.
pub unsafe fn wait(&self, data: D) -> Result<(), D> {
*self.inner.get() = Some(data);
match self.status.compare_exchange(POLLING, WAITING, SeqCst, SeqCst) {
// no unparks came in while we were running
Ok(_) => Ok(()),
// guaranteed to be in REPOLL state; just clobber the
// state and run again.
Err(status) => {
assert_eq!(status, REPOLL);
self.status.store(POLLING, SeqCst);
Err((*self.inner.get()).take().unwrap())
}
}
}
/// Alert the mutex that the task has completed execution and should not be
/// notified again.
///
/// # Safety
///
/// Callable only from the `POLLING`/`REPOLL` states, i.e. between
/// successful calls to `notify` and `wait`/`complete`.
pub unsafe fn complete(&self) {
self.status.store(COMPLETE, SeqCst);
}
}
|
{
self.status.store(POLLING, SeqCst);
}
|
identifier_body
|
unpark_mutex.rs
|
use std::cell::UnsafeCell;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst;
/// A "lock" around data `D`, which employs a *helping* strategy.
///
/// Used to ensure that concurrent `unpark` invocations lead to (1) `poll` being
/// invoked on only a single thread at a time (2) `poll` being invoked at least
/// once after each `unpark` (unless the future has completed).
pub struct UnparkMutex<D> {
// The state of task execution (state machine described below)
status: AtomicUsize,
// The actual task data, accessible only in the POLLING state
inner: UnsafeCell<Option<D>>,
}
// `UnparkMutex<D>` functions in many ways like a `Mutex<D>`, except that on
// acquisition failure, the current lockholder performs the desired work --
// re-polling.
//
// As such, these impls mirror those for `Mutex<D>`. In particular, a reference
// to `UnparkMutex` can be used to gain `&mut` access to the inner data, which
// must therefore be `Send`.
unsafe impl<D: Send> Send for UnparkMutex<D> {}
unsafe impl<D: Send> Sync for UnparkMutex<D> {}
// There are four possible task states, listed below with their possible
// transitions:
// The task is blocked, waiting on an event
const WAITING: usize = 0; // --> POLLING
// The task is actively being polled by a thread; arrival of additional events
|
const REPOLL: usize = 2; // --> POLLING
// The task has finished executing (either successfully or with an error/panic)
const COMPLETE: usize = 3; // No transitions out
impl<D> UnparkMutex<D> {
pub fn new() -> UnparkMutex<D> {
UnparkMutex {
status: AtomicUsize::new(WAITING),
inner: UnsafeCell::new(None),
}
}
/// Attempt to "notify" the mutex that a poll should occur.
///
/// An `Ok` result indicates that the `POLLING` state has been entered, and
/// the caller can proceed to poll the future. An `Err` result indicates
/// that polling is not necessary (because the task is finished or the
/// polling has been delegated).
pub fn notify(&self) -> Result<D, ()> {
let mut status = self.status.load(SeqCst);
loop {
match status {
// The task is idle, so try to run it immediately.
WAITING => {
match self.status.compare_exchange(WAITING, POLLING,
SeqCst, SeqCst) {
Ok(_) => {
let data = unsafe {
// SAFETY: we've ensured mutual exclusion via
// the status protocol; we are the only thread
// that has transitioned to the POLLING state,
// and we won't transition back to QUEUED until
// the lock is "released" by this thread. See
// the protocol diagram above.
(*self.inner.get()).take().unwrap()
};
return Ok(data);
}
Err(cur) => status = cur,
}
}
// The task is being polled, so we need to record that it should
// be *repolled* when complete.
POLLING => {
match self.status.compare_exchange(POLLING, REPOLL,
SeqCst, SeqCst) {
Ok(_) => return Err(()),
Err(cur) => status = cur,
}
}
// The task is already scheduled for polling, or is complete, so
// we've got nothing to do.
_ => return Err(()),
}
}
}
/// Alert the mutex that polling is about to begin, clearing any accumulated
/// re-poll requests.
///
/// # Safety
///
/// Callable only from the `POLLING`/`REPOLL` states, i.e. between
/// successful calls to `notify` and `wait`/`complete`.
pub unsafe fn start_poll(&self) {
self.status.store(POLLING, SeqCst);
}
/// Alert the mutex that polling completed with NotReady.
///
/// # Safety
///
/// Callable only from the `POLLING`/`REPOLL` states, i.e. between
/// successful calls to `notify` and `wait`/`complete`.
pub unsafe fn wait(&self, data: D) -> Result<(), D> {
*self.inner.get() = Some(data);
match self.status.compare_exchange(POLLING, WAITING, SeqCst, SeqCst) {
// no unparks came in while we were running
Ok(_) => Ok(()),
// guaranteed to be in REPOLL state; just clobber the
// state and run again.
Err(status) => {
assert_eq!(status, REPOLL);
self.status.store(POLLING, SeqCst);
Err((*self.inner.get()).take().unwrap())
}
}
}
/// Alert the mutex that the task has completed execution and should not be
/// notified again.
///
/// # Safety
///
/// Callable only from the `POLLING`/`REPOLL` states, i.e. between
/// successful calls to `notify` and `wait`/`complete`.
pub unsafe fn complete(&self) {
self.status.store(COMPLETE, SeqCst);
}
}
|
// of interest should move it to the REPOLL state
const POLLING: usize = 1; // --> WAITING, REPOLL, or COMPLETE
// The task is actively being polled, but will need to be re-polled upon
// completion to ensure that all events were observed.
|
random_line_split
|
unpark_mutex.rs
|
use std::cell::UnsafeCell;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst;
/// A "lock" around data `D`, which employs a *helping* strategy.
///
/// Used to ensure that concurrent `unpark` invocations lead to (1) `poll` being
/// invoked on only a single thread at a time (2) `poll` being invoked at least
/// once after each `unpark` (unless the future has completed).
pub struct UnparkMutex<D> {
// The state of task execution (state machine described below)
status: AtomicUsize,
// The actual task data, accessible only in the POLLING state
inner: UnsafeCell<Option<D>>,
}
// `UnparkMutex<D>` functions in many ways like a `Mutex<D>`, except that on
// acquisition failure, the current lockholder performs the desired work --
// re-polling.
//
// As such, these impls mirror those for `Mutex<D>`. In particular, a reference
// to `UnparkMutex` can be used to gain `&mut` access to the inner data, which
// must therefore be `Send`.
unsafe impl<D: Send> Send for UnparkMutex<D> {}
unsafe impl<D: Send> Sync for UnparkMutex<D> {}
// There are four possible task states, listed below with their possible
// transitions:
// The task is blocked, waiting on an event
const WAITING: usize = 0; // --> POLLING
// The task is actively being polled by a thread; arrival of additional events
// of interest should move it to the REPOLL state
const POLLING: usize = 1; // --> WAITING, REPOLL, or COMPLETE
// The task is actively being polled, but will need to be re-polled upon
// completion to ensure that all events were observed.
const REPOLL: usize = 2; // --> POLLING
// The task has finished executing (either successfully or with an error/panic)
const COMPLETE: usize = 3; // No transitions out
impl<D> UnparkMutex<D> {
pub fn new() -> UnparkMutex<D> {
UnparkMutex {
status: AtomicUsize::new(WAITING),
inner: UnsafeCell::new(None),
}
}
/// Attempt to "notify" the mutex that a poll should occur.
///
/// An `Ok` result indicates that the `POLLING` state has been entered, and
/// the caller can proceed to poll the future. An `Err` result indicates
/// that polling is not necessary (because the task is finished or the
/// polling has been delegated).
pub fn notify(&self) -> Result<D, ()> {
let mut status = self.status.load(SeqCst);
loop {
match status {
// The task is idle, so try to run it immediately.
WAITING => {
match self.status.compare_exchange(WAITING, POLLING,
SeqCst, SeqCst) {
Ok(_) => {
let data = unsafe {
// SAFETY: we've ensured mutual exclusion via
// the status protocol; we are the only thread
// that has transitioned to the POLLING state,
// and we won't transition back to QUEUED until
// the lock is "released" by this thread. See
// the protocol diagram above.
(*self.inner.get()).take().unwrap()
};
return Ok(data);
}
Err(cur) => status = cur,
}
}
// The task is being polled, so we need to record that it should
// be *repolled* when complete.
POLLING => {
match self.status.compare_exchange(POLLING, REPOLL,
SeqCst, SeqCst) {
Ok(_) => return Err(()),
Err(cur) => status = cur,
}
}
// The task is already scheduled for polling, or is complete, so
// we've got nothing to do.
_ => return Err(()),
}
}
}
/// Alert the mutex that polling is about to begin, clearing any accumulated
/// re-poll requests.
///
/// # Safety
///
/// Callable only from the `POLLING`/`REPOLL` states, i.e. between
/// successful calls to `notify` and `wait`/`complete`.
pub unsafe fn start_poll(&self) {
self.status.store(POLLING, SeqCst);
}
/// Alert the mutex that polling completed with NotReady.
///
/// # Safety
///
/// Callable only from the `POLLING`/`REPOLL` states, i.e. between
/// successful calls to `notify` and `wait`/`complete`.
pub unsafe fn
|
(&self, data: D) -> Result<(), D> {
*self.inner.get() = Some(data);
match self.status.compare_exchange(POLLING, WAITING, SeqCst, SeqCst) {
// no unparks came in while we were running
Ok(_) => Ok(()),
// guaranteed to be in REPOLL state; just clobber the
// state and run again.
Err(status) => {
assert_eq!(status, REPOLL);
self.status.store(POLLING, SeqCst);
Err((*self.inner.get()).take().unwrap())
}
}
}
/// Alert the mutex that the task has completed execution and should not be
/// notified again.
///
/// # Safety
///
/// Callable only from the `POLLING`/`REPOLL` states, i.e. between
/// successful calls to `notify` and `wait`/`complete`.
pub unsafe fn complete(&self) {
self.status.store(COMPLETE, SeqCst);
}
}
|
wait
|
identifier_name
|
constant-debug-locs.rs
|
// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// min-lldb-version: 310
// compile-flags:-g
#![allow(dead_code, unused_variables)]
#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
// This test makes sure that the compiler doesn't crash when trying to assign
// debug locations to const-expressions.
use std::cell::UnsafeCell;
const CONSTANT: u64 = 3 + 4;
struct Struct {
a: isize,
b: usize,
}
const STRUCT: Struct = Struct { a: 1, b: 2 };
struct TupleStruct(u32);
const TUPLE_STRUCT: TupleStruct = TupleStruct(4);
enum Enum {
Variant1(char),
Variant2 { a: u8 },
Variant3
}
const VARIANT1: Enum = Enum::Variant1('v');
const VARIANT2: Enum = Enum::Variant2 { a: 2 };
const VARIANT3: Enum = Enum::Variant3;
const STRING: &'static str = "String";
const VEC: [u32; 8] = [0; 8];
const NESTED: (Struct, TupleStruct) = (STRUCT, TUPLE_STRUCT);
const UNSAFE_CELL: UnsafeCell<bool> = UnsafeCell::new(false);
fn main() {
let mut _constant = CONSTANT;
let mut _struct = STRUCT;
let mut _tuple_struct = TUPLE_STRUCT;
let mut _variant1 = VARIANT1;
let mut _variant2 = VARIANT2;
let mut _variant3 = VARIANT3;
let mut _string = STRING;
let mut _vec = VEC;
let mut _nested = NESTED;
let mut _unsafe_cell = UNSAFE_CELL;
}
|
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
|
random_line_split
|
constant-debug-locs.rs
|
// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// min-lldb-version: 310
// compile-flags:-g
#![allow(dead_code, unused_variables)]
#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
// This test makes sure that the compiler doesn't crash when trying to assign
// debug locations to const-expressions.
use std::cell::UnsafeCell;
const CONSTANT: u64 = 3 + 4;
struct Struct {
a: isize,
b: usize,
}
const STRUCT: Struct = Struct { a: 1, b: 2 };
struct TupleStruct(u32);
const TUPLE_STRUCT: TupleStruct = TupleStruct(4);
enum Enum {
Variant1(char),
Variant2 { a: u8 },
Variant3
}
const VARIANT1: Enum = Enum::Variant1('v');
const VARIANT2: Enum = Enum::Variant2 { a: 2 };
const VARIANT3: Enum = Enum::Variant3;
const STRING: &'static str = "String";
const VEC: [u32; 8] = [0; 8];
const NESTED: (Struct, TupleStruct) = (STRUCT, TUPLE_STRUCT);
const UNSAFE_CELL: UnsafeCell<bool> = UnsafeCell::new(false);
fn
|
() {
let mut _constant = CONSTANT;
let mut _struct = STRUCT;
let mut _tuple_struct = TUPLE_STRUCT;
let mut _variant1 = VARIANT1;
let mut _variant2 = VARIANT2;
let mut _variant3 = VARIANT3;
let mut _string = STRING;
let mut _vec = VEC;
let mut _nested = NESTED;
let mut _unsafe_cell = UNSAFE_CELL;
}
|
main
|
identifier_name
|
iterator.rs
|
// This file is part of libfringe, a low-level green threading library.
// Copyright (c) whitequark <[email protected]>
|
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
extern crate fringe;
use fringe::OsStack;
use fringe::generator::Generator;
#[test]
fn producer() {
let stack = OsStack::new(0).unwrap();
let mut gen = Generator::new(stack, move |yielder, ()| {
for i in 0.. { yielder.suspend(i) }
});
assert_eq!(gen.next(), Some(0));
assert_eq!(gen.next(), Some(1));
assert_eq!(gen.next(), Some(2));
unsafe { gen.unsafe_unwrap(); }
}
|
random_line_split
|
|
iterator.rs
|
// This file is part of libfringe, a low-level green threading library.
// Copyright (c) whitequark <[email protected]>
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
extern crate fringe;
use fringe::OsStack;
use fringe::generator::Generator;
#[test]
fn producer()
|
{
let stack = OsStack::new(0).unwrap();
let mut gen = Generator::new(stack, move |yielder, ()| {
for i in 0.. { yielder.suspend(i) }
});
assert_eq!(gen.next(), Some(0));
assert_eq!(gen.next(), Some(1));
assert_eq!(gen.next(), Some(2));
unsafe { gen.unsafe_unwrap(); }
}
|
identifier_body
|
|
iterator.rs
|
// This file is part of libfringe, a low-level green threading library.
// Copyright (c) whitequark <[email protected]>
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
extern crate fringe;
use fringe::OsStack;
use fringe::generator::Generator;
#[test]
fn
|
() {
let stack = OsStack::new(0).unwrap();
let mut gen = Generator::new(stack, move |yielder, ()| {
for i in 0.. { yielder.suspend(i) }
});
assert_eq!(gen.next(), Some(0));
assert_eq!(gen.next(), Some(1));
assert_eq!(gen.next(), Some(2));
unsafe { gen.unsafe_unwrap(); }
}
|
producer
|
identifier_name
|
match-arm-statics.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(struct_variant)]
struct NewBool(bool);
enum Direction {
North,
East,
South,
West
}
struct Foo {
bar: Option<Direction>,
baz: NewBool
}
enum EnumWithStructVariants {
Variant1(bool),
Variant2 {
dir: Direction
}
}
static TRUE_TRUE: (bool, bool) = (true, true);
static NONE: Option<Direction> = None;
static EAST: Direction = East;
static NEW_FALSE: NewBool = NewBool(false);
static STATIC_FOO: Foo = Foo { bar: Some(South), baz: NEW_FALSE };
static VARIANT2_NORTH: EnumWithStructVariants = Variant2 { dir: North };
pub mod glfw {
pub struct InputState(uint);
pub static RELEASE : InputState = InputState(0);
pub static PRESS : InputState = InputState(1);
pub static REPEAT : InputState = InputState(2);
}
fn issue_6533() {
use glfw;
fn action_to_str(state: glfw::InputState) -> &'static str {
use glfw::{RELEASE, PRESS, REPEAT};
match state {
RELEASE => { "Released" }
PRESS => { "Pressed" }
REPEAT => { "Repeated" }
_ => { "Unknown" }
}
}
assert_eq!(action_to_str(glfw::RELEASE), "Released");
assert_eq!(action_to_str(glfw::PRESS), "Pressed");
assert_eq!(action_to_str(glfw::REPEAT), "Repeated");
}
fn issue_13626() {
static VAL: [u8,..1] = [0];
match [1] {
VAL => unreachable!(),
_ => ()
}
}
fn issue_14576() {
type Foo = (i32, i32);
static ON: Foo = (1, 1);
static OFF: Foo = (0, 0);
match (1, 1) {
OFF => unreachable!(),
ON => (),
_ => unreachable!()
}
enum C { D = 3, E = 4 }
static F : C = D;
assert_eq!(match D { F => 1i, _ => 2, }, 1);
}
fn issue_13731() {
enum A { A(()) }
static B: A = A(());
match A(()) {
B => ()
}
}
fn issue_15393() {
#![allow(dead_code)]
struct Flags {
bits: uint
}
static FOO: Flags = Flags { bits: 0x01 };
static BAR: Flags = Flags { bits: 0x02 };
match (Flags { bits: 0x02 }) {
FOO => unreachable!(),
BAR => (),
_ => unreachable!()
}
}
fn main() {
assert_eq!(match (true, false) {
TRUE_TRUE => 1i,
(false, false) => 2,
(false, true) => 3,
(true, false) => 4
}, 4);
assert_eq!(match Some(Some(North)) {
Some(NONE) => 1i,
Some(Some(North)) => 2,
Some(Some(EAST)) => 3,
Some(Some(South)) => 4,
Some(Some(West)) => 5,
None => 6
}, 2);
assert_eq!(match (Foo { bar: Some(West), baz: NewBool(true) }) {
Foo { bar: None, baz: NewBool(true) } => 1i,
Foo { bar: NONE, baz: NEW_FALSE } => 2,
STATIC_FOO => 3,
Foo { bar: _, baz: NEW_FALSE } => 4,
Foo { bar: Some(West), baz: NewBool(true) } => 5,
Foo { bar: Some(South), baz: NewBool(true) } => 6,
Foo { bar: Some(EAST),.. } => 7,
Foo { bar: Some(North), baz: NewBool(true) } => 8
}, 5);
assert_eq!(match (Variant2 { dir: North }) {
Variant1(true) => 1i,
Variant1(false) => 2,
Variant2 { dir: West } => 3,
VARIANT2_NORTH => 4,
Variant2 { dir: South } => 5,
Variant2 { dir: East } => 6
}, 4);
issue_6533();
issue_13626();
issue_13731();
|
}
|
issue_14576();
issue_15393();
|
random_line_split
|
match-arm-statics.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(struct_variant)]
struct NewBool(bool);
enum Direction {
North,
East,
South,
West
}
struct Foo {
bar: Option<Direction>,
baz: NewBool
}
enum EnumWithStructVariants {
Variant1(bool),
Variant2 {
dir: Direction
}
}
static TRUE_TRUE: (bool, bool) = (true, true);
static NONE: Option<Direction> = None;
static EAST: Direction = East;
static NEW_FALSE: NewBool = NewBool(false);
static STATIC_FOO: Foo = Foo { bar: Some(South), baz: NEW_FALSE };
static VARIANT2_NORTH: EnumWithStructVariants = Variant2 { dir: North };
pub mod glfw {
pub struct InputState(uint);
pub static RELEASE : InputState = InputState(0);
pub static PRESS : InputState = InputState(1);
pub static REPEAT : InputState = InputState(2);
}
fn issue_6533() {
use glfw;
fn action_to_str(state: glfw::InputState) -> &'static str {
use glfw::{RELEASE, PRESS, REPEAT};
match state {
RELEASE => { "Released" }
PRESS => { "Pressed" }
REPEAT => { "Repeated" }
_ => { "Unknown" }
}
}
assert_eq!(action_to_str(glfw::RELEASE), "Released");
assert_eq!(action_to_str(glfw::PRESS), "Pressed");
assert_eq!(action_to_str(glfw::REPEAT), "Repeated");
}
fn issue_13626() {
static VAL: [u8,..1] = [0];
match [1] {
VAL => unreachable!(),
_ => ()
}
}
fn issue_14576() {
type Foo = (i32, i32);
static ON: Foo = (1, 1);
static OFF: Foo = (0, 0);
match (1, 1) {
OFF => unreachable!(),
ON => (),
_ => unreachable!()
}
enum C { D = 3, E = 4 }
static F : C = D;
assert_eq!(match D { F => 1i, _ => 2, }, 1);
}
fn issue_13731() {
enum A { A(()) }
static B: A = A(());
match A(()) {
B => ()
}
}
fn issue_15393() {
#![allow(dead_code)]
struct
|
{
bits: uint
}
static FOO: Flags = Flags { bits: 0x01 };
static BAR: Flags = Flags { bits: 0x02 };
match (Flags { bits: 0x02 }) {
FOO => unreachable!(),
BAR => (),
_ => unreachable!()
}
}
fn main() {
assert_eq!(match (true, false) {
TRUE_TRUE => 1i,
(false, false) => 2,
(false, true) => 3,
(true, false) => 4
}, 4);
assert_eq!(match Some(Some(North)) {
Some(NONE) => 1i,
Some(Some(North)) => 2,
Some(Some(EAST)) => 3,
Some(Some(South)) => 4,
Some(Some(West)) => 5,
None => 6
}, 2);
assert_eq!(match (Foo { bar: Some(West), baz: NewBool(true) }) {
Foo { bar: None, baz: NewBool(true) } => 1i,
Foo { bar: NONE, baz: NEW_FALSE } => 2,
STATIC_FOO => 3,
Foo { bar: _, baz: NEW_FALSE } => 4,
Foo { bar: Some(West), baz: NewBool(true) } => 5,
Foo { bar: Some(South), baz: NewBool(true) } => 6,
Foo { bar: Some(EAST),.. } => 7,
Foo { bar: Some(North), baz: NewBool(true) } => 8
}, 5);
assert_eq!(match (Variant2 { dir: North }) {
Variant1(true) => 1i,
Variant1(false) => 2,
Variant2 { dir: West } => 3,
VARIANT2_NORTH => 4,
Variant2 { dir: South } => 5,
Variant2 { dir: East } => 6
}, 4);
issue_6533();
issue_13626();
issue_13731();
issue_14576();
issue_15393();
}
|
Flags
|
identifier_name
|
match-arm-statics.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(struct_variant)]
struct NewBool(bool);
enum Direction {
North,
East,
South,
West
}
struct Foo {
bar: Option<Direction>,
baz: NewBool
}
enum EnumWithStructVariants {
Variant1(bool),
Variant2 {
dir: Direction
}
}
static TRUE_TRUE: (bool, bool) = (true, true);
static NONE: Option<Direction> = None;
static EAST: Direction = East;
static NEW_FALSE: NewBool = NewBool(false);
static STATIC_FOO: Foo = Foo { bar: Some(South), baz: NEW_FALSE };
static VARIANT2_NORTH: EnumWithStructVariants = Variant2 { dir: North };
pub mod glfw {
pub struct InputState(uint);
pub static RELEASE : InputState = InputState(0);
pub static PRESS : InputState = InputState(1);
pub static REPEAT : InputState = InputState(2);
}
fn issue_6533() {
use glfw;
fn action_to_str(state: glfw::InputState) -> &'static str {
use glfw::{RELEASE, PRESS, REPEAT};
match state {
RELEASE => { "Released" }
PRESS => { "Pressed" }
REPEAT => { "Repeated" }
_ => { "Unknown" }
}
}
assert_eq!(action_to_str(glfw::RELEASE), "Released");
assert_eq!(action_to_str(glfw::PRESS), "Pressed");
assert_eq!(action_to_str(glfw::REPEAT), "Repeated");
}
fn issue_13626() {
static VAL: [u8,..1] = [0];
match [1] {
VAL => unreachable!(),
_ => ()
}
}
fn issue_14576()
|
fn issue_13731() {
enum A { A(()) }
static B: A = A(());
match A(()) {
B => ()
}
}
fn issue_15393() {
#![allow(dead_code)]
struct Flags {
bits: uint
}
static FOO: Flags = Flags { bits: 0x01 };
static BAR: Flags = Flags { bits: 0x02 };
match (Flags { bits: 0x02 }) {
FOO => unreachable!(),
BAR => (),
_ => unreachable!()
}
}
fn main() {
assert_eq!(match (true, false) {
TRUE_TRUE => 1i,
(false, false) => 2,
(false, true) => 3,
(true, false) => 4
}, 4);
assert_eq!(match Some(Some(North)) {
Some(NONE) => 1i,
Some(Some(North)) => 2,
Some(Some(EAST)) => 3,
Some(Some(South)) => 4,
Some(Some(West)) => 5,
None => 6
}, 2);
assert_eq!(match (Foo { bar: Some(West), baz: NewBool(true) }) {
Foo { bar: None, baz: NewBool(true) } => 1i,
Foo { bar: NONE, baz: NEW_FALSE } => 2,
STATIC_FOO => 3,
Foo { bar: _, baz: NEW_FALSE } => 4,
Foo { bar: Some(West), baz: NewBool(true) } => 5,
Foo { bar: Some(South), baz: NewBool(true) } => 6,
Foo { bar: Some(EAST),.. } => 7,
Foo { bar: Some(North), baz: NewBool(true) } => 8
}, 5);
assert_eq!(match (Variant2 { dir: North }) {
Variant1(true) => 1i,
Variant1(false) => 2,
Variant2 { dir: West } => 3,
VARIANT2_NORTH => 4,
Variant2 { dir: South } => 5,
Variant2 { dir: East } => 6
}, 4);
issue_6533();
issue_13626();
issue_13731();
issue_14576();
issue_15393();
}
|
{
type Foo = (i32, i32);
static ON: Foo = (1, 1);
static OFF: Foo = (0, 0);
match (1, 1) {
OFF => unreachable!(),
ON => (),
_ => unreachable!()
}
enum C { D = 3, E = 4 }
static F : C = D;
assert_eq!(match D { F => 1i, _ => 2, }, 1);
}
|
identifier_body
|
creader.rs
|
dump_crates(&sess.cstore);
warn_if_multiple_versions(sess.diagnostic(), &sess.cstore)
}
impl<'a, 'v> visit::Visitor<'v> for Env<'a> {
fn visit_view_item(&mut self, a: &ast::ViewItem) {
visit_view_item(self, a);
visit::walk_view_item(self, a);
}
fn visit_item(&mut self, a: &ast::Item) {
visit_item(self, a);
visit::walk_item(self, a);
}
}
fn dump_crates(cstore: &CStore) {
debug!("resolved crates:");
cstore.iter_crate_data_origins(|_, data, opt_source| {
debug!(" name: {}", data.name());
debug!(" cnum: {}", data.cnum);
debug!(" hash: {}", data.hash());
opt_source.map(|cs| {
let CrateSource { dylib, rlib, cnum: _ } = cs;
dylib.map(|dl| debug!(" dylib: {}", dl.display()));
rlib.map(|rl| debug!(" rlib: {}", rl.display()));
});
})
}
fn warn_if_multiple_versions(diag: &SpanHandler, cstore: &CStore) {
let mut map = HashMap::new();
cstore.iter_crate_data(|cnum, data| {
match map.entry(data.name()) {
Vacant(entry) => { entry.set(vec![cnum]); },
Occupied(mut entry) => { entry.get_mut().push(cnum); },
}
});
for (name, dupes) in map.into_iter() {
if dupes.len() == 1 { continue }
diag.handler().warn(
format!("using multiple versions of crate `{}`", name).as_slice());
for dupe in dupes.into_iter() {
let data = cstore.get_crate_data(dupe);
diag.span_note(data.span, "used here");
loader::note_crate_name(diag, data.name().as_slice());
}
}
}
fn visit_crate(e: &Env, c: &ast::Crate) {
for a in c.attrs.iter().filter(|m| m.name().equiv(&("link_args"))) {
match a.value_str() {
Some(ref linkarg) => e.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
}
fn should_link(i: &ast::ViewItem) -> bool {
i.attrs.iter().all(|attr| {
attr.name().get()!= "phase" ||
attr.meta_item_list().map_or(false, |phases| {
attr::contains_name(phases.as_slice(), "link")
})
})
}
fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
if!should_link(i) {
return;
}
match extract_crate_info(e, i) {
Some(info) => {
let (cnum, _, _) = resolve_crate(e,
&None,
info.ident.as_slice(),
info.name.as_slice(),
None,
i.span);
e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
}
None => ()
}
}
struct CrateInfo {
ident: String,
name: String,
id: ast::NodeId,
should_link: bool,
}
fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
match i.node {
ast::ViewItemExternCrate(ident, ref path_opt, id) => {
let ident = token::get_ident(ident);
debug!("resolving extern crate stmt. ident: {} path_opt: {}",
ident, path_opt);
let name = match *path_opt {
Some((ref path_str, _)) => {
let name = path_str.get().to_string();
validate_crate_name(Some(e.sess), name.as_slice(),
Some(i.span));
name
}
None => ident.get().to_string(),
};
Some(CrateInfo {
ident: ident.get().to_string(),
name: name,
id: id,
should_link: should_link(i),
})
}
_ => None
}
}
pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option<Span>) {
let err = |s: &str| {
match (sp, sess) {
(_, None) => fail!("{}", s),
(Some(sp), Some(sess)) => sess.span_err(sp, s),
(None, Some(sess)) => sess.err(s),
}
};
if s.len() == 0 {
err("crate name must not be empty");
}
for c in s.chars() {
if c.is_alphanumeric() { continue }
if c == '_' || c == '-' { continue }
err(format!("invalid character `{}` in crate name: `{}`", c, s).as_slice());
}
match sess {
Some(sess) => sess.abort_if_errors(),
None => {}
}
}
fn visit_item(e: &Env, i: &ast::Item) {
match i.node {
ast::ItemForeignMod(ref fm) => {
if fm.abi == abi::Rust || fm.abi == abi::RustIntrinsic {
return;
}
// First, add all of the custom link_args attributes
let link_args = i.attrs.iter()
.filter_map(|at| if at.name().equiv(&("link_args")) {
Some(at)
} else {
None
})
.collect::<Vec<&ast::Attribute>>();
for m in link_args.iter() {
match m.value_str() {
Some(linkarg) => e.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
// Next, process all of the #[link(..)]-style arguments
let link_args = i.attrs.iter()
.filter_map(|at| if at.name().equiv(&("link")) {
Some(at)
} else {
None
})
.collect::<Vec<&ast::Attribute>>();
for m in link_args.iter() {
match m.meta_item_list() {
Some(items) => {
let kind = items.iter().find(|k| {
k.name().equiv(&("kind"))
}).and_then(|a| a.value_str());
let kind = match kind {
Some(k) => {
if k.equiv(&("static")) {
cstore::NativeStatic
} else if (e.sess.targ_cfg.os == abi::OsMacos ||
e.sess.targ_cfg.os == abi::OsiOS) &&
k.equiv(&("framework")) {
cstore::NativeFramework
} else if k.equiv(&("framework")) {
e.sess.span_err(m.span,
"native frameworks are only available \
on OSX targets");
cstore::NativeUnknown
} else {
e.sess.span_err(m.span,
format!("unknown kind: `{}`",
k).as_slice());
cstore::NativeUnknown
}
}
None => cstore::NativeUnknown
};
let n = items.iter().find(|n| {
n.name().equiv(&("name"))
}).and_then(|a| a.value_str());
let n = match n {
Some(n) => n,
None => {
e.sess.span_err(m.span,
"#[link(...)] specified without \
`name = \"foo\"`");
InternedString::new("foo")
}
};
if n.get().is_empty() {
e.sess.span_err(m.span,
"#[link(name = \"\")] given with \
empty name");
} else {
e.sess
.cstore
.add_used_library(n.get().to_string(), kind);
}
}
None => {}
}
}
}
_ => { }
}
}
fn existing_match(e: &Env, name: &str,
hash: Option<&Svh>) -> Option<ast::CrateNum> {
let mut ret = None;
e.sess.cstore.iter_crate_data(|cnum, data| {
if data.name.as_slice()!= name { return }
match hash {
Some(hash) if *hash == data.hash() => { ret = Some(cnum); return }
Some(..) => return,
None => {}
}
// When the hash is None we're dealing with a top-level dependency in
// which case we may have a specification on the command line for this
// library. Even though an upstream library may have loaded something of
// the same name, we have to make sure it was loaded from the exact same
// location as well.
//
// We're also sure to compare *paths*, not actual byte slices. The
// `source` stores paths which are normalized which may be different
// from the strings on the command line.
let source = e.sess.cstore.get_used_crate_source(cnum).unwrap();
match e.sess.opts.externs.find_equiv(&name) {
Some(locs) => {
let found = locs.iter().any(|l| {
let l = fs::realpath(&Path::new(l.as_slice())).ok();
l == source.dylib || l == source.rlib
});
if found {
ret = Some(cnum);
}
}
None => ret = Some(cnum),
}
});
return ret;
}
|
lib: loader::Library)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
cstore::CrateSource) {
// Claim this crate number and cache it
let cnum = e.next_crate_num;
e.next_crate_num += 1;
// Stash paths for top-most crate locally if necessary.
let crate_paths = if root.is_none() {
Some(CratePaths {
ident: ident.to_string(),
dylib: lib.dylib.clone(),
rlib: lib.rlib.clone(),
})
} else {
None
};
// Maintain a reference to the top most crate.
let root = if root.is_some() { root } else { &crate_paths };
let cnum_map = resolve_crate_deps(e, root, lib.metadata.as_slice(), span);
let loader::Library{ dylib, rlib, metadata } = lib;
let cmeta = Rc::new( cstore::crate_metadata {
name: name.to_string(),
data: metadata,
cnum_map: cnum_map,
cnum: cnum,
span: span,
});
let source = cstore::CrateSource {
dylib: dylib,
rlib: rlib,
cnum: cnum,
};
e.sess.cstore.set_crate_data(cnum, cmeta.clone());
e.sess.cstore.add_used_crate_source(source.clone());
(cnum, cmeta, source)
}
fn resolve_crate<'a>(e: &mut Env,
root: &Option<CratePaths>,
ident: &str,
name: &str,
hash: Option<&Svh>,
span: Span)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
cstore::CrateSource) {
match existing_match(e, name, hash) {
None => {
let mut load_ctxt = loader::Context {
sess: e.sess,
span: span,
ident: ident,
crate_name: name,
hash: hash.map(|a| &*a),
filesearch: e.sess.target_filesearch(),
os: e.sess.targ_cfg.os,
triple: e.sess.targ_cfg.target_strs.target_triple.as_slice(),
root: root,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
should_match_name: true,
};
let library = load_ctxt.load_library_crate();
register_crate(e, root, ident, name, span, library)
}
Some(cnum) => (cnum,
e.sess.cstore.get_crate_data(cnum),
e.sess.cstore.get_used_crate_source(cnum).unwrap())
}
}
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(e: &mut Env,
root: &Option<CratePaths>,
cdata: &[u8], span : Span)
-> cstore::cnum_map {
debug!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate
// numbers
decoder::get_crate_deps(cdata).iter().map(|dep| {
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
let (local_cnum, _, _) = resolve_crate(e, root,
dep.name.as_slice(),
dep.name.as_slice(),
Some(&dep.hash),
span);
(dep.cnum, local_cnum)
}).collect()
}
pub struct PluginMetadataReader<'a> {
env: Env<'a>,
}
impl<'a> PluginMetadataReader<'a> {
pub fn new(sess: &'a Session) -> PluginMetadataReader<'a> {
PluginMetadataReader {
env: Env {
sess: sess,
next_crate_num: sess.cstore.next_crate_num(),
}
}
}
pub fn read_plugin_metadata(&mut self, krate: &ast::ViewItem) -> PluginMetadata {
let info = extract_crate_info(&self.env, krate).unwrap();
let target_triple = self.env.sess.targ_cfg.target_strs.target_triple.as_slice();
let is_cross = target_triple!= driver::host_triple();
let mut should_link = info.should_link &&!is_cross;
let os = config::get_os(driver::host_triple()).unwrap();
let mut load_ctxt = loader::Context {
sess: self.env.sess,
span: krate.span,
ident: info.ident.as_slice(),
crate_name: info.name.as_slice(),
hash: None,
filesearch: self.env.sess.host_filesearch(),
triple: driver::host_triple(),
os: os,
root: &None,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
should_match_name: true,
};
let library = match load_ctxt.maybe_load_library_crate() {
Some(l) =>
|
fn register_crate<'a>(e: &mut Env,
root: &Option<CratePaths>,
ident: &str,
name: &str,
span: Span,
|
random_line_split
|
creader.rs
|
dump_crates(&sess.cstore);
warn_if_multiple_versions(sess.diagnostic(), &sess.cstore)
}
impl<'a, 'v> visit::Visitor<'v> for Env<'a> {
fn visit_view_item(&mut self, a: &ast::ViewItem) {
visit_view_item(self, a);
visit::walk_view_item(self, a);
}
fn visit_item(&mut self, a: &ast::Item) {
visit_item(self, a);
visit::walk_item(self, a);
}
}
fn dump_crates(cstore: &CStore)
|
fn warn_if_multiple_versions(diag: &SpanHandler, cstore: &CStore) {
let mut map = HashMap::new();
cstore.iter_crate_data(|cnum, data| {
match map.entry(data.name()) {
Vacant(entry) => { entry.set(vec![cnum]); },
Occupied(mut entry) => { entry.get_mut().push(cnum); },
}
});
for (name, dupes) in map.into_iter() {
if dupes.len() == 1 { continue }
diag.handler().warn(
format!("using multiple versions of crate `{}`", name).as_slice());
for dupe in dupes.into_iter() {
let data = cstore.get_crate_data(dupe);
diag.span_note(data.span, "used here");
loader::note_crate_name(diag, data.name().as_slice());
}
}
}
fn visit_crate(e: &Env, c: &ast::Crate) {
for a in c.attrs.iter().filter(|m| m.name().equiv(&("link_args"))) {
match a.value_str() {
Some(ref linkarg) => e.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
}
fn should_link(i: &ast::ViewItem) -> bool {
i.attrs.iter().all(|attr| {
attr.name().get()!= "phase" ||
attr.meta_item_list().map_or(false, |phases| {
attr::contains_name(phases.as_slice(), "link")
})
})
}
fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
if!should_link(i) {
return;
}
match extract_crate_info(e, i) {
Some(info) => {
let (cnum, _, _) = resolve_crate(e,
&None,
info.ident.as_slice(),
info.name.as_slice(),
None,
i.span);
e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
}
None => ()
}
}
struct CrateInfo {
ident: String,
name: String,
id: ast::NodeId,
should_link: bool,
}
fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
match i.node {
ast::ViewItemExternCrate(ident, ref path_opt, id) => {
let ident = token::get_ident(ident);
debug!("resolving extern crate stmt. ident: {} path_opt: {}",
ident, path_opt);
let name = match *path_opt {
Some((ref path_str, _)) => {
let name = path_str.get().to_string();
validate_crate_name(Some(e.sess), name.as_slice(),
Some(i.span));
name
}
None => ident.get().to_string(),
};
Some(CrateInfo {
ident: ident.get().to_string(),
name: name,
id: id,
should_link: should_link(i),
})
}
_ => None
}
}
pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option<Span>) {
let err = |s: &str| {
match (sp, sess) {
(_, None) => fail!("{}", s),
(Some(sp), Some(sess)) => sess.span_err(sp, s),
(None, Some(sess)) => sess.err(s),
}
};
if s.len() == 0 {
err("crate name must not be empty");
}
for c in s.chars() {
if c.is_alphanumeric() { continue }
if c == '_' || c == '-' { continue }
err(format!("invalid character `{}` in crate name: `{}`", c, s).as_slice());
}
match sess {
Some(sess) => sess.abort_if_errors(),
None => {}
}
}
fn visit_item(e: &Env, i: &ast::Item) {
match i.node {
ast::ItemForeignMod(ref fm) => {
if fm.abi == abi::Rust || fm.abi == abi::RustIntrinsic {
return;
}
// First, add all of the custom link_args attributes
let link_args = i.attrs.iter()
.filter_map(|at| if at.name().equiv(&("link_args")) {
Some(at)
} else {
None
})
.collect::<Vec<&ast::Attribute>>();
for m in link_args.iter() {
match m.value_str() {
Some(linkarg) => e.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
// Next, process all of the #[link(..)]-style arguments
let link_args = i.attrs.iter()
.filter_map(|at| if at.name().equiv(&("link")) {
Some(at)
} else {
None
})
.collect::<Vec<&ast::Attribute>>();
for m in link_args.iter() {
match m.meta_item_list() {
Some(items) => {
let kind = items.iter().find(|k| {
k.name().equiv(&("kind"))
}).and_then(|a| a.value_str());
let kind = match kind {
Some(k) => {
if k.equiv(&("static")) {
cstore::NativeStatic
} else if (e.sess.targ_cfg.os == abi::OsMacos ||
e.sess.targ_cfg.os == abi::OsiOS) &&
k.equiv(&("framework")) {
cstore::NativeFramework
} else if k.equiv(&("framework")) {
e.sess.span_err(m.span,
"native frameworks are only available \
on OSX targets");
cstore::NativeUnknown
} else {
e.sess.span_err(m.span,
format!("unknown kind: `{}`",
k).as_slice());
cstore::NativeUnknown
}
}
None => cstore::NativeUnknown
};
let n = items.iter().find(|n| {
n.name().equiv(&("name"))
}).and_then(|a| a.value_str());
let n = match n {
Some(n) => n,
None => {
e.sess.span_err(m.span,
"#[link(...)] specified without \
`name = \"foo\"`");
InternedString::new("foo")
}
};
if n.get().is_empty() {
e.sess.span_err(m.span,
"#[link(name = \"\")] given with \
empty name");
} else {
e.sess
.cstore
.add_used_library(n.get().to_string(), kind);
}
}
None => {}
}
}
}
_ => { }
}
}
fn existing_match(e: &Env, name: &str,
hash: Option<&Svh>) -> Option<ast::CrateNum> {
let mut ret = None;
e.sess.cstore.iter_crate_data(|cnum, data| {
if data.name.as_slice()!= name { return }
match hash {
Some(hash) if *hash == data.hash() => { ret = Some(cnum); return }
Some(..) => return,
None => {}
}
// When the hash is None we're dealing with a top-level dependency in
// which case we may have a specification on the command line for this
// library. Even though an upstream library may have loaded something of
// the same name, we have to make sure it was loaded from the exact same
// location as well.
//
// We're also sure to compare *paths*, not actual byte slices. The
// `source` stores paths which are normalized which may be different
// from the strings on the command line.
let source = e.sess.cstore.get_used_crate_source(cnum).unwrap();
match e.sess.opts.externs.find_equiv(&name) {
Some(locs) => {
let found = locs.iter().any(|l| {
let l = fs::realpath(&Path::new(l.as_slice())).ok();
l == source.dylib || l == source.rlib
});
if found {
ret = Some(cnum);
}
}
None => ret = Some(cnum),
}
});
return ret;
}
fn register_crate<'a>(e: &mut Env,
root: &Option<CratePaths>,
ident: &str,
name: &str,
span: Span,
lib: loader::Library)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
cstore::CrateSource) {
// Claim this crate number and cache it
let cnum = e.next_crate_num;
e.next_crate_num += 1;
// Stash paths for top-most crate locally if necessary.
let crate_paths = if root.is_none() {
Some(CratePaths {
ident: ident.to_string(),
dylib: lib.dylib.clone(),
rlib: lib.rlib.clone(),
})
} else {
None
};
// Maintain a reference to the top most crate.
let root = if root.is_some() { root } else { &crate_paths };
let cnum_map = resolve_crate_deps(e, root, lib.metadata.as_slice(), span);
let loader::Library{ dylib, rlib, metadata } = lib;
let cmeta = Rc::new( cstore::crate_metadata {
name: name.to_string(),
data: metadata,
cnum_map: cnum_map,
cnum: cnum,
span: span,
});
let source = cstore::CrateSource {
dylib: dylib,
rlib: rlib,
cnum: cnum,
};
e.sess.cstore.set_crate_data(cnum, cmeta.clone());
e.sess.cstore.add_used_crate_source(source.clone());
(cnum, cmeta, source)
}
fn resolve_crate<'a>(e: &mut Env,
root: &Option<CratePaths>,
ident: &str,
name: &str,
hash: Option<&Svh>,
span: Span)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
cstore::CrateSource) {
match existing_match(e, name, hash) {
None => {
let mut load_ctxt = loader::Context {
sess: e.sess,
span: span,
ident: ident,
crate_name: name,
hash: hash.map(|a| &*a),
filesearch: e.sess.target_filesearch(),
os: e.sess.targ_cfg.os,
triple: e.sess.targ_cfg.target_strs.target_triple.as_slice(),
root: root,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
should_match_name: true,
};
let library = load_ctxt.load_library_crate();
register_crate(e, root, ident, name, span, library)
}
Some(cnum) => (cnum,
e.sess.cstore.get_crate_data(cnum),
e.sess.cstore.get_used_crate_source(cnum).unwrap())
}
}
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(e: &mut Env,
root: &Option<CratePaths>,
cdata: &[u8], span : Span)
-> cstore::cnum_map {
debug!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate
// numbers
decoder::get_crate_deps(cdata).iter().map(|dep| {
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
let (local_cnum, _, _) = resolve_crate(e, root,
dep.name.as_slice(),
dep.name.as_slice(),
Some(&dep.hash),
span);
(dep.cnum, local_cnum)
}).collect()
}
pub struct PluginMetadataReader<'a> {
env: Env<'a>,
}
impl<'a> PluginMetadataReader<'a> {
pub fn new(sess: &'a Session) -> PluginMetadataReader<'a> {
PluginMetadataReader {
env: Env {
sess: sess,
next_crate_num: sess.cstore.next_crate_num(),
}
}
}
pub fn read_plugin_metadata(&mut self, krate: &ast::ViewItem) -> PluginMetadata {
let info = extract_crate_info(&self.env, krate).unwrap();
let target_triple = self.env.sess.targ_cfg.target_strs.target_triple.as_slice();
let is_cross = target_triple!= driver::host_triple();
let mut should_link = info.should_link &&!is_cross;
let os = config::get_os(driver::host_triple()).unwrap();
let mut load_ctxt = loader::Context {
sess: self.env.sess,
span: krate.span,
ident: info.ident.as_slice(),
crate_name: info.name.as_slice(),
hash: None,
filesearch: self.env.sess.host_filesearch(),
triple: driver::host_triple(),
os: os,
root: &None,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
should_match_name: true,
};
let library = match load_ctxt.maybe_load_library_crate() {
Some(l
|
{
debug!("resolved crates:");
cstore.iter_crate_data_origins(|_, data, opt_source| {
debug!(" name: {}", data.name());
debug!(" cnum: {}", data.cnum);
debug!(" hash: {}", data.hash());
opt_source.map(|cs| {
let CrateSource { dylib, rlib, cnum: _ } = cs;
dylib.map(|dl| debug!(" dylib: {}", dl.display()));
rlib.map(|rl| debug!(" rlib: {}", rl.display()));
});
})
}
|
identifier_body
|
creader.rs
|
{ continue }
diag.handler().warn(
format!("using multiple versions of crate `{}`", name).as_slice());
for dupe in dupes.into_iter() {
let data = cstore.get_crate_data(dupe);
diag.span_note(data.span, "used here");
loader::note_crate_name(diag, data.name().as_slice());
}
}
}
fn visit_crate(e: &Env, c: &ast::Crate) {
for a in c.attrs.iter().filter(|m| m.name().equiv(&("link_args"))) {
match a.value_str() {
Some(ref linkarg) => e.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
}
fn should_link(i: &ast::ViewItem) -> bool {
i.attrs.iter().all(|attr| {
attr.name().get()!= "phase" ||
attr.meta_item_list().map_or(false, |phases| {
attr::contains_name(phases.as_slice(), "link")
})
})
}
fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
if!should_link(i) {
return;
}
match extract_crate_info(e, i) {
Some(info) => {
let (cnum, _, _) = resolve_crate(e,
&None,
info.ident.as_slice(),
info.name.as_slice(),
None,
i.span);
e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
}
None => ()
}
}
struct CrateInfo {
ident: String,
name: String,
id: ast::NodeId,
should_link: bool,
}
fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
match i.node {
ast::ViewItemExternCrate(ident, ref path_opt, id) => {
let ident = token::get_ident(ident);
debug!("resolving extern crate stmt. ident: {} path_opt: {}",
ident, path_opt);
let name = match *path_opt {
Some((ref path_str, _)) => {
let name = path_str.get().to_string();
validate_crate_name(Some(e.sess), name.as_slice(),
Some(i.span));
name
}
None => ident.get().to_string(),
};
Some(CrateInfo {
ident: ident.get().to_string(),
name: name,
id: id,
should_link: should_link(i),
})
}
_ => None
}
}
pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option<Span>) {
let err = |s: &str| {
match (sp, sess) {
(_, None) => fail!("{}", s),
(Some(sp), Some(sess)) => sess.span_err(sp, s),
(None, Some(sess)) => sess.err(s),
}
};
if s.len() == 0 {
err("crate name must not be empty");
}
for c in s.chars() {
if c.is_alphanumeric() { continue }
if c == '_' || c == '-' { continue }
err(format!("invalid character `{}` in crate name: `{}`", c, s).as_slice());
}
match sess {
Some(sess) => sess.abort_if_errors(),
None => {}
}
}
fn visit_item(e: &Env, i: &ast::Item) {
match i.node {
ast::ItemForeignMod(ref fm) => {
if fm.abi == abi::Rust || fm.abi == abi::RustIntrinsic {
return;
}
// First, add all of the custom link_args attributes
let link_args = i.attrs.iter()
.filter_map(|at| if at.name().equiv(&("link_args")) {
Some(at)
} else {
None
})
.collect::<Vec<&ast::Attribute>>();
for m in link_args.iter() {
match m.value_str() {
Some(linkarg) => e.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
// Next, process all of the #[link(..)]-style arguments
let link_args = i.attrs.iter()
.filter_map(|at| if at.name().equiv(&("link")) {
Some(at)
} else {
None
})
.collect::<Vec<&ast::Attribute>>();
for m in link_args.iter() {
match m.meta_item_list() {
Some(items) => {
let kind = items.iter().find(|k| {
k.name().equiv(&("kind"))
}).and_then(|a| a.value_str());
let kind = match kind {
Some(k) => {
if k.equiv(&("static")) {
cstore::NativeStatic
} else if (e.sess.targ_cfg.os == abi::OsMacos ||
e.sess.targ_cfg.os == abi::OsiOS) &&
k.equiv(&("framework")) {
cstore::NativeFramework
} else if k.equiv(&("framework")) {
e.sess.span_err(m.span,
"native frameworks are only available \
on OSX targets");
cstore::NativeUnknown
} else {
e.sess.span_err(m.span,
format!("unknown kind: `{}`",
k).as_slice());
cstore::NativeUnknown
}
}
None => cstore::NativeUnknown
};
let n = items.iter().find(|n| {
n.name().equiv(&("name"))
}).and_then(|a| a.value_str());
let n = match n {
Some(n) => n,
None => {
e.sess.span_err(m.span,
"#[link(...)] specified without \
`name = \"foo\"`");
InternedString::new("foo")
}
};
if n.get().is_empty() {
e.sess.span_err(m.span,
"#[link(name = \"\")] given with \
empty name");
} else {
e.sess
.cstore
.add_used_library(n.get().to_string(), kind);
}
}
None => {}
}
}
}
_ => { }
}
}
fn existing_match(e: &Env, name: &str,
hash: Option<&Svh>) -> Option<ast::CrateNum> {
let mut ret = None;
e.sess.cstore.iter_crate_data(|cnum, data| {
if data.name.as_slice()!= name { return }
match hash {
Some(hash) if *hash == data.hash() => { ret = Some(cnum); return }
Some(..) => return,
None => {}
}
// When the hash is None we're dealing with a top-level dependency in
// which case we may have a specification on the command line for this
// library. Even though an upstream library may have loaded something of
// the same name, we have to make sure it was loaded from the exact same
// location as well.
//
// We're also sure to compare *paths*, not actual byte slices. The
// `source` stores paths which are normalized which may be different
// from the strings on the command line.
let source = e.sess.cstore.get_used_crate_source(cnum).unwrap();
match e.sess.opts.externs.find_equiv(&name) {
Some(locs) => {
let found = locs.iter().any(|l| {
let l = fs::realpath(&Path::new(l.as_slice())).ok();
l == source.dylib || l == source.rlib
});
if found {
ret = Some(cnum);
}
}
None => ret = Some(cnum),
}
});
return ret;
}
fn register_crate<'a>(e: &mut Env,
root: &Option<CratePaths>,
ident: &str,
name: &str,
span: Span,
lib: loader::Library)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
cstore::CrateSource) {
// Claim this crate number and cache it
let cnum = e.next_crate_num;
e.next_crate_num += 1;
// Stash paths for top-most crate locally if necessary.
let crate_paths = if root.is_none() {
Some(CratePaths {
ident: ident.to_string(),
dylib: lib.dylib.clone(),
rlib: lib.rlib.clone(),
})
} else {
None
};
// Maintain a reference to the top most crate.
let root = if root.is_some() { root } else { &crate_paths };
let cnum_map = resolve_crate_deps(e, root, lib.metadata.as_slice(), span);
let loader::Library{ dylib, rlib, metadata } = lib;
let cmeta = Rc::new( cstore::crate_metadata {
name: name.to_string(),
data: metadata,
cnum_map: cnum_map,
cnum: cnum,
span: span,
});
let source = cstore::CrateSource {
dylib: dylib,
rlib: rlib,
cnum: cnum,
};
e.sess.cstore.set_crate_data(cnum, cmeta.clone());
e.sess.cstore.add_used_crate_source(source.clone());
(cnum, cmeta, source)
}
fn resolve_crate<'a>(e: &mut Env,
root: &Option<CratePaths>,
ident: &str,
name: &str,
hash: Option<&Svh>,
span: Span)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
cstore::CrateSource) {
match existing_match(e, name, hash) {
None => {
let mut load_ctxt = loader::Context {
sess: e.sess,
span: span,
ident: ident,
crate_name: name,
hash: hash.map(|a| &*a),
filesearch: e.sess.target_filesearch(),
os: e.sess.targ_cfg.os,
triple: e.sess.targ_cfg.target_strs.target_triple.as_slice(),
root: root,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
should_match_name: true,
};
let library = load_ctxt.load_library_crate();
register_crate(e, root, ident, name, span, library)
}
Some(cnum) => (cnum,
e.sess.cstore.get_crate_data(cnum),
e.sess.cstore.get_used_crate_source(cnum).unwrap())
}
}
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(e: &mut Env,
root: &Option<CratePaths>,
cdata: &[u8], span : Span)
-> cstore::cnum_map {
debug!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate
// numbers
decoder::get_crate_deps(cdata).iter().map(|dep| {
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
let (local_cnum, _, _) = resolve_crate(e, root,
dep.name.as_slice(),
dep.name.as_slice(),
Some(&dep.hash),
span);
(dep.cnum, local_cnum)
}).collect()
}
pub struct PluginMetadataReader<'a> {
env: Env<'a>,
}
impl<'a> PluginMetadataReader<'a> {
pub fn new(sess: &'a Session) -> PluginMetadataReader<'a> {
PluginMetadataReader {
env: Env {
sess: sess,
next_crate_num: sess.cstore.next_crate_num(),
}
}
}
pub fn read_plugin_metadata(&mut self, krate: &ast::ViewItem) -> PluginMetadata {
let info = extract_crate_info(&self.env, krate).unwrap();
let target_triple = self.env.sess.targ_cfg.target_strs.target_triple.as_slice();
let is_cross = target_triple!= driver::host_triple();
let mut should_link = info.should_link &&!is_cross;
let os = config::get_os(driver::host_triple()).unwrap();
let mut load_ctxt = loader::Context {
sess: self.env.sess,
span: krate.span,
ident: info.ident.as_slice(),
crate_name: info.name.as_slice(),
hash: None,
filesearch: self.env.sess.host_filesearch(),
triple: driver::host_triple(),
os: os,
root: &None,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
should_match_name: true,
};
let library = match load_ctxt.maybe_load_library_crate() {
Some(l) => l,
None if is_cross =>
|
{
// try loading from target crates (only valid if there are
// no syntax extensions)
load_ctxt.triple = target_triple;
load_ctxt.os = self.env.sess.targ_cfg.os;
load_ctxt.filesearch = self.env.sess.target_filesearch();
let lib = load_ctxt.load_library_crate();
if decoder::get_plugin_registrar_fn(lib.metadata.as_slice()).is_some() {
let message = format!("crate `{}` contains a plugin_registrar fn but \
only a version for triple `{}` could be found (need {})",
info.ident, target_triple, driver::host_triple());
self.env.sess.span_err(krate.span, message.as_slice());
// need to abort now because the syntax expansion
// code will shortly attempt to load and execute
// code from the found library.
self.env.sess.abort_if_errors();
}
should_link = info.should_link;
lib
}
|
conditional_block
|
|
creader.rs
|
dump_crates(&sess.cstore);
warn_if_multiple_versions(sess.diagnostic(), &sess.cstore)
}
impl<'a, 'v> visit::Visitor<'v> for Env<'a> {
fn visit_view_item(&mut self, a: &ast::ViewItem) {
visit_view_item(self, a);
visit::walk_view_item(self, a);
}
fn visit_item(&mut self, a: &ast::Item) {
visit_item(self, a);
visit::walk_item(self, a);
}
}
fn dump_crates(cstore: &CStore) {
debug!("resolved crates:");
cstore.iter_crate_data_origins(|_, data, opt_source| {
debug!(" name: {}", data.name());
debug!(" cnum: {}", data.cnum);
debug!(" hash: {}", data.hash());
opt_source.map(|cs| {
let CrateSource { dylib, rlib, cnum: _ } = cs;
dylib.map(|dl| debug!(" dylib: {}", dl.display()));
rlib.map(|rl| debug!(" rlib: {}", rl.display()));
});
})
}
fn
|
(diag: &SpanHandler, cstore: &CStore) {
let mut map = HashMap::new();
cstore.iter_crate_data(|cnum, data| {
match map.entry(data.name()) {
Vacant(entry) => { entry.set(vec![cnum]); },
Occupied(mut entry) => { entry.get_mut().push(cnum); },
}
});
for (name, dupes) in map.into_iter() {
if dupes.len() == 1 { continue }
diag.handler().warn(
format!("using multiple versions of crate `{}`", name).as_slice());
for dupe in dupes.into_iter() {
let data = cstore.get_crate_data(dupe);
diag.span_note(data.span, "used here");
loader::note_crate_name(diag, data.name().as_slice());
}
}
}
fn visit_crate(e: &Env, c: &ast::Crate) {
for a in c.attrs.iter().filter(|m| m.name().equiv(&("link_args"))) {
match a.value_str() {
Some(ref linkarg) => e.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
}
fn should_link(i: &ast::ViewItem) -> bool {
i.attrs.iter().all(|attr| {
attr.name().get()!= "phase" ||
attr.meta_item_list().map_or(false, |phases| {
attr::contains_name(phases.as_slice(), "link")
})
})
}
fn visit_view_item(e: &mut Env, i: &ast::ViewItem) {
if!should_link(i) {
return;
}
match extract_crate_info(e, i) {
Some(info) => {
let (cnum, _, _) = resolve_crate(e,
&None,
info.ident.as_slice(),
info.name.as_slice(),
None,
i.span);
e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
}
None => ()
}
}
struct CrateInfo {
ident: String,
name: String,
id: ast::NodeId,
should_link: bool,
}
fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
match i.node {
ast::ViewItemExternCrate(ident, ref path_opt, id) => {
let ident = token::get_ident(ident);
debug!("resolving extern crate stmt. ident: {} path_opt: {}",
ident, path_opt);
let name = match *path_opt {
Some((ref path_str, _)) => {
let name = path_str.get().to_string();
validate_crate_name(Some(e.sess), name.as_slice(),
Some(i.span));
name
}
None => ident.get().to_string(),
};
Some(CrateInfo {
ident: ident.get().to_string(),
name: name,
id: id,
should_link: should_link(i),
})
}
_ => None
}
}
pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option<Span>) {
let err = |s: &str| {
match (sp, sess) {
(_, None) => fail!("{}", s),
(Some(sp), Some(sess)) => sess.span_err(sp, s),
(None, Some(sess)) => sess.err(s),
}
};
if s.len() == 0 {
err("crate name must not be empty");
}
for c in s.chars() {
if c.is_alphanumeric() { continue }
if c == '_' || c == '-' { continue }
err(format!("invalid character `{}` in crate name: `{}`", c, s).as_slice());
}
match sess {
Some(sess) => sess.abort_if_errors(),
None => {}
}
}
fn visit_item(e: &Env, i: &ast::Item) {
match i.node {
ast::ItemForeignMod(ref fm) => {
if fm.abi == abi::Rust || fm.abi == abi::RustIntrinsic {
return;
}
// First, add all of the custom link_args attributes
let link_args = i.attrs.iter()
.filter_map(|at| if at.name().equiv(&("link_args")) {
Some(at)
} else {
None
})
.collect::<Vec<&ast::Attribute>>();
for m in link_args.iter() {
match m.value_str() {
Some(linkarg) => e.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
}
}
// Next, process all of the #[link(..)]-style arguments
let link_args = i.attrs.iter()
.filter_map(|at| if at.name().equiv(&("link")) {
Some(at)
} else {
None
})
.collect::<Vec<&ast::Attribute>>();
for m in link_args.iter() {
match m.meta_item_list() {
Some(items) => {
let kind = items.iter().find(|k| {
k.name().equiv(&("kind"))
}).and_then(|a| a.value_str());
let kind = match kind {
Some(k) => {
if k.equiv(&("static")) {
cstore::NativeStatic
} else if (e.sess.targ_cfg.os == abi::OsMacos ||
e.sess.targ_cfg.os == abi::OsiOS) &&
k.equiv(&("framework")) {
cstore::NativeFramework
} else if k.equiv(&("framework")) {
e.sess.span_err(m.span,
"native frameworks are only available \
on OSX targets");
cstore::NativeUnknown
} else {
e.sess.span_err(m.span,
format!("unknown kind: `{}`",
k).as_slice());
cstore::NativeUnknown
}
}
None => cstore::NativeUnknown
};
let n = items.iter().find(|n| {
n.name().equiv(&("name"))
}).and_then(|a| a.value_str());
let n = match n {
Some(n) => n,
None => {
e.sess.span_err(m.span,
"#[link(...)] specified without \
`name = \"foo\"`");
InternedString::new("foo")
}
};
if n.get().is_empty() {
e.sess.span_err(m.span,
"#[link(name = \"\")] given with \
empty name");
} else {
e.sess
.cstore
.add_used_library(n.get().to_string(), kind);
}
}
None => {}
}
}
}
_ => { }
}
}
fn existing_match(e: &Env, name: &str,
hash: Option<&Svh>) -> Option<ast::CrateNum> {
let mut ret = None;
e.sess.cstore.iter_crate_data(|cnum, data| {
if data.name.as_slice()!= name { return }
match hash {
Some(hash) if *hash == data.hash() => { ret = Some(cnum); return }
Some(..) => return,
None => {}
}
// When the hash is None we're dealing with a top-level dependency in
// which case we may have a specification on the command line for this
// library. Even though an upstream library may have loaded something of
// the same name, we have to make sure it was loaded from the exact same
// location as well.
//
// We're also sure to compare *paths*, not actual byte slices. The
// `source` stores paths which are normalized which may be different
// from the strings on the command line.
let source = e.sess.cstore.get_used_crate_source(cnum).unwrap();
match e.sess.opts.externs.find_equiv(&name) {
Some(locs) => {
let found = locs.iter().any(|l| {
let l = fs::realpath(&Path::new(l.as_slice())).ok();
l == source.dylib || l == source.rlib
});
if found {
ret = Some(cnum);
}
}
None => ret = Some(cnum),
}
});
return ret;
}
fn register_crate<'a>(e: &mut Env,
root: &Option<CratePaths>,
ident: &str,
name: &str,
span: Span,
lib: loader::Library)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
cstore::CrateSource) {
// Claim this crate number and cache it
let cnum = e.next_crate_num;
e.next_crate_num += 1;
// Stash paths for top-most crate locally if necessary.
let crate_paths = if root.is_none() {
Some(CratePaths {
ident: ident.to_string(),
dylib: lib.dylib.clone(),
rlib: lib.rlib.clone(),
})
} else {
None
};
// Maintain a reference to the top most crate.
let root = if root.is_some() { root } else { &crate_paths };
let cnum_map = resolve_crate_deps(e, root, lib.metadata.as_slice(), span);
let loader::Library{ dylib, rlib, metadata } = lib;
let cmeta = Rc::new( cstore::crate_metadata {
name: name.to_string(),
data: metadata,
cnum_map: cnum_map,
cnum: cnum,
span: span,
});
let source = cstore::CrateSource {
dylib: dylib,
rlib: rlib,
cnum: cnum,
};
e.sess.cstore.set_crate_data(cnum, cmeta.clone());
e.sess.cstore.add_used_crate_source(source.clone());
(cnum, cmeta, source)
}
fn resolve_crate<'a>(e: &mut Env,
root: &Option<CratePaths>,
ident: &str,
name: &str,
hash: Option<&Svh>,
span: Span)
-> (ast::CrateNum, Rc<cstore::crate_metadata>,
cstore::CrateSource) {
match existing_match(e, name, hash) {
None => {
let mut load_ctxt = loader::Context {
sess: e.sess,
span: span,
ident: ident,
crate_name: name,
hash: hash.map(|a| &*a),
filesearch: e.sess.target_filesearch(),
os: e.sess.targ_cfg.os,
triple: e.sess.targ_cfg.target_strs.target_triple.as_slice(),
root: root,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
should_match_name: true,
};
let library = load_ctxt.load_library_crate();
register_crate(e, root, ident, name, span, library)
}
Some(cnum) => (cnum,
e.sess.cstore.get_crate_data(cnum),
e.sess.cstore.get_used_crate_source(cnum).unwrap())
}
}
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(e: &mut Env,
root: &Option<CratePaths>,
cdata: &[u8], span : Span)
-> cstore::cnum_map {
debug!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate
// numbers
decoder::get_crate_deps(cdata).iter().map(|dep| {
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
let (local_cnum, _, _) = resolve_crate(e, root,
dep.name.as_slice(),
dep.name.as_slice(),
Some(&dep.hash),
span);
(dep.cnum, local_cnum)
}).collect()
}
pub struct PluginMetadataReader<'a> {
env: Env<'a>,
}
impl<'a> PluginMetadataReader<'a> {
pub fn new(sess: &'a Session) -> PluginMetadataReader<'a> {
PluginMetadataReader {
env: Env {
sess: sess,
next_crate_num: sess.cstore.next_crate_num(),
}
}
}
pub fn read_plugin_metadata(&mut self, krate: &ast::ViewItem) -> PluginMetadata {
let info = extract_crate_info(&self.env, krate).unwrap();
let target_triple = self.env.sess.targ_cfg.target_strs.target_triple.as_slice();
let is_cross = target_triple!= driver::host_triple();
let mut should_link = info.should_link &&!is_cross;
let os = config::get_os(driver::host_triple()).unwrap();
let mut load_ctxt = loader::Context {
sess: self.env.sess,
span: krate.span,
ident: info.ident.as_slice(),
crate_name: info.name.as_slice(),
hash: None,
filesearch: self.env.sess.host_filesearch(),
triple: driver::host_triple(),
os: os,
root: &None,
rejected_via_hash: vec!(),
rejected_via_triple: vec!(),
should_match_name: true,
};
let library = match load_ctxt.maybe_load_library_crate() {
Some(l
|
warn_if_multiple_versions
|
identifier_name
|
repoconfig.rs
|
use std::collections::BTreeMap;
use std::default::Default;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use toml::{Parser, Value};
use toml::Value::Table;
pub struct Config {
fuzz: bool,
fuzz_cores: usize,
fuzz_seconds: usize,
fuzz_max_len: usize,
cross: bool,
}
impl Default for Config {
fn default() -> Config {
Config {
fuzz: true,
fuzz_cores: 1,
fuzz_seconds: 60,
fuzz_max_len: 64,
cross: true,
}
}
}
impl Config {
pub fn set_fuzz_cores(&mut self, cores: usize) -> &mut Self {
self.fuzz_cores = cores;
self
}
#[allow(dead_code)]
pub fn fuzz_cores(&self) -> usize {
self.fuzz_cores
}
pub fn set_fuzz_seconds(&mut self, seconds: usize) -> &mut Self {
self.fuzz_seconds = seconds;
self
}
#[allow(dead_code)]
pub fn fuzz_seconds(&self) -> usize {
self.fuzz_seconds
}
pub fn set_fuzz_max_len(&mut self, bytes: usize) -> &mut Self {
self.fuzz_max_len = bytes;
self
}
#[allow(dead_code)]
pub fn fuzz_max_len(&self) -> usize {
self.fuzz_max_len
}
pub fn set_fuzz(&mut self, enable: bool) -> &mut Self {
self.fuzz = enable;
self
}
pub fn fuzz(&self) -> bool {
self.fuzz
}
pub fn set_cross(&mut self, enable: bool) -> &mut Self {
self.cross = enable;
self
}
pub fn cross(&self) -> bool {
self.cross
}
}
pub fn load_config(path: &Path) -> Result<Config, String> {
let cfg_txt = match File::open(path) {
Ok(mut f) => {
let mut cfg_txt = String::new();
f.read_to_string(&mut cfg_txt).unwrap();
cfg_txt
}
Err(e) => return Err(format!("Error opening config: {}", e)),
};
let mut p = Parser::new(&cfg_txt);
match p.parse() {
Some(table) => {
debug!("toml parsed successfully. creating config");
load_config_table(&table)
}
None => {
for err in &p.errors {
let (loline, locol) = p.to_linecol(err.lo);
let (hiline, hicol) = p.to_linecol(err.hi);
println!(
"{:?}:{}:{}-{}:{} error: {}",
path,
loline,
locol,
hiline,
hicol,
err.desc
);
}
Err("failed to load config".to_owned())
}
}
}
fn
|
(table: &BTreeMap<String, Value>) -> Result<Config, String> {
let mut config = Config::default();
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("enable").and_then(|k| k.as_bool()) {
config.set_fuzz(v as bool);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("cores").and_then(|k| k.as_integer()) {
config.set_fuzz_cores(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("seconds").and_then(|k| k.as_integer()) {
config.set_fuzz_seconds(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("length").and_then(|k| k.as_integer()) {
config.set_fuzz_max_len(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("cross") {
if let Some(v) = general.get("enable").and_then(|k| k.as_bool()) {
config.set_cross(v as bool);
}
}
Ok(config)
}
|
load_config_table
|
identifier_name
|
repoconfig.rs
|
use std::collections::BTreeMap;
use std::default::Default;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use toml::{Parser, Value};
use toml::Value::Table;
pub struct Config {
fuzz: bool,
fuzz_cores: usize,
fuzz_seconds: usize,
fuzz_max_len: usize,
cross: bool,
}
impl Default for Config {
fn default() -> Config {
|
fuzz_max_len: 64,
cross: true,
}
}
}
impl Config {
pub fn set_fuzz_cores(&mut self, cores: usize) -> &mut Self {
self.fuzz_cores = cores;
self
}
#[allow(dead_code)]
pub fn fuzz_cores(&self) -> usize {
self.fuzz_cores
}
pub fn set_fuzz_seconds(&mut self, seconds: usize) -> &mut Self {
self.fuzz_seconds = seconds;
self
}
#[allow(dead_code)]
pub fn fuzz_seconds(&self) -> usize {
self.fuzz_seconds
}
pub fn set_fuzz_max_len(&mut self, bytes: usize) -> &mut Self {
self.fuzz_max_len = bytes;
self
}
#[allow(dead_code)]
pub fn fuzz_max_len(&self) -> usize {
self.fuzz_max_len
}
pub fn set_fuzz(&mut self, enable: bool) -> &mut Self {
self.fuzz = enable;
self
}
pub fn fuzz(&self) -> bool {
self.fuzz
}
pub fn set_cross(&mut self, enable: bool) -> &mut Self {
self.cross = enable;
self
}
pub fn cross(&self) -> bool {
self.cross
}
}
pub fn load_config(path: &Path) -> Result<Config, String> {
let cfg_txt = match File::open(path) {
Ok(mut f) => {
let mut cfg_txt = String::new();
f.read_to_string(&mut cfg_txt).unwrap();
cfg_txt
}
Err(e) => return Err(format!("Error opening config: {}", e)),
};
let mut p = Parser::new(&cfg_txt);
match p.parse() {
Some(table) => {
debug!("toml parsed successfully. creating config");
load_config_table(&table)
}
None => {
for err in &p.errors {
let (loline, locol) = p.to_linecol(err.lo);
let (hiline, hicol) = p.to_linecol(err.hi);
println!(
"{:?}:{}:{}-{}:{} error: {}",
path,
loline,
locol,
hiline,
hicol,
err.desc
);
}
Err("failed to load config".to_owned())
}
}
}
fn load_config_table(table: &BTreeMap<String, Value>) -> Result<Config, String> {
let mut config = Config::default();
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("enable").and_then(|k| k.as_bool()) {
config.set_fuzz(v as bool);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("cores").and_then(|k| k.as_integer()) {
config.set_fuzz_cores(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("seconds").and_then(|k| k.as_integer()) {
config.set_fuzz_seconds(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("length").and_then(|k| k.as_integer()) {
config.set_fuzz_max_len(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("cross") {
if let Some(v) = general.get("enable").and_then(|k| k.as_bool()) {
config.set_cross(v as bool);
}
}
Ok(config)
}
|
Config {
fuzz: true,
fuzz_cores: 1,
fuzz_seconds: 60,
|
random_line_split
|
repoconfig.rs
|
use std::collections::BTreeMap;
use std::default::Default;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use toml::{Parser, Value};
use toml::Value::Table;
pub struct Config {
fuzz: bool,
fuzz_cores: usize,
fuzz_seconds: usize,
fuzz_max_len: usize,
cross: bool,
}
impl Default for Config {
fn default() -> Config {
Config {
fuzz: true,
fuzz_cores: 1,
fuzz_seconds: 60,
fuzz_max_len: 64,
cross: true,
}
}
}
impl Config {
pub fn set_fuzz_cores(&mut self, cores: usize) -> &mut Self {
self.fuzz_cores = cores;
self
}
#[allow(dead_code)]
pub fn fuzz_cores(&self) -> usize {
self.fuzz_cores
}
pub fn set_fuzz_seconds(&mut self, seconds: usize) -> &mut Self {
self.fuzz_seconds = seconds;
self
}
#[allow(dead_code)]
pub fn fuzz_seconds(&self) -> usize {
self.fuzz_seconds
}
pub fn set_fuzz_max_len(&mut self, bytes: usize) -> &mut Self {
self.fuzz_max_len = bytes;
self
}
#[allow(dead_code)]
pub fn fuzz_max_len(&self) -> usize {
self.fuzz_max_len
}
pub fn set_fuzz(&mut self, enable: bool) -> &mut Self {
self.fuzz = enable;
self
}
pub fn fuzz(&self) -> bool {
self.fuzz
}
pub fn set_cross(&mut self, enable: bool) -> &mut Self {
self.cross = enable;
self
}
pub fn cross(&self) -> bool {
self.cross
}
}
pub fn load_config(path: &Path) -> Result<Config, String> {
let cfg_txt = match File::open(path) {
Ok(mut f) => {
let mut cfg_txt = String::new();
f.read_to_string(&mut cfg_txt).unwrap();
cfg_txt
}
Err(e) => return Err(format!("Error opening config: {}", e)),
};
let mut p = Parser::new(&cfg_txt);
match p.parse() {
Some(table) => {
debug!("toml parsed successfully. creating config");
load_config_table(&table)
}
None => {
for err in &p.errors {
let (loline, locol) = p.to_linecol(err.lo);
let (hiline, hicol) = p.to_linecol(err.hi);
println!(
"{:?}:{}:{}-{}:{} error: {}",
path,
loline,
locol,
hiline,
hicol,
err.desc
);
}
Err("failed to load config".to_owned())
}
}
}
fn load_config_table(table: &BTreeMap<String, Value>) -> Result<Config, String> {
let mut config = Config::default();
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("enable").and_then(|k| k.as_bool()) {
config.set_fuzz(v as bool);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("cores").and_then(|k| k.as_integer()) {
config.set_fuzz_cores(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("seconds").and_then(|k| k.as_integer()) {
config.set_fuzz_seconds(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("fuzz")
|
if let Some(&Table(ref general)) = table.get("cross") {
if let Some(v) = general.get("enable").and_then(|k| k.as_bool()) {
config.set_cross(v as bool);
}
}
Ok(config)
}
|
{
if let Some(v) = general.get("length").and_then(|k| k.as_integer()) {
config.set_fuzz_max_len(v as usize);
}
}
|
conditional_block
|
repoconfig.rs
|
use std::collections::BTreeMap;
use std::default::Default;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use toml::{Parser, Value};
use toml::Value::Table;
pub struct Config {
fuzz: bool,
fuzz_cores: usize,
fuzz_seconds: usize,
fuzz_max_len: usize,
cross: bool,
}
impl Default for Config {
fn default() -> Config {
Config {
fuzz: true,
fuzz_cores: 1,
fuzz_seconds: 60,
fuzz_max_len: 64,
cross: true,
}
}
}
impl Config {
pub fn set_fuzz_cores(&mut self, cores: usize) -> &mut Self {
self.fuzz_cores = cores;
self
}
#[allow(dead_code)]
pub fn fuzz_cores(&self) -> usize {
self.fuzz_cores
}
pub fn set_fuzz_seconds(&mut self, seconds: usize) -> &mut Self {
self.fuzz_seconds = seconds;
self
}
#[allow(dead_code)]
pub fn fuzz_seconds(&self) -> usize {
self.fuzz_seconds
}
pub fn set_fuzz_max_len(&mut self, bytes: usize) -> &mut Self {
self.fuzz_max_len = bytes;
self
}
#[allow(dead_code)]
pub fn fuzz_max_len(&self) -> usize {
self.fuzz_max_len
}
pub fn set_fuzz(&mut self, enable: bool) -> &mut Self {
self.fuzz = enable;
self
}
pub fn fuzz(&self) -> bool {
self.fuzz
}
pub fn set_cross(&mut self, enable: bool) -> &mut Self {
self.cross = enable;
self
}
pub fn cross(&self) -> bool
|
}
pub fn load_config(path: &Path) -> Result<Config, String> {
let cfg_txt = match File::open(path) {
Ok(mut f) => {
let mut cfg_txt = String::new();
f.read_to_string(&mut cfg_txt).unwrap();
cfg_txt
}
Err(e) => return Err(format!("Error opening config: {}", e)),
};
let mut p = Parser::new(&cfg_txt);
match p.parse() {
Some(table) => {
debug!("toml parsed successfully. creating config");
load_config_table(&table)
}
None => {
for err in &p.errors {
let (loline, locol) = p.to_linecol(err.lo);
let (hiline, hicol) = p.to_linecol(err.hi);
println!(
"{:?}:{}:{}-{}:{} error: {}",
path,
loline,
locol,
hiline,
hicol,
err.desc
);
}
Err("failed to load config".to_owned())
}
}
}
fn load_config_table(table: &BTreeMap<String, Value>) -> Result<Config, String> {
let mut config = Config::default();
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("enable").and_then(|k| k.as_bool()) {
config.set_fuzz(v as bool);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("cores").and_then(|k| k.as_integer()) {
config.set_fuzz_cores(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("seconds").and_then(|k| k.as_integer()) {
config.set_fuzz_seconds(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("fuzz") {
if let Some(v) = general.get("length").and_then(|k| k.as_integer()) {
config.set_fuzz_max_len(v as usize);
}
}
if let Some(&Table(ref general)) = table.get("cross") {
if let Some(v) = general.get("enable").and_then(|k| k.as_bool()) {
config.set_cross(v as bool);
}
}
Ok(config)
}
|
{
self.cross
}
|
identifier_body
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.