file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
hrtb-higher-ranker-supertraits-transitive.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test HRTB supertraits with several levels of expansion required.
trait Foo<'tcx>
{
fn foo(&'tcx self) -> &'tcx isize;
}
trait Bar<'ccx>
: for<'tcx> Foo<'tcx>
{
fn bar(&'ccx self) -> &'ccx isize;
|
}
trait Baz
: for<'ccx> Bar<'ccx>
{
fn dummy(&self);
}
trait Qux
: Bar<'static>
{
fn dummy(&self);
}
fn want_foo_for_any_tcx<F>(f: &F)
where F : for<'tcx> Foo<'tcx>
{
}
fn want_bar_for_any_ccx<B>(b: &B)
where B : for<'ccx> Bar<'ccx>
{
}
fn want_baz<B>(b: &B)
where B : Baz
{
want_foo_for_any_tcx(b);
want_bar_for_any_ccx(b);
}
fn want_qux<B>(b: &B)
where B : Qux
{
want_foo_for_any_tcx(b);
want_bar_for_any_ccx(b); //~ ERROR E0277
}
fn main() {}
|
random_line_split
|
|
hrtb-higher-ranker-supertraits-transitive.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test HRTB supertraits with several levels of expansion required.
trait Foo<'tcx>
{
fn foo(&'tcx self) -> &'tcx isize;
}
trait Bar<'ccx>
: for<'tcx> Foo<'tcx>
{
fn bar(&'ccx self) -> &'ccx isize;
}
trait Baz
: for<'ccx> Bar<'ccx>
{
fn dummy(&self);
}
trait Qux
: Bar<'static>
{
fn dummy(&self);
}
fn want_foo_for_any_tcx<F>(f: &F)
where F : for<'tcx> Foo<'tcx>
{
}
fn want_bar_for_any_ccx<B>(b: &B)
where B : for<'ccx> Bar<'ccx>
{
}
fn
|
<B>(b: &B)
where B : Baz
{
want_foo_for_any_tcx(b);
want_bar_for_any_ccx(b);
}
fn want_qux<B>(b: &B)
where B : Qux
{
want_foo_for_any_tcx(b);
want_bar_for_any_ccx(b); //~ ERROR E0277
}
fn main() {}
|
want_baz
|
identifier_name
|
hrtb-higher-ranker-supertraits-transitive.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test HRTB supertraits with several levels of expansion required.
trait Foo<'tcx>
{
fn foo(&'tcx self) -> &'tcx isize;
}
trait Bar<'ccx>
: for<'tcx> Foo<'tcx>
{
fn bar(&'ccx self) -> &'ccx isize;
}
trait Baz
: for<'ccx> Bar<'ccx>
{
fn dummy(&self);
}
trait Qux
: Bar<'static>
{
fn dummy(&self);
}
fn want_foo_for_any_tcx<F>(f: &F)
where F : for<'tcx> Foo<'tcx>
{
}
fn want_bar_for_any_ccx<B>(b: &B)
where B : for<'ccx> Bar<'ccx>
{
}
fn want_baz<B>(b: &B)
where B : Baz
{
want_foo_for_any_tcx(b);
want_bar_for_any_ccx(b);
}
fn want_qux<B>(b: &B)
where B : Qux
|
fn main() {}
|
{
want_foo_for_any_tcx(b);
want_bar_for_any_ccx(b); //~ ERROR E0277
}
|
identifier_body
|
event_loop.rs
|
// Copyright 2018 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use super::error::{Error, Result};
use base::{
error, warn, wrap_descriptor, AsRawDescriptor, Descriptor, EpollContext, EpollEvents, Event,
RawDescriptor, WatchingEvents,
};
use std::collections::BTreeMap;
use std::mem::drop;
use std::sync::{Arc, Weak};
use std::thread;
use sync::Mutex;
/// A fail handle will do the clean up when we cannot recover from some error.
pub trait FailHandle: Send + Sync {
/// Fail the code.
fn fail(&self);
/// Returns true if already failed.
fn failed(&self) -> bool;
}
impl FailHandle for Option<Arc<dyn FailHandle>> {
fn fail(&self) {
match self {
Some(handle) => handle.fail(),
None => error!("event loop trying to fail without a fail handle"),
}
}
fn failed(&self) -> bool {
match self {
Some(handle) => handle.failed(),
None => false,
}
}
}
/// EpollEventLoop is an event loop blocked on a set of fds. When a monitered events is triggered,
/// event loop will invoke the mapped handler.
pub struct EventLoop {
fail_handle: Option<Arc<dyn FailHandle>>,
poll_ctx: Arc<EpollContext<Descriptor>>,
handlers: Arc<Mutex<BTreeMap<RawDescriptor, Weak<dyn EventHandler>>>>,
stop_evt: Event,
}
/// Interface for event handler.
pub trait EventHandler: Send + Sync {
fn on_event(&self) -> anyhow::Result<()>;
}
impl EventLoop {
/// Start an event loop. An optional fail handle could be passed to the event loop.
pub fn start(
name: String,
fail_handle: Option<Arc<dyn FailHandle>>,
) -> Result<(EventLoop, thread::JoinHandle<()>)> {
let (self_stop_evt, stop_evt) = Event::new()
.and_then(|e| Ok((e.try_clone()?, e)))
.map_err(Error::CreateEvent)?;
let fd_callbacks: Arc<Mutex<BTreeMap<RawDescriptor, Weak<dyn EventHandler>>>> =
Arc::new(Mutex::new(BTreeMap::new()));
let poll_ctx: EpollContext<Descriptor> = EpollContext::new()
.and_then(|pc| {
pc.add(
&wrap_descriptor(&stop_evt),
Descriptor(stop_evt.as_raw_descriptor()),
)
.and(Ok(pc))
})
.map_err(Error::CreateWaitContext)?;
let poll_ctx = Arc::new(poll_ctx);
let event_loop = EventLoop {
fail_handle: fail_handle.clone(),
poll_ctx: poll_ctx.clone(),
handlers: fd_callbacks.clone(),
stop_evt: self_stop_evt,
};
let handle = thread::Builder::new()
.name(name)
.spawn(move || {
let event_loop = EpollEvents::new();
loop {
if fail_handle.failed() {
error!("xhci controller already failed, stopping event ring");
return;
}
let events = match poll_ctx.wait(&event_loop) {
Ok(events) => events,
Err(e) => {
error!("cannot poll {:?}", e);
fail_handle.fail();
return;
}
};
for event in &events {
let fd = event.token().as_raw_descriptor();
if fd == stop_evt.as_raw_descriptor() {
return;
}
let mut locked = fd_callbacks.lock();
let weak_handler = match locked.get(&fd) {
Some(cb) => cb.clone(),
None => {
warn!("callback for fd {} already removed", fd);
continue;
}
};
// If the file descriptor is hung up, remove it after calling the handler
// one final time.
let mut remove = event.hungup();
if let Some(handler) = weak_handler.upgrade() {
// Drop lock before triggering the event.
drop(locked);
if let Err(e) = handler.on_event() {
error!("removing event handler due to error: {:#}", e);
remove = true;
}
locked = fd_callbacks.lock();
} else {
// If the handler is already gone, we remove the fd.
remove = true;
}
if remove {
let _ = poll_ctx.delete(&event.token());
let _ = locked.remove(&fd);
}
}
}
})
.map_err(Error::StartThread)?;
Ok((event_loop, handle))
}
/// Add a new event to event loop. The event handler will be invoked when `event` happens on
/// `descriptor`.
///
/// If the same `descriptor` is added multiple times, the old handler will be replaced.
/// EventLoop will not keep `handler` alive, if handler is dropped when `event` is triggered,
/// the event will be removed.
pub fn add_event(
&self,
descriptor: &dyn AsRawDescriptor,
events: WatchingEvents,
handler: Weak<dyn EventHandler>,
) -> Result<()> {
if self.fail_handle.failed() {
return Err(Error::EventLoopAlreadyFailed);
}
self.handlers
.lock()
.insert(descriptor.as_raw_descriptor(), handler);
// This might fail due to epoll syscall. Check epoll_ctl(2).
self.poll_ctx
.add_fd_with_events(
&wrap_descriptor(descriptor),
events,
Descriptor(descriptor.as_raw_descriptor()),
)
.map_err(Error::WaitContextAddDescriptor)
}
/// Removes event for this `descriptor`. This function returns false if it fails.
///
/// EventLoop does not guarantee all events for `descriptor` is handled.
pub fn remove_event_for_fd(&self, descriptor: &dyn AsRawDescriptor) -> Result<()> {
if self.fail_handle.failed() {
return Err(Error::EventLoopAlreadyFailed);
}
// This might fail due to epoll syscall. Check epoll_ctl(2).
self.poll_ctx
.delete(&wrap_descriptor(descriptor))
.map_err(Error::WaitContextDeleteDescriptor)?;
self.handlers.lock().remove(&descriptor.as_raw_descriptor());
Ok(())
}
/// Stops this event loop asynchronously. Previous events might not be handled.
pub fn stop(&self) {
match self.stop_evt.write(1) {
Ok(_) => {}
Err(_) => {
error!("fail to send event loop stop event, it might already stopped");
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use base::Event;
use std::sync::{Arc, Condvar, Mutex};
struct EventLoopTestHandler {
val: Mutex<u8>,
cvar: Condvar,
evt: Event,
}
impl EventHandler for EventLoopTestHandler {
fn on_event(&self) -> anyhow::Result<()> {
self.evt.read().unwrap();
*self.val.lock().unwrap() += 1;
self.cvar.notify_one();
Ok(())
}
}
#[test]
fn
|
() {
let (l, j) = EventLoop::start("test".to_string(), None).unwrap();
let (self_evt, evt) = match Event::new().and_then(|e| Ok((e.try_clone()?, e))) {
Ok(v) => v,
Err(e) => {
error!("failed creating Event pair: {:?}", e);
return;
}
};
let h = Arc::new(EventLoopTestHandler {
val: Mutex::new(0),
cvar: Condvar::new(),
evt,
});
let t: Arc<dyn EventHandler> = h.clone();
l.add_event(
&h.evt,
WatchingEvents::empty().set_read(),
Arc::downgrade(&t),
)
.unwrap();
self_evt.write(1).unwrap();
{
let mut val = h.val.lock().unwrap();
while *val < 1 {
val = h.cvar.wait(val).unwrap();
}
}
l.stop();
j.join().unwrap();
assert_eq!(*(h.val.lock().unwrap()), 1);
}
}
|
event_loop_test
|
identifier_name
|
event_loop.rs
|
// Copyright 2018 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use super::error::{Error, Result};
use base::{
error, warn, wrap_descriptor, AsRawDescriptor, Descriptor, EpollContext, EpollEvents, Event,
RawDescriptor, WatchingEvents,
};
|
/// A fail handle will do the clean up when we cannot recover from some error.
pub trait FailHandle: Send + Sync {
/// Fail the code.
fn fail(&self);
/// Returns true if already failed.
fn failed(&self) -> bool;
}
impl FailHandle for Option<Arc<dyn FailHandle>> {
fn fail(&self) {
match self {
Some(handle) => handle.fail(),
None => error!("event loop trying to fail without a fail handle"),
}
}
fn failed(&self) -> bool {
match self {
Some(handle) => handle.failed(),
None => false,
}
}
}
/// EpollEventLoop is an event loop blocked on a set of fds. When a monitered events is triggered,
/// event loop will invoke the mapped handler.
pub struct EventLoop {
fail_handle: Option<Arc<dyn FailHandle>>,
poll_ctx: Arc<EpollContext<Descriptor>>,
handlers: Arc<Mutex<BTreeMap<RawDescriptor, Weak<dyn EventHandler>>>>,
stop_evt: Event,
}
/// Interface for event handler.
pub trait EventHandler: Send + Sync {
fn on_event(&self) -> anyhow::Result<()>;
}
impl EventLoop {
/// Start an event loop. An optional fail handle could be passed to the event loop.
pub fn start(
name: String,
fail_handle: Option<Arc<dyn FailHandle>>,
) -> Result<(EventLoop, thread::JoinHandle<()>)> {
let (self_stop_evt, stop_evt) = Event::new()
.and_then(|e| Ok((e.try_clone()?, e)))
.map_err(Error::CreateEvent)?;
let fd_callbacks: Arc<Mutex<BTreeMap<RawDescriptor, Weak<dyn EventHandler>>>> =
Arc::new(Mutex::new(BTreeMap::new()));
let poll_ctx: EpollContext<Descriptor> = EpollContext::new()
.and_then(|pc| {
pc.add(
&wrap_descriptor(&stop_evt),
Descriptor(stop_evt.as_raw_descriptor()),
)
.and(Ok(pc))
})
.map_err(Error::CreateWaitContext)?;
let poll_ctx = Arc::new(poll_ctx);
let event_loop = EventLoop {
fail_handle: fail_handle.clone(),
poll_ctx: poll_ctx.clone(),
handlers: fd_callbacks.clone(),
stop_evt: self_stop_evt,
};
let handle = thread::Builder::new()
.name(name)
.spawn(move || {
let event_loop = EpollEvents::new();
loop {
if fail_handle.failed() {
error!("xhci controller already failed, stopping event ring");
return;
}
let events = match poll_ctx.wait(&event_loop) {
Ok(events) => events,
Err(e) => {
error!("cannot poll {:?}", e);
fail_handle.fail();
return;
}
};
for event in &events {
let fd = event.token().as_raw_descriptor();
if fd == stop_evt.as_raw_descriptor() {
return;
}
let mut locked = fd_callbacks.lock();
let weak_handler = match locked.get(&fd) {
Some(cb) => cb.clone(),
None => {
warn!("callback for fd {} already removed", fd);
continue;
}
};
// If the file descriptor is hung up, remove it after calling the handler
// one final time.
let mut remove = event.hungup();
if let Some(handler) = weak_handler.upgrade() {
// Drop lock before triggering the event.
drop(locked);
if let Err(e) = handler.on_event() {
error!("removing event handler due to error: {:#}", e);
remove = true;
}
locked = fd_callbacks.lock();
} else {
// If the handler is already gone, we remove the fd.
remove = true;
}
if remove {
let _ = poll_ctx.delete(&event.token());
let _ = locked.remove(&fd);
}
}
}
})
.map_err(Error::StartThread)?;
Ok((event_loop, handle))
}
/// Add a new event to event loop. The event handler will be invoked when `event` happens on
/// `descriptor`.
///
/// If the same `descriptor` is added multiple times, the old handler will be replaced.
/// EventLoop will not keep `handler` alive, if handler is dropped when `event` is triggered,
/// the event will be removed.
pub fn add_event(
&self,
descriptor: &dyn AsRawDescriptor,
events: WatchingEvents,
handler: Weak<dyn EventHandler>,
) -> Result<()> {
if self.fail_handle.failed() {
return Err(Error::EventLoopAlreadyFailed);
}
self.handlers
.lock()
.insert(descriptor.as_raw_descriptor(), handler);
// This might fail due to epoll syscall. Check epoll_ctl(2).
self.poll_ctx
.add_fd_with_events(
&wrap_descriptor(descriptor),
events,
Descriptor(descriptor.as_raw_descriptor()),
)
.map_err(Error::WaitContextAddDescriptor)
}
/// Removes event for this `descriptor`. This function returns false if it fails.
///
/// EventLoop does not guarantee all events for `descriptor` is handled.
pub fn remove_event_for_fd(&self, descriptor: &dyn AsRawDescriptor) -> Result<()> {
if self.fail_handle.failed() {
return Err(Error::EventLoopAlreadyFailed);
}
// This might fail due to epoll syscall. Check epoll_ctl(2).
self.poll_ctx
.delete(&wrap_descriptor(descriptor))
.map_err(Error::WaitContextDeleteDescriptor)?;
self.handlers.lock().remove(&descriptor.as_raw_descriptor());
Ok(())
}
/// Stops this event loop asynchronously. Previous events might not be handled.
pub fn stop(&self) {
match self.stop_evt.write(1) {
Ok(_) => {}
Err(_) => {
error!("fail to send event loop stop event, it might already stopped");
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use base::Event;
use std::sync::{Arc, Condvar, Mutex};
struct EventLoopTestHandler {
val: Mutex<u8>,
cvar: Condvar,
evt: Event,
}
impl EventHandler for EventLoopTestHandler {
fn on_event(&self) -> anyhow::Result<()> {
self.evt.read().unwrap();
*self.val.lock().unwrap() += 1;
self.cvar.notify_one();
Ok(())
}
}
#[test]
fn event_loop_test() {
let (l, j) = EventLoop::start("test".to_string(), None).unwrap();
let (self_evt, evt) = match Event::new().and_then(|e| Ok((e.try_clone()?, e))) {
Ok(v) => v,
Err(e) => {
error!("failed creating Event pair: {:?}", e);
return;
}
};
let h = Arc::new(EventLoopTestHandler {
val: Mutex::new(0),
cvar: Condvar::new(),
evt,
});
let t: Arc<dyn EventHandler> = h.clone();
l.add_event(
&h.evt,
WatchingEvents::empty().set_read(),
Arc::downgrade(&t),
)
.unwrap();
self_evt.write(1).unwrap();
{
let mut val = h.val.lock().unwrap();
while *val < 1 {
val = h.cvar.wait(val).unwrap();
}
}
l.stop();
j.join().unwrap();
assert_eq!(*(h.val.lock().unwrap()), 1);
}
}
|
use std::collections::BTreeMap;
use std::mem::drop;
use std::sync::{Arc, Weak};
use std::thread;
use sync::Mutex;
|
random_line_split
|
lights.rs
|
use crate::geometry::direction::Direction;
use crate::geometry::ray::Ray;
use crate::geometry::vector::Vector4;
use rand::prelude::ThreadRng;
use rand::Rng;
pub trait Light : Sync {
fn emit(&self, rng: &mut ThreadRng) -> Ray;
fn energy_total(&self) -> f64;
}
pub struct IntervalLight {
from: Vector4,
to: Vector4,
energy: f64
}
impl IntervalLight {
pub fn new(from: Vector4, to: Vector4, energy: f64) -> Box<dyn Light> {
return Box::new(IntervalLight { from, to, energy });
}
|
}
impl Light for IntervalLight {
fn emit(&self, rng: &mut ThreadRng) -> Ray {
let d: f64 = rng.gen_range(0.0, 1.0);
let start = self.from + (self.to - self.from) * d;
let direction: Direction = Direction::random(rng, 1.0);
return Ray { start, direction };
}
fn energy_total(&self) -> f64 {
return self.energy;
}
}
|
random_line_split
|
|
lights.rs
|
use crate::geometry::direction::Direction;
use crate::geometry::ray::Ray;
use crate::geometry::vector::Vector4;
use rand::prelude::ThreadRng;
use rand::Rng;
pub trait Light : Sync {
fn emit(&self, rng: &mut ThreadRng) -> Ray;
fn energy_total(&self) -> f64;
}
pub struct IntervalLight {
from: Vector4,
to: Vector4,
energy: f64
}
impl IntervalLight {
pub fn new(from: Vector4, to: Vector4, energy: f64) -> Box<dyn Light> {
return Box::new(IntervalLight { from, to, energy });
}
}
impl Light for IntervalLight {
fn emit(&self, rng: &mut ThreadRng) -> Ray {
let d: f64 = rng.gen_range(0.0, 1.0);
let start = self.from + (self.to - self.from) * d;
let direction: Direction = Direction::random(rng, 1.0);
return Ray { start, direction };
}
fn energy_total(&self) -> f64
|
}
|
{
return self.energy;
}
|
identifier_body
|
lights.rs
|
use crate::geometry::direction::Direction;
use crate::geometry::ray::Ray;
use crate::geometry::vector::Vector4;
use rand::prelude::ThreadRng;
use rand::Rng;
pub trait Light : Sync {
fn emit(&self, rng: &mut ThreadRng) -> Ray;
fn energy_total(&self) -> f64;
}
pub struct
|
{
from: Vector4,
to: Vector4,
energy: f64
}
impl IntervalLight {
pub fn new(from: Vector4, to: Vector4, energy: f64) -> Box<dyn Light> {
return Box::new(IntervalLight { from, to, energy });
}
}
impl Light for IntervalLight {
fn emit(&self, rng: &mut ThreadRng) -> Ray {
let d: f64 = rng.gen_range(0.0, 1.0);
let start = self.from + (self.to - self.from) * d;
let direction: Direction = Direction::random(rng, 1.0);
return Ray { start, direction };
}
fn energy_total(&self) -> f64 {
return self.energy;
}
}
|
IntervalLight
|
identifier_name
|
cols.rs
|
use transposed::{Cols, ColsMut};
use {Col, ColMut};
impl<'a, T> DoubleEndedIterator for Cols<'a, T> {
fn next_back(&mut self) -> Option<Col<'a, T>> {
self.0.next_back().map(|r| Col(r.0))
}
}
impl<'a, T> Iterator for Cols<'a, T> {
type Item = Col<'a, T>;
fn next(&mut self) -> Option<Col<'a, T>> {
self.0.next().map(|r| Col(r.0))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
impl<'a, T> DoubleEndedIterator for ColsMut<'a, T> {
fn next_back(&mut self) -> Option<ColMut<'a, T>> {
self.0.next_back().map(|r| ColMut(Col((r.0).0)))
}
}
|
type Item = ColMut<'a, T>;
fn next(&mut self) -> Option<ColMut<'a, T>> {
self.0.next().map(|r| ColMut(Col((r.0).0)))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
|
impl<'a, T> Iterator for ColsMut<'a, T> {
|
random_line_split
|
cols.rs
|
use transposed::{Cols, ColsMut};
use {Col, ColMut};
impl<'a, T> DoubleEndedIterator for Cols<'a, T> {
fn next_back(&mut self) -> Option<Col<'a, T>> {
self.0.next_back().map(|r| Col(r.0))
}
}
impl<'a, T> Iterator for Cols<'a, T> {
type Item = Col<'a, T>;
fn next(&mut self) -> Option<Col<'a, T>> {
self.0.next().map(|r| Col(r.0))
}
fn
|
(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
impl<'a, T> DoubleEndedIterator for ColsMut<'a, T> {
fn next_back(&mut self) -> Option<ColMut<'a, T>> {
self.0.next_back().map(|r| ColMut(Col((r.0).0)))
}
}
impl<'a, T> Iterator for ColsMut<'a, T> {
type Item = ColMut<'a, T>;
fn next(&mut self) -> Option<ColMut<'a, T>> {
self.0.next().map(|r| ColMut(Col((r.0).0)))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
|
size_hint
|
identifier_name
|
cols.rs
|
use transposed::{Cols, ColsMut};
use {Col, ColMut};
impl<'a, T> DoubleEndedIterator for Cols<'a, T> {
fn next_back(&mut self) -> Option<Col<'a, T>> {
self.0.next_back().map(|r| Col(r.0))
}
}
impl<'a, T> Iterator for Cols<'a, T> {
type Item = Col<'a, T>;
fn next(&mut self) -> Option<Col<'a, T>> {
self.0.next().map(|r| Col(r.0))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
impl<'a, T> DoubleEndedIterator for ColsMut<'a, T> {
fn next_back(&mut self) -> Option<ColMut<'a, T>> {
self.0.next_back().map(|r| ColMut(Col((r.0).0)))
}
}
impl<'a, T> Iterator for ColsMut<'a, T> {
type Item = ColMut<'a, T>;
fn next(&mut self) -> Option<ColMut<'a, T>> {
self.0.next().map(|r| ColMut(Col((r.0).0)))
}
fn size_hint(&self) -> (usize, Option<usize>)
|
}
|
{
self.0.size_hint()
}
|
identifier_body
|
test_stats.rs
|
// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::Arc;
use kvproto::kvrpcpb::*;
use pd_client::PdClient;
use test_raftstore::*;
use tikv_util::config::*;
fn check_available<T: Simulator>(cluster: &mut Cluster<T>)
|
if stats.get_available()!= last_available {
return;
}
}
panic!("available not changed")
}
fn test_simple_store_stats<T: Simulator>(cluster: &mut Cluster<T>) {
let pd_client = Arc::clone(&cluster.pd_client);
cluster.cfg.raft_store.pd_store_heartbeat_tick_interval = ReadableDuration::millis(20);
cluster.run();
// wait store reports stats.
for _ in 0..100 {
sleep_ms(20);
if pd_client.get_store_stats(1).is_some() {
break;
}
}
let engine = cluster.get_engine(1);
let raft_engine = cluster.get_raft_engine(1);
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
let last_stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(last_stats.get_region_count(), 1);
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let region = pd_client.get_region(b"").unwrap();
cluster.must_split(®ion, b"k2");
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
// wait report region count after split
for _ in 0..100 {
sleep_ms(20);
let stats = pd_client.get_store_stats(1).unwrap();
if stats.get_region_count() == 2 {
break;
}
}
let stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(stats.get_region_count(), 2);
check_available(cluster);
}
#[test]
fn test_node_simple_store_stats() {
let mut cluster = new_node_cluster(0, 1);
test_simple_store_stats(&mut cluster);
}
#[test]
fn test_store_heartbeat_report_hotspots() {
fail::cfg("mock_hotspot_threshold", "return(0)").unwrap();
fail::cfg("mock_tick_interval", "return(0)").unwrap();
let (cluster, client, ctx) = must_new_and_configure_cluster_and_kv_client(|cluster| {
cluster.cfg.raft_store.pd_store_heartbeat_tick_interval = ReadableDuration::millis(10);
});
let (k, v) = (b"key".to_vec(), b"v2".to_vec());
// Raw put
let mut put_req = RawPutRequest::default();
put_req.set_context(ctx.clone());
put_req.key = k.clone();
put_req.value = v.clone();
let put_resp = client.raw_put(&put_req).unwrap();
assert!(!put_resp.has_region_error());
assert!(put_resp.error.is_empty());
for _i in 0..100 {
// Raw get
let mut get_req = RawGetRequest::default();
get_req.set_context(ctx.clone());
get_req.key = k.clone();
let get_resp = client.raw_get(&get_req).unwrap();
assert_eq!(get_resp.value, v);
}
sleep_ms(50);
let region_id = cluster.get_region_id(b"");
let store_id = 1;
let hot_peers = cluster.pd_client.get_store_hotspots(store_id).unwrap();
let peer_stat = hot_peers.get(®ion_id).unwrap();
assert_eq!(peer_stat.get_region_id(), region_id);
assert!(peer_stat.get_read_keys() > 0);
assert!(peer_stat.get_read_bytes() > 0);
fail::remove("mock_tick_interval");
fail::remove("mock_hotspot_threshold");
}
|
{
let pd_client = Arc::clone(&cluster.pd_client);
let engine = cluster.get_engine(1);
let raft_engine = cluster.get_raft_engine(1);
let stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(stats.get_region_count(), 2);
let value = vec![0; 1024];
for i in 0..1000 {
let last_available = stats.get_available();
cluster.must_put(format!("k{}", i).as_bytes(), &value);
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
sleep_ms(20);
let stats = pd_client.get_store_stats(1).unwrap();
// Because the available is for disk size, even we add data
// other process may reduce data too. so here we try to
// check available size changed.
|
identifier_body
|
test_stats.rs
|
// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::Arc;
use kvproto::kvrpcpb::*;
use pd_client::PdClient;
use test_raftstore::*;
use tikv_util::config::*;
fn check_available<T: Simulator>(cluster: &mut Cluster<T>) {
let pd_client = Arc::clone(&cluster.pd_client);
let engine = cluster.get_engine(1);
let raft_engine = cluster.get_raft_engine(1);
let stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(stats.get_region_count(), 2);
let value = vec![0; 1024];
for i in 0..1000 {
let last_available = stats.get_available();
cluster.must_put(format!("k{}", i).as_bytes(), &value);
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
sleep_ms(20);
let stats = pd_client.get_store_stats(1).unwrap();
// Because the available is for disk size, even we add data
// other process may reduce data too. so here we try to
// check available size changed.
if stats.get_available()!= last_available {
return;
}
}
panic!("available not changed")
}
fn test_simple_store_stats<T: Simulator>(cluster: &mut Cluster<T>) {
let pd_client = Arc::clone(&cluster.pd_client);
cluster.cfg.raft_store.pd_store_heartbeat_tick_interval = ReadableDuration::millis(20);
cluster.run();
// wait store reports stats.
for _ in 0..100 {
sleep_ms(20);
if pd_client.get_store_stats(1).is_some()
|
}
let engine = cluster.get_engine(1);
let raft_engine = cluster.get_raft_engine(1);
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
let last_stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(last_stats.get_region_count(), 1);
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let region = pd_client.get_region(b"").unwrap();
cluster.must_split(®ion, b"k2");
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
// wait report region count after split
for _ in 0..100 {
sleep_ms(20);
let stats = pd_client.get_store_stats(1).unwrap();
if stats.get_region_count() == 2 {
break;
}
}
let stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(stats.get_region_count(), 2);
check_available(cluster);
}
#[test]
fn test_node_simple_store_stats() {
let mut cluster = new_node_cluster(0, 1);
test_simple_store_stats(&mut cluster);
}
#[test]
fn test_store_heartbeat_report_hotspots() {
fail::cfg("mock_hotspot_threshold", "return(0)").unwrap();
fail::cfg("mock_tick_interval", "return(0)").unwrap();
let (cluster, client, ctx) = must_new_and_configure_cluster_and_kv_client(|cluster| {
cluster.cfg.raft_store.pd_store_heartbeat_tick_interval = ReadableDuration::millis(10);
});
let (k, v) = (b"key".to_vec(), b"v2".to_vec());
// Raw put
let mut put_req = RawPutRequest::default();
put_req.set_context(ctx.clone());
put_req.key = k.clone();
put_req.value = v.clone();
let put_resp = client.raw_put(&put_req).unwrap();
assert!(!put_resp.has_region_error());
assert!(put_resp.error.is_empty());
for _i in 0..100 {
// Raw get
let mut get_req = RawGetRequest::default();
get_req.set_context(ctx.clone());
get_req.key = k.clone();
let get_resp = client.raw_get(&get_req).unwrap();
assert_eq!(get_resp.value, v);
}
sleep_ms(50);
let region_id = cluster.get_region_id(b"");
let store_id = 1;
let hot_peers = cluster.pd_client.get_store_hotspots(store_id).unwrap();
let peer_stat = hot_peers.get(®ion_id).unwrap();
assert_eq!(peer_stat.get_region_id(), region_id);
assert!(peer_stat.get_read_keys() > 0);
assert!(peer_stat.get_read_bytes() > 0);
fail::remove("mock_tick_interval");
fail::remove("mock_hotspot_threshold");
}
|
{
break;
}
|
conditional_block
|
test_stats.rs
|
// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::Arc;
use kvproto::kvrpcpb::*;
use pd_client::PdClient;
use test_raftstore::*;
use tikv_util::config::*;
fn check_available<T: Simulator>(cluster: &mut Cluster<T>) {
let pd_client = Arc::clone(&cluster.pd_client);
let engine = cluster.get_engine(1);
let raft_engine = cluster.get_raft_engine(1);
let stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(stats.get_region_count(), 2);
let value = vec![0; 1024];
for i in 0..1000 {
let last_available = stats.get_available();
cluster.must_put(format!("k{}", i).as_bytes(), &value);
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
sleep_ms(20);
let stats = pd_client.get_store_stats(1).unwrap();
// Because the available is for disk size, even we add data
// other process may reduce data too. so here we try to
// check available size changed.
if stats.get_available()!= last_available {
return;
}
}
panic!("available not changed")
}
fn
|
<T: Simulator>(cluster: &mut Cluster<T>) {
let pd_client = Arc::clone(&cluster.pd_client);
cluster.cfg.raft_store.pd_store_heartbeat_tick_interval = ReadableDuration::millis(20);
cluster.run();
// wait store reports stats.
for _ in 0..100 {
sleep_ms(20);
if pd_client.get_store_stats(1).is_some() {
break;
}
}
let engine = cluster.get_engine(1);
let raft_engine = cluster.get_raft_engine(1);
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
let last_stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(last_stats.get_region_count(), 1);
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let region = pd_client.get_region(b"").unwrap();
cluster.must_split(®ion, b"k2");
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
// wait report region count after split
for _ in 0..100 {
sleep_ms(20);
let stats = pd_client.get_store_stats(1).unwrap();
if stats.get_region_count() == 2 {
break;
}
}
let stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(stats.get_region_count(), 2);
check_available(cluster);
}
#[test]
fn test_node_simple_store_stats() {
let mut cluster = new_node_cluster(0, 1);
test_simple_store_stats(&mut cluster);
}
#[test]
fn test_store_heartbeat_report_hotspots() {
fail::cfg("mock_hotspot_threshold", "return(0)").unwrap();
fail::cfg("mock_tick_interval", "return(0)").unwrap();
let (cluster, client, ctx) = must_new_and_configure_cluster_and_kv_client(|cluster| {
cluster.cfg.raft_store.pd_store_heartbeat_tick_interval = ReadableDuration::millis(10);
});
let (k, v) = (b"key".to_vec(), b"v2".to_vec());
// Raw put
let mut put_req = RawPutRequest::default();
put_req.set_context(ctx.clone());
put_req.key = k.clone();
put_req.value = v.clone();
let put_resp = client.raw_put(&put_req).unwrap();
assert!(!put_resp.has_region_error());
assert!(put_resp.error.is_empty());
for _i in 0..100 {
// Raw get
let mut get_req = RawGetRequest::default();
get_req.set_context(ctx.clone());
get_req.key = k.clone();
let get_resp = client.raw_get(&get_req).unwrap();
assert_eq!(get_resp.value, v);
}
sleep_ms(50);
let region_id = cluster.get_region_id(b"");
let store_id = 1;
let hot_peers = cluster.pd_client.get_store_hotspots(store_id).unwrap();
let peer_stat = hot_peers.get(®ion_id).unwrap();
assert_eq!(peer_stat.get_region_id(), region_id);
assert!(peer_stat.get_read_keys() > 0);
assert!(peer_stat.get_read_bytes() > 0);
fail::remove("mock_tick_interval");
fail::remove("mock_hotspot_threshold");
}
|
test_simple_store_stats
|
identifier_name
|
test_stats.rs
|
// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::Arc;
use kvproto::kvrpcpb::*;
use pd_client::PdClient;
use test_raftstore::*;
use tikv_util::config::*;
fn check_available<T: Simulator>(cluster: &mut Cluster<T>) {
let pd_client = Arc::clone(&cluster.pd_client);
let engine = cluster.get_engine(1);
let raft_engine = cluster.get_raft_engine(1);
let stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(stats.get_region_count(), 2);
let value = vec![0; 1024];
for i in 0..1000 {
let last_available = stats.get_available();
cluster.must_put(format!("k{}", i).as_bytes(), &value);
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
sleep_ms(20);
let stats = pd_client.get_store_stats(1).unwrap();
// Because the available is for disk size, even we add data
// other process may reduce data too. so here we try to
// check available size changed.
if stats.get_available()!= last_available {
return;
}
}
panic!("available not changed")
}
fn test_simple_store_stats<T: Simulator>(cluster: &mut Cluster<T>) {
let pd_client = Arc::clone(&cluster.pd_client);
cluster.cfg.raft_store.pd_store_heartbeat_tick_interval = ReadableDuration::millis(20);
cluster.run();
// wait store reports stats.
for _ in 0..100 {
sleep_ms(20);
if pd_client.get_store_stats(1).is_some() {
break;
}
}
let engine = cluster.get_engine(1);
let raft_engine = cluster.get_raft_engine(1);
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
let last_stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(last_stats.get_region_count(), 1);
cluster.must_put(b"k1", b"v1");
cluster.must_put(b"k3", b"v3");
let region = pd_client.get_region(b"").unwrap();
cluster.must_split(®ion, b"k2");
raft_engine.flush(true).unwrap();
engine.flush(true).unwrap();
// wait report region count after split
for _ in 0..100 {
sleep_ms(20);
let stats = pd_client.get_store_stats(1).unwrap();
if stats.get_region_count() == 2 {
break;
}
}
let stats = pd_client.get_store_stats(1).unwrap();
assert_eq!(stats.get_region_count(), 2);
check_available(cluster);
}
#[test]
fn test_node_simple_store_stats() {
let mut cluster = new_node_cluster(0, 1);
test_simple_store_stats(&mut cluster);
}
#[test]
fn test_store_heartbeat_report_hotspots() {
fail::cfg("mock_hotspot_threshold", "return(0)").unwrap();
fail::cfg("mock_tick_interval", "return(0)").unwrap();
let (cluster, client, ctx) = must_new_and_configure_cluster_and_kv_client(|cluster| {
cluster.cfg.raft_store.pd_store_heartbeat_tick_interval = ReadableDuration::millis(10);
|
let mut put_req = RawPutRequest::default();
put_req.set_context(ctx.clone());
put_req.key = k.clone();
put_req.value = v.clone();
let put_resp = client.raw_put(&put_req).unwrap();
assert!(!put_resp.has_region_error());
assert!(put_resp.error.is_empty());
for _i in 0..100 {
// Raw get
let mut get_req = RawGetRequest::default();
get_req.set_context(ctx.clone());
get_req.key = k.clone();
let get_resp = client.raw_get(&get_req).unwrap();
assert_eq!(get_resp.value, v);
}
sleep_ms(50);
let region_id = cluster.get_region_id(b"");
let store_id = 1;
let hot_peers = cluster.pd_client.get_store_hotspots(store_id).unwrap();
let peer_stat = hot_peers.get(®ion_id).unwrap();
assert_eq!(peer_stat.get_region_id(), region_id);
assert!(peer_stat.get_read_keys() > 0);
assert!(peer_stat.get_read_bytes() > 0);
fail::remove("mock_tick_interval");
fail::remove("mock_hotspot_threshold");
}
|
});
let (k, v) = (b"key".to_vec(), b"v2".to_vec());
// Raw put
|
random_line_split
|
aux2bib.rs
|
#[macro_use(crate_version, crate_authors)]
extern crate clap;
use inspirer;
use inspirer::InspirerError;
use std::error::Error;
use human_panic::setup_panic;
#[macro_use]
extern crate slog;
use slog::DrainExt;
use slog_term;
use clap::{App, Arg};
fn main()
|
fn run(root_logger: &slog::Logger) -> Result<(), InspirerError> {
info!(root_logger, "Application started");
// Initialize instance of InspirerLib
let lib = inspirer::Inspirer::init(Some(root_logger.new(o!())));
// Define CLI
let matches = App::new("aux2bib")
.version(crate_version!())
.author(crate_authors!(",\n"))
.about("gets BibTeX keys from Inspire")
.arg(
Arg::with_name("INPUT")
.help("Sets the file from which to extract BibTeX keys")
.index(1),
)
.arg(
Arg::with_name("OUTPUT")
.help("Sets the file to which results should be appended")
.index(2),
)
.get_matches();
// Get input from specified file or stdin
let input_data = lib.get_input(matches.value_of("INPUT"))?;
// Extract BibTeX tags from document
let keys = lib.aux2key(input_data);
info!(root_logger, "Extracted BibTeX keys";
"number_of_keys" => keys.len());
// Retrieve BibTeX entries from inspire.net
info!(root_logger, "Retrieving entries");
let mut bibtex_entries: Vec<String> = Vec::new();
for key in keys {
if let Some(bibtex_entry) = lib.bibtex(&key) {
bibtex_entries.push(bibtex_entry);
}
}
// Write BibTeX entries to file or stdout
lib.put_output(matches.value_of("OUTPUT"), bibtex_entries)?;
info!(root_logger, "Done");
Ok(())
}
|
{
setup_panic!();
// Initialize logging
let drain = slog_term::streamer().stderr().build().fuse();
let root_logger = slog::Logger::root(drain, o!("version" => crate_version!()));
if let Err(ref e) = run(&root_logger) {
match e {
_ => error!(root_logger, e.to_string();
"error" => format!("{}", match e.source(){
Some(e) => e.to_string(),
None => String::new(),
})
),
}
::std::process::exit(1);
}
}
|
identifier_body
|
aux2bib.rs
|
#[macro_use(crate_version, crate_authors)]
extern crate clap;
use inspirer;
use inspirer::InspirerError;
use std::error::Error;
use human_panic::setup_panic;
#[macro_use]
extern crate slog;
use slog::DrainExt;
use slog_term;
use clap::{App, Arg};
|
let drain = slog_term::streamer().stderr().build().fuse();
let root_logger = slog::Logger::root(drain, o!("version" => crate_version!()));
if let Err(ref e) = run(&root_logger) {
match e {
_ => error!(root_logger, e.to_string();
"error" => format!("{}", match e.source(){
Some(e) => e.to_string(),
None => String::new(),
})
),
}
::std::process::exit(1);
}
}
fn run(root_logger: &slog::Logger) -> Result<(), InspirerError> {
info!(root_logger, "Application started");
// Initialize instance of InspirerLib
let lib = inspirer::Inspirer::init(Some(root_logger.new(o!())));
// Define CLI
let matches = App::new("aux2bib")
.version(crate_version!())
.author(crate_authors!(",\n"))
.about("gets BibTeX keys from Inspire")
.arg(
Arg::with_name("INPUT")
.help("Sets the file from which to extract BibTeX keys")
.index(1),
)
.arg(
Arg::with_name("OUTPUT")
.help("Sets the file to which results should be appended")
.index(2),
)
.get_matches();
// Get input from specified file or stdin
let input_data = lib.get_input(matches.value_of("INPUT"))?;
// Extract BibTeX tags from document
let keys = lib.aux2key(input_data);
info!(root_logger, "Extracted BibTeX keys";
"number_of_keys" => keys.len());
// Retrieve BibTeX entries from inspire.net
info!(root_logger, "Retrieving entries");
let mut bibtex_entries: Vec<String> = Vec::new();
for key in keys {
if let Some(bibtex_entry) = lib.bibtex(&key) {
bibtex_entries.push(bibtex_entry);
}
}
// Write BibTeX entries to file or stdout
lib.put_output(matches.value_of("OUTPUT"), bibtex_entries)?;
info!(root_logger, "Done");
Ok(())
}
|
fn main() {
setup_panic!();
// Initialize logging
|
random_line_split
|
aux2bib.rs
|
#[macro_use(crate_version, crate_authors)]
extern crate clap;
use inspirer;
use inspirer::InspirerError;
use std::error::Error;
use human_panic::setup_panic;
#[macro_use]
extern crate slog;
use slog::DrainExt;
use slog_term;
use clap::{App, Arg};
fn
|
() {
setup_panic!();
// Initialize logging
let drain = slog_term::streamer().stderr().build().fuse();
let root_logger = slog::Logger::root(drain, o!("version" => crate_version!()));
if let Err(ref e) = run(&root_logger) {
match e {
_ => error!(root_logger, e.to_string();
"error" => format!("{}", match e.source(){
Some(e) => e.to_string(),
None => String::new(),
})
),
}
::std::process::exit(1);
}
}
fn run(root_logger: &slog::Logger) -> Result<(), InspirerError> {
info!(root_logger, "Application started");
// Initialize instance of InspirerLib
let lib = inspirer::Inspirer::init(Some(root_logger.new(o!())));
// Define CLI
let matches = App::new("aux2bib")
.version(crate_version!())
.author(crate_authors!(",\n"))
.about("gets BibTeX keys from Inspire")
.arg(
Arg::with_name("INPUT")
.help("Sets the file from which to extract BibTeX keys")
.index(1),
)
.arg(
Arg::with_name("OUTPUT")
.help("Sets the file to which results should be appended")
.index(2),
)
.get_matches();
// Get input from specified file or stdin
let input_data = lib.get_input(matches.value_of("INPUT"))?;
// Extract BibTeX tags from document
let keys = lib.aux2key(input_data);
info!(root_logger, "Extracted BibTeX keys";
"number_of_keys" => keys.len());
// Retrieve BibTeX entries from inspire.net
info!(root_logger, "Retrieving entries");
let mut bibtex_entries: Vec<String> = Vec::new();
for key in keys {
if let Some(bibtex_entry) = lib.bibtex(&key) {
bibtex_entries.push(bibtex_entry);
}
}
// Write BibTeX entries to file or stdout
lib.put_output(matches.value_of("OUTPUT"), bibtex_entries)?;
info!(root_logger, "Done");
Ok(())
}
|
main
|
identifier_name
|
aux2bib.rs
|
#[macro_use(crate_version, crate_authors)]
extern crate clap;
use inspirer;
use inspirer::InspirerError;
use std::error::Error;
use human_panic::setup_panic;
#[macro_use]
extern crate slog;
use slog::DrainExt;
use slog_term;
use clap::{App, Arg};
fn main() {
setup_panic!();
// Initialize logging
let drain = slog_term::streamer().stderr().build().fuse();
let root_logger = slog::Logger::root(drain, o!("version" => crate_version!()));
if let Err(ref e) = run(&root_logger)
|
}
fn run(root_logger: &slog::Logger) -> Result<(), InspirerError> {
info!(root_logger, "Application started");
// Initialize instance of InspirerLib
let lib = inspirer::Inspirer::init(Some(root_logger.new(o!())));
// Define CLI
let matches = App::new("aux2bib")
.version(crate_version!())
.author(crate_authors!(",\n"))
.about("gets BibTeX keys from Inspire")
.arg(
Arg::with_name("INPUT")
.help("Sets the file from which to extract BibTeX keys")
.index(1),
)
.arg(
Arg::with_name("OUTPUT")
.help("Sets the file to which results should be appended")
.index(2),
)
.get_matches();
// Get input from specified file or stdin
let input_data = lib.get_input(matches.value_of("INPUT"))?;
// Extract BibTeX tags from document
let keys = lib.aux2key(input_data);
info!(root_logger, "Extracted BibTeX keys";
"number_of_keys" => keys.len());
// Retrieve BibTeX entries from inspire.net
info!(root_logger, "Retrieving entries");
let mut bibtex_entries: Vec<String> = Vec::new();
for key in keys {
if let Some(bibtex_entry) = lib.bibtex(&key) {
bibtex_entries.push(bibtex_entry);
}
}
// Write BibTeX entries to file or stdout
lib.put_output(matches.value_of("OUTPUT"), bibtex_entries)?;
info!(root_logger, "Done");
Ok(())
}
|
{
match e {
_ => error!(root_logger, e.to_string();
"error" => format!("{}", match e.source(){
Some(e) => e.to_string(),
None => String::new(),
})
),
}
::std::process::exit(1);
}
|
conditional_block
|
cosmogony2mimir.rs
|
/// This module contains the definition for bano2mimir configuration and command line arguments.
use mimir::domain::model::configuration::ContainerConfig;
use serde::{Deserialize, Serialize};
use snafu::{ResultExt, Snafu};
use std::env;
use std::path::PathBuf;
use mimir::adapters::secondary::elasticsearch::ElasticsearchStorageConfig;
const VERSION: &str = env!("CARGO_PKG_VERSION");
const AUTHORS: &str = env!("CARGO_PKG_AUTHORS");
#[derive(Debug, Snafu)]
pub enum Error {
#[snafu(display("Config Source Error: {}", source))]
ConfigSource { source: common::config::Error },
#[snafu(display("Config Error: {}", source))]
ConfigBuild { source: config::ConfigError },
#[snafu(display("Invalid Configuration: {}", msg))]
Invalid { msg: String },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Settings {
pub mode: Option<String>,
pub langs: Vec<String>,
pub elasticsearch: ElasticsearchStorageConfig,
pub container: ContainerConfig,
pub nb_threads: Option<usize>,
pub french_id_retrocompatibility: bool,
}
#[derive(Debug, clap::Parser)]
#[clap(
name = "cosmogony2mimir",
about = "Parsing Cosmogony document and indexing its content in Elasticsearch",
version = VERSION,
author = AUTHORS
)]
pub struct
|
{
/// Defines the config directory
///
/// This directory must contain 'elasticsearch' and 'cosmogony2mimir' subdirectories.
#[clap(parse(from_os_str), short = 'c', long = "config-dir")]
pub config_dir: PathBuf,
/// Defines the run mode in {testing, dev, prod,...}
///
/// If no run mode is provided, a default behavior will be used.
#[clap(short ='m', long = "run-mode")]
pub run_mode: Option<String>,
/// Override settings values using key=value
#[clap(
short ='s',
long = "setting",
multiple_values = false,
multiple_occurrences = true
)]
pub settings: Vec<String>,
/// A file produced by cosmogony
#[clap(short = 'i', long = "input", parse(from_os_str))]
pub input: PathBuf,
#[clap(subcommand)]
pub cmd: Command,
}
#[derive(Debug, clap::Parser)]
pub enum Command {
/// Execute cosmogony2mimir with the given configuration
Run,
/// Prints cosmogony2mimir's configuration
Config,
}
// TODO Parameterize the config directory
impl Settings {
// Read the configuration from <config-dir>/cosmogony2mimir and <config-dir>/elasticsearch
pub fn new(opts: &Opts) -> Result<Self, Error> {
common::config::config_from(
opts.config_dir.as_ref(),
&["cosmogony2mimir", "elasticsearch"],
opts.run_mode.as_deref(),
"MIMIR",
opts.settings.clone(),
)
.context(ConfigSourceSnafu)?
.try_into()
.context(ConfigBuildSnafu)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_return_ok_with_default_config_dir() {
let config_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("config");
let opts = Opts {
config_dir,
run_mode: None,
settings: vec![],
cmd: Command::Run,
input: PathBuf::from("foo.jsonl.gz"),
};
let settings = Settings::new(&opts);
assert!(
settings.is_ok(),
"Expected Ok, Got an Err: {}",
settings.unwrap_err().to_string()
);
assert_eq!(settings.unwrap().mode, None);
}
#[test]
fn should_override_elasticsearch_url_with_command_line() {
let config_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("config");
let opts = Opts {
config_dir,
run_mode: None,
settings: vec![String::from("elasticsearch.url='http://localhost:9999'")],
cmd: Command::Run,
input: PathBuf::from("foo.jsonl.gz"),
};
let settings = Settings::new(&opts);
assert!(
settings.is_ok(),
"Expected Ok, Got an Err: {}",
settings.unwrap_err().to_string()
);
assert_eq!(
settings.unwrap().elasticsearch.url.as_str(),
"http://localhost:9999/"
);
}
#[test]
fn should_override_elasticsearch_url_environment_variable() {
let config_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("config");
std::env::set_var("MIMIR_ELASTICSEARCH__URL", "http://localhost:9999");
let opts = Opts {
config_dir,
run_mode: None,
settings: vec![],
cmd: Command::Run,
input: PathBuf::from("foo.osm.pbf"),
};
let settings = Settings::new(&opts);
assert!(
settings.is_ok(),
"Expected Ok, Got an Err: {}",
settings.unwrap_err().to_string()
);
assert_eq!(
settings.unwrap().elasticsearch.url.as_str(),
"http://localhost:9999/"
);
}
}
|
Opts
|
identifier_name
|
cosmogony2mimir.rs
|
/// This module contains the definition for bano2mimir configuration and command line arguments.
use mimir::domain::model::configuration::ContainerConfig;
use serde::{Deserialize, Serialize};
use snafu::{ResultExt, Snafu};
use std::env;
use std::path::PathBuf;
use mimir::adapters::secondary::elasticsearch::ElasticsearchStorageConfig;
const VERSION: &str = env!("CARGO_PKG_VERSION");
const AUTHORS: &str = env!("CARGO_PKG_AUTHORS");
#[derive(Debug, Snafu)]
pub enum Error {
#[snafu(display("Config Source Error: {}", source))]
ConfigSource { source: common::config::Error },
#[snafu(display("Config Error: {}", source))]
ConfigBuild { source: config::ConfigError },
#[snafu(display("Invalid Configuration: {}", msg))]
Invalid { msg: String },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Settings {
pub mode: Option<String>,
pub langs: Vec<String>,
pub elasticsearch: ElasticsearchStorageConfig,
pub container: ContainerConfig,
pub nb_threads: Option<usize>,
pub french_id_retrocompatibility: bool,
}
|
about = "Parsing Cosmogony document and indexing its content in Elasticsearch",
version = VERSION,
author = AUTHORS
)]
pub struct Opts {
/// Defines the config directory
///
/// This directory must contain 'elasticsearch' and 'cosmogony2mimir' subdirectories.
#[clap(parse(from_os_str), short = 'c', long = "config-dir")]
pub config_dir: PathBuf,
/// Defines the run mode in {testing, dev, prod,...}
///
/// If no run mode is provided, a default behavior will be used.
#[clap(short ='m', long = "run-mode")]
pub run_mode: Option<String>,
/// Override settings values using key=value
#[clap(
short ='s',
long = "setting",
multiple_values = false,
multiple_occurrences = true
)]
pub settings: Vec<String>,
/// A file produced by cosmogony
#[clap(short = 'i', long = "input", parse(from_os_str))]
pub input: PathBuf,
#[clap(subcommand)]
pub cmd: Command,
}
#[derive(Debug, clap::Parser)]
pub enum Command {
/// Execute cosmogony2mimir with the given configuration
Run,
/// Prints cosmogony2mimir's configuration
Config,
}
// TODO Parameterize the config directory
impl Settings {
// Read the configuration from <config-dir>/cosmogony2mimir and <config-dir>/elasticsearch
pub fn new(opts: &Opts) -> Result<Self, Error> {
common::config::config_from(
opts.config_dir.as_ref(),
&["cosmogony2mimir", "elasticsearch"],
opts.run_mode.as_deref(),
"MIMIR",
opts.settings.clone(),
)
.context(ConfigSourceSnafu)?
.try_into()
.context(ConfigBuildSnafu)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_return_ok_with_default_config_dir() {
let config_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("config");
let opts = Opts {
config_dir,
run_mode: None,
settings: vec![],
cmd: Command::Run,
input: PathBuf::from("foo.jsonl.gz"),
};
let settings = Settings::new(&opts);
assert!(
settings.is_ok(),
"Expected Ok, Got an Err: {}",
settings.unwrap_err().to_string()
);
assert_eq!(settings.unwrap().mode, None);
}
#[test]
fn should_override_elasticsearch_url_with_command_line() {
let config_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("config");
let opts = Opts {
config_dir,
run_mode: None,
settings: vec![String::from("elasticsearch.url='http://localhost:9999'")],
cmd: Command::Run,
input: PathBuf::from("foo.jsonl.gz"),
};
let settings = Settings::new(&opts);
assert!(
settings.is_ok(),
"Expected Ok, Got an Err: {}",
settings.unwrap_err().to_string()
);
assert_eq!(
settings.unwrap().elasticsearch.url.as_str(),
"http://localhost:9999/"
);
}
#[test]
fn should_override_elasticsearch_url_environment_variable() {
let config_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("config");
std::env::set_var("MIMIR_ELASTICSEARCH__URL", "http://localhost:9999");
let opts = Opts {
config_dir,
run_mode: None,
settings: vec![],
cmd: Command::Run,
input: PathBuf::from("foo.osm.pbf"),
};
let settings = Settings::new(&opts);
assert!(
settings.is_ok(),
"Expected Ok, Got an Err: {}",
settings.unwrap_err().to_string()
);
assert_eq!(
settings.unwrap().elasticsearch.url.as_str(),
"http://localhost:9999/"
);
}
}
|
#[derive(Debug, clap::Parser)]
#[clap(
name = "cosmogony2mimir",
|
random_line_split
|
python.rs
|
// Copyright (c) 2015 Daniel Grunwald
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this
// software and associated documentation files (the "Software"), to deal in the Software
// without restriction, including without limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
// to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use std;
use std::ffi::CString;
use std::marker::PhantomData;
use libc::c_int;
use ffi;
use objects::{PyObject, PyType, PyBool, PyDict, PyModule};
use err::{self, PyErr, PyResult};
use pythonrun::GILGuard;
/// Marker type that indicates that the GIL is currently held.
///
/// The 'Python' struct is a zero-size marker struct that is required for most Python operations.
/// This is used to indicate that the operation accesses/modifies the Python interpreter state,
/// and thus can only be called if the Python interpreter is initialized and the
/// Python global interpreter lock (GIL) is acquired.
/// The lifetime `'p` represents the lifetime of the Python interpreter.
///
/// You can imagine the GIL to be a giant `Mutex<PythonInterpreterState>`.
/// The type `Python<'p>` then acts like a reference `&'p PythonInterpreterState`.
#[derive(Copy, Clone)]
pub struct Python<'p>(PhantomData<&'p GILGuard>);
/// Trait implemented by all Python object types.
pub trait PythonObject : ::conversion::ToPyObject + Send + Sized +'static {
/// Casts the Python object to PyObject.
fn as_object(&self) -> &PyObject;
/// Casts the Python object to PyObject.
fn into_object(self) -> PyObject;
/// Unchecked downcast from PyObject to Self.
/// Undefined behavior if the input object does not have the expected type.
unsafe fn unchecked_downcast_from(PyObject) -> Self;
/// Unchecked downcast from PyObject to Self.
/// Undefined behavior if the input object does not have the expected type.
unsafe fn unchecked_downcast_borrow_from(&PyObject) -> &Self;
}
// Marker type that indicates an error while downcasting
pub struct PythonObjectDowncastError<'p>(pub Python<'p>);
/// Trait implemented by Python object types that allow a checked downcast.
pub trait PythonObjectWithCheckedDowncast : PythonObject {
/// Cast from PyObject to a concrete Python object type.
fn downcast_from<'p>(Python<'p>, PyObject) -> Result<Self, PythonObjectDowncastError<'p>>;
/// Cast from PyObject to a concrete Python object type.
fn downcast_borrow_from<'a, 'p>(Python<'p>, &'a PyObject) -> Result<&'a Self, PythonObjectDowncastError<'p>>;
}
/// Trait implemented by Python object types that have a corresponding type object.
pub trait PythonObjectWithTypeObject : PythonObjectWithCheckedDowncast {
/// Retrieves the type object for this Python object type.
fn type_object(Python) -> PyType;
}
pub trait PyClone : Sized {
fn clone_ref(&self, Python) -> Self;
}
impl <T> PyClone for T where T: PythonObject {
#[inline]
fn clone_ref(&self, py: Python) -> T {
let ptr = self.as_object().as_ptr();
unsafe {
T::unchecked_downcast_from(PyObject::from_borrowed_ptr(py, ptr))
}
}
}
impl <T> PyClone for Option<T> where T: PyClone {
#[inline]
fn clone_ref(&self, py: Python) -> Option<T> {
match *self {
Some(ref v) => Some(v.clone_ref(py)),
None => None
}
}
}
pub trait PyDrop : Sized {
fn release_ref(self, Python);
}
impl <T> PyDrop for T where T: PythonObject {
#[inline]
fn release_ref(self, _py: Python) {
let ptr = self.into_object().steal_ptr();
unsafe {
ffi::Py_DECREF(ptr);
}
}
}
impl <T> PyDrop for Option<T> where T: PyDrop {
#[inline]
fn release_ref(self, py: Python) {
match self {
Some(v) => v.release_ref(py),
None => {}
}
}
}
/// This trait allows retrieving the underlying FFI pointer from Python objects.
pub trait ToPythonPointer {
/// Retrieves the underlying FFI pointer (as a borrowed pointer).
fn as_ptr(&self) -> *mut ffi::PyObject;
/// Retrieves the underlying FFI pointer as a "stolen pointer".
fn steal_ptr(self, py: Python) -> *mut ffi::PyObject;
}
/// ToPythonPointer for borrowed Python pointers.
impl ToPythonPointer for PyObject {
#[inline]
fn as_ptr(&self) -> *mut ffi::PyObject {
self.as_ptr()
}
#[inline]
fn steal_ptr(self, _py: Python) -> *mut ffi::PyObject {
self.steal_ptr()
}
}
/// ToPythonPointer for borrowed Python pointers.
impl <'a, T> ToPythonPointer for &'a T where T: PythonObject {
#[inline]
fn as_ptr(&self) -> *mut ffi::PyObject {
self.as_object().as_ptr()
}
#[inline]
fn steal_ptr(self, py: Python) -> *mut ffi::PyObject {
self.as_object().clone_ref(py).steal_ptr()
}
}
/// Convert None into a null pointer.
impl <T> ToPythonPointer for Option<T> where T: ToPythonPointer {
#[inline]
fn as_ptr(&self) -> *mut ffi::PyObject {
match *self {
Some(ref t) => t.as_ptr(),
None => std::ptr::null_mut()
}
}
#[inline]
fn steal_ptr(self, py: Python) -> *mut ffi::PyObject {
match self {
Some(t) => t.steal_ptr(py),
None => std::ptr::null_mut()
}
}
}
impl<'p> Python<'p> {
/// Retrieve Python instance under the assumption that the GIL is already acquired at this point,
/// and stays acquired for the lifetime `'p`.
///
/// Because the output lifetime `'p` is not connected to any input parameter,
/// care must be taken that the compiler infers an appropriate lifetime for `'p`
/// when calling this function.
#[inline]
pub unsafe fn assume_gil_acquired() -> Python<'p> {
Python(PhantomData)
}
/// Acquires the global interpreter lock, which allows access to the Python runtime.
///
/// If the Python runtime is not already initialized, this function will initialize it.
/// See [prepare_freethreaded_python()](fn.prepare_freethreaded_python.html) for details.
#[inline]
pub fn acquire_gil() -> GILGuard
|
/// Temporarily releases the GIL, thus allowing other Python threads to run.
pub fn allow_threads<T, F>(self, f: F) -> T where F : Send + FnOnce() -> T {
// The `Send` bound on the closure prevents the user from
// transferring the `Python` token into the closure.
unsafe {
let save = ffi::PyEval_SaveThread();
let result = f();
ffi::PyEval_RestoreThread(save);
result
}
}
/// Evaluates a Python expression in the given context and returns the result.
///
/// If `globals` is `None`, it defaults to Python module `__main__`.
/// If `locals` is `None`, it defaults to the value of `globals`.
pub fn eval(self, code: &str, globals: Option<&PyDict>,
locals: Option<&PyDict>) -> PyResult<PyObject> {
self.run_code(code, ffi::Py_eval_input, globals, locals)
}
/// Executes one or more Python statements in the given context.
///
/// If `globals` is `None`, it defaults to Python module `__main__`.
/// If `locals` is `None`, it defaults to the value of `globals`.
pub fn run(self, code: &str, globals: Option<&PyDict>,
locals: Option<&PyDict>) -> PyResult<()> {
self.run_code(code, ffi::Py_file_input, globals, locals)?;
Ok(())
}
/// Runs code in the given context.
/// `start` indicates the type of input expected:
/// one of `Py_single_input`, `Py_file_input`, or `Py_eval_input`.
///
/// If `globals` is `None`, it defaults to Python module `__main__`.
/// If `locals` is `None`, it defaults to the value of `globals`.
fn run_code(self, code: &str, start: c_int,
globals: Option<&PyDict>, locals: Option<&PyDict>)
-> PyResult<PyObject> {
let code = CString::new(code).unwrap();
unsafe {
let mptr = ffi::PyImport_AddModule("__main__\0".as_ptr() as *const _);
if mptr.is_null() {
return Err(PyErr::fetch(self));
}
let mdict = ffi::PyModule_GetDict(mptr);
let globals = match globals {
Some(g) => g.as_ptr(),
None => mdict,
};
let locals = match locals {
Some(l) => l.as_ptr(),
None => globals
};
let res_ptr = ffi::PyRun_StringFlags(code.as_ptr(),
start, globals, locals, 0 as *mut _);
err::result_from_owned_ptr(self, res_ptr)
}
}
/// Gets the Python builtin value `None`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn None(self) -> PyObject {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_None()) }
}
/// Gets the Python builtin value `True`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn True(self) -> PyBool {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_True()).unchecked_cast_into::<PyBool>() }
}
/// Gets the Python builtin value `False`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn False(self) -> PyBool {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_False()).unchecked_cast_into::<PyBool>() }
}
/// Gets the Python builtin value `NotImplemented`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn NotImplemented(self) -> PyObject {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_NotImplemented()) }
}
/// Gets the Python type object for type T.
pub fn get_type<T>(self) -> PyType where T: PythonObjectWithTypeObject {
T::type_object(self)
}
/// Import the Python module with the specified name.
pub fn import(self, name : &str) -> PyResult<PyModule> {
PyModule::import(self, name)
}
}
impl <'p> std::fmt::Debug for PythonObjectDowncastError<'p> {
fn fmt(&self, f : &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
f.write_str("PythonObjectDowncastError")
}
}
#[cfg(test)]
mod test {
use {Python, PyDict};
#[test]
fn test_eval() {
let gil = Python::acquire_gil();
let py = gil.python();
// Make sure builtin names are accessible
let v: i32 = py.eval("min(1, 2)", None, None).unwrap().extract(py).unwrap();
assert_eq!(v, 1);
let d = PyDict::new(py);
d.set_item(py, "foo", 13).unwrap();
// Inject our own local namespace
let v: i32 = py.eval("foo + 29", None, Some(&d)).unwrap().extract(py).unwrap();
assert_eq!(v, 42);
// Make sure builtin names are still accessible when using a local namespace
let v: i32 = py.eval("min(foo, 2)", None, Some(&d)).unwrap().extract(py).unwrap();
assert_eq!(v, 2);
}
}
|
{
GILGuard::acquire()
}
|
identifier_body
|
python.rs
|
// Copyright (c) 2015 Daniel Grunwald
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this
// software and associated documentation files (the "Software"), to deal in the Software
// without restriction, including without limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
// to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use std;
use std::ffi::CString;
use std::marker::PhantomData;
use libc::c_int;
use ffi;
use objects::{PyObject, PyType, PyBool, PyDict, PyModule};
use err::{self, PyErr, PyResult};
use pythonrun::GILGuard;
/// Marker type that indicates that the GIL is currently held.
///
/// The 'Python' struct is a zero-size marker struct that is required for most Python operations.
/// This is used to indicate that the operation accesses/modifies the Python interpreter state,
/// and thus can only be called if the Python interpreter is initialized and the
/// Python global interpreter lock (GIL) is acquired.
/// The lifetime `'p` represents the lifetime of the Python interpreter.
///
/// You can imagine the GIL to be a giant `Mutex<PythonInterpreterState>`.
/// The type `Python<'p>` then acts like a reference `&'p PythonInterpreterState`.
#[derive(Copy, Clone)]
pub struct Python<'p>(PhantomData<&'p GILGuard>);
/// Trait implemented by all Python object types.
pub trait PythonObject : ::conversion::ToPyObject + Send + Sized +'static {
/// Casts the Python object to PyObject.
fn as_object(&self) -> &PyObject;
/// Casts the Python object to PyObject.
fn into_object(self) -> PyObject;
/// Unchecked downcast from PyObject to Self.
/// Undefined behavior if the input object does not have the expected type.
unsafe fn unchecked_downcast_from(PyObject) -> Self;
/// Unchecked downcast from PyObject to Self.
/// Undefined behavior if the input object does not have the expected type.
unsafe fn unchecked_downcast_borrow_from(&PyObject) -> &Self;
}
// Marker type that indicates an error while downcasting
pub struct PythonObjectDowncastError<'p>(pub Python<'p>);
/// Trait implemented by Python object types that allow a checked downcast.
pub trait PythonObjectWithCheckedDowncast : PythonObject {
/// Cast from PyObject to a concrete Python object type.
fn downcast_from<'p>(Python<'p>, PyObject) -> Result<Self, PythonObjectDowncastError<'p>>;
/// Cast from PyObject to a concrete Python object type.
fn downcast_borrow_from<'a, 'p>(Python<'p>, &'a PyObject) -> Result<&'a Self, PythonObjectDowncastError<'p>>;
}
/// Trait implemented by Python object types that have a corresponding type object.
pub trait PythonObjectWithTypeObject : PythonObjectWithCheckedDowncast {
/// Retrieves the type object for this Python object type.
fn type_object(Python) -> PyType;
}
pub trait PyClone : Sized {
fn clone_ref(&self, Python) -> Self;
}
impl <T> PyClone for T where T: PythonObject {
#[inline]
fn clone_ref(&self, py: Python) -> T {
let ptr = self.as_object().as_ptr();
unsafe {
T::unchecked_downcast_from(PyObject::from_borrowed_ptr(py, ptr))
}
}
}
impl <T> PyClone for Option<T> where T: PyClone {
#[inline]
fn clone_ref(&self, py: Python) -> Option<T> {
match *self {
Some(ref v) => Some(v.clone_ref(py)),
None => None
}
}
}
pub trait PyDrop : Sized {
fn release_ref(self, Python);
}
impl <T> PyDrop for T where T: PythonObject {
#[inline]
fn release_ref(self, _py: Python) {
let ptr = self.into_object().steal_ptr();
unsafe {
ffi::Py_DECREF(ptr);
}
}
}
impl <T> PyDrop for Option<T> where T: PyDrop {
#[inline]
fn release_ref(self, py: Python) {
match self {
Some(v) => v.release_ref(py),
None => {}
}
}
}
/// This trait allows retrieving the underlying FFI pointer from Python objects.
pub trait ToPythonPointer {
/// Retrieves the underlying FFI pointer (as a borrowed pointer).
fn as_ptr(&self) -> *mut ffi::PyObject;
/// Retrieves the underlying FFI pointer as a "stolen pointer".
fn steal_ptr(self, py: Python) -> *mut ffi::PyObject;
}
/// ToPythonPointer for borrowed Python pointers.
impl ToPythonPointer for PyObject {
#[inline]
fn as_ptr(&self) -> *mut ffi::PyObject {
self.as_ptr()
}
|
}
/// ToPythonPointer for borrowed Python pointers.
impl <'a, T> ToPythonPointer for &'a T where T: PythonObject {
#[inline]
fn as_ptr(&self) -> *mut ffi::PyObject {
self.as_object().as_ptr()
}
#[inline]
fn steal_ptr(self, py: Python) -> *mut ffi::PyObject {
self.as_object().clone_ref(py).steal_ptr()
}
}
/// Convert None into a null pointer.
impl <T> ToPythonPointer for Option<T> where T: ToPythonPointer {
#[inline]
fn as_ptr(&self) -> *mut ffi::PyObject {
match *self {
Some(ref t) => t.as_ptr(),
None => std::ptr::null_mut()
}
}
#[inline]
fn steal_ptr(self, py: Python) -> *mut ffi::PyObject {
match self {
Some(t) => t.steal_ptr(py),
None => std::ptr::null_mut()
}
}
}
impl<'p> Python<'p> {
/// Retrieve Python instance under the assumption that the GIL is already acquired at this point,
/// and stays acquired for the lifetime `'p`.
///
/// Because the output lifetime `'p` is not connected to any input parameter,
/// care must be taken that the compiler infers an appropriate lifetime for `'p`
/// when calling this function.
#[inline]
pub unsafe fn assume_gil_acquired() -> Python<'p> {
Python(PhantomData)
}
/// Acquires the global interpreter lock, which allows access to the Python runtime.
///
/// If the Python runtime is not already initialized, this function will initialize it.
/// See [prepare_freethreaded_python()](fn.prepare_freethreaded_python.html) for details.
#[inline]
pub fn acquire_gil() -> GILGuard {
GILGuard::acquire()
}
/// Temporarily releases the GIL, thus allowing other Python threads to run.
pub fn allow_threads<T, F>(self, f: F) -> T where F : Send + FnOnce() -> T {
// The `Send` bound on the closure prevents the user from
// transferring the `Python` token into the closure.
unsafe {
let save = ffi::PyEval_SaveThread();
let result = f();
ffi::PyEval_RestoreThread(save);
result
}
}
/// Evaluates a Python expression in the given context and returns the result.
///
/// If `globals` is `None`, it defaults to Python module `__main__`.
/// If `locals` is `None`, it defaults to the value of `globals`.
pub fn eval(self, code: &str, globals: Option<&PyDict>,
locals: Option<&PyDict>) -> PyResult<PyObject> {
self.run_code(code, ffi::Py_eval_input, globals, locals)
}
/// Executes one or more Python statements in the given context.
///
/// If `globals` is `None`, it defaults to Python module `__main__`.
/// If `locals` is `None`, it defaults to the value of `globals`.
pub fn run(self, code: &str, globals: Option<&PyDict>,
locals: Option<&PyDict>) -> PyResult<()> {
self.run_code(code, ffi::Py_file_input, globals, locals)?;
Ok(())
}
/// Runs code in the given context.
/// `start` indicates the type of input expected:
/// one of `Py_single_input`, `Py_file_input`, or `Py_eval_input`.
///
/// If `globals` is `None`, it defaults to Python module `__main__`.
/// If `locals` is `None`, it defaults to the value of `globals`.
fn run_code(self, code: &str, start: c_int,
globals: Option<&PyDict>, locals: Option<&PyDict>)
-> PyResult<PyObject> {
let code = CString::new(code).unwrap();
unsafe {
let mptr = ffi::PyImport_AddModule("__main__\0".as_ptr() as *const _);
if mptr.is_null() {
return Err(PyErr::fetch(self));
}
let mdict = ffi::PyModule_GetDict(mptr);
let globals = match globals {
Some(g) => g.as_ptr(),
None => mdict,
};
let locals = match locals {
Some(l) => l.as_ptr(),
None => globals
};
let res_ptr = ffi::PyRun_StringFlags(code.as_ptr(),
start, globals, locals, 0 as *mut _);
err::result_from_owned_ptr(self, res_ptr)
}
}
/// Gets the Python builtin value `None`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn None(self) -> PyObject {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_None()) }
}
/// Gets the Python builtin value `True`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn True(self) -> PyBool {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_True()).unchecked_cast_into::<PyBool>() }
}
/// Gets the Python builtin value `False`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn False(self) -> PyBool {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_False()).unchecked_cast_into::<PyBool>() }
}
/// Gets the Python builtin value `NotImplemented`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn NotImplemented(self) -> PyObject {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_NotImplemented()) }
}
/// Gets the Python type object for type T.
pub fn get_type<T>(self) -> PyType where T: PythonObjectWithTypeObject {
T::type_object(self)
}
/// Import the Python module with the specified name.
pub fn import(self, name : &str) -> PyResult<PyModule> {
PyModule::import(self, name)
}
}
impl <'p> std::fmt::Debug for PythonObjectDowncastError<'p> {
fn fmt(&self, f : &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
f.write_str("PythonObjectDowncastError")
}
}
#[cfg(test)]
mod test {
use {Python, PyDict};
#[test]
fn test_eval() {
let gil = Python::acquire_gil();
let py = gil.python();
// Make sure builtin names are accessible
let v: i32 = py.eval("min(1, 2)", None, None).unwrap().extract(py).unwrap();
assert_eq!(v, 1);
let d = PyDict::new(py);
d.set_item(py, "foo", 13).unwrap();
// Inject our own local namespace
let v: i32 = py.eval("foo + 29", None, Some(&d)).unwrap().extract(py).unwrap();
assert_eq!(v, 42);
// Make sure builtin names are still accessible when using a local namespace
let v: i32 = py.eval("min(foo, 2)", None, Some(&d)).unwrap().extract(py).unwrap();
assert_eq!(v, 2);
}
}
|
#[inline]
fn steal_ptr(self, _py: Python) -> *mut ffi::PyObject {
self.steal_ptr()
}
|
random_line_split
|
python.rs
|
// Copyright (c) 2015 Daniel Grunwald
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this
// software and associated documentation files (the "Software"), to deal in the Software
// without restriction, including without limitation the rights to use, copy, modify, merge,
// publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
// to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
// INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
// PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
// FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use std;
use std::ffi::CString;
use std::marker::PhantomData;
use libc::c_int;
use ffi;
use objects::{PyObject, PyType, PyBool, PyDict, PyModule};
use err::{self, PyErr, PyResult};
use pythonrun::GILGuard;
/// Marker type that indicates that the GIL is currently held.
///
/// The 'Python' struct is a zero-size marker struct that is required for most Python operations.
/// This is used to indicate that the operation accesses/modifies the Python interpreter state,
/// and thus can only be called if the Python interpreter is initialized and the
/// Python global interpreter lock (GIL) is acquired.
/// The lifetime `'p` represents the lifetime of the Python interpreter.
///
/// You can imagine the GIL to be a giant `Mutex<PythonInterpreterState>`.
/// The type `Python<'p>` then acts like a reference `&'p PythonInterpreterState`.
#[derive(Copy, Clone)]
pub struct Python<'p>(PhantomData<&'p GILGuard>);
/// Trait implemented by all Python object types.
pub trait PythonObject : ::conversion::ToPyObject + Send + Sized +'static {
/// Casts the Python object to PyObject.
fn as_object(&self) -> &PyObject;
/// Casts the Python object to PyObject.
fn into_object(self) -> PyObject;
/// Unchecked downcast from PyObject to Self.
/// Undefined behavior if the input object does not have the expected type.
unsafe fn unchecked_downcast_from(PyObject) -> Self;
/// Unchecked downcast from PyObject to Self.
/// Undefined behavior if the input object does not have the expected type.
unsafe fn unchecked_downcast_borrow_from(&PyObject) -> &Self;
}
// Marker type that indicates an error while downcasting
pub struct PythonObjectDowncastError<'p>(pub Python<'p>);
/// Trait implemented by Python object types that allow a checked downcast.
pub trait PythonObjectWithCheckedDowncast : PythonObject {
/// Cast from PyObject to a concrete Python object type.
fn downcast_from<'p>(Python<'p>, PyObject) -> Result<Self, PythonObjectDowncastError<'p>>;
/// Cast from PyObject to a concrete Python object type.
fn downcast_borrow_from<'a, 'p>(Python<'p>, &'a PyObject) -> Result<&'a Self, PythonObjectDowncastError<'p>>;
}
/// Trait implemented by Python object types that have a corresponding type object.
pub trait PythonObjectWithTypeObject : PythonObjectWithCheckedDowncast {
/// Retrieves the type object for this Python object type.
fn type_object(Python) -> PyType;
}
pub trait PyClone : Sized {
fn clone_ref(&self, Python) -> Self;
}
impl <T> PyClone for T where T: PythonObject {
#[inline]
fn clone_ref(&self, py: Python) -> T {
let ptr = self.as_object().as_ptr();
unsafe {
T::unchecked_downcast_from(PyObject::from_borrowed_ptr(py, ptr))
}
}
}
impl <T> PyClone for Option<T> where T: PyClone {
#[inline]
fn clone_ref(&self, py: Python) -> Option<T> {
match *self {
Some(ref v) => Some(v.clone_ref(py)),
None => None
}
}
}
pub trait PyDrop : Sized {
fn release_ref(self, Python);
}
impl <T> PyDrop for T where T: PythonObject {
#[inline]
fn release_ref(self, _py: Python) {
let ptr = self.into_object().steal_ptr();
unsafe {
ffi::Py_DECREF(ptr);
}
}
}
impl <T> PyDrop for Option<T> where T: PyDrop {
#[inline]
fn release_ref(self, py: Python) {
match self {
Some(v) => v.release_ref(py),
None => {}
}
}
}
/// This trait allows retrieving the underlying FFI pointer from Python objects.
pub trait ToPythonPointer {
/// Retrieves the underlying FFI pointer (as a borrowed pointer).
fn as_ptr(&self) -> *mut ffi::PyObject;
/// Retrieves the underlying FFI pointer as a "stolen pointer".
fn steal_ptr(self, py: Python) -> *mut ffi::PyObject;
}
/// ToPythonPointer for borrowed Python pointers.
impl ToPythonPointer for PyObject {
#[inline]
fn as_ptr(&self) -> *mut ffi::PyObject {
self.as_ptr()
}
#[inline]
fn steal_ptr(self, _py: Python) -> *mut ffi::PyObject {
self.steal_ptr()
}
}
/// ToPythonPointer for borrowed Python pointers.
impl <'a, T> ToPythonPointer for &'a T where T: PythonObject {
#[inline]
fn as_ptr(&self) -> *mut ffi::PyObject {
self.as_object().as_ptr()
}
#[inline]
fn steal_ptr(self, py: Python) -> *mut ffi::PyObject {
self.as_object().clone_ref(py).steal_ptr()
}
}
/// Convert None into a null pointer.
impl <T> ToPythonPointer for Option<T> where T: ToPythonPointer {
#[inline]
fn as_ptr(&self) -> *mut ffi::PyObject {
match *self {
Some(ref t) => t.as_ptr(),
None => std::ptr::null_mut()
}
}
#[inline]
fn steal_ptr(self, py: Python) -> *mut ffi::PyObject {
match self {
Some(t) => t.steal_ptr(py),
None => std::ptr::null_mut()
}
}
}
impl<'p> Python<'p> {
/// Retrieve Python instance under the assumption that the GIL is already acquired at this point,
/// and stays acquired for the lifetime `'p`.
///
/// Because the output lifetime `'p` is not connected to any input parameter,
/// care must be taken that the compiler infers an appropriate lifetime for `'p`
/// when calling this function.
#[inline]
pub unsafe fn assume_gil_acquired() -> Python<'p> {
Python(PhantomData)
}
/// Acquires the global interpreter lock, which allows access to the Python runtime.
///
/// If the Python runtime is not already initialized, this function will initialize it.
/// See [prepare_freethreaded_python()](fn.prepare_freethreaded_python.html) for details.
#[inline]
pub fn acquire_gil() -> GILGuard {
GILGuard::acquire()
}
/// Temporarily releases the GIL, thus allowing other Python threads to run.
pub fn allow_threads<T, F>(self, f: F) -> T where F : Send + FnOnce() -> T {
// The `Send` bound on the closure prevents the user from
// transferring the `Python` token into the closure.
unsafe {
let save = ffi::PyEval_SaveThread();
let result = f();
ffi::PyEval_RestoreThread(save);
result
}
}
/// Evaluates a Python expression in the given context and returns the result.
///
/// If `globals` is `None`, it defaults to Python module `__main__`.
/// If `locals` is `None`, it defaults to the value of `globals`.
pub fn eval(self, code: &str, globals: Option<&PyDict>,
locals: Option<&PyDict>) -> PyResult<PyObject> {
self.run_code(code, ffi::Py_eval_input, globals, locals)
}
/// Executes one or more Python statements in the given context.
///
/// If `globals` is `None`, it defaults to Python module `__main__`.
/// If `locals` is `None`, it defaults to the value of `globals`.
pub fn run(self, code: &str, globals: Option<&PyDict>,
locals: Option<&PyDict>) -> PyResult<()> {
self.run_code(code, ffi::Py_file_input, globals, locals)?;
Ok(())
}
/// Runs code in the given context.
/// `start` indicates the type of input expected:
/// one of `Py_single_input`, `Py_file_input`, or `Py_eval_input`.
///
/// If `globals` is `None`, it defaults to Python module `__main__`.
/// If `locals` is `None`, it defaults to the value of `globals`.
fn run_code(self, code: &str, start: c_int,
globals: Option<&PyDict>, locals: Option<&PyDict>)
-> PyResult<PyObject> {
let code = CString::new(code).unwrap();
unsafe {
let mptr = ffi::PyImport_AddModule("__main__\0".as_ptr() as *const _);
if mptr.is_null() {
return Err(PyErr::fetch(self));
}
let mdict = ffi::PyModule_GetDict(mptr);
let globals = match globals {
Some(g) => g.as_ptr(),
None => mdict,
};
let locals = match locals {
Some(l) => l.as_ptr(),
None => globals
};
let res_ptr = ffi::PyRun_StringFlags(code.as_ptr(),
start, globals, locals, 0 as *mut _);
err::result_from_owned_ptr(self, res_ptr)
}
}
/// Gets the Python builtin value `None`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn
|
(self) -> PyObject {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_None()) }
}
/// Gets the Python builtin value `True`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn True(self) -> PyBool {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_True()).unchecked_cast_into::<PyBool>() }
}
/// Gets the Python builtin value `False`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn False(self) -> PyBool {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_False()).unchecked_cast_into::<PyBool>() }
}
/// Gets the Python builtin value `NotImplemented`.
#[allow(non_snake_case)] // the Python keyword starts with uppercase
#[inline]
pub fn NotImplemented(self) -> PyObject {
unsafe { PyObject::from_borrowed_ptr(self, ffi::Py_NotImplemented()) }
}
/// Gets the Python type object for type T.
pub fn get_type<T>(self) -> PyType where T: PythonObjectWithTypeObject {
T::type_object(self)
}
/// Import the Python module with the specified name.
pub fn import(self, name : &str) -> PyResult<PyModule> {
PyModule::import(self, name)
}
}
impl <'p> std::fmt::Debug for PythonObjectDowncastError<'p> {
fn fmt(&self, f : &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
f.write_str("PythonObjectDowncastError")
}
}
#[cfg(test)]
mod test {
use {Python, PyDict};
#[test]
fn test_eval() {
let gil = Python::acquire_gil();
let py = gil.python();
// Make sure builtin names are accessible
let v: i32 = py.eval("min(1, 2)", None, None).unwrap().extract(py).unwrap();
assert_eq!(v, 1);
let d = PyDict::new(py);
d.set_item(py, "foo", 13).unwrap();
// Inject our own local namespace
let v: i32 = py.eval("foo + 29", None, Some(&d)).unwrap().extract(py).unwrap();
assert_eq!(v, 42);
// Make sure builtin names are still accessible when using a local namespace
let v: i32 = py.eval("min(foo, 2)", None, Some(&d)).unwrap().extract(py).unwrap();
assert_eq!(v, 2);
}
}
|
None
|
identifier_name
|
bob.rs
|
fn process_response_case(phrase: &str, expected_response: &str) {
assert_eq!(bob::reply(phrase), expected_response);
}
#[test]
/// stating something
fn test_stating_something() {
process_response_case("Tom-ay-to, tom-aaaah-to.", "Whatever.");
}
#[test]
#[ignore]
/// ending with whitespace
fn test_ending_with_whitespace() {
process_response_case("Okay if like my spacebar quite a bit? ", "Sure.");
}
#[test]
#[ignore]
/// shouting numbers
fn test_shouting_numbers() {
process_response_case("1, 2, 3 GO!", "Whoa, chill out!");
}
#[test]
#[ignore]
/// other whitespace
fn test_other_whitespace() {
process_response_case("\r\r ", "Fine. Be that way!");
}
#[test]
#[ignore]
/// shouting with special characters
fn test_shouting_with_special_characters() {
process_response_case(
"ZOMG THE %^*@#$(*^ ZOMBIES ARE COMING!!11!!1!",
"Whoa, chill out!",
);
}
#[test]
#[ignore]
/// talking forcefully
fn test_talking_forcefully() {
process_response_case("Hi there!", "Whatever.");
}
#[test]
#[ignore]
/// prattling on
fn test_prattling_on() {
process_response_case("Wait! Hang on. Are you going to be OK?", "Sure.");
}
#[test]
#[ignore]
/// forceful question
fn test_forceful_question() {
process_response_case("WHAT'S GOING ON?", "Calm down, I know what I'm doing!");
}
#[test]
#[ignore]
/// shouting with no exclamation mark
fn test_shouting_with_no_exclamation_mark() {
process_response_case("I HATE THE DENTIST", "Whoa, chill out!");
}
#[test]
#[ignore]
/// asking gibberish
fn test_asking_gibberish() {
process_response_case("fffbbcbeab?", "Sure.");
}
#[test]
|
#[ignore]
/// question with no letters
fn test_question_with_no_letters() {
process_response_case("4?", "Sure.");
}
#[test]
#[ignore]
/// no letters
fn test_no_letters() {
process_response_case("1, 2, 3", "Whatever.");
}
#[test]
#[ignore]
/// statement containing question mark
fn test_statement_containing_question_mark() {
process_response_case("Ending with? means a question.", "Whatever.");
}
//NEW
#[test]
#[ignore]
/// multiple line question
fn test_multiple_line_question() {
process_response_case(
"\rDoes this cryogenic chamber make me look fat?\rNo.",
"Whatever.",
);
}
#[test]
#[ignore]
/// non-question ending with whitespace
fn test_nonquestion_ending_with_whitespace() {
process_response_case(
"This is a statement ending with whitespace ",
"Whatever.",
);
}
#[test]
#[ignore]
/// shouting
fn test_shouting() {
process_response_case("WATCH OUT!", "Whoa, chill out!");
}
#[test]
#[ignore]
/// non-letters with question
fn test_nonletters_with_question() {
process_response_case(":)?", "Sure.");
}
#[test]
#[ignore]
/// shouting gibberish
fn test_shouting_gibberish() {
process_response_case("FCECDFCAAB", "Whoa, chill out!");
}
#[test]
#[ignore]
/// asking a question
fn test_asking_a_question() {
process_response_case("Does this cryogenic chamber make me look fat?", "Sure.");
}
#[test]
#[ignore]
/// asking a numeric question
fn test_asking_a_numeric_question() {
process_response_case("You are, what, like 15?", "Sure.");
}
#[test]
#[ignore]
/// silence
fn test_silence() {
process_response_case("", "Fine. Be that way!");
}
#[test]
#[ignore]
/// starting with whitespace
fn test_starting_with_whitespace() {
process_response_case(" hmmmmmmm...", "Whatever.");
}
#[test]
#[ignore]
/// using acronyms in regular speech
fn test_using_acronyms_in_regular_speech() {
process_response_case(
"It's OK if you don't want to go work for NASA.",
"Whatever.",
);
}
#[test]
#[ignore]
/// alternate silence
fn test_alternate_silence() {
process_response_case(" ", "Fine. Be that way!");
}
#[test]
#[ignore]
/// prolonged silence
fn test_prolonged_silence() {
process_response_case(" ", "Fine. Be that way!");
}
|
random_line_split
|
|
bob.rs
|
fn process_response_case(phrase: &str, expected_response: &str) {
assert_eq!(bob::reply(phrase), expected_response);
}
#[test]
/// stating something
fn test_stating_something() {
process_response_case("Tom-ay-to, tom-aaaah-to.", "Whatever.");
}
#[test]
#[ignore]
/// ending with whitespace
fn test_ending_with_whitespace() {
process_response_case("Okay if like my spacebar quite a bit? ", "Sure.");
}
#[test]
#[ignore]
/// shouting numbers
fn test_shouting_numbers() {
process_response_case("1, 2, 3 GO!", "Whoa, chill out!");
}
#[test]
#[ignore]
/// other whitespace
fn test_other_whitespace() {
process_response_case("\r\r ", "Fine. Be that way!");
}
#[test]
#[ignore]
/// shouting with special characters
fn test_shouting_with_special_characters() {
process_response_case(
"ZOMG THE %^*@#$(*^ ZOMBIES ARE COMING!!11!!1!",
"Whoa, chill out!",
);
}
#[test]
#[ignore]
/// talking forcefully
fn test_talking_forcefully() {
process_response_case("Hi there!", "Whatever.");
}
#[test]
#[ignore]
/// prattling on
fn test_prattling_on() {
process_response_case("Wait! Hang on. Are you going to be OK?", "Sure.");
}
#[test]
#[ignore]
/// forceful question
fn test_forceful_question() {
process_response_case("WHAT'S GOING ON?", "Calm down, I know what I'm doing!");
}
#[test]
#[ignore]
/// shouting with no exclamation mark
fn test_shouting_with_no_exclamation_mark() {
process_response_case("I HATE THE DENTIST", "Whoa, chill out!");
}
#[test]
#[ignore]
/// asking gibberish
fn test_asking_gibberish() {
process_response_case("fffbbcbeab?", "Sure.");
}
#[test]
#[ignore]
/// question with no letters
fn test_question_with_no_letters() {
process_response_case("4?", "Sure.");
}
#[test]
#[ignore]
/// no letters
fn test_no_letters()
|
#[test]
#[ignore]
/// statement containing question mark
fn test_statement_containing_question_mark() {
process_response_case("Ending with? means a question.", "Whatever.");
}
//NEW
#[test]
#[ignore]
/// multiple line question
fn test_multiple_line_question() {
process_response_case(
"\rDoes this cryogenic chamber make me look fat?\rNo.",
"Whatever.",
);
}
#[test]
#[ignore]
/// non-question ending with whitespace
fn test_nonquestion_ending_with_whitespace() {
process_response_case(
"This is a statement ending with whitespace ",
"Whatever.",
);
}
#[test]
#[ignore]
/// shouting
fn test_shouting() {
process_response_case("WATCH OUT!", "Whoa, chill out!");
}
#[test]
#[ignore]
/// non-letters with question
fn test_nonletters_with_question() {
process_response_case(":)?", "Sure.");
}
#[test]
#[ignore]
/// shouting gibberish
fn test_shouting_gibberish() {
process_response_case("FCECDFCAAB", "Whoa, chill out!");
}
#[test]
#[ignore]
/// asking a question
fn test_asking_a_question() {
process_response_case("Does this cryogenic chamber make me look fat?", "Sure.");
}
#[test]
#[ignore]
/// asking a numeric question
fn test_asking_a_numeric_question() {
process_response_case("You are, what, like 15?", "Sure.");
}
#[test]
#[ignore]
/// silence
fn test_silence() {
process_response_case("", "Fine. Be that way!");
}
#[test]
#[ignore]
/// starting with whitespace
fn test_starting_with_whitespace() {
process_response_case(" hmmmmmmm...", "Whatever.");
}
#[test]
#[ignore]
/// using acronyms in regular speech
fn test_using_acronyms_in_regular_speech() {
process_response_case(
"It's OK if you don't want to go work for NASA.",
"Whatever.",
);
}
#[test]
#[ignore]
/// alternate silence
fn test_alternate_silence() {
process_response_case(" ", "Fine. Be that way!");
}
#[test]
#[ignore]
/// prolonged silence
fn test_prolonged_silence() {
process_response_case(" ", "Fine. Be that way!");
}
|
{
process_response_case("1, 2, 3", "Whatever.");
}
|
identifier_body
|
bob.rs
|
fn process_response_case(phrase: &str, expected_response: &str) {
assert_eq!(bob::reply(phrase), expected_response);
}
#[test]
/// stating something
fn test_stating_something() {
process_response_case("Tom-ay-to, tom-aaaah-to.", "Whatever.");
}
#[test]
#[ignore]
/// ending with whitespace
fn test_ending_with_whitespace() {
process_response_case("Okay if like my spacebar quite a bit? ", "Sure.");
}
#[test]
#[ignore]
/// shouting numbers
fn test_shouting_numbers() {
process_response_case("1, 2, 3 GO!", "Whoa, chill out!");
}
#[test]
#[ignore]
/// other whitespace
fn test_other_whitespace() {
process_response_case("\r\r ", "Fine. Be that way!");
}
#[test]
#[ignore]
/// shouting with special characters
fn test_shouting_with_special_characters() {
process_response_case(
"ZOMG THE %^*@#$(*^ ZOMBIES ARE COMING!!11!!1!",
"Whoa, chill out!",
);
}
#[test]
#[ignore]
/// talking forcefully
fn test_talking_forcefully() {
process_response_case("Hi there!", "Whatever.");
}
#[test]
#[ignore]
/// prattling on
fn test_prattling_on() {
process_response_case("Wait! Hang on. Are you going to be OK?", "Sure.");
}
#[test]
#[ignore]
/// forceful question
fn test_forceful_question() {
process_response_case("WHAT'S GOING ON?", "Calm down, I know what I'm doing!");
}
#[test]
#[ignore]
/// shouting with no exclamation mark
fn test_shouting_with_no_exclamation_mark() {
process_response_case("I HATE THE DENTIST", "Whoa, chill out!");
}
#[test]
#[ignore]
/// asking gibberish
fn test_asking_gibberish() {
process_response_case("fffbbcbeab?", "Sure.");
}
#[test]
#[ignore]
/// question with no letters
fn test_question_with_no_letters() {
process_response_case("4?", "Sure.");
}
#[test]
#[ignore]
/// no letters
fn test_no_letters() {
process_response_case("1, 2, 3", "Whatever.");
}
#[test]
#[ignore]
/// statement containing question mark
fn test_statement_containing_question_mark() {
process_response_case("Ending with? means a question.", "Whatever.");
}
//NEW
#[test]
#[ignore]
/// multiple line question
fn test_multiple_line_question() {
process_response_case(
"\rDoes this cryogenic chamber make me look fat?\rNo.",
"Whatever.",
);
}
#[test]
#[ignore]
/// non-question ending with whitespace
fn test_nonquestion_ending_with_whitespace() {
process_response_case(
"This is a statement ending with whitespace ",
"Whatever.",
);
}
#[test]
#[ignore]
/// shouting
fn
|
() {
process_response_case("WATCH OUT!", "Whoa, chill out!");
}
#[test]
#[ignore]
/// non-letters with question
fn test_nonletters_with_question() {
process_response_case(":)?", "Sure.");
}
#[test]
#[ignore]
/// shouting gibberish
fn test_shouting_gibberish() {
process_response_case("FCECDFCAAB", "Whoa, chill out!");
}
#[test]
#[ignore]
/// asking a question
fn test_asking_a_question() {
process_response_case("Does this cryogenic chamber make me look fat?", "Sure.");
}
#[test]
#[ignore]
/// asking a numeric question
fn test_asking_a_numeric_question() {
process_response_case("You are, what, like 15?", "Sure.");
}
#[test]
#[ignore]
/// silence
fn test_silence() {
process_response_case("", "Fine. Be that way!");
}
#[test]
#[ignore]
/// starting with whitespace
fn test_starting_with_whitespace() {
process_response_case(" hmmmmmmm...", "Whatever.");
}
#[test]
#[ignore]
/// using acronyms in regular speech
fn test_using_acronyms_in_regular_speech() {
process_response_case(
"It's OK if you don't want to go work for NASA.",
"Whatever.",
);
}
#[test]
#[ignore]
/// alternate silence
fn test_alternate_silence() {
process_response_case(" ", "Fine. Be that way!");
}
#[test]
#[ignore]
/// prolonged silence
fn test_prolonged_silence() {
process_response_case(" ", "Fine. Be that way!");
}
|
test_shouting
|
identifier_name
|
random_negative_integers.rs
|
use itertools::Itertools;
use malachite_base::num::basic::floats::PrimitiveFloat;
use malachite_base::num::float::NiceFloat;
use malachite_base::random::EXAMPLE_SEED;
use malachite_base_test_util::stats::common_values_map::common_values_map;
use malachite_base_test_util::stats::median;
use malachite_base_test_util::stats::moments::{moment_stats, MomentStats};
use malachite_nz::integer::random::random_negative_integers;
use malachite_nz::integer::Integer;
use malachite_nz_test_util::integer::IntegerCheckedToF64Wrapper;
fn random_negative_integers_helper(
mean_bits_numerator: u64,
mean_bits_denominator: u64,
expected_values: &[&str],
expected_common_values: &[(&str, usize)],
expected_sample_median: (&str, Option<&str>),
expected_sample_moment_stats: MomentStats,
) {
let xs = random_negative_integers(EXAMPLE_SEED, mean_bits_numerator, mean_bits_denominator);
let actual_values = xs
.clone()
.map(|x| Integer::to_string(&x))
.take(20)
.collect_vec();
let actual_values = actual_values.iter().map(String::as_str).collect_vec();
let actual_common_values = common_values_map(1000000, 10, xs.clone())
.into_iter()
.map(|(x, freq)| (x.to_string(), freq))
.collect_vec();
let actual_common_values = actual_common_values
.iter()
.map(|(x, freq)| (x.as_str(), *freq))
.collect_vec();
let (median_lo, median_hi) = median(xs.clone().take(1000000));
let (median_lo, median_hi) = (
median_lo.to_string(),
median_hi.map(|x| Integer::to_string(&x)),
);
let actual_sample_median = (median_lo.as_str(), median_hi.as_deref());
// Note that the population moments do not exist.
let actual_sample_moment_stats = moment_stats(xs.take(1000000).map(IntegerCheckedToF64Wrapper));
assert_eq!(
(
actual_values.as_slice(),
actual_common_values.as_slice(),
actual_sample_median,
actual_sample_moment_stats
),
(
expected_values,
expected_common_values,
expected_sample_median,
expected_sample_moment_stats
)
);
}
#[test]
fn test_random_negative_integers() {
// mean bits = 65/64
let values = &["-1"; 20];
let common_values = &[
("-1", 984681),
("-3", 7622),
("-2", 7455),
("-5", 73),
("-6", 66),
("-7", 54),
("-4", 44),
("-8", 2),
("-10", 2),
("-14", 1),
];
let sample_median = ("-1", None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(-1.023822000000005),
standard_deviation: NiceFloat(0.20727410662829246),
skewness: NiceFloat(-10.72004433095801),
excess_kurtosis: NiceFloat(159.60627558337237),
};
random_negative_integers_helper(
65,
64,
values,
common_values,
sample_median,
sample_moment_stats,
);
// mean bits = 2
let values = &[
"-1", "-24", "-1", "-30", "-6", "-12", "-2", "-1", "-1", "-1", "-1", "-1", "-2", "-2",
"-1", "-1", "-3", "-3", "-1", "-1",
];
let common_values = &[
("-1", 500248),
("-3", 124972),
("-2", 124519),
("-7", 31554),
("-5", 31346),
("-6", 31198),
("-4", 31043),
("-12", 8033),
("-11", 7959),
("-10", 7935),
];
let sample_median = ("-1", None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(-15.68562000000007),
standard_deviation: NiceFloat(2088.3045530403606),
skewness: NiceFloat(-877.2889258611025),
excess_kurtosis: NiceFloat(832799.3689336807),
};
random_negative_integers_helper(
2,
1,
values,
common_values,
sample_median,
sample_moment_stats,
);
// mean bits = 32
let values = &[
"-22",
"-4",
"-178",
"-55845661150",
"-93254818",
"-7577967529619388",
"-8",
"-11316951483471",
"-11",
"-1005760138411689342464923704482",
"-948931",
"-42716754",
"-81013760999253680590984897748479904878392",
"-23",
"-5",
"-488225822927510",
"-1558028859598",
"-29",
"-200127331174844881647",
"-4058622214797175252",
];
let common_values = &[
("-1", 31094),
("-2", 15260),
("-3", 15185),
("-4", 7586),
("-5", 7376),
("-7", 7346),
("-6", 7258),
("-10", 3631),
("-14", 3607),
("-11", 3605),
];
let sample_median = ("-3799067", Some("-3799061"));
let sample_moment_stats = MomentStats {
mean: NiceFloat(-2.312362311300544e130),
standard_deviation: NiceFloat(2.3122865276852406e133),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_negative_integers_helper(
32,
1,
values,
common_values,
sample_median,
sample_moment_stats,
);
// mean bits = 64
let values = &[
"-1030304779202860497815440824491190",
"-886085025458",
"-207326",
"-83590267817164982586207812646050",
"-142592182196136038718074156629812683467448",
"-486577913627642327503939268330036386",
"-5557920650918595",
"-82",
"-3896",
"-259694111319673990840",
"-38511521798151392412656616617957654586378660839",
"-637134",
"-2330568192653124764618470467652346596061",
"-2516",
"-512663303",
"-39317568409",
"-18536901993439",
"-4959577657266999117207",
"-628",
"-42485719907732979",
];
let common_values = &[
("-1", 15720),
("-2", 7718),
("-3", 7584),
("-6", 3790),
("-4", 3739),
("-7", 3704),
("-5", 3673),
("-9", 1918),
("-11", 1916),
("-10", 1904),
];
let sample_median = ("-18438360920148", Some("-18436851140261"));
let sample_moment_stats = MomentStats {
mean: NiceFloat(-5.519478531998525e283),
standard_deviation: NiceFloat(f64::POSITIVE_INFINITY),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_negative_integers_helper(
64,
1,
values,
common_values,
sample_median,
sample_moment_stats,
);
}
#[test]
#[should_panic]
fn random_negative_integers_fail_1()
|
#[test]
#[should_panic]
fn random_negative_integers_fail_2() {
random_negative_integers(EXAMPLE_SEED, 2, 3);
}
|
{
random_negative_integers(EXAMPLE_SEED, 1, 0);
}
|
identifier_body
|
random_negative_integers.rs
|
use itertools::Itertools;
use malachite_base::num::basic::floats::PrimitiveFloat;
use malachite_base::num::float::NiceFloat;
use malachite_base::random::EXAMPLE_SEED;
use malachite_base_test_util::stats::common_values_map::common_values_map;
use malachite_base_test_util::stats::median;
use malachite_base_test_util::stats::moments::{moment_stats, MomentStats};
use malachite_nz::integer::random::random_negative_integers;
use malachite_nz::integer::Integer;
use malachite_nz_test_util::integer::IntegerCheckedToF64Wrapper;
fn random_negative_integers_helper(
mean_bits_numerator: u64,
mean_bits_denominator: u64,
expected_values: &[&str],
expected_common_values: &[(&str, usize)],
expected_sample_median: (&str, Option<&str>),
expected_sample_moment_stats: MomentStats,
) {
let xs = random_negative_integers(EXAMPLE_SEED, mean_bits_numerator, mean_bits_denominator);
let actual_values = xs
.clone()
.map(|x| Integer::to_string(&x))
.take(20)
.collect_vec();
let actual_values = actual_values.iter().map(String::as_str).collect_vec();
let actual_common_values = common_values_map(1000000, 10, xs.clone())
.into_iter()
.map(|(x, freq)| (x.to_string(), freq))
.collect_vec();
let actual_common_values = actual_common_values
.iter()
.map(|(x, freq)| (x.as_str(), *freq))
.collect_vec();
let (median_lo, median_hi) = median(xs.clone().take(1000000));
let (median_lo, median_hi) = (
median_lo.to_string(),
median_hi.map(|x| Integer::to_string(&x)),
);
let actual_sample_median = (median_lo.as_str(), median_hi.as_deref());
// Note that the population moments do not exist.
let actual_sample_moment_stats = moment_stats(xs.take(1000000).map(IntegerCheckedToF64Wrapper));
assert_eq!(
(
actual_values.as_slice(),
actual_common_values.as_slice(),
actual_sample_median,
actual_sample_moment_stats
),
(
expected_values,
expected_common_values,
expected_sample_median,
expected_sample_moment_stats
)
);
}
#[test]
fn
|
() {
// mean bits = 65/64
let values = &["-1"; 20];
let common_values = &[
("-1", 984681),
("-3", 7622),
("-2", 7455),
("-5", 73),
("-6", 66),
("-7", 54),
("-4", 44),
("-8", 2),
("-10", 2),
("-14", 1),
];
let sample_median = ("-1", None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(-1.023822000000005),
standard_deviation: NiceFloat(0.20727410662829246),
skewness: NiceFloat(-10.72004433095801),
excess_kurtosis: NiceFloat(159.60627558337237),
};
random_negative_integers_helper(
65,
64,
values,
common_values,
sample_median,
sample_moment_stats,
);
// mean bits = 2
let values = &[
"-1", "-24", "-1", "-30", "-6", "-12", "-2", "-1", "-1", "-1", "-1", "-1", "-2", "-2",
"-1", "-1", "-3", "-3", "-1", "-1",
];
let common_values = &[
("-1", 500248),
("-3", 124972),
("-2", 124519),
("-7", 31554),
("-5", 31346),
("-6", 31198),
("-4", 31043),
("-12", 8033),
("-11", 7959),
("-10", 7935),
];
let sample_median = ("-1", None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(-15.68562000000007),
standard_deviation: NiceFloat(2088.3045530403606),
skewness: NiceFloat(-877.2889258611025),
excess_kurtosis: NiceFloat(832799.3689336807),
};
random_negative_integers_helper(
2,
1,
values,
common_values,
sample_median,
sample_moment_stats,
);
// mean bits = 32
let values = &[
"-22",
"-4",
"-178",
"-55845661150",
"-93254818",
"-7577967529619388",
"-8",
"-11316951483471",
"-11",
"-1005760138411689342464923704482",
"-948931",
"-42716754",
"-81013760999253680590984897748479904878392",
"-23",
"-5",
"-488225822927510",
"-1558028859598",
"-29",
"-200127331174844881647",
"-4058622214797175252",
];
let common_values = &[
("-1", 31094),
("-2", 15260),
("-3", 15185),
("-4", 7586),
("-5", 7376),
("-7", 7346),
("-6", 7258),
("-10", 3631),
("-14", 3607),
("-11", 3605),
];
let sample_median = ("-3799067", Some("-3799061"));
let sample_moment_stats = MomentStats {
mean: NiceFloat(-2.312362311300544e130),
standard_deviation: NiceFloat(2.3122865276852406e133),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_negative_integers_helper(
32,
1,
values,
common_values,
sample_median,
sample_moment_stats,
);
// mean bits = 64
let values = &[
"-1030304779202860497815440824491190",
"-886085025458",
"-207326",
"-83590267817164982586207812646050",
"-142592182196136038718074156629812683467448",
"-486577913627642327503939268330036386",
"-5557920650918595",
"-82",
"-3896",
"-259694111319673990840",
"-38511521798151392412656616617957654586378660839",
"-637134",
"-2330568192653124764618470467652346596061",
"-2516",
"-512663303",
"-39317568409",
"-18536901993439",
"-4959577657266999117207",
"-628",
"-42485719907732979",
];
let common_values = &[
("-1", 15720),
("-2", 7718),
("-3", 7584),
("-6", 3790),
("-4", 3739),
("-7", 3704),
("-5", 3673),
("-9", 1918),
("-11", 1916),
("-10", 1904),
];
let sample_median = ("-18438360920148", Some("-18436851140261"));
let sample_moment_stats = MomentStats {
mean: NiceFloat(-5.519478531998525e283),
standard_deviation: NiceFloat(f64::POSITIVE_INFINITY),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_negative_integers_helper(
64,
1,
values,
common_values,
sample_median,
sample_moment_stats,
);
}
#[test]
#[should_panic]
fn random_negative_integers_fail_1() {
random_negative_integers(EXAMPLE_SEED, 1, 0);
}
#[test]
#[should_panic]
fn random_negative_integers_fail_2() {
random_negative_integers(EXAMPLE_SEED, 2, 3);
}
|
test_random_negative_integers
|
identifier_name
|
random_negative_integers.rs
|
use itertools::Itertools;
use malachite_base::num::basic::floats::PrimitiveFloat;
use malachite_base::num::float::NiceFloat;
use malachite_base::random::EXAMPLE_SEED;
use malachite_base_test_util::stats::common_values_map::common_values_map;
use malachite_base_test_util::stats::median;
use malachite_base_test_util::stats::moments::{moment_stats, MomentStats};
use malachite_nz::integer::random::random_negative_integers;
use malachite_nz::integer::Integer;
|
fn random_negative_integers_helper(
mean_bits_numerator: u64,
mean_bits_denominator: u64,
expected_values: &[&str],
expected_common_values: &[(&str, usize)],
expected_sample_median: (&str, Option<&str>),
expected_sample_moment_stats: MomentStats,
) {
let xs = random_negative_integers(EXAMPLE_SEED, mean_bits_numerator, mean_bits_denominator);
let actual_values = xs
.clone()
.map(|x| Integer::to_string(&x))
.take(20)
.collect_vec();
let actual_values = actual_values.iter().map(String::as_str).collect_vec();
let actual_common_values = common_values_map(1000000, 10, xs.clone())
.into_iter()
.map(|(x, freq)| (x.to_string(), freq))
.collect_vec();
let actual_common_values = actual_common_values
.iter()
.map(|(x, freq)| (x.as_str(), *freq))
.collect_vec();
let (median_lo, median_hi) = median(xs.clone().take(1000000));
let (median_lo, median_hi) = (
median_lo.to_string(),
median_hi.map(|x| Integer::to_string(&x)),
);
let actual_sample_median = (median_lo.as_str(), median_hi.as_deref());
// Note that the population moments do not exist.
let actual_sample_moment_stats = moment_stats(xs.take(1000000).map(IntegerCheckedToF64Wrapper));
assert_eq!(
(
actual_values.as_slice(),
actual_common_values.as_slice(),
actual_sample_median,
actual_sample_moment_stats
),
(
expected_values,
expected_common_values,
expected_sample_median,
expected_sample_moment_stats
)
);
}
#[test]
fn test_random_negative_integers() {
// mean bits = 65/64
let values = &["-1"; 20];
let common_values = &[
("-1", 984681),
("-3", 7622),
("-2", 7455),
("-5", 73),
("-6", 66),
("-7", 54),
("-4", 44),
("-8", 2),
("-10", 2),
("-14", 1),
];
let sample_median = ("-1", None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(-1.023822000000005),
standard_deviation: NiceFloat(0.20727410662829246),
skewness: NiceFloat(-10.72004433095801),
excess_kurtosis: NiceFloat(159.60627558337237),
};
random_negative_integers_helper(
65,
64,
values,
common_values,
sample_median,
sample_moment_stats,
);
// mean bits = 2
let values = &[
"-1", "-24", "-1", "-30", "-6", "-12", "-2", "-1", "-1", "-1", "-1", "-1", "-2", "-2",
"-1", "-1", "-3", "-3", "-1", "-1",
];
let common_values = &[
("-1", 500248),
("-3", 124972),
("-2", 124519),
("-7", 31554),
("-5", 31346),
("-6", 31198),
("-4", 31043),
("-12", 8033),
("-11", 7959),
("-10", 7935),
];
let sample_median = ("-1", None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(-15.68562000000007),
standard_deviation: NiceFloat(2088.3045530403606),
skewness: NiceFloat(-877.2889258611025),
excess_kurtosis: NiceFloat(832799.3689336807),
};
random_negative_integers_helper(
2,
1,
values,
common_values,
sample_median,
sample_moment_stats,
);
// mean bits = 32
let values = &[
"-22",
"-4",
"-178",
"-55845661150",
"-93254818",
"-7577967529619388",
"-8",
"-11316951483471",
"-11",
"-1005760138411689342464923704482",
"-948931",
"-42716754",
"-81013760999253680590984897748479904878392",
"-23",
"-5",
"-488225822927510",
"-1558028859598",
"-29",
"-200127331174844881647",
"-4058622214797175252",
];
let common_values = &[
("-1", 31094),
("-2", 15260),
("-3", 15185),
("-4", 7586),
("-5", 7376),
("-7", 7346),
("-6", 7258),
("-10", 3631),
("-14", 3607),
("-11", 3605),
];
let sample_median = ("-3799067", Some("-3799061"));
let sample_moment_stats = MomentStats {
mean: NiceFloat(-2.312362311300544e130),
standard_deviation: NiceFloat(2.3122865276852406e133),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_negative_integers_helper(
32,
1,
values,
common_values,
sample_median,
sample_moment_stats,
);
// mean bits = 64
let values = &[
"-1030304779202860497815440824491190",
"-886085025458",
"-207326",
"-83590267817164982586207812646050",
"-142592182196136038718074156629812683467448",
"-486577913627642327503939268330036386",
"-5557920650918595",
"-82",
"-3896",
"-259694111319673990840",
"-38511521798151392412656616617957654586378660839",
"-637134",
"-2330568192653124764618470467652346596061",
"-2516",
"-512663303",
"-39317568409",
"-18536901993439",
"-4959577657266999117207",
"-628",
"-42485719907732979",
];
let common_values = &[
("-1", 15720),
("-2", 7718),
("-3", 7584),
("-6", 3790),
("-4", 3739),
("-7", 3704),
("-5", 3673),
("-9", 1918),
("-11", 1916),
("-10", 1904),
];
let sample_median = ("-18438360920148", Some("-18436851140261"));
let sample_moment_stats = MomentStats {
mean: NiceFloat(-5.519478531998525e283),
standard_deviation: NiceFloat(f64::POSITIVE_INFINITY),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_negative_integers_helper(
64,
1,
values,
common_values,
sample_median,
sample_moment_stats,
);
}
#[test]
#[should_panic]
fn random_negative_integers_fail_1() {
random_negative_integers(EXAMPLE_SEED, 1, 0);
}
#[test]
#[should_panic]
fn random_negative_integers_fail_2() {
random_negative_integers(EXAMPLE_SEED, 2, 3);
}
|
use malachite_nz_test_util::integer::IntegerCheckedToF64Wrapper;
|
random_line_split
|
style.rs
|
//! Code for applying CSS styles to the DOM.
//!
//! This is not very interesting at the moment. It will get much more
//! complicated if I add support for compound selectors.
use dom::{Node, NodeType, ElementData};
use css::{Stylesheet, Rule, Selector, SimpleSelector, Value, Specificity};
use std::collections::HashMap;
/// Map from CSS property names to values.
pub type PropertyMap = HashMap<String, Value>;
/// A node with associated style data.
pub struct StyledNode<'a> {
pub node: &'a Node,
pub specified_values: PropertyMap,
pub children: Vec<StyledNode<'a>>,
}
#[derive(PartialEq)]
pub enum Display {
Inline,
Block,
None,
}
impl<'a> StyledNode<'a> {
/// Return the specified value of a property if it exists, otherwise `None`.
pub fn value(&self, name: &str) -> Option<Value> {
self.specified_values.get(name).map(|v| v.clone())
}
/// Return the specified value of property `name`, or property `fallback_name` if that doesn't
/// exist. or value `default` if neither does.
pub fn lookup(&self, name: &str, fallback_name: &str, default: &Value) -> Value {
self.value(name).unwrap_or_else(|| self.value(fallback_name)
.unwrap_or_else(|| default.clone()))
}
/// The value of the `display` property (defaults to inline).
pub fn display(&self) -> Display {
match self.value("display") {
Some(Value::Keyword(s)) => match &*s {
"block" => Display::Block,
"none" => Display::None,
_ => Display::Inline
},
_ => Display::Inline
}
}
}
/// Apply a stylesheet to an entire DOM tree, returning a StyledNode tree.
///
/// This finds only the specified values at the moment. Eventually it should be extended to find the
/// computed values too, including inherited values.
pub fn style_tree<'a>(root: &'a Node, stylesheet: &'a Stylesheet) -> StyledNode<'a> {
StyledNode {
node: root,
specified_values: match root.node_type {
NodeType::Element(ref elem) => specified_values(elem, stylesheet),
NodeType::Text(_) => HashMap::new()
},
children: root.children.iter().map(|child| style_tree(child, stylesheet)).collect(),
}
}
/// Apply styles to a single element, returning the specified styles.
///
/// To do: Allow multiple UA/author/user stylesheets, and implement the cascade.
fn specified_values(elem: &ElementData, stylesheet: &Stylesheet) -> PropertyMap
|
/// A single CSS rule and the specificity of its most specific matching selector.
type MatchedRule<'a> = (Specificity, &'a Rule);
/// Find all CSS rules that match the given element.
fn matching_rules<'a>(elem: &ElementData, stylesheet: &'a Stylesheet) -> Vec<MatchedRule<'a>> {
// For now, we just do a linear scan of all the rules. For large
// documents, it would be more efficient to store the rules in hash tables
// based on tag name, id, class, etc.
stylesheet.rules.iter().filter_map(|rule| match_rule(elem, rule)).collect()
}
/// If `rule` matches `elem`, return a `MatchedRule`. Otherwise return `None`.
fn match_rule<'a>(elem: &ElementData, rule: &'a Rule) -> Option<MatchedRule<'a>> {
// Find the first (most specific) matching selector.
rule.selectors.iter().find(|selector| matches(elem, *selector))
.map(|selector| (selector.specificity(), rule))
}
/// Selector matching:
fn matches(elem: &ElementData, selector: &Selector) -> bool {
match *selector {
Selector::Simple(ref simple_selector) => matches_simple_selector(elem, simple_selector)
}
}
fn matches_simple_selector(elem: &ElementData, selector: &SimpleSelector) -> bool {
// Check type selector
if selector.tag_name.iter().any(|name| elem.tag_name!= *name) {
return false;
}
// Check ID selector
if selector.id.iter().any(|id| elem.id()!= Some(id)) {
return false;
}
// Check class selectors
let elem_classes = elem.classes();
if selector.class.iter().any(|class|!elem_classes.contains(&**class)) {
return false;
}
// We didn't find any non-matching selector components.
return true;
}
|
{
let mut values = HashMap::new();
let mut rules = matching_rules(elem, stylesheet);
// Go through the rules from lowest to highest specificity.
rules.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
for &(_, rule) in rules.iter() {
for declaration in rule.declarations.iter() {
values.insert(declaration.name.clone(), declaration.value.clone());
}
}
return values;
}
|
identifier_body
|
style.rs
|
//! Code for applying CSS styles to the DOM.
//!
//! This is not very interesting at the moment. It will get much more
//! complicated if I add support for compound selectors.
use dom::{Node, NodeType, ElementData};
use css::{Stylesheet, Rule, Selector, SimpleSelector, Value, Specificity};
use std::collections::HashMap;
/// Map from CSS property names to values.
pub type PropertyMap = HashMap<String, Value>;
/// A node with associated style data.
pub struct
|
<'a> {
pub node: &'a Node,
pub specified_values: PropertyMap,
pub children: Vec<StyledNode<'a>>,
}
#[derive(PartialEq)]
pub enum Display {
Inline,
Block,
None,
}
impl<'a> StyledNode<'a> {
/// Return the specified value of a property if it exists, otherwise `None`.
pub fn value(&self, name: &str) -> Option<Value> {
self.specified_values.get(name).map(|v| v.clone())
}
/// Return the specified value of property `name`, or property `fallback_name` if that doesn't
/// exist. or value `default` if neither does.
pub fn lookup(&self, name: &str, fallback_name: &str, default: &Value) -> Value {
self.value(name).unwrap_or_else(|| self.value(fallback_name)
.unwrap_or_else(|| default.clone()))
}
/// The value of the `display` property (defaults to inline).
pub fn display(&self) -> Display {
match self.value("display") {
Some(Value::Keyword(s)) => match &*s {
"block" => Display::Block,
"none" => Display::None,
_ => Display::Inline
},
_ => Display::Inline
}
}
}
/// Apply a stylesheet to an entire DOM tree, returning a StyledNode tree.
///
/// This finds only the specified values at the moment. Eventually it should be extended to find the
/// computed values too, including inherited values.
pub fn style_tree<'a>(root: &'a Node, stylesheet: &'a Stylesheet) -> StyledNode<'a> {
StyledNode {
node: root,
specified_values: match root.node_type {
NodeType::Element(ref elem) => specified_values(elem, stylesheet),
NodeType::Text(_) => HashMap::new()
},
children: root.children.iter().map(|child| style_tree(child, stylesheet)).collect(),
}
}
/// Apply styles to a single element, returning the specified styles.
///
/// To do: Allow multiple UA/author/user stylesheets, and implement the cascade.
fn specified_values(elem: &ElementData, stylesheet: &Stylesheet) -> PropertyMap {
let mut values = HashMap::new();
let mut rules = matching_rules(elem, stylesheet);
// Go through the rules from lowest to highest specificity.
rules.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
for &(_, rule) in rules.iter() {
for declaration in rule.declarations.iter() {
values.insert(declaration.name.clone(), declaration.value.clone());
}
}
return values;
}
/// A single CSS rule and the specificity of its most specific matching selector.
type MatchedRule<'a> = (Specificity, &'a Rule);
/// Find all CSS rules that match the given element.
fn matching_rules<'a>(elem: &ElementData, stylesheet: &'a Stylesheet) -> Vec<MatchedRule<'a>> {
// For now, we just do a linear scan of all the rules. For large
// documents, it would be more efficient to store the rules in hash tables
// based on tag name, id, class, etc.
stylesheet.rules.iter().filter_map(|rule| match_rule(elem, rule)).collect()
}
/// If `rule` matches `elem`, return a `MatchedRule`. Otherwise return `None`.
fn match_rule<'a>(elem: &ElementData, rule: &'a Rule) -> Option<MatchedRule<'a>> {
// Find the first (most specific) matching selector.
rule.selectors.iter().find(|selector| matches(elem, *selector))
.map(|selector| (selector.specificity(), rule))
}
/// Selector matching:
fn matches(elem: &ElementData, selector: &Selector) -> bool {
match *selector {
Selector::Simple(ref simple_selector) => matches_simple_selector(elem, simple_selector)
}
}
fn matches_simple_selector(elem: &ElementData, selector: &SimpleSelector) -> bool {
// Check type selector
if selector.tag_name.iter().any(|name| elem.tag_name!= *name) {
return false;
}
// Check ID selector
if selector.id.iter().any(|id| elem.id()!= Some(id)) {
return false;
}
// Check class selectors
let elem_classes = elem.classes();
if selector.class.iter().any(|class|!elem_classes.contains(&**class)) {
return false;
}
// We didn't find any non-matching selector components.
return true;
}
|
StyledNode
|
identifier_name
|
style.rs
|
//! Code for applying CSS styles to the DOM.
//!
//! This is not very interesting at the moment. It will get much more
//! complicated if I add support for compound selectors.
use dom::{Node, NodeType, ElementData};
use css::{Stylesheet, Rule, Selector, SimpleSelector, Value, Specificity};
use std::collections::HashMap;
/// Map from CSS property names to values.
pub type PropertyMap = HashMap<String, Value>;
/// A node with associated style data.
pub struct StyledNode<'a> {
pub node: &'a Node,
pub specified_values: PropertyMap,
pub children: Vec<StyledNode<'a>>,
}
#[derive(PartialEq)]
pub enum Display {
Inline,
Block,
None,
}
impl<'a> StyledNode<'a> {
/// Return the specified value of a property if it exists, otherwise `None`.
pub fn value(&self, name: &str) -> Option<Value> {
self.specified_values.get(name).map(|v| v.clone())
}
/// Return the specified value of property `name`, or property `fallback_name` if that doesn't
/// exist. or value `default` if neither does.
pub fn lookup(&self, name: &str, fallback_name: &str, default: &Value) -> Value {
self.value(name).unwrap_or_else(|| self.value(fallback_name)
.unwrap_or_else(|| default.clone()))
}
/// The value of the `display` property (defaults to inline).
pub fn display(&self) -> Display {
match self.value("display") {
Some(Value::Keyword(s)) => match &*s {
"block" => Display::Block,
"none" => Display::None,
_ => Display::Inline
},
_ => Display::Inline
}
}
}
/// Apply a stylesheet to an entire DOM tree, returning a StyledNode tree.
///
/// This finds only the specified values at the moment. Eventually it should be extended to find the
/// computed values too, including inherited values.
pub fn style_tree<'a>(root: &'a Node, stylesheet: &'a Stylesheet) -> StyledNode<'a> {
StyledNode {
node: root,
specified_values: match root.node_type {
NodeType::Element(ref elem) => specified_values(elem, stylesheet),
NodeType::Text(_) => HashMap::new()
},
children: root.children.iter().map(|child| style_tree(child, stylesheet)).collect(),
}
}
/// Apply styles to a single element, returning the specified styles.
///
/// To do: Allow multiple UA/author/user stylesheets, and implement the cascade.
fn specified_values(elem: &ElementData, stylesheet: &Stylesheet) -> PropertyMap {
let mut values = HashMap::new();
let mut rules = matching_rules(elem, stylesheet);
// Go through the rules from lowest to highest specificity.
rules.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
for &(_, rule) in rules.iter() {
for declaration in rule.declarations.iter() {
values.insert(declaration.name.clone(), declaration.value.clone());
}
}
return values;
}
|
/// Find all CSS rules that match the given element.
fn matching_rules<'a>(elem: &ElementData, stylesheet: &'a Stylesheet) -> Vec<MatchedRule<'a>> {
// For now, we just do a linear scan of all the rules. For large
// documents, it would be more efficient to store the rules in hash tables
// based on tag name, id, class, etc.
stylesheet.rules.iter().filter_map(|rule| match_rule(elem, rule)).collect()
}
/// If `rule` matches `elem`, return a `MatchedRule`. Otherwise return `None`.
fn match_rule<'a>(elem: &ElementData, rule: &'a Rule) -> Option<MatchedRule<'a>> {
// Find the first (most specific) matching selector.
rule.selectors.iter().find(|selector| matches(elem, *selector))
.map(|selector| (selector.specificity(), rule))
}
/// Selector matching:
fn matches(elem: &ElementData, selector: &Selector) -> bool {
match *selector {
Selector::Simple(ref simple_selector) => matches_simple_selector(elem, simple_selector)
}
}
fn matches_simple_selector(elem: &ElementData, selector: &SimpleSelector) -> bool {
// Check type selector
if selector.tag_name.iter().any(|name| elem.tag_name!= *name) {
return false;
}
// Check ID selector
if selector.id.iter().any(|id| elem.id()!= Some(id)) {
return false;
}
// Check class selectors
let elem_classes = elem.classes();
if selector.class.iter().any(|class|!elem_classes.contains(&**class)) {
return false;
}
// We didn't find any non-matching selector components.
return true;
}
|
/// A single CSS rule and the specificity of its most specific matching selector.
type MatchedRule<'a> = (Specificity, &'a Rule);
|
random_line_split
|
lib.rs
|
use serde;
use serde::{Serialize, Deserialize};
fn str_from_size_buffer(name_size: usize, name: *const u8) -> Option<&'static str>
{
let name = unsafe { std::slice::from_raw_parts(name, name_size)};
std::str::from_utf8(name).map_or(None,Some)
}
#[derive(Serialize, Deserialize, Debug)]
struct PrepareInstallModule { }
#[repr(u64)]
enum ContentType
{
Json = 1,
Bson = 2
|
}
fn parse_message(content_type: u64, msg_type: u64, len: usize, data: *const u8) -> Messages
{
let msg_content_type = unsafe{std::mem::transmute::<u64, ContentType>(content_type)};
let buffer = unsafe { std::slice::from_raw_parts(data, len)};
match msg_type
{
1 =>
{
let buffer = unsafe { std::str::from_utf8_unchecked(buffer) };
Messages::PrepareInstallModule(serde_json::from_str::<PrepareInstallModule>(buffer).unwrap())
},
_ => Messages::Unknown (buffer)
}
}
#[derive(Debug)]
enum Messages
{
Unknown (&'static [u8]),
PrepareInstallModule(PrepareInstallModule)
}
#[no_mangle]
pub extern "C" fn print_hello_from_rust(msg_content_type: u64,
msg_type: u64,
buffer_size: usize,
buffer: *const u8,
) {
println!("Message: {:?} {:?} {:?} {:?}", msg_content_type, msg_type, buffer_size, buffer);
let msg = parse_message(msg_content_type, msg_type, buffer_size, buffer);
println!("Message: {:?}", msg);
}
|
random_line_split
|
|
lib.rs
|
use serde;
use serde::{Serialize, Deserialize};
fn str_from_size_buffer(name_size: usize, name: *const u8) -> Option<&'static str>
{
let name = unsafe { std::slice::from_raw_parts(name, name_size)};
std::str::from_utf8(name).map_or(None,Some)
}
#[derive(Serialize, Deserialize, Debug)]
struct PrepareInstallModule { }
#[repr(u64)]
enum ContentType
{
Json = 1,
Bson = 2
}
fn parse_message(content_type: u64, msg_type: u64, len: usize, data: *const u8) -> Messages
|
#[derive(Debug)]
enum Messages
{
Unknown (&'static [u8]),
PrepareInstallModule(PrepareInstallModule)
}
#[no_mangle]
pub extern "C" fn print_hello_from_rust(msg_content_type: u64,
msg_type: u64,
buffer_size: usize,
buffer: *const u8,
) {
println!("Message: {:?} {:?} {:?} {:?}", msg_content_type, msg_type, buffer_size, buffer);
let msg = parse_message(msg_content_type, msg_type, buffer_size, buffer);
println!("Message: {:?}", msg);
}
|
{
let msg_content_type = unsafe{std::mem::transmute::<u64, ContentType>(content_type)};
let buffer = unsafe { std::slice::from_raw_parts(data, len)};
match msg_type
{
1 =>
{
let buffer = unsafe { std::str::from_utf8_unchecked(buffer) };
Messages::PrepareInstallModule(serde_json::from_str::<PrepareInstallModule>(buffer).unwrap())
},
_ => Messages::Unknown (buffer)
}
}
|
identifier_body
|
lib.rs
|
use serde;
use serde::{Serialize, Deserialize};
fn str_from_size_buffer(name_size: usize, name: *const u8) -> Option<&'static str>
{
let name = unsafe { std::slice::from_raw_parts(name, name_size)};
std::str::from_utf8(name).map_or(None,Some)
}
#[derive(Serialize, Deserialize, Debug)]
struct PrepareInstallModule { }
#[repr(u64)]
enum ContentType
{
Json = 1,
Bson = 2
}
fn parse_message(content_type: u64, msg_type: u64, len: usize, data: *const u8) -> Messages
{
let msg_content_type = unsafe{std::mem::transmute::<u64, ContentType>(content_type)};
let buffer = unsafe { std::slice::from_raw_parts(data, len)};
match msg_type
{
1 =>
{
let buffer = unsafe { std::str::from_utf8_unchecked(buffer) };
Messages::PrepareInstallModule(serde_json::from_str::<PrepareInstallModule>(buffer).unwrap())
},
_ => Messages::Unknown (buffer)
}
}
#[derive(Debug)]
enum Messages
{
Unknown (&'static [u8]),
PrepareInstallModule(PrepareInstallModule)
}
#[no_mangle]
pub extern "C" fn
|
(msg_content_type: u64,
msg_type: u64,
buffer_size: usize,
buffer: *const u8,
) {
println!("Message: {:?} {:?} {:?} {:?}", msg_content_type, msg_type, buffer_size, buffer);
let msg = parse_message(msg_content_type, msg_type, buffer_size, buffer);
println!("Message: {:?}", msg);
}
|
print_hello_from_rust
|
identifier_name
|
check-static-values-constraints.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verifies all possible restrictions for statics values.
use std::kinds::marker;
struct WithDtor;
impl Drop for WithDtor {
fn drop(&mut self) {}
}
// This enum will be used to test the following rules:
// 1. Variants are safe for static
// 2. Expr calls are allowed as long as they arguments are safe
// 3. Expr calls with unsafe arguments for statics are rejected
enum SafeEnum {
Variant1,
Variant2(int),
Variant3(WithDtor),
Variant4(String)
}
// These should be ok
static STATIC1: SafeEnum = SafeEnum::Variant1;
static STATIC2: SafeEnum = SafeEnum::Variant2(0);
// This one should fail
static STATIC3: SafeEnum = SafeEnum::Variant3(WithDtor);
//~^ ERROR statics are not allowed to have destructors
// This enum will be used to test that variants
// are considered unsafe if their enum type implements
// a destructor.
enum UnsafeEnum {
Variant5,
Variant6(int)
}
impl Drop for UnsafeEnum {
fn drop(&mut self) {}
}
static STATIC4: UnsafeEnum = UnsafeEnum::Variant5;
//~^ ERROR statics are not allowed to have destructors
static STATIC5: UnsafeEnum = UnsafeEnum::Variant6(0);
//~^ ERROR statics are not allowed to have destructors
struct SafeStruct {
field1: SafeEnum,
field2: SafeEnum,
}
// Struct fields are safe, hence this static should be safe
static STATIC6: SafeStruct = SafeStruct{field1: SafeEnum::Variant1, field2: SafeEnum::Variant2(0)};
// field2 has an unsafe value, hence this should fail
|
// Test variadic constructor for structs. The base struct should be examined
// as well as every field present in the constructor.
// This example shouldn't fail because all the fields are safe.
static STATIC8: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
..SafeStruct{field1: SafeEnum::Variant1,
field2: SafeEnum::Variant1}};
// This example should fail because field1 in the base struct is not safe
static STATIC9: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
..SafeStruct{field1: SafeEnum::Variant3(WithDtor),
field2: SafeEnum::Variant1}};
//~^^ ERROR statics are not allowed to have destructors
struct UnsafeStruct;
impl Drop for UnsafeStruct {
fn drop(&mut self) {}
}
// Types with destructors are not allowed for statics
static STATIC10: UnsafeStruct = UnsafeStruct;
//~^ ERROR statics are not allowed to have destructor
struct MyOwned;
static STATIC11: Box<MyOwned> = box MyOwned;
//~^ ERROR statics are not allowed to have custom pointers
// The following examples test that mutable structs are just forbidden
// to have types with destructors
// These should fail
static mut STATIC12: UnsafeStruct = UnsafeStruct;
//~^ ERROR mutable statics are not allowed to have destructors
//~^^ ERROR statics are not allowed to have destructors
static mut STATIC13: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
//~^ ERROR mutable statics are not allowed to have destructors
field2: SafeEnum::Variant3(WithDtor)};
//~^ ERROR: statics are not allowed to have destructors
static mut STATIC14: SafeStruct = SafeStruct {
//~^ ERROR mutable statics are not allowed to have destructors
field1: SafeEnum::Variant1,
field2: SafeEnum::Variant4("str".to_string())
};
static STATIC15: &'static [Box<MyOwned>] = &[
box MyOwned, //~ ERROR statics are not allowed to have custom pointers
box MyOwned, //~ ERROR statics are not allowed to have custom pointers
];
static STATIC16: (&'static Box<MyOwned>, &'static Box<MyOwned>) = (
&box MyOwned, //~ ERROR statics are not allowed to have custom pointers
&box MyOwned, //~ ERROR statics are not allowed to have custom pointers
);
static mut STATIC17: SafeEnum = SafeEnum::Variant1;
//~^ ERROR mutable statics are not allowed to have destructors
static STATIC19: Box<int> =
box 3;
//~^ ERROR statics are not allowed to have custom pointers
pub fn main() {
let y = { static x: Box<int> = box 3; x };
//~^ ERROR statics are not allowed to have custom pointers
}
|
static STATIC7: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
field2: SafeEnum::Variant3(WithDtor)};
//~^ ERROR statics are not allowed to have destructors
|
random_line_split
|
check-static-values-constraints.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verifies all possible restrictions for statics values.
use std::kinds::marker;
struct WithDtor;
impl Drop for WithDtor {
fn drop(&mut self) {}
}
// This enum will be used to test the following rules:
// 1. Variants are safe for static
// 2. Expr calls are allowed as long as they arguments are safe
// 3. Expr calls with unsafe arguments for statics are rejected
enum SafeEnum {
Variant1,
Variant2(int),
Variant3(WithDtor),
Variant4(String)
}
// These should be ok
static STATIC1: SafeEnum = SafeEnum::Variant1;
static STATIC2: SafeEnum = SafeEnum::Variant2(0);
// This one should fail
static STATIC3: SafeEnum = SafeEnum::Variant3(WithDtor);
//~^ ERROR statics are not allowed to have destructors
// This enum will be used to test that variants
// are considered unsafe if their enum type implements
// a destructor.
enum UnsafeEnum {
Variant5,
Variant6(int)
}
impl Drop for UnsafeEnum {
fn drop(&mut self) {}
}
static STATIC4: UnsafeEnum = UnsafeEnum::Variant5;
//~^ ERROR statics are not allowed to have destructors
static STATIC5: UnsafeEnum = UnsafeEnum::Variant6(0);
//~^ ERROR statics are not allowed to have destructors
struct SafeStruct {
field1: SafeEnum,
field2: SafeEnum,
}
// Struct fields are safe, hence this static should be safe
static STATIC6: SafeStruct = SafeStruct{field1: SafeEnum::Variant1, field2: SafeEnum::Variant2(0)};
// field2 has an unsafe value, hence this should fail
static STATIC7: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
field2: SafeEnum::Variant3(WithDtor)};
//~^ ERROR statics are not allowed to have destructors
// Test variadic constructor for structs. The base struct should be examined
// as well as every field present in the constructor.
// This example shouldn't fail because all the fields are safe.
static STATIC8: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
..SafeStruct{field1: SafeEnum::Variant1,
field2: SafeEnum::Variant1}};
// This example should fail because field1 in the base struct is not safe
static STATIC9: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
..SafeStruct{field1: SafeEnum::Variant3(WithDtor),
field2: SafeEnum::Variant1}};
//~^^ ERROR statics are not allowed to have destructors
struct UnsafeStruct;
impl Drop for UnsafeStruct {
fn drop(&mut self) {}
}
// Types with destructors are not allowed for statics
static STATIC10: UnsafeStruct = UnsafeStruct;
//~^ ERROR statics are not allowed to have destructor
struct MyOwned;
static STATIC11: Box<MyOwned> = box MyOwned;
//~^ ERROR statics are not allowed to have custom pointers
// The following examples test that mutable structs are just forbidden
// to have types with destructors
// These should fail
static mut STATIC12: UnsafeStruct = UnsafeStruct;
//~^ ERROR mutable statics are not allowed to have destructors
//~^^ ERROR statics are not allowed to have destructors
static mut STATIC13: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
//~^ ERROR mutable statics are not allowed to have destructors
field2: SafeEnum::Variant3(WithDtor)};
//~^ ERROR: statics are not allowed to have destructors
static mut STATIC14: SafeStruct = SafeStruct {
//~^ ERROR mutable statics are not allowed to have destructors
field1: SafeEnum::Variant1,
field2: SafeEnum::Variant4("str".to_string())
};
static STATIC15: &'static [Box<MyOwned>] = &[
box MyOwned, //~ ERROR statics are not allowed to have custom pointers
box MyOwned, //~ ERROR statics are not allowed to have custom pointers
];
static STATIC16: (&'static Box<MyOwned>, &'static Box<MyOwned>) = (
&box MyOwned, //~ ERROR statics are not allowed to have custom pointers
&box MyOwned, //~ ERROR statics are not allowed to have custom pointers
);
static mut STATIC17: SafeEnum = SafeEnum::Variant1;
//~^ ERROR mutable statics are not allowed to have destructors
static STATIC19: Box<int> =
box 3;
//~^ ERROR statics are not allowed to have custom pointers
pub fn main()
|
{
let y = { static x: Box<int> = box 3; x };
//~^ ERROR statics are not allowed to have custom pointers
}
|
identifier_body
|
|
check-static-values-constraints.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verifies all possible restrictions for statics values.
use std::kinds::marker;
struct WithDtor;
impl Drop for WithDtor {
fn drop(&mut self) {}
}
// This enum will be used to test the following rules:
// 1. Variants are safe for static
// 2. Expr calls are allowed as long as they arguments are safe
// 3. Expr calls with unsafe arguments for statics are rejected
enum
|
{
Variant1,
Variant2(int),
Variant3(WithDtor),
Variant4(String)
}
// These should be ok
static STATIC1: SafeEnum = SafeEnum::Variant1;
static STATIC2: SafeEnum = SafeEnum::Variant2(0);
// This one should fail
static STATIC3: SafeEnum = SafeEnum::Variant3(WithDtor);
//~^ ERROR statics are not allowed to have destructors
// This enum will be used to test that variants
// are considered unsafe if their enum type implements
// a destructor.
enum UnsafeEnum {
Variant5,
Variant6(int)
}
impl Drop for UnsafeEnum {
fn drop(&mut self) {}
}
static STATIC4: UnsafeEnum = UnsafeEnum::Variant5;
//~^ ERROR statics are not allowed to have destructors
static STATIC5: UnsafeEnum = UnsafeEnum::Variant6(0);
//~^ ERROR statics are not allowed to have destructors
struct SafeStruct {
field1: SafeEnum,
field2: SafeEnum,
}
// Struct fields are safe, hence this static should be safe
static STATIC6: SafeStruct = SafeStruct{field1: SafeEnum::Variant1, field2: SafeEnum::Variant2(0)};
// field2 has an unsafe value, hence this should fail
static STATIC7: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
field2: SafeEnum::Variant3(WithDtor)};
//~^ ERROR statics are not allowed to have destructors
// Test variadic constructor for structs. The base struct should be examined
// as well as every field present in the constructor.
// This example shouldn't fail because all the fields are safe.
static STATIC8: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
..SafeStruct{field1: SafeEnum::Variant1,
field2: SafeEnum::Variant1}};
// This example should fail because field1 in the base struct is not safe
static STATIC9: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
..SafeStruct{field1: SafeEnum::Variant3(WithDtor),
field2: SafeEnum::Variant1}};
//~^^ ERROR statics are not allowed to have destructors
struct UnsafeStruct;
impl Drop for UnsafeStruct {
fn drop(&mut self) {}
}
// Types with destructors are not allowed for statics
static STATIC10: UnsafeStruct = UnsafeStruct;
//~^ ERROR statics are not allowed to have destructor
struct MyOwned;
static STATIC11: Box<MyOwned> = box MyOwned;
//~^ ERROR statics are not allowed to have custom pointers
// The following examples test that mutable structs are just forbidden
// to have types with destructors
// These should fail
static mut STATIC12: UnsafeStruct = UnsafeStruct;
//~^ ERROR mutable statics are not allowed to have destructors
//~^^ ERROR statics are not allowed to have destructors
static mut STATIC13: SafeStruct = SafeStruct{field1: SafeEnum::Variant1,
//~^ ERROR mutable statics are not allowed to have destructors
field2: SafeEnum::Variant3(WithDtor)};
//~^ ERROR: statics are not allowed to have destructors
static mut STATIC14: SafeStruct = SafeStruct {
//~^ ERROR mutable statics are not allowed to have destructors
field1: SafeEnum::Variant1,
field2: SafeEnum::Variant4("str".to_string())
};
static STATIC15: &'static [Box<MyOwned>] = &[
box MyOwned, //~ ERROR statics are not allowed to have custom pointers
box MyOwned, //~ ERROR statics are not allowed to have custom pointers
];
static STATIC16: (&'static Box<MyOwned>, &'static Box<MyOwned>) = (
&box MyOwned, //~ ERROR statics are not allowed to have custom pointers
&box MyOwned, //~ ERROR statics are not allowed to have custom pointers
);
static mut STATIC17: SafeEnum = SafeEnum::Variant1;
//~^ ERROR mutable statics are not allowed to have destructors
static STATIC19: Box<int> =
box 3;
//~^ ERROR statics are not allowed to have custom pointers
pub fn main() {
let y = { static x: Box<int> = box 3; x };
//~^ ERROR statics are not allowed to have custom pointers
}
|
SafeEnum
|
identifier_name
|
mod.rs
|
//! This is a simple "Flat" rendering pipeline.
//! It doesn't support blended objects and uses front-to-back ordering.
//! The pipeline is meant for simple applications and fall-back paths.
use std::marker::PhantomData;
use gfx;
use gfx_phase;
use gfx_scene;
/// A short typedef for the phase.
pub type Phase<R> = gfx_phase::CachedPhase<R,
::Material<R>,
::view::Info<f32>,
Technique<R>,
>;
mod param {
#![allow(missing_docs)]
use gfx::shade::TextureParam;
gfx_parameters!( Struct {
u_Transform@ mvp: [[f32; 4]; 4],
u_Color@ color: [f32; 4],
t_Diffuse@ texture: TextureParam<R>,
u_AlphaTest@ alpha_test: f32,
});
}
const FLAT_VS : &'static [u8] = include_bytes!("../../gpu/flat.glslv");
const FLAT_FS : &'static [u8] = include_bytes!("../../gpu/flat.glslf");
const FLAT_TEX_VS: &'static [u8] = include_bytes!("../../gpu/flat_tex.glslv");
const FLAT_TEX_FS: &'static [u8] = include_bytes!("../../gpu/flat_tex.glslf");
/// Pipeline creation error.
#[derive(Clone, Debug, PartialEq)]
pub enum
|
{
/// Failed to create a texture.
Texture(gfx::tex::TextureError),
/// Failed to link a program.
Program(gfx::ProgramError),
}
impl From<gfx::tex::TextureError> for Error {
fn from(e: gfx::tex::TextureError) -> Error {
Error::Texture(e)
}
}
impl From<gfx::ProgramError> for Error {
fn from(e: gfx::ProgramError) -> Error {
Error::Program(e)
}
}
/// The core technique of the pipeline.
pub struct Technique<R: gfx::Resources> {
program: gfx::handle::Program<R>,
program_textured: gfx::handle::Program<R>,
state: gfx::DrawState,
/// The default texture used for materials that don't have it.
pub default_texture: gfx::handle::Texture<R>,
}
impl<R: gfx::Resources> Technique<R> {
/// Create a new technique.
pub fn new<F: gfx::Factory<R>>(factory: &mut F)
-> Result<Technique<R>, Error> {
use gfx::traits::FactoryExt;
Ok(Technique {
program: try!(factory.link_program(FLAT_VS, FLAT_FS)),
program_textured: try!(factory.link_program(FLAT_TEX_VS, FLAT_TEX_FS)),
state: gfx::DrawState::new().depth(gfx::state::Comparison::LessEqual, true),
default_texture: try!(factory.create_texture_rgba8_static(1, 1, &[0xFFFFFFFF])),
})
}
}
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum Kernel {
Flat,
Textured,
AlphaCut(::AlphaThreshold),
}
impl<R: gfx::Resources> gfx_phase::Technique<R, ::Material<R>, ::view::Info<f32>> for Technique<R> {
type Kernel = Kernel;
type Params = param::Struct<R>;
fn test(&self, mesh: &gfx::Mesh<R>, mat: &::Material<R>) -> Option<Kernel> {
let textured = mat.texture.is_some() &&
mesh.attributes.iter().find(|a| a.name == "a_Tex0").is_some();
match mat.transparency {
::Transparency::Opaque if textured => Some(Kernel::Textured),
::Transparency::Opaque => Some(Kernel::Flat),
::Transparency::Cutout(v) if textured => Some(Kernel::AlphaCut(v)),
_ => None
}
}
fn compile<'a>(&'a self, kernel: Kernel)
-> gfx_phase::TechResult<'a, R, param::Struct<R>> {
( if kernel!= Kernel::Flat {
&self.program_textured
} else {
&self.program
},
param::Struct {
mvp: [[0.0; 4]; 4],
color: [0.0; 4],
texture: (self.default_texture.clone(), None),
alpha_test: if let Kernel::AlphaCut(v) = kernel {
v as f32 / 255 as f32
}else { 0.0 },
_r: PhantomData,
},
&self.state,
None,
)
}
fn fix_params(&self, mat: &::Material<R>, space: &::view::Info<f32>,
params: &mut param::Struct<R>) {
use cgmath::FixedArray;
params.mvp = *space.mx_vertex.as_fixed();
params.color = mat.color;
if let Some(ref tex) = mat.texture {
params.texture = tex.clone();
}
}
}
/// The flat pipeline.
pub struct Pipeline<R: gfx::Resources> {
/// The only rendering phase.
pub phase: Phase<R>,
/// Background color. Set to none if you don't want the screen to be cleared.
pub background: Option<gfx::ColorValue>,
}
impl<R: gfx::Resources> Pipeline<R> {
/// Create a new pipeline.
pub fn new<F: gfx::Factory<R>>(factory: &mut F)
-> Result<Pipeline<R>, Error> {
Technique::new(factory).map(|tech| Pipeline {
phase: gfx_phase::Phase::new("Main", tech)
.with_sort(gfx_phase::sort::front_to_back)
.with_cache(),
background: Some([0.0; 4]),
})
}
}
impl<R: gfx::Resources> ::Pipeline<f32, R> for Pipeline<R> {
fn render<A, T>(&mut self, scene: &A, camera: &A::Camera, stream: &mut T)
-> Result<A::Status, gfx_scene::Error> where
A: gfx_scene::AbstractScene<R, ViewInfo = ::view::Info<f32>, Material = ::Material<R>>,
T: gfx::Stream<R>,
{
// clear
if let Some(color) = self.background {
stream.clear(gfx::ClearData {
color: color,
depth: 1.0,
stencil: 0,
});
}
// draw
scene.draw(&mut self.phase, camera, stream)
}
}
|
Error
|
identifier_name
|
mod.rs
|
//! This is a simple "Flat" rendering pipeline.
//! It doesn't support blended objects and uses front-to-back ordering.
//! The pipeline is meant for simple applications and fall-back paths.
use std::marker::PhantomData;
use gfx;
use gfx_phase;
use gfx_scene;
/// A short typedef for the phase.
pub type Phase<R> = gfx_phase::CachedPhase<R,
::Material<R>,
::view::Info<f32>,
Technique<R>,
>;
mod param {
#![allow(missing_docs)]
use gfx::shade::TextureParam;
gfx_parameters!( Struct {
u_Transform@ mvp: [[f32; 4]; 4],
u_Color@ color: [f32; 4],
t_Diffuse@ texture: TextureParam<R>,
u_AlphaTest@ alpha_test: f32,
});
}
const FLAT_VS : &'static [u8] = include_bytes!("../../gpu/flat.glslv");
const FLAT_FS : &'static [u8] = include_bytes!("../../gpu/flat.glslf");
const FLAT_TEX_VS: &'static [u8] = include_bytes!("../../gpu/flat_tex.glslv");
const FLAT_TEX_FS: &'static [u8] = include_bytes!("../../gpu/flat_tex.glslf");
/// Pipeline creation error.
#[derive(Clone, Debug, PartialEq)]
pub enum Error {
/// Failed to create a texture.
Texture(gfx::tex::TextureError),
/// Failed to link a program.
Program(gfx::ProgramError),
}
impl From<gfx::tex::TextureError> for Error {
fn from(e: gfx::tex::TextureError) -> Error
|
}
impl From<gfx::ProgramError> for Error {
fn from(e: gfx::ProgramError) -> Error {
Error::Program(e)
}
}
/// The core technique of the pipeline.
pub struct Technique<R: gfx::Resources> {
program: gfx::handle::Program<R>,
program_textured: gfx::handle::Program<R>,
state: gfx::DrawState,
/// The default texture used for materials that don't have it.
pub default_texture: gfx::handle::Texture<R>,
}
impl<R: gfx::Resources> Technique<R> {
/// Create a new technique.
pub fn new<F: gfx::Factory<R>>(factory: &mut F)
-> Result<Technique<R>, Error> {
use gfx::traits::FactoryExt;
Ok(Technique {
program: try!(factory.link_program(FLAT_VS, FLAT_FS)),
program_textured: try!(factory.link_program(FLAT_TEX_VS, FLAT_TEX_FS)),
state: gfx::DrawState::new().depth(gfx::state::Comparison::LessEqual, true),
default_texture: try!(factory.create_texture_rgba8_static(1, 1, &[0xFFFFFFFF])),
})
}
}
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum Kernel {
Flat,
Textured,
AlphaCut(::AlphaThreshold),
}
impl<R: gfx::Resources> gfx_phase::Technique<R, ::Material<R>, ::view::Info<f32>> for Technique<R> {
type Kernel = Kernel;
type Params = param::Struct<R>;
fn test(&self, mesh: &gfx::Mesh<R>, mat: &::Material<R>) -> Option<Kernel> {
let textured = mat.texture.is_some() &&
mesh.attributes.iter().find(|a| a.name == "a_Tex0").is_some();
match mat.transparency {
::Transparency::Opaque if textured => Some(Kernel::Textured),
::Transparency::Opaque => Some(Kernel::Flat),
::Transparency::Cutout(v) if textured => Some(Kernel::AlphaCut(v)),
_ => None
}
}
fn compile<'a>(&'a self, kernel: Kernel)
-> gfx_phase::TechResult<'a, R, param::Struct<R>> {
( if kernel!= Kernel::Flat {
&self.program_textured
} else {
&self.program
},
param::Struct {
mvp: [[0.0; 4]; 4],
color: [0.0; 4],
texture: (self.default_texture.clone(), None),
alpha_test: if let Kernel::AlphaCut(v) = kernel {
v as f32 / 255 as f32
}else { 0.0 },
_r: PhantomData,
},
&self.state,
None,
)
}
fn fix_params(&self, mat: &::Material<R>, space: &::view::Info<f32>,
params: &mut param::Struct<R>) {
use cgmath::FixedArray;
params.mvp = *space.mx_vertex.as_fixed();
params.color = mat.color;
if let Some(ref tex) = mat.texture {
params.texture = tex.clone();
}
}
}
/// The flat pipeline.
pub struct Pipeline<R: gfx::Resources> {
/// The only rendering phase.
pub phase: Phase<R>,
/// Background color. Set to none if you don't want the screen to be cleared.
pub background: Option<gfx::ColorValue>,
}
impl<R: gfx::Resources> Pipeline<R> {
/// Create a new pipeline.
pub fn new<F: gfx::Factory<R>>(factory: &mut F)
-> Result<Pipeline<R>, Error> {
Technique::new(factory).map(|tech| Pipeline {
phase: gfx_phase::Phase::new("Main", tech)
.with_sort(gfx_phase::sort::front_to_back)
.with_cache(),
background: Some([0.0; 4]),
})
}
}
impl<R: gfx::Resources> ::Pipeline<f32, R> for Pipeline<R> {
fn render<A, T>(&mut self, scene: &A, camera: &A::Camera, stream: &mut T)
-> Result<A::Status, gfx_scene::Error> where
A: gfx_scene::AbstractScene<R, ViewInfo = ::view::Info<f32>, Material = ::Material<R>>,
T: gfx::Stream<R>,
{
// clear
if let Some(color) = self.background {
stream.clear(gfx::ClearData {
color: color,
depth: 1.0,
stencil: 0,
});
}
// draw
scene.draw(&mut self.phase, camera, stream)
}
}
|
{
Error::Texture(e)
}
|
identifier_body
|
mod.rs
|
//! This is a simple "Flat" rendering pipeline.
//! It doesn't support blended objects and uses front-to-back ordering.
//! The pipeline is meant for simple applications and fall-back paths.
use std::marker::PhantomData;
use gfx;
use gfx_phase;
use gfx_scene;
/// A short typedef for the phase.
pub type Phase<R> = gfx_phase::CachedPhase<R,
::Material<R>,
::view::Info<f32>,
Technique<R>,
>;
mod param {
#![allow(missing_docs)]
use gfx::shade::TextureParam;
gfx_parameters!( Struct {
u_Transform@ mvp: [[f32; 4]; 4],
u_Color@ color: [f32; 4],
t_Diffuse@ texture: TextureParam<R>,
u_AlphaTest@ alpha_test: f32,
});
}
const FLAT_VS : &'static [u8] = include_bytes!("../../gpu/flat.glslv");
const FLAT_FS : &'static [u8] = include_bytes!("../../gpu/flat.glslf");
const FLAT_TEX_VS: &'static [u8] = include_bytes!("../../gpu/flat_tex.glslv");
const FLAT_TEX_FS: &'static [u8] = include_bytes!("../../gpu/flat_tex.glslf");
/// Pipeline creation error.
#[derive(Clone, Debug, PartialEq)]
pub enum Error {
/// Failed to create a texture.
Texture(gfx::tex::TextureError),
/// Failed to link a program.
Program(gfx::ProgramError),
}
impl From<gfx::tex::TextureError> for Error {
fn from(e: gfx::tex::TextureError) -> Error {
Error::Texture(e)
}
}
impl From<gfx::ProgramError> for Error {
fn from(e: gfx::ProgramError) -> Error {
Error::Program(e)
}
}
|
pub struct Technique<R: gfx::Resources> {
program: gfx::handle::Program<R>,
program_textured: gfx::handle::Program<R>,
state: gfx::DrawState,
/// The default texture used for materials that don't have it.
pub default_texture: gfx::handle::Texture<R>,
}
impl<R: gfx::Resources> Technique<R> {
/// Create a new technique.
pub fn new<F: gfx::Factory<R>>(factory: &mut F)
-> Result<Technique<R>, Error> {
use gfx::traits::FactoryExt;
Ok(Technique {
program: try!(factory.link_program(FLAT_VS, FLAT_FS)),
program_textured: try!(factory.link_program(FLAT_TEX_VS, FLAT_TEX_FS)),
state: gfx::DrawState::new().depth(gfx::state::Comparison::LessEqual, true),
default_texture: try!(factory.create_texture_rgba8_static(1, 1, &[0xFFFFFFFF])),
})
}
}
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum Kernel {
Flat,
Textured,
AlphaCut(::AlphaThreshold),
}
impl<R: gfx::Resources> gfx_phase::Technique<R, ::Material<R>, ::view::Info<f32>> for Technique<R> {
type Kernel = Kernel;
type Params = param::Struct<R>;
fn test(&self, mesh: &gfx::Mesh<R>, mat: &::Material<R>) -> Option<Kernel> {
let textured = mat.texture.is_some() &&
mesh.attributes.iter().find(|a| a.name == "a_Tex0").is_some();
match mat.transparency {
::Transparency::Opaque if textured => Some(Kernel::Textured),
::Transparency::Opaque => Some(Kernel::Flat),
::Transparency::Cutout(v) if textured => Some(Kernel::AlphaCut(v)),
_ => None
}
}
fn compile<'a>(&'a self, kernel: Kernel)
-> gfx_phase::TechResult<'a, R, param::Struct<R>> {
( if kernel!= Kernel::Flat {
&self.program_textured
} else {
&self.program
},
param::Struct {
mvp: [[0.0; 4]; 4],
color: [0.0; 4],
texture: (self.default_texture.clone(), None),
alpha_test: if let Kernel::AlphaCut(v) = kernel {
v as f32 / 255 as f32
}else { 0.0 },
_r: PhantomData,
},
&self.state,
None,
)
}
fn fix_params(&self, mat: &::Material<R>, space: &::view::Info<f32>,
params: &mut param::Struct<R>) {
use cgmath::FixedArray;
params.mvp = *space.mx_vertex.as_fixed();
params.color = mat.color;
if let Some(ref tex) = mat.texture {
params.texture = tex.clone();
}
}
}
/// The flat pipeline.
pub struct Pipeline<R: gfx::Resources> {
/// The only rendering phase.
pub phase: Phase<R>,
/// Background color. Set to none if you don't want the screen to be cleared.
pub background: Option<gfx::ColorValue>,
}
impl<R: gfx::Resources> Pipeline<R> {
/// Create a new pipeline.
pub fn new<F: gfx::Factory<R>>(factory: &mut F)
-> Result<Pipeline<R>, Error> {
Technique::new(factory).map(|tech| Pipeline {
phase: gfx_phase::Phase::new("Main", tech)
.with_sort(gfx_phase::sort::front_to_back)
.with_cache(),
background: Some([0.0; 4]),
})
}
}
impl<R: gfx::Resources> ::Pipeline<f32, R> for Pipeline<R> {
fn render<A, T>(&mut self, scene: &A, camera: &A::Camera, stream: &mut T)
-> Result<A::Status, gfx_scene::Error> where
A: gfx_scene::AbstractScene<R, ViewInfo = ::view::Info<f32>, Material = ::Material<R>>,
T: gfx::Stream<R>,
{
// clear
if let Some(color) = self.background {
stream.clear(gfx::ClearData {
color: color,
depth: 1.0,
stencil: 0,
});
}
// draw
scene.draw(&mut self.phase, camera, stream)
}
}
|
/// The core technique of the pipeline.
|
random_line_split
|
arc.rs
|
in range(0u, 10) {
//! let five = five.clone();
//!
//! Thread::spawn(move || {
//! println!("{:?}", five);
//! });
//! }
//! ```
//!
//! Sharing mutable data safely between tasks with a `Mutex`:
//!
//! ```
//! use std::sync::{Arc, Mutex};
//! use std::thread::Thread;
//!
//! let five = Arc::new(Mutex::new(5i));
//!
//! for _ in range(0u, 10) {
//! let five = five.clone();
//!
//! Thread::spawn(move || {
//! let mut number = five.lock().unwrap();
//!
//! *number += 1;
//!
//! println!("{}", *number); // prints 6
//! });
//! }
//! ```
use core::prelude::*;
use core::atomic;
use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
use core::borrow::BorrowFrom;
use core::fmt::{self, Show};
use core::cmp::{Ordering};
use core::default::Default;
use core::mem::{min_align_of, size_of};
use core::mem;
use core::nonzero::NonZero;
use core::ops::Deref;
use core::ptr;
use core::hash::{Hash, Hasher};
use heap::deallocate;
/// An atomically reference counted wrapper for shared state.
///
/// # Example
///
/// In this example, a large vector of floats is shared between several tasks. With simple pipes,
/// without `Arc`, a copy would have to be made for each task.
///
/// ```rust
/// use std::sync::Arc;
/// use std::thread::Thread;
///
/// fn main() {
/// let numbers: Vec<_> = range(0, 100u32).map(|i| i as f32).collect();
/// let shared_numbers = Arc::new(numbers);
///
/// for _ in range(0u, 10) {
/// let child_numbers = shared_numbers.clone();
///
/// Thread::spawn(move || {
/// let local_numbers = child_numbers.as_slice();
///
/// // Work with the local numbers
/// });
/// }
/// }
/// ```
#[unsafe_no_drop_flag]
#[stable]
pub struct Arc<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Arc<T> { }
unsafe impl<T: Sync + Send> Sync for Arc<T> { }
/// A weak pointer to an `Arc`.
///
/// Weak pointers will not keep the data inside of the `Arc` alive, and can be used to break cycles
/// between `Arc` pointers.
#[unsafe_no_drop_flag]
#[experimental = "Weak pointers may not belong in this module."]
pub struct Weak<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Weak<T> { }
unsafe impl<T: Sync + Send> Sync for Weak<T> { }
struct ArcInner<T> {
strong: atomic::AtomicUint,
weak: atomic::AtomicUint,
data: T,
}
unsafe impl<T: Sync + Send> Send for ArcInner<T> {}
unsafe impl<T: Sync + Send> Sync for ArcInner<T> {}
impl<T> Arc<T> {
/// Constructs a new `Arc<T>`.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
/// ```
#[inline]
#[stable]
pub fn new(data: T) -> Arc<T> {
// Start the weak pointer count as 1 which is the weak pointer that's
// held by all the strong pointers (kinda), see std/rc.rs for more info
let x = box ArcInner {
strong: atomic::AtomicUint::new(1),
weak: atomic::AtomicUint::new(1),
data: data,
};
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
}
/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// let weak_five = five.downgrade();
/// ```
#[experimental = "Weak pointers may not belong in this module."]
pub fn downgrade(&self) -> Weak<T> {
// See the clone() impl for why this is relaxed
self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
impl<T> Arc<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// This unsafety is ok because while this arc is alive we're guaranteed that the inner
// pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync`
// because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer
// to these contents.
unsafe { &**self._ptr }
}
}
/// Get the number of weak references to this value.
#[inline]
#[experimental]
pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(SeqCst) - 1 }
/// Get the number of strong references to this value.
#[inline]
#[experimental]
pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(SeqCst) }
#[stable]
impl<T> Clone for Arc<T> {
/// Makes a clone of the `Arc<T>`.
///
/// This increases the strong reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five.clone();
/// ```
#[inline]
fn clone(&self) -> Arc<T> {
// Using a relaxed ordering is alright here, as knowledge of the original reference
// prevents other threads from erroneously deleting the object.
//
// As explained in the [Boost documentation][1], Increasing the reference counter can
// always be done with memory_order_relaxed: New references to an object can only be formed
// from an existing reference, and passing an existing reference from one thread to another
// must already provide any required synchronization.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
self.inner().strong.fetch_add(1, Relaxed);
Arc { _ptr: self._ptr }
}
}
impl<T> BorrowFrom<Arc<T>> for T {
fn borrow_from(owned: &Arc<T>) -> &T {
&**owned
}
}
#[stable]
impl<T> Deref for Arc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.inner().data
}
}
impl<T: Send + Sync + Clone> Arc<T> {
/// Make a mutable reference from the given `Arc<T>`.
///
/// This is also referred to as a copy-on-write operation because the inner data is cloned if
/// the reference count is greater than one.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let mut five = Arc::new(5i);
///
/// let mut_five = five.make_unique();
/// ```
#[inline]
#[experimental]
pub fn make_unique(&mut self) -> &mut T {
// Note that we hold a strong reference, which also counts as a weak reference, so we only
// clone if there is an additional reference of either kind.
if self.inner().strong.load(SeqCst)!= 1 ||
self.inner().weak.load(SeqCst)!= 1 {
*self = Arc::new((**self).clone())
}
// This unsafety is ok because we're guaranteed that the pointer returned is the *only*
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
// this point, and we required the Arc itself to be `mut`, so we're returning the only
// possible reference to the inner data.
let inner = unsafe { &mut **self._ptr };
&mut inner.data
}
}
#[unsafe_destructor]
#[stable]
impl<T: Sync + Send> Drop for Arc<T> {
/// Drops the `Arc<T>`.
///
/// This will decrement the strong reference count. If the strong reference count becomes zero
/// and the only other references are `Weak<T>` ones, `drop`s the inner value.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5i);
///
/// // stuff
///
/// drop(five); // explict drop
/// }
/// {
/// let five = Arc::new(5i);
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
// it is guaranteed to be zeroed after the first if it's run more than once)
let ptr = *self._ptr;
if ptr.is_null() { return }
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
// unless we are going to delete the object. This same logic applies to the below
// `fetch_sub` to the `weak` count.
if self.inner().strong.fetch_sub(1, Release)!= 1 { return }
// This fence is needed to prevent reordering of use of the data and deletion of the data.
// Because it is marked `Release`, the decreasing of the reference count synchronizes with
// this `Acquire` fence. This means that use of the data happens before decreasing the
// reference count, which happens before this fence, which happens before the deletion of
// the data.
//
// As explained in the [Boost documentation][1],
//
// > It is important to enforce any possible access to the object in one thread (through an
// > existing reference) to *happen before* deleting the object in a different thread. This
// > is achieved by a "release" operation after dropping a reference (any access to the
// > object through this reference must obviously happened before), and an "acquire"
// > operation before deleting the object.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
atomic::fence(Acquire);
// Destroy the data at this time, even though we may not free the box allocation itself
// (there may still be weak pointers lying around).
unsafe { drop(ptr::read(&self.inner().data)); }
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
}
#[experimental = "Weak pointers may not belong in this module."]
impl<T: Sync + Send> Weak<T> {
/// Upgrades a weak reference to a strong reference.
///
/// Upgrades the `Weak<T>` reference to an `Arc<T>`, if possible.
///
/// Returns `None` if there were no strong references and the data was destroyed.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// let weak_five = five.downgrade();
///
/// let strong_five: Option<Arc<_>> = weak_five.upgrade();
/// ```
pub fn upgrade(&self) -> Option<Arc<T>> {
// We use a CAS loop to increment the strong count instead of a fetch_add because once the
// count hits 0 is must never be above 0.
let inner = self.inner();
loop {
let n = inner.strong.load(SeqCst);
if n == 0 { return None }
let old = inner.strong.compare_and_swap(n, n + 1, SeqCst);
if old == n { return Some(Arc { _ptr: self._ptr }) }
}
}
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &**self._ptr }
}
}
#[experimental = "Weak pointers may not belong in this module."]
impl<T: Sync + Send> Clone for Weak<T> {
/// Makes a clone of the `Weak<T>`.
///
/// This increases the weak reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let weak_five = Arc::new(5i).downgrade();
///
/// weak_five.clone();
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
// See comments in Arc::clone() for why this is relaxed
self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
#[unsafe_destructor]
#[stable]
impl<T: Sync + Send> Drop for Weak<T> {
/// Drops the `Weak<T>`.
///
/// This will decrement the weak reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5i);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// drop(weak_five); // explict drop
/// }
/// {
/// let five = Arc::new(5i);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
let ptr = *self._ptr;
// see comments above for why this check is here
if ptr.is_null() { return }
// If we find out that we were the last weak pointer, then its time to deallocate the data
// entirely. See the discussion in Arc::drop() about the memory orderings
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
}
#[stable]
impl<T: PartialEq> PartialEq for Arc<T> {
/// Equality for two `Arc<T>`s.
///
/// Two `Arc<T>`s are equal if their inner value are equal.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five == Arc::new(5i);
/// ```
fn eq(&self, other: &Arc<T>) -> bool { *(*self) == *(*other) }
/// Inequality for two `Arc<T>`s.
///
/// Two `Arc<T>`s are unequal if their inner value are unequal.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five!= Arc::new(5i);
/// ```
fn ne(&self, other: &Arc<T>) -> bool { *(*self)!= *(*other) }
}
#[stable]
impl<T: PartialOrd> PartialOrd for Arc<T> {
/// Partial comparison for two `Arc<T>`s.
///
/// The two are compared by calling `partial_cmp()` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five.partial_cmp(&Arc::new(5i));
/// ```
fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
/// Less-than comparison for two `Arc<T>`s.
///
/// The two are compared by calling `<` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five < Arc::new(5i);
/// ```
fn lt(&self, other: &Arc<T>) -> bool { *(*self) < *(*other) }
/// 'Less-than or equal to' comparison for two `Arc<T>`s.
///
/// The two are compared by calling `<=` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five <= Arc::new(5i);
/// ```
fn le(&self, other: &Arc<T>) -> bool { *(*self) <= *(*other) }
/// Greater-than comparison for two `Arc<T>`s.
///
/// The two are compared by calling `>` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five > Arc::new(5i);
/// ```
fn gt(&self, other: &Arc<T>) -> bool { *(*self) > *(*other) }
/// 'Greater-than or equal to' comparison for two `Arc<T>`s.
///
/// The two are compared by calling `>=` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five >= Arc::new(5i);
/// ```
fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
}
#[stable]
impl<T: Ord> Ord for Arc<T> {
fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
}
#[stable]
impl<T: Eq> Eq for Arc<T> {}
impl<T: fmt::Show> fmt::Show for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Arc({:?})", (**self))
}
}
#[stable]
impl<T: fmt::String> fmt::String for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::String::fmt(&**self, f)
}
}
#[stable]
impl<T: Default + Sync + Send> Default for Arc<T> {
#[stable]
fn default() -> Arc<T> { Arc::new(Default::default()) }
}
impl<H: Hasher, T: Hash<H>> Hash<H> for Arc<T> {
fn hash(&self, state: &mut H) {
(**self).hash(state)
}
}
#[cfg(test)]
#[allow(experimental)]
mod tests {
use std::clone::Clone;
use std::sync::mpsc::channel;
use std::mem::drop;
use std::ops::Drop;
use std::option::Option;
use std::option::Option::{Some, None};
use std::sync::atomic;
use std::sync::atomic::Ordering::{Acquire, SeqCst};
use std::thread::Thread;
use std::vec::Vec;
use super::{Arc, Weak, weak_count, strong_count};
use std::sync::Mutex;
struct Canary(*mut atomic::AtomicUint);
impl Drop for Canary
{
fn drop(&mut self) {
unsafe {
match *self {
Canary(c) =>
|
}
}
}
}
#[test]
fn manually_share_arc() {
let v = vec!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
let arc_v = Arc::new(v);
let (tx, rx) = channel();
let _t = Thread::spawn(move || {
let arc_v: Arc<Vec<int>> = rx.recv().unwrap();
assert_eq!((*arc_v)[3], 4);
});
tx.send(arc_v.clone()).unwrap();
assert_eq!((*arc_v)[2], 3);
assert_eq!((*arc_v)[4], 5);
}
#[test]
fn test_cowarc_clone_make_unique() {
let mut cow0 = Arc::new(75u);
let mut cow1 = cow0.clone();
let mut cow2 = cow1.clone();
assert!(75 == *cow0.make_unique());
assert!(75 == *cow1.make_unique());
assert!(75 == *cow2.make_unique());
*cow0.make_unique() += 1;
*cow1.make_unique() += 2;
*cow2.make_unique() += 3;
assert!(76 == *cow0);
assert!(77 == *cow1);
assert!(78 == *cow2);
// none should point to the same backing memory
assert!(*cow0!= *cow1);
assert!(*cow0!= *cow2);
assert!(*cow1!= *cow2);
}
#[test]
fn test_cowarc_clone_unique2() {
let mut cow0 = Arc::new(75u);
let cow1 = cow0.clone();
let cow2 = cow1.clone();
assert!(75 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
*cow0.make_unique() += 1;
assert!(76 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
// cow1 and cow2 should share the same contents
// cow0 should have a unique reference
assert!(*cow0!= *cow1);
assert!(*cow0!= *cow2);
assert!(*cow1 == *cow2);
}
#[test]
fn test_cowarc_clone_weak() {
let mut cow0 = Arc::new(75u);
let cow1_weak = cow0.downgrade();
assert!(75 == *cow0);
assert!(75 == *cow1_weak.upgrade().unwrap());
*cow0.make_unique() += 1;
assert!(76 == *cow0);
assert!(cow1_weak.upgrade().is_none());
}
#[test]
fn test_live() {
let x = Arc::new(5i);
let y = x.downgrade();
assert!(y.upgrade().is_some());
}
#[test]
fn test_dead() {
let x = Arc::new(5i);
let y = x.downgrade();
drop(x);
assert!(y.upgrade().is_none());
}
#[test]
fn weak_self_cyclic() {
struct Cycle {
x: Mutex<Option<Weak<Cycle>>>
}
let a = Arc::new(Cycle { x: Mutex::new(None) });
let b = a.clone().downgrade();
*a.x.lock().unwrap() = Some(b);
// hopefully we don't double-free (or leak
|
{
(*c).fetch_add(1, SeqCst);
}
|
conditional_block
|
arc.rs
|
for _ in range(0u, 10) {
//! let five = five.clone();
//!
//! Thread::spawn(move || {
//! println!("{:?}", five);
//! });
//! }
//! ```
//!
//! Sharing mutable data safely between tasks with a `Mutex`:
//!
//! ```
//! use std::sync::{Arc, Mutex};
//! use std::thread::Thread;
//!
//! let five = Arc::new(Mutex::new(5i));
//!
//! for _ in range(0u, 10) {
//! let five = five.clone();
//!
//! Thread::spawn(move || {
//! let mut number = five.lock().unwrap();
//!
//! *number += 1;
//!
//! println!("{}", *number); // prints 6
//! });
//! }
//! ```
use core::prelude::*;
use core::atomic;
use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
use core::borrow::BorrowFrom;
use core::fmt::{self, Show};
use core::cmp::{Ordering};
use core::default::Default;
use core::mem::{min_align_of, size_of};
use core::mem;
use core::nonzero::NonZero;
use core::ops::Deref;
use core::ptr;
use core::hash::{Hash, Hasher};
use heap::deallocate;
/// An atomically reference counted wrapper for shared state.
///
/// # Example
///
/// In this example, a large vector of floats is shared between several tasks. With simple pipes,
/// without `Arc`, a copy would have to be made for each task.
///
/// ```rust
/// use std::sync::Arc;
/// use std::thread::Thread;
///
/// fn main() {
/// let numbers: Vec<_> = range(0, 100u32).map(|i| i as f32).collect();
/// let shared_numbers = Arc::new(numbers);
///
/// for _ in range(0u, 10) {
/// let child_numbers = shared_numbers.clone();
///
/// Thread::spawn(move || {
/// let local_numbers = child_numbers.as_slice();
///
/// // Work with the local numbers
/// });
/// }
/// }
/// ```
#[unsafe_no_drop_flag]
#[stable]
pub struct Arc<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Arc<T> { }
unsafe impl<T: Sync + Send> Sync for Arc<T> { }
/// A weak pointer to an `Arc`.
///
/// Weak pointers will not keep the data inside of the `Arc` alive, and can be used to break cycles
/// between `Arc` pointers.
#[unsafe_no_drop_flag]
#[experimental = "Weak pointers may not belong in this module."]
pub struct Weak<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Weak<T> { }
unsafe impl<T: Sync + Send> Sync for Weak<T> { }
struct ArcInner<T> {
strong: atomic::AtomicUint,
weak: atomic::AtomicUint,
data: T,
}
unsafe impl<T: Sync + Send> Send for ArcInner<T> {}
unsafe impl<T: Sync + Send> Sync for ArcInner<T> {}
impl<T> Arc<T> {
/// Constructs a new `Arc<T>`.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
/// ```
#[inline]
#[stable]
pub fn new(data: T) -> Arc<T> {
|
data: data,
};
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
}
/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// let weak_five = five.downgrade();
/// ```
#[experimental = "Weak pointers may not belong in this module."]
pub fn downgrade(&self) -> Weak<T> {
// See the clone() impl for why this is relaxed
self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
impl<T> Arc<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// This unsafety is ok because while this arc is alive we're guaranteed that the inner
// pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync`
// because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer
// to these contents.
unsafe { &**self._ptr }
}
}
/// Get the number of weak references to this value.
#[inline]
#[experimental]
pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(SeqCst) - 1 }
/// Get the number of strong references to this value.
#[inline]
#[experimental]
pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(SeqCst) }
#[stable]
impl<T> Clone for Arc<T> {
/// Makes a clone of the `Arc<T>`.
///
/// This increases the strong reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five.clone();
/// ```
#[inline]
fn clone(&self) -> Arc<T> {
// Using a relaxed ordering is alright here, as knowledge of the original reference
// prevents other threads from erroneously deleting the object.
//
// As explained in the [Boost documentation][1], Increasing the reference counter can
// always be done with memory_order_relaxed: New references to an object can only be formed
// from an existing reference, and passing an existing reference from one thread to another
// must already provide any required synchronization.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
self.inner().strong.fetch_add(1, Relaxed);
Arc { _ptr: self._ptr }
}
}
impl<T> BorrowFrom<Arc<T>> for T {
fn borrow_from(owned: &Arc<T>) -> &T {
&**owned
}
}
#[stable]
impl<T> Deref for Arc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.inner().data
}
}
impl<T: Send + Sync + Clone> Arc<T> {
/// Make a mutable reference from the given `Arc<T>`.
///
/// This is also referred to as a copy-on-write operation because the inner data is cloned if
/// the reference count is greater than one.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let mut five = Arc::new(5i);
///
/// let mut_five = five.make_unique();
/// ```
#[inline]
#[experimental]
pub fn make_unique(&mut self) -> &mut T {
// Note that we hold a strong reference, which also counts as a weak reference, so we only
// clone if there is an additional reference of either kind.
if self.inner().strong.load(SeqCst)!= 1 ||
self.inner().weak.load(SeqCst)!= 1 {
*self = Arc::new((**self).clone())
}
// This unsafety is ok because we're guaranteed that the pointer returned is the *only*
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
// this point, and we required the Arc itself to be `mut`, so we're returning the only
// possible reference to the inner data.
let inner = unsafe { &mut **self._ptr };
&mut inner.data
}
}
#[unsafe_destructor]
#[stable]
impl<T: Sync + Send> Drop for Arc<T> {
/// Drops the `Arc<T>`.
///
/// This will decrement the strong reference count. If the strong reference count becomes zero
/// and the only other references are `Weak<T>` ones, `drop`s the inner value.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5i);
///
/// // stuff
///
/// drop(five); // explict drop
/// }
/// {
/// let five = Arc::new(5i);
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
// it is guaranteed to be zeroed after the first if it's run more than once)
let ptr = *self._ptr;
if ptr.is_null() { return }
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
// unless we are going to delete the object. This same logic applies to the below
// `fetch_sub` to the `weak` count.
if self.inner().strong.fetch_sub(1, Release)!= 1 { return }
// This fence is needed to prevent reordering of use of the data and deletion of the data.
// Because it is marked `Release`, the decreasing of the reference count synchronizes with
// this `Acquire` fence. This means that use of the data happens before decreasing the
// reference count, which happens before this fence, which happens before the deletion of
// the data.
//
// As explained in the [Boost documentation][1],
//
// > It is important to enforce any possible access to the object in one thread (through an
// > existing reference) to *happen before* deleting the object in a different thread. This
// > is achieved by a "release" operation after dropping a reference (any access to the
// > object through this reference must obviously happened before), and an "acquire"
// > operation before deleting the object.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
atomic::fence(Acquire);
// Destroy the data at this time, even though we may not free the box allocation itself
// (there may still be weak pointers lying around).
unsafe { drop(ptr::read(&self.inner().data)); }
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
}
#[experimental = "Weak pointers may not belong in this module."]
impl<T: Sync + Send> Weak<T> {
/// Upgrades a weak reference to a strong reference.
///
/// Upgrades the `Weak<T>` reference to an `Arc<T>`, if possible.
///
/// Returns `None` if there were no strong references and the data was destroyed.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// let weak_five = five.downgrade();
///
/// let strong_five: Option<Arc<_>> = weak_five.upgrade();
/// ```
pub fn upgrade(&self) -> Option<Arc<T>> {
// We use a CAS loop to increment the strong count instead of a fetch_add because once the
// count hits 0 is must never be above 0.
let inner = self.inner();
loop {
let n = inner.strong.load(SeqCst);
if n == 0 { return None }
let old = inner.strong.compare_and_swap(n, n + 1, SeqCst);
if old == n { return Some(Arc { _ptr: self._ptr }) }
}
}
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &**self._ptr }
}
}
#[experimental = "Weak pointers may not belong in this module."]
impl<T: Sync + Send> Clone for Weak<T> {
/// Makes a clone of the `Weak<T>`.
///
/// This increases the weak reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let weak_five = Arc::new(5i).downgrade();
///
/// weak_five.clone();
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
// See comments in Arc::clone() for why this is relaxed
self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
#[unsafe_destructor]
#[stable]
impl<T: Sync + Send> Drop for Weak<T> {
/// Drops the `Weak<T>`.
///
/// This will decrement the weak reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5i);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// drop(weak_five); // explict drop
/// }
/// {
/// let five = Arc::new(5i);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
let ptr = *self._ptr;
// see comments above for why this check is here
if ptr.is_null() { return }
// If we find out that we were the last weak pointer, then its time to deallocate the data
// entirely. See the discussion in Arc::drop() about the memory orderings
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
}
#[stable]
impl<T: PartialEq> PartialEq for Arc<T> {
/// Equality for two `Arc<T>`s.
///
/// Two `Arc<T>`s are equal if their inner value are equal.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five == Arc::new(5i);
/// ```
fn eq(&self, other: &Arc<T>) -> bool { *(*self) == *(*other) }
/// Inequality for two `Arc<T>`s.
///
/// Two `Arc<T>`s are unequal if their inner value are unequal.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five!= Arc::new(5i);
/// ```
fn ne(&self, other: &Arc<T>) -> bool { *(*self)!= *(*other) }
}
#[stable]
impl<T: PartialOrd> PartialOrd for Arc<T> {
/// Partial comparison for two `Arc<T>`s.
///
/// The two are compared by calling `partial_cmp()` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five.partial_cmp(&Arc::new(5i));
/// ```
fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
/// Less-than comparison for two `Arc<T>`s.
///
/// The two are compared by calling `<` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five < Arc::new(5i);
/// ```
fn lt(&self, other: &Arc<T>) -> bool { *(*self) < *(*other) }
/// 'Less-than or equal to' comparison for two `Arc<T>`s.
///
/// The two are compared by calling `<=` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five <= Arc::new(5i);
/// ```
fn le(&self, other: &Arc<T>) -> bool { *(*self) <= *(*other) }
/// Greater-than comparison for two `Arc<T>`s.
///
/// The two are compared by calling `>` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five > Arc::new(5i);
/// ```
fn gt(&self, other: &Arc<T>) -> bool { *(*self) > *(*other) }
/// 'Greater-than or equal to' comparison for two `Arc<T>`s.
///
/// The two are compared by calling `>=` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five >= Arc::new(5i);
/// ```
fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
}
#[stable]
impl<T: Ord> Ord for Arc<T> {
fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
}
#[stable]
impl<T: Eq> Eq for Arc<T> {}
impl<T: fmt::Show> fmt::Show for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Arc({:?})", (**self))
}
}
#[stable]
impl<T: fmt::String> fmt::String for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::String::fmt(&**self, f)
}
}
#[stable]
impl<T: Default + Sync + Send> Default for Arc<T> {
#[stable]
fn default() -> Arc<T> { Arc::new(Default::default()) }
}
impl<H: Hasher, T: Hash<H>> Hash<H> for Arc<T> {
fn hash(&self, state: &mut H) {
(**self).hash(state)
}
}
#[cfg(test)]
#[allow(experimental)]
mod tests {
use std::clone::Clone;
use std::sync::mpsc::channel;
use std::mem::drop;
use std::ops::Drop;
use std::option::Option;
use std::option::Option::{Some, None};
use std::sync::atomic;
use std::sync::atomic::Ordering::{Acquire, SeqCst};
use std::thread::Thread;
use std::vec::Vec;
use super::{Arc, Weak, weak_count, strong_count};
use std::sync::Mutex;
struct Canary(*mut atomic::AtomicUint);
impl Drop for Canary
{
fn drop(&mut self) {
unsafe {
match *self {
Canary(c) => {
(*c).fetch_add(1, SeqCst);
}
}
}
}
}
#[test]
fn manually_share_arc() {
let v = vec!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
let arc_v = Arc::new(v);
let (tx, rx) = channel();
let _t = Thread::spawn(move || {
let arc_v: Arc<Vec<int>> = rx.recv().unwrap();
assert_eq!((*arc_v)[3], 4);
});
tx.send(arc_v.clone()).unwrap();
assert_eq!((*arc_v)[2], 3);
assert_eq!((*arc_v)[4], 5);
}
#[test]
fn test_cowarc_clone_make_unique() {
let mut cow0 = Arc::new(75u);
let mut cow1 = cow0.clone();
let mut cow2 = cow1.clone();
assert!(75 == *cow0.make_unique());
assert!(75 == *cow1.make_unique());
assert!(75 == *cow2.make_unique());
*cow0.make_unique() += 1;
*cow1.make_unique() += 2;
*cow2.make_unique() += 3;
assert!(76 == *cow0);
assert!(77 == *cow1);
assert!(78 == *cow2);
// none should point to the same backing memory
assert!(*cow0!= *cow1);
assert!(*cow0!= *cow2);
assert!(*cow1!= *cow2);
}
#[test]
fn test_cowarc_clone_unique2() {
let mut cow0 = Arc::new(75u);
let cow1 = cow0.clone();
let cow2 = cow1.clone();
assert!(75 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
*cow0.make_unique() += 1;
assert!(76 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
// cow1 and cow2 should share the same contents
// cow0 should have a unique reference
assert!(*cow0!= *cow1);
assert!(*cow0!= *cow2);
assert!(*cow1 == *cow2);
}
#[test]
fn test_cowarc_clone_weak() {
let mut cow0 = Arc::new(75u);
let cow1_weak = cow0.downgrade();
assert!(75 == *cow0);
assert!(75 == *cow1_weak.upgrade().unwrap());
*cow0.make_unique() += 1;
assert!(76 == *cow0);
assert!(cow1_weak.upgrade().is_none());
}
#[test]
fn test_live() {
let x = Arc::new(5i);
let y = x.downgrade();
assert!(y.upgrade().is_some());
}
#[test]
fn test_dead() {
let x = Arc::new(5i);
let y = x.downgrade();
drop(x);
assert!(y.upgrade().is_none());
}
#[test]
fn weak_self_cyclic() {
struct Cycle {
x: Mutex<Option<Weak<Cycle>>>
}
let a = Arc::new(Cycle { x: Mutex::new(None) });
let b = a.clone().downgrade();
*a.x.lock().unwrap() = Some(b);
// hopefully we don't double-free (or leak)...
|
// Start the weak pointer count as 1 which is the weak pointer that's
// held by all the strong pointers (kinda), see std/rc.rs for more info
let x = box ArcInner {
strong: atomic::AtomicUint::new(1),
weak: atomic::AtomicUint::new(1),
|
random_line_split
|
arc.rs
|
in range(0u, 10) {
//! let five = five.clone();
//!
//! Thread::spawn(move || {
//! println!("{:?}", five);
//! });
//! }
//! ```
//!
//! Sharing mutable data safely between tasks with a `Mutex`:
//!
//! ```
//! use std::sync::{Arc, Mutex};
//! use std::thread::Thread;
//!
//! let five = Arc::new(Mutex::new(5i));
//!
//! for _ in range(0u, 10) {
//! let five = five.clone();
//!
//! Thread::spawn(move || {
//! let mut number = five.lock().unwrap();
//!
//! *number += 1;
//!
//! println!("{}", *number); // prints 6
//! });
//! }
//! ```
use core::prelude::*;
use core::atomic;
use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
use core::borrow::BorrowFrom;
use core::fmt::{self, Show};
use core::cmp::{Ordering};
use core::default::Default;
use core::mem::{min_align_of, size_of};
use core::mem;
use core::nonzero::NonZero;
use core::ops::Deref;
use core::ptr;
use core::hash::{Hash, Hasher};
use heap::deallocate;
/// An atomically reference counted wrapper for shared state.
///
/// # Example
///
/// In this example, a large vector of floats is shared between several tasks. With simple pipes,
/// without `Arc`, a copy would have to be made for each task.
///
/// ```rust
/// use std::sync::Arc;
/// use std::thread::Thread;
///
/// fn main() {
/// let numbers: Vec<_> = range(0, 100u32).map(|i| i as f32).collect();
/// let shared_numbers = Arc::new(numbers);
///
/// for _ in range(0u, 10) {
/// let child_numbers = shared_numbers.clone();
///
/// Thread::spawn(move || {
/// let local_numbers = child_numbers.as_slice();
///
/// // Work with the local numbers
/// });
/// }
/// }
/// ```
#[unsafe_no_drop_flag]
#[stable]
pub struct Arc<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Arc<T> { }
unsafe impl<T: Sync + Send> Sync for Arc<T> { }
/// A weak pointer to an `Arc`.
///
/// Weak pointers will not keep the data inside of the `Arc` alive, and can be used to break cycles
/// between `Arc` pointers.
#[unsafe_no_drop_flag]
#[experimental = "Weak pointers may not belong in this module."]
pub struct Weak<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Weak<T> { }
unsafe impl<T: Sync + Send> Sync for Weak<T> { }
struct ArcInner<T> {
strong: atomic::AtomicUint,
weak: atomic::AtomicUint,
data: T,
}
unsafe impl<T: Sync + Send> Send for ArcInner<T> {}
unsafe impl<T: Sync + Send> Sync for ArcInner<T> {}
impl<T> Arc<T> {
/// Constructs a new `Arc<T>`.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
/// ```
#[inline]
#[stable]
pub fn new(data: T) -> Arc<T> {
// Start the weak pointer count as 1 which is the weak pointer that's
// held by all the strong pointers (kinda), see std/rc.rs for more info
let x = box ArcInner {
strong: atomic::AtomicUint::new(1),
weak: atomic::AtomicUint::new(1),
data: data,
};
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
}
/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// let weak_five = five.downgrade();
/// ```
#[experimental = "Weak pointers may not belong in this module."]
pub fn downgrade(&self) -> Weak<T> {
// See the clone() impl for why this is relaxed
self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
impl<T> Arc<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// This unsafety is ok because while this arc is alive we're guaranteed that the inner
// pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync`
// because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer
// to these contents.
unsafe { &**self._ptr }
}
}
/// Get the number of weak references to this value.
#[inline]
#[experimental]
pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(SeqCst) - 1 }
/// Get the number of strong references to this value.
#[inline]
#[experimental]
pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(SeqCst) }
#[stable]
impl<T> Clone for Arc<T> {
/// Makes a clone of the `Arc<T>`.
///
/// This increases the strong reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five.clone();
/// ```
#[inline]
fn clone(&self) -> Arc<T> {
// Using a relaxed ordering is alright here, as knowledge of the original reference
// prevents other threads from erroneously deleting the object.
//
// As explained in the [Boost documentation][1], Increasing the reference counter can
// always be done with memory_order_relaxed: New references to an object can only be formed
// from an existing reference, and passing an existing reference from one thread to another
// must already provide any required synchronization.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
self.inner().strong.fetch_add(1, Relaxed);
Arc { _ptr: self._ptr }
}
}
impl<T> BorrowFrom<Arc<T>> for T {
fn borrow_from(owned: &Arc<T>) -> &T {
&**owned
}
}
#[stable]
impl<T> Deref for Arc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.inner().data
}
}
impl<T: Send + Sync + Clone> Arc<T> {
/// Make a mutable reference from the given `Arc<T>`.
///
/// This is also referred to as a copy-on-write operation because the inner data is cloned if
/// the reference count is greater than one.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let mut five = Arc::new(5i);
///
/// let mut_five = five.make_unique();
/// ```
#[inline]
#[experimental]
pub fn make_unique(&mut self) -> &mut T {
// Note that we hold a strong reference, which also counts as a weak reference, so we only
// clone if there is an additional reference of either kind.
if self.inner().strong.load(SeqCst)!= 1 ||
self.inner().weak.load(SeqCst)!= 1 {
*self = Arc::new((**self).clone())
}
// This unsafety is ok because we're guaranteed that the pointer returned is the *only*
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
// this point, and we required the Arc itself to be `mut`, so we're returning the only
// possible reference to the inner data.
let inner = unsafe { &mut **self._ptr };
&mut inner.data
}
}
#[unsafe_destructor]
#[stable]
impl<T: Sync + Send> Drop for Arc<T> {
/// Drops the `Arc<T>`.
///
/// This will decrement the strong reference count. If the strong reference count becomes zero
/// and the only other references are `Weak<T>` ones, `drop`s the inner value.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5i);
///
/// // stuff
///
/// drop(five); // explict drop
/// }
/// {
/// let five = Arc::new(5i);
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
// it is guaranteed to be zeroed after the first if it's run more than once)
let ptr = *self._ptr;
if ptr.is_null() { return }
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
// unless we are going to delete the object. This same logic applies to the below
// `fetch_sub` to the `weak` count.
if self.inner().strong.fetch_sub(1, Release)!= 1 { return }
// This fence is needed to prevent reordering of use of the data and deletion of the data.
// Because it is marked `Release`, the decreasing of the reference count synchronizes with
// this `Acquire` fence. This means that use of the data happens before decreasing the
// reference count, which happens before this fence, which happens before the deletion of
// the data.
//
// As explained in the [Boost documentation][1],
//
// > It is important to enforce any possible access to the object in one thread (through an
// > existing reference) to *happen before* deleting the object in a different thread. This
// > is achieved by a "release" operation after dropping a reference (any access to the
// > object through this reference must obviously happened before), and an "acquire"
// > operation before deleting the object.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
atomic::fence(Acquire);
// Destroy the data at this time, even though we may not free the box allocation itself
// (there may still be weak pointers lying around).
unsafe { drop(ptr::read(&self.inner().data)); }
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
}
#[experimental = "Weak pointers may not belong in this module."]
impl<T: Sync + Send> Weak<T> {
/// Upgrades a weak reference to a strong reference.
///
/// Upgrades the `Weak<T>` reference to an `Arc<T>`, if possible.
///
/// Returns `None` if there were no strong references and the data was destroyed.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// let weak_five = five.downgrade();
///
/// let strong_five: Option<Arc<_>> = weak_five.upgrade();
/// ```
pub fn upgrade(&self) -> Option<Arc<T>> {
// We use a CAS loop to increment the strong count instead of a fetch_add because once the
// count hits 0 is must never be above 0.
let inner = self.inner();
loop {
let n = inner.strong.load(SeqCst);
if n == 0 { return None }
let old = inner.strong.compare_and_swap(n, n + 1, SeqCst);
if old == n { return Some(Arc { _ptr: self._ptr }) }
}
}
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &**self._ptr }
}
}
#[experimental = "Weak pointers may not belong in this module."]
impl<T: Sync + Send> Clone for Weak<T> {
/// Makes a clone of the `Weak<T>`.
///
/// This increases the weak reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let weak_five = Arc::new(5i).downgrade();
///
/// weak_five.clone();
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
// See comments in Arc::clone() for why this is relaxed
self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
#[unsafe_destructor]
#[stable]
impl<T: Sync + Send> Drop for Weak<T> {
/// Drops the `Weak<T>`.
///
/// This will decrement the weak reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5i);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// drop(weak_five); // explict drop
/// }
/// {
/// let five = Arc::new(5i);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self)
|
}
#[stable]
impl<T: PartialEq> PartialEq for Arc<T> {
/// Equality for two `Arc<T>`s.
///
/// Two `Arc<T>`s are equal if their inner value are equal.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five == Arc::new(5i);
/// ```
fn eq(&self, other: &Arc<T>) -> bool { *(*self) == *(*other) }
/// Inequality for two `Arc<T>`s.
///
/// Two `Arc<T>`s are unequal if their inner value are unequal.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five!= Arc::new(5i);
/// ```
fn ne(&self, other: &Arc<T>) -> bool { *(*self)!= *(*other) }
}
#[stable]
impl<T: PartialOrd> PartialOrd for Arc<T> {
/// Partial comparison for two `Arc<T>`s.
///
/// The two are compared by calling `partial_cmp()` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five.partial_cmp(&Arc::new(5i));
/// ```
fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
/// Less-than comparison for two `Arc<T>`s.
///
/// The two are compared by calling `<` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five < Arc::new(5i);
/// ```
fn lt(&self, other: &Arc<T>) -> bool { *(*self) < *(*other) }
/// 'Less-than or equal to' comparison for two `Arc<T>`s.
///
/// The two are compared by calling `<=` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five <= Arc::new(5i);
/// ```
fn le(&self, other: &Arc<T>) -> bool { *(*self) <= *(*other) }
/// Greater-than comparison for two `Arc<T>`s.
///
/// The two are compared by calling `>` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five > Arc::new(5i);
/// ```
fn gt(&self, other: &Arc<T>) -> bool { *(*self) > *(*other) }
/// 'Greater-than or equal to' comparison for two `Arc<T>`s.
///
/// The two are compared by calling `>=` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five >= Arc::new(5i);
/// ```
fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
}
#[stable]
impl<T: Ord> Ord for Arc<T> {
fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
}
#[stable]
impl<T: Eq> Eq for Arc<T> {}
impl<T: fmt::Show> fmt::Show for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Arc({:?})", (**self))
}
}
#[stable]
impl<T: fmt::String> fmt::String for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::String::fmt(&**self, f)
}
}
#[stable]
impl<T: Default + Sync + Send> Default for Arc<T> {
#[stable]
fn default() -> Arc<T> { Arc::new(Default::default()) }
}
impl<H: Hasher, T: Hash<H>> Hash<H> for Arc<T> {
fn hash(&self, state: &mut H) {
(**self).hash(state)
}
}
#[cfg(test)]
#[allow(experimental)]
mod tests {
use std::clone::Clone;
use std::sync::mpsc::channel;
use std::mem::drop;
use std::ops::Drop;
use std::option::Option;
use std::option::Option::{Some, None};
use std::sync::atomic;
use std::sync::atomic::Ordering::{Acquire, SeqCst};
use std::thread::Thread;
use std::vec::Vec;
use super::{Arc, Weak, weak_count, strong_count};
use std::sync::Mutex;
struct Canary(*mut atomic::AtomicUint);
impl Drop for Canary
{
fn drop(&mut self) {
unsafe {
match *self {
Canary(c) => {
(*c).fetch_add(1, SeqCst);
}
}
}
}
}
#[test]
fn manually_share_arc() {
let v = vec!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
let arc_v = Arc::new(v);
let (tx, rx) = channel();
let _t = Thread::spawn(move || {
let arc_v: Arc<Vec<int>> = rx.recv().unwrap();
assert_eq!((*arc_v)[3], 4);
});
tx.send(arc_v.clone()).unwrap();
assert_eq!((*arc_v)[2], 3);
assert_eq!((*arc_v)[4], 5);
}
#[test]
fn test_cowarc_clone_make_unique() {
let mut cow0 = Arc::new(75u);
let mut cow1 = cow0.clone();
let mut cow2 = cow1.clone();
assert!(75 == *cow0.make_unique());
assert!(75 == *cow1.make_unique());
assert!(75 == *cow2.make_unique());
*cow0.make_unique() += 1;
*cow1.make_unique() += 2;
*cow2.make_unique() += 3;
assert!(76 == *cow0);
assert!(77 == *cow1);
assert!(78 == *cow2);
// none should point to the same backing memory
assert!(*cow0!= *cow1);
assert!(*cow0!= *cow2);
assert!(*cow1!= *cow2);
}
#[test]
fn test_cowarc_clone_unique2() {
let mut cow0 = Arc::new(75u);
let cow1 = cow0.clone();
let cow2 = cow1.clone();
assert!(75 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
*cow0.make_unique() += 1;
assert!(76 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
// cow1 and cow2 should share the same contents
// cow0 should have a unique reference
assert!(*cow0!= *cow1);
assert!(*cow0!= *cow2);
assert!(*cow1 == *cow2);
}
#[test]
fn test_cowarc_clone_weak() {
let mut cow0 = Arc::new(75u);
let cow1_weak = cow0.downgrade();
assert!(75 == *cow0);
assert!(75 == *cow1_weak.upgrade().unwrap());
*cow0.make_unique() += 1;
assert!(76 == *cow0);
assert!(cow1_weak.upgrade().is_none());
}
#[test]
fn test_live() {
let x = Arc::new(5i);
let y = x.downgrade();
assert!(y.upgrade().is_some());
}
#[test]
fn test_dead() {
let x = Arc::new(5i);
let y = x.downgrade();
drop(x);
assert!(y.upgrade().is_none());
}
#[test]
fn weak_self_cyclic() {
struct Cycle {
x: Mutex<Option<Weak<Cycle>>>
}
let a = Arc::new(Cycle { x: Mutex::new(None) });
let b = a.clone().downgrade();
*a.x.lock().unwrap() = Some(b);
// hopefully we don't double-free (or leak
|
{
let ptr = *self._ptr;
// see comments above for why this check is here
if ptr.is_null() { return }
// If we find out that we were the last weak pointer, then its time to deallocate the data
// entirely. See the discussion in Arc::drop() about the memory orderings
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
|
identifier_body
|
arc.rs
|
in range(0u, 10) {
//! let five = five.clone();
//!
//! Thread::spawn(move || {
//! println!("{:?}", five);
//! });
//! }
//! ```
//!
//! Sharing mutable data safely between tasks with a `Mutex`:
//!
//! ```
//! use std::sync::{Arc, Mutex};
//! use std::thread::Thread;
//!
//! let five = Arc::new(Mutex::new(5i));
//!
//! for _ in range(0u, 10) {
//! let five = five.clone();
//!
//! Thread::spawn(move || {
//! let mut number = five.lock().unwrap();
//!
//! *number += 1;
//!
//! println!("{}", *number); // prints 6
//! });
//! }
//! ```
use core::prelude::*;
use core::atomic;
use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
use core::borrow::BorrowFrom;
use core::fmt::{self, Show};
use core::cmp::{Ordering};
use core::default::Default;
use core::mem::{min_align_of, size_of};
use core::mem;
use core::nonzero::NonZero;
use core::ops::Deref;
use core::ptr;
use core::hash::{Hash, Hasher};
use heap::deallocate;
/// An atomically reference counted wrapper for shared state.
///
/// # Example
///
/// In this example, a large vector of floats is shared between several tasks. With simple pipes,
/// without `Arc`, a copy would have to be made for each task.
///
/// ```rust
/// use std::sync::Arc;
/// use std::thread::Thread;
///
/// fn main() {
/// let numbers: Vec<_> = range(0, 100u32).map(|i| i as f32).collect();
/// let shared_numbers = Arc::new(numbers);
///
/// for _ in range(0u, 10) {
/// let child_numbers = shared_numbers.clone();
///
/// Thread::spawn(move || {
/// let local_numbers = child_numbers.as_slice();
///
/// // Work with the local numbers
/// });
/// }
/// }
/// ```
#[unsafe_no_drop_flag]
#[stable]
pub struct Arc<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Arc<T> { }
unsafe impl<T: Sync + Send> Sync for Arc<T> { }
/// A weak pointer to an `Arc`.
///
/// Weak pointers will not keep the data inside of the `Arc` alive, and can be used to break cycles
/// between `Arc` pointers.
#[unsafe_no_drop_flag]
#[experimental = "Weak pointers may not belong in this module."]
pub struct Weak<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Weak<T> { }
unsafe impl<T: Sync + Send> Sync for Weak<T> { }
struct ArcInner<T> {
strong: atomic::AtomicUint,
weak: atomic::AtomicUint,
data: T,
}
unsafe impl<T: Sync + Send> Send for ArcInner<T> {}
unsafe impl<T: Sync + Send> Sync for ArcInner<T> {}
impl<T> Arc<T> {
/// Constructs a new `Arc<T>`.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
/// ```
#[inline]
#[stable]
pub fn new(data: T) -> Arc<T> {
// Start the weak pointer count as 1 which is the weak pointer that's
// held by all the strong pointers (kinda), see std/rc.rs for more info
let x = box ArcInner {
strong: atomic::AtomicUint::new(1),
weak: atomic::AtomicUint::new(1),
data: data,
};
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
}
/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// let weak_five = five.downgrade();
/// ```
#[experimental = "Weak pointers may not belong in this module."]
pub fn downgrade(&self) -> Weak<T> {
// See the clone() impl for why this is relaxed
self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
impl<T> Arc<T> {
#[inline]
fn
|
(&self) -> &ArcInner<T> {
// This unsafety is ok because while this arc is alive we're guaranteed that the inner
// pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync`
// because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer
// to these contents.
unsafe { &**self._ptr }
}
}
/// Get the number of weak references to this value.
#[inline]
#[experimental]
pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(SeqCst) - 1 }
/// Get the number of strong references to this value.
#[inline]
#[experimental]
pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(SeqCst) }
#[stable]
impl<T> Clone for Arc<T> {
/// Makes a clone of the `Arc<T>`.
///
/// This increases the strong reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five.clone();
/// ```
#[inline]
fn clone(&self) -> Arc<T> {
// Using a relaxed ordering is alright here, as knowledge of the original reference
// prevents other threads from erroneously deleting the object.
//
// As explained in the [Boost documentation][1], Increasing the reference counter can
// always be done with memory_order_relaxed: New references to an object can only be formed
// from an existing reference, and passing an existing reference from one thread to another
// must already provide any required synchronization.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
self.inner().strong.fetch_add(1, Relaxed);
Arc { _ptr: self._ptr }
}
}
impl<T> BorrowFrom<Arc<T>> for T {
fn borrow_from(owned: &Arc<T>) -> &T {
&**owned
}
}
#[stable]
impl<T> Deref for Arc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.inner().data
}
}
impl<T: Send + Sync + Clone> Arc<T> {
/// Make a mutable reference from the given `Arc<T>`.
///
/// This is also referred to as a copy-on-write operation because the inner data is cloned if
/// the reference count is greater than one.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let mut five = Arc::new(5i);
///
/// let mut_five = five.make_unique();
/// ```
#[inline]
#[experimental]
pub fn make_unique(&mut self) -> &mut T {
// Note that we hold a strong reference, which also counts as a weak reference, so we only
// clone if there is an additional reference of either kind.
if self.inner().strong.load(SeqCst)!= 1 ||
self.inner().weak.load(SeqCst)!= 1 {
*self = Arc::new((**self).clone())
}
// This unsafety is ok because we're guaranteed that the pointer returned is the *only*
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
// this point, and we required the Arc itself to be `mut`, so we're returning the only
// possible reference to the inner data.
let inner = unsafe { &mut **self._ptr };
&mut inner.data
}
}
#[unsafe_destructor]
#[stable]
impl<T: Sync + Send> Drop for Arc<T> {
/// Drops the `Arc<T>`.
///
/// This will decrement the strong reference count. If the strong reference count becomes zero
/// and the only other references are `Weak<T>` ones, `drop`s the inner value.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5i);
///
/// // stuff
///
/// drop(five); // explict drop
/// }
/// {
/// let five = Arc::new(5i);
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
// it is guaranteed to be zeroed after the first if it's run more than once)
let ptr = *self._ptr;
if ptr.is_null() { return }
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
// unless we are going to delete the object. This same logic applies to the below
// `fetch_sub` to the `weak` count.
if self.inner().strong.fetch_sub(1, Release)!= 1 { return }
// This fence is needed to prevent reordering of use of the data and deletion of the data.
// Because it is marked `Release`, the decreasing of the reference count synchronizes with
// this `Acquire` fence. This means that use of the data happens before decreasing the
// reference count, which happens before this fence, which happens before the deletion of
// the data.
//
// As explained in the [Boost documentation][1],
//
// > It is important to enforce any possible access to the object in one thread (through an
// > existing reference) to *happen before* deleting the object in a different thread. This
// > is achieved by a "release" operation after dropping a reference (any access to the
// > object through this reference must obviously happened before), and an "acquire"
// > operation before deleting the object.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
atomic::fence(Acquire);
// Destroy the data at this time, even though we may not free the box allocation itself
// (there may still be weak pointers lying around).
unsafe { drop(ptr::read(&self.inner().data)); }
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
}
#[experimental = "Weak pointers may not belong in this module."]
impl<T: Sync + Send> Weak<T> {
/// Upgrades a weak reference to a strong reference.
///
/// Upgrades the `Weak<T>` reference to an `Arc<T>`, if possible.
///
/// Returns `None` if there were no strong references and the data was destroyed.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// let weak_five = five.downgrade();
///
/// let strong_five: Option<Arc<_>> = weak_five.upgrade();
/// ```
pub fn upgrade(&self) -> Option<Arc<T>> {
// We use a CAS loop to increment the strong count instead of a fetch_add because once the
// count hits 0 is must never be above 0.
let inner = self.inner();
loop {
let n = inner.strong.load(SeqCst);
if n == 0 { return None }
let old = inner.strong.compare_and_swap(n, n + 1, SeqCst);
if old == n { return Some(Arc { _ptr: self._ptr }) }
}
}
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &**self._ptr }
}
}
#[experimental = "Weak pointers may not belong in this module."]
impl<T: Sync + Send> Clone for Weak<T> {
/// Makes a clone of the `Weak<T>`.
///
/// This increases the weak reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let weak_five = Arc::new(5i).downgrade();
///
/// weak_five.clone();
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
// See comments in Arc::clone() for why this is relaxed
self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
#[unsafe_destructor]
#[stable]
impl<T: Sync + Send> Drop for Weak<T> {
/// Drops the `Weak<T>`.
///
/// This will decrement the weak reference count.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5i);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// drop(weak_five); // explict drop
/// }
/// {
/// let five = Arc::new(5i);
/// let weak_five = five.downgrade();
///
/// // stuff
///
/// } // implicit drop
/// ```
fn drop(&mut self) {
let ptr = *self._ptr;
// see comments above for why this check is here
if ptr.is_null() { return }
// If we find out that we were the last weak pointer, then its time to deallocate the data
// entirely. See the discussion in Arc::drop() about the memory orderings
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
}
#[stable]
impl<T: PartialEq> PartialEq for Arc<T> {
/// Equality for two `Arc<T>`s.
///
/// Two `Arc<T>`s are equal if their inner value are equal.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five == Arc::new(5i);
/// ```
fn eq(&self, other: &Arc<T>) -> bool { *(*self) == *(*other) }
/// Inequality for two `Arc<T>`s.
///
/// Two `Arc<T>`s are unequal if their inner value are unequal.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five!= Arc::new(5i);
/// ```
fn ne(&self, other: &Arc<T>) -> bool { *(*self)!= *(*other) }
}
#[stable]
impl<T: PartialOrd> PartialOrd for Arc<T> {
/// Partial comparison for two `Arc<T>`s.
///
/// The two are compared by calling `partial_cmp()` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five.partial_cmp(&Arc::new(5i));
/// ```
fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
/// Less-than comparison for two `Arc<T>`s.
///
/// The two are compared by calling `<` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five < Arc::new(5i);
/// ```
fn lt(&self, other: &Arc<T>) -> bool { *(*self) < *(*other) }
/// 'Less-than or equal to' comparison for two `Arc<T>`s.
///
/// The two are compared by calling `<=` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five <= Arc::new(5i);
/// ```
fn le(&self, other: &Arc<T>) -> bool { *(*self) <= *(*other) }
/// Greater-than comparison for two `Arc<T>`s.
///
/// The two are compared by calling `>` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five > Arc::new(5i);
/// ```
fn gt(&self, other: &Arc<T>) -> bool { *(*self) > *(*other) }
/// 'Greater-than or equal to' comparison for two `Arc<T>`s.
///
/// The two are compared by calling `>=` on their inner values.
///
/// # Examples
///
/// ```
/// use std::sync::Arc;
///
/// let five = Arc::new(5i);
///
/// five >= Arc::new(5i);
/// ```
fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
}
#[stable]
impl<T: Ord> Ord for Arc<T> {
fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
}
#[stable]
impl<T: Eq> Eq for Arc<T> {}
impl<T: fmt::Show> fmt::Show for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Arc({:?})", (**self))
}
}
#[stable]
impl<T: fmt::String> fmt::String for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::String::fmt(&**self, f)
}
}
#[stable]
impl<T: Default + Sync + Send> Default for Arc<T> {
#[stable]
fn default() -> Arc<T> { Arc::new(Default::default()) }
}
impl<H: Hasher, T: Hash<H>> Hash<H> for Arc<T> {
fn hash(&self, state: &mut H) {
(**self).hash(state)
}
}
#[cfg(test)]
#[allow(experimental)]
mod tests {
use std::clone::Clone;
use std::sync::mpsc::channel;
use std::mem::drop;
use std::ops::Drop;
use std::option::Option;
use std::option::Option::{Some, None};
use std::sync::atomic;
use std::sync::atomic::Ordering::{Acquire, SeqCst};
use std::thread::Thread;
use std::vec::Vec;
use super::{Arc, Weak, weak_count, strong_count};
use std::sync::Mutex;
struct Canary(*mut atomic::AtomicUint);
impl Drop for Canary
{
fn drop(&mut self) {
unsafe {
match *self {
Canary(c) => {
(*c).fetch_add(1, SeqCst);
}
}
}
}
}
#[test]
fn manually_share_arc() {
let v = vec!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
let arc_v = Arc::new(v);
let (tx, rx) = channel();
let _t = Thread::spawn(move || {
let arc_v: Arc<Vec<int>> = rx.recv().unwrap();
assert_eq!((*arc_v)[3], 4);
});
tx.send(arc_v.clone()).unwrap();
assert_eq!((*arc_v)[2], 3);
assert_eq!((*arc_v)[4], 5);
}
#[test]
fn test_cowarc_clone_make_unique() {
let mut cow0 = Arc::new(75u);
let mut cow1 = cow0.clone();
let mut cow2 = cow1.clone();
assert!(75 == *cow0.make_unique());
assert!(75 == *cow1.make_unique());
assert!(75 == *cow2.make_unique());
*cow0.make_unique() += 1;
*cow1.make_unique() += 2;
*cow2.make_unique() += 3;
assert!(76 == *cow0);
assert!(77 == *cow1);
assert!(78 == *cow2);
// none should point to the same backing memory
assert!(*cow0!= *cow1);
assert!(*cow0!= *cow2);
assert!(*cow1!= *cow2);
}
#[test]
fn test_cowarc_clone_unique2() {
let mut cow0 = Arc::new(75u);
let cow1 = cow0.clone();
let cow2 = cow1.clone();
assert!(75 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
*cow0.make_unique() += 1;
assert!(76 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
// cow1 and cow2 should share the same contents
// cow0 should have a unique reference
assert!(*cow0!= *cow1);
assert!(*cow0!= *cow2);
assert!(*cow1 == *cow2);
}
#[test]
fn test_cowarc_clone_weak() {
let mut cow0 = Arc::new(75u);
let cow1_weak = cow0.downgrade();
assert!(75 == *cow0);
assert!(75 == *cow1_weak.upgrade().unwrap());
*cow0.make_unique() += 1;
assert!(76 == *cow0);
assert!(cow1_weak.upgrade().is_none());
}
#[test]
fn test_live() {
let x = Arc::new(5i);
let y = x.downgrade();
assert!(y.upgrade().is_some());
}
#[test]
fn test_dead() {
let x = Arc::new(5i);
let y = x.downgrade();
drop(x);
assert!(y.upgrade().is_none());
}
#[test]
fn weak_self_cyclic() {
struct Cycle {
x: Mutex<Option<Weak<Cycle>>>
}
let a = Arc::new(Cycle { x: Mutex::new(None) });
let b = a.clone().downgrade();
*a.x.lock().unwrap() = Some(b);
// hopefully we don't double-free (or leak
|
inner
|
identifier_name
|
misc.rs
|
//! Misc tests
|
use reversi::turn::*;
/// Checks `turn::check_move` method on starting turn.
#[test]
fn test_first_turn() {
let first_turn = Turn::first_turn();
for row in 0..BOARD_SIZE {
for col in 0..BOARD_SIZE {
let coord = Coord::new(row, col);
assert_eq!(first_turn.check_move(coord).is_ok(), match coord.get_row_col() {
(2, 3) | (3, 2) | (4, 5) | (5, 4) => true,
_ => false,
}, "fails at {:?} because {:?}", coord, first_turn.check_move(coord))
}
}
}
#[test]
fn test_second_turn() {
let mut second_turn = Turn::first_turn();
second_turn.make_move(Coord::new(2, 3)).expect("Is this move illegal?");
for row in 0..BOARD_SIZE {
for col in 0..BOARD_SIZE {
let coord = Coord::new(row, col);
assert_eq!(second_turn.check_move(coord).is_ok(), match coord.get_row_col() {
(2, 2) | (2, 4) | (4, 2) => true,
_ => false,
}, "fails at {:?} because {:?}", coord, second_turn.check_move(coord))
}
}
}
|
extern crate reversi;
use reversi::board::*;
|
random_line_split
|
misc.rs
|
//! Misc tests
extern crate reversi;
use reversi::board::*;
use reversi::turn::*;
/// Checks `turn::check_move` method on starting turn.
#[test]
fn test_first_turn()
|
#[test]
fn test_second_turn() {
let mut second_turn = Turn::first_turn();
second_turn.make_move(Coord::new(2, 3)).expect("Is this move illegal?");
for row in 0..BOARD_SIZE {
for col in 0..BOARD_SIZE {
let coord = Coord::new(row, col);
assert_eq!(second_turn.check_move(coord).is_ok(), match coord.get_row_col() {
(2, 2) | (2, 4) | (4, 2) => true,
_ => false,
}, "fails at {:?} because {:?}", coord, second_turn.check_move(coord))
}
}
}
|
{
let first_turn = Turn::first_turn();
for row in 0..BOARD_SIZE {
for col in 0..BOARD_SIZE {
let coord = Coord::new(row, col);
assert_eq!(first_turn.check_move(coord).is_ok(), match coord.get_row_col() {
(2, 3) | (3, 2) | (4, 5) | (5, 4) => true,
_ => false,
}, "fails at {:?} because {:?}", coord, first_turn.check_move(coord))
}
}
}
|
identifier_body
|
misc.rs
|
//! Misc tests
extern crate reversi;
use reversi::board::*;
use reversi::turn::*;
/// Checks `turn::check_move` method on starting turn.
#[test]
fn test_first_turn() {
let first_turn = Turn::first_turn();
for row in 0..BOARD_SIZE {
for col in 0..BOARD_SIZE {
let coord = Coord::new(row, col);
assert_eq!(first_turn.check_move(coord).is_ok(), match coord.get_row_col() {
(2, 3) | (3, 2) | (4, 5) | (5, 4) => true,
_ => false,
}, "fails at {:?} because {:?}", coord, first_turn.check_move(coord))
}
}
}
#[test]
fn
|
() {
let mut second_turn = Turn::first_turn();
second_turn.make_move(Coord::new(2, 3)).expect("Is this move illegal?");
for row in 0..BOARD_SIZE {
for col in 0..BOARD_SIZE {
let coord = Coord::new(row, col);
assert_eq!(second_turn.check_move(coord).is_ok(), match coord.get_row_col() {
(2, 2) | (2, 4) | (4, 2) => true,
_ => false,
}, "fails at {:?} because {:?}", coord, second_turn.check_move(coord))
}
}
}
|
test_second_turn
|
identifier_name
|
pubcomp.rs
|
use std::io::{Read, Write};
use control::{FixedHeader, PacketType, ControlType};
use control::variable_header::PacketIdentifier;
use packet::{Packet, PacketError};
use {Encodable, Decodable};
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct PubcompPacket {
fixed_header: FixedHeader,
packet_identifier: PacketIdentifier,
payload: (),
}
impl PubcompPacket {
pub fn new(pkid: u16) -> PubcompPacket {
PubcompPacket {
fixed_header: FixedHeader::new(PacketType::with_default(ControlType::PublishComplete), 2),
packet_identifier: PacketIdentifier(pkid),
payload: (),
}
}
|
self.packet_identifier.0
}
pub fn set_packet_identifier(&mut self, pkid: u16) {
self.packet_identifier.0 = pkid;
}
}
impl<'a> Packet<'a> for PubcompPacket {
type Payload = ();
fn fixed_header(&self) -> &FixedHeader {
&self.fixed_header
}
fn payload(&self) -> &Self::Payload {
&self.payload
}
fn encode_variable_headers<W: Write>(&self, writer: &mut W) -> Result<(), PacketError<'a, Self>> {
try!(self.packet_identifier.encode(writer));
Ok(())
}
fn encoded_variable_headers_length(&self) -> u32 {
self.packet_identifier.encoded_length()
}
fn decode_packet<R: Read>(reader: &mut R, fixed_header: FixedHeader) -> Result<Self, PacketError<'a, Self>> {
let packet_identifier: PacketIdentifier = try!(PacketIdentifier::decode(reader));
Ok(PubcompPacket {
fixed_header: fixed_header,
packet_identifier: packet_identifier,
payload: (),
})
}
}
|
pub fn packet_identifier(&self) -> u16 {
|
random_line_split
|
pubcomp.rs
|
use std::io::{Read, Write};
use control::{FixedHeader, PacketType, ControlType};
use control::variable_header::PacketIdentifier;
use packet::{Packet, PacketError};
use {Encodable, Decodable};
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct PubcompPacket {
fixed_header: FixedHeader,
packet_identifier: PacketIdentifier,
payload: (),
}
impl PubcompPacket {
pub fn new(pkid: u16) -> PubcompPacket {
PubcompPacket {
fixed_header: FixedHeader::new(PacketType::with_default(ControlType::PublishComplete), 2),
packet_identifier: PacketIdentifier(pkid),
payload: (),
}
}
pub fn packet_identifier(&self) -> u16 {
self.packet_identifier.0
}
pub fn set_packet_identifier(&mut self, pkid: u16) {
self.packet_identifier.0 = pkid;
}
}
impl<'a> Packet<'a> for PubcompPacket {
type Payload = ();
fn fixed_header(&self) -> &FixedHeader
|
fn payload(&self) -> &Self::Payload {
&self.payload
}
fn encode_variable_headers<W: Write>(&self, writer: &mut W) -> Result<(), PacketError<'a, Self>> {
try!(self.packet_identifier.encode(writer));
Ok(())
}
fn encoded_variable_headers_length(&self) -> u32 {
self.packet_identifier.encoded_length()
}
fn decode_packet<R: Read>(reader: &mut R, fixed_header: FixedHeader) -> Result<Self, PacketError<'a, Self>> {
let packet_identifier: PacketIdentifier = try!(PacketIdentifier::decode(reader));
Ok(PubcompPacket {
fixed_header: fixed_header,
packet_identifier: packet_identifier,
payload: (),
})
}
}
|
{
&self.fixed_header
}
|
identifier_body
|
pubcomp.rs
|
use std::io::{Read, Write};
use control::{FixedHeader, PacketType, ControlType};
use control::variable_header::PacketIdentifier;
use packet::{Packet, PacketError};
use {Encodable, Decodable};
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct PubcompPacket {
fixed_header: FixedHeader,
packet_identifier: PacketIdentifier,
payload: (),
}
impl PubcompPacket {
pub fn new(pkid: u16) -> PubcompPacket {
PubcompPacket {
fixed_header: FixedHeader::new(PacketType::with_default(ControlType::PublishComplete), 2),
packet_identifier: PacketIdentifier(pkid),
payload: (),
}
}
pub fn packet_identifier(&self) -> u16 {
self.packet_identifier.0
}
pub fn set_packet_identifier(&mut self, pkid: u16) {
self.packet_identifier.0 = pkid;
}
}
impl<'a> Packet<'a> for PubcompPacket {
type Payload = ();
fn fixed_header(&self) -> &FixedHeader {
&self.fixed_header
}
fn payload(&self) -> &Self::Payload {
&self.payload
}
fn encode_variable_headers<W: Write>(&self, writer: &mut W) -> Result<(), PacketError<'a, Self>> {
try!(self.packet_identifier.encode(writer));
Ok(())
}
fn
|
(&self) -> u32 {
self.packet_identifier.encoded_length()
}
fn decode_packet<R: Read>(reader: &mut R, fixed_header: FixedHeader) -> Result<Self, PacketError<'a, Self>> {
let packet_identifier: PacketIdentifier = try!(PacketIdentifier::decode(reader));
Ok(PubcompPacket {
fixed_header: fixed_header,
packet_identifier: packet_identifier,
payload: (),
})
}
}
|
encoded_variable_headers_length
|
identifier_name
|
accept_encoding.rs
|
use header::{Encoding, QualityItem};
header! {
#[doc="`Accept-Encoding` header, defined in"]
#[doc="[RFC7231](http://tools.ietf.org/html/rfc7231#section-5.3.4)"]
#[doc=""]
#[doc="The `Accept-Encoding` header field can be used by user agents to"]
#[doc="indicate what response content-codings are"]
#[doc="acceptable in the response. An `identity` token is used as a synonym"]
#[doc="for \"no encoding\" in order to communicate when no encoding is"]
#[doc="preferred."]
#[doc=""]
#[doc="# ABNF"]
#[doc="```plain"]
#[doc="Accept-Encoding = #( codings [ weight ] )"]
#[doc="codings = content-coding / \"identity\" / \"*\""]
#[doc="```"]
(AcceptEncoding, "Accept-Encoding") => (QualityItem<Encoding>)*
}
#[cfg(test)]
mod tests {
use header::{Encoding, Header, qitem, Quality, QualityItem};
use super::*;
#[test]
fn test_parse_header()
|
}
|
{
let a: AcceptEncoding = Header::parse_header([b"gzip;q=1.0, identity; q=0.5".to_vec()].as_ref()).unwrap();
let b = AcceptEncoding(vec![
qitem(Encoding::Gzip),
QualityItem::new(Encoding::Identity, Quality(500)),
]);
assert_eq!(a, b);
}
|
identifier_body
|
accept_encoding.rs
|
use header::{Encoding, QualityItem};
header! {
#[doc="`Accept-Encoding` header, defined in"]
#[doc="[RFC7231](http://tools.ietf.org/html/rfc7231#section-5.3.4)"]
#[doc=""]
#[doc="The `Accept-Encoding` header field can be used by user agents to"]
#[doc="indicate what response content-codings are"]
#[doc="acceptable in the response. An `identity` token is used as a synonym"]
#[doc="for \"no encoding\" in order to communicate when no encoding is"]
#[doc="preferred."]
#[doc=""]
#[doc="# ABNF"]
#[doc="```plain"]
#[doc="Accept-Encoding = #( codings [ weight ] )"]
#[doc="codings = content-coding / \"identity\" / \"*\""]
#[doc="```"]
(AcceptEncoding, "Accept-Encoding") => (QualityItem<Encoding>)*
}
#[cfg(test)]
mod tests {
use header::{Encoding, Header, qitem, Quality, QualityItem};
use super::*;
#[test]
fn
|
() {
let a: AcceptEncoding = Header::parse_header([b"gzip;q=1.0, identity; q=0.5".to_vec()].as_ref()).unwrap();
let b = AcceptEncoding(vec![
qitem(Encoding::Gzip),
QualityItem::new(Encoding::Identity, Quality(500)),
]);
assert_eq!(a, b);
}
}
|
test_parse_header
|
identifier_name
|
accept_encoding.rs
|
use header::{Encoding, QualityItem};
|
#[doc="The `Accept-Encoding` header field can be used by user agents to"]
#[doc="indicate what response content-codings are"]
#[doc="acceptable in the response. An `identity` token is used as a synonym"]
#[doc="for \"no encoding\" in order to communicate when no encoding is"]
#[doc="preferred."]
#[doc=""]
#[doc="# ABNF"]
#[doc="```plain"]
#[doc="Accept-Encoding = #( codings [ weight ] )"]
#[doc="codings = content-coding / \"identity\" / \"*\""]
#[doc="```"]
(AcceptEncoding, "Accept-Encoding") => (QualityItem<Encoding>)*
}
#[cfg(test)]
mod tests {
use header::{Encoding, Header, qitem, Quality, QualityItem};
use super::*;
#[test]
fn test_parse_header() {
let a: AcceptEncoding = Header::parse_header([b"gzip;q=1.0, identity; q=0.5".to_vec()].as_ref()).unwrap();
let b = AcceptEncoding(vec![
qitem(Encoding::Gzip),
QualityItem::new(Encoding::Identity, Quality(500)),
]);
assert_eq!(a, b);
}
}
|
header! {
#[doc="`Accept-Encoding` header, defined in"]
#[doc="[RFC7231](http://tools.ietf.org/html/rfc7231#section-5.3.4)"]
#[doc=""]
|
random_line_split
|
texture.rs
|
// Copyright 2017 The gltf Library Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum_number! {
Filter {
Nearest = 9728,
Linear = 9729,
NearestMipmapNearest = 9984,
LinearMipmapNearest = 9985,
NearestMipmapLinear = 9986,
LinearMipmapLinear = 9987,
}
}
impl Default for Filter {
fn default() -> Filter {
Filter::Nearest
}
}
enum_number! {
Wrap {
Repeat = 10497,
ClampToEdge = 33071,
MirroredRepeat = 33648,
}
}
impl Default for Wrap {
fn default() -> Wrap {
Wrap::Repeat
}
}
enum_number! {
Format {
Alpha = 6406,
Rgb = 6407,
Rgba = 6408,
Luminance = 6409,
LuminanceAlpha = 6410,
}
}
impl Default for Format {
fn default() -> Format {
Format::Rgba
}
}
enum_number! {
Target {
Texture2d = 3553,
}
}
impl Default for Target {
fn default() -> Target {
Target::Texture2d
}
}
enum_number! {
TexelType {
U8 = 5121,
U16R5G6B5 = 33635,
U16R4G4B4A4 = 32819,
U16R5G5B5A1 = 32820,
}
}
impl Default for TexelType {
fn default() -> TexelType
|
}
#[derive(Debug, Default, Deserialize, Serialize)]
pub struct Texture {
/// The texture's format.
#[serde(default)]
pub format: Format,
/// The texture's internal format.
#[serde(rename = "internalFormat")]
#[serde(default)]
pub internal_format: Format,
/// The ID of the sampler used by this texture.
pub sampler: String,
/// The ID of the image used by this texture.
pub source: String,
/// The target that the WebGL texture should be bound to.
#[serde(default)]
pub target: Target,
/// Texel datatype.
#[serde(rename = "type")]
#[serde(default)]
pub kind: TexelType,
/// The user-defined name of this object.
///
/// This is not necessarily unique, e.g., a texture and a buffer could have
/// the same name, or two textures could even have the same name.
pub name: Option<String>,
}
|
{
TexelType::U8
}
|
identifier_body
|
texture.rs
|
// Copyright 2017 The gltf Library Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum_number! {
Filter {
Nearest = 9728,
Linear = 9729,
NearestMipmapNearest = 9984,
LinearMipmapNearest = 9985,
NearestMipmapLinear = 9986,
LinearMipmapLinear = 9987,
}
}
impl Default for Filter {
fn default() -> Filter {
Filter::Nearest
}
}
enum_number! {
Wrap {
Repeat = 10497,
ClampToEdge = 33071,
MirroredRepeat = 33648,
}
}
impl Default for Wrap {
fn
|
() -> Wrap {
Wrap::Repeat
}
}
enum_number! {
Format {
Alpha = 6406,
Rgb = 6407,
Rgba = 6408,
Luminance = 6409,
LuminanceAlpha = 6410,
}
}
impl Default for Format {
fn default() -> Format {
Format::Rgba
}
}
enum_number! {
Target {
Texture2d = 3553,
}
}
impl Default for Target {
fn default() -> Target {
Target::Texture2d
}
}
enum_number! {
TexelType {
U8 = 5121,
U16R5G6B5 = 33635,
U16R4G4B4A4 = 32819,
U16R5G5B5A1 = 32820,
}
}
impl Default for TexelType {
fn default() -> TexelType {
TexelType::U8
}
}
#[derive(Debug, Default, Deserialize, Serialize)]
pub struct Texture {
/// The texture's format.
#[serde(default)]
pub format: Format,
/// The texture's internal format.
#[serde(rename = "internalFormat")]
#[serde(default)]
pub internal_format: Format,
/// The ID of the sampler used by this texture.
pub sampler: String,
/// The ID of the image used by this texture.
pub source: String,
/// The target that the WebGL texture should be bound to.
#[serde(default)]
pub target: Target,
/// Texel datatype.
#[serde(rename = "type")]
#[serde(default)]
pub kind: TexelType,
/// The user-defined name of this object.
///
/// This is not necessarily unique, e.g., a texture and a buffer could have
/// the same name, or two textures could even have the same name.
pub name: Option<String>,
}
|
default
|
identifier_name
|
texture.rs
|
// Copyright 2017 The gltf Library Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum_number! {
Filter {
Nearest = 9728,
Linear = 9729,
NearestMipmapNearest = 9984,
LinearMipmapNearest = 9985,
NearestMipmapLinear = 9986,
LinearMipmapLinear = 9987,
}
}
impl Default for Filter {
fn default() -> Filter {
Filter::Nearest
}
}
enum_number! {
Wrap {
Repeat = 10497,
ClampToEdge = 33071,
MirroredRepeat = 33648,
}
}
impl Default for Wrap {
fn default() -> Wrap {
Wrap::Repeat
}
}
enum_number! {
Format {
Alpha = 6406,
Rgb = 6407,
Rgba = 6408,
Luminance = 6409,
LuminanceAlpha = 6410,
}
}
impl Default for Format {
fn default() -> Format {
Format::Rgba
}
}
enum_number! {
Target {
Texture2d = 3553,
}
}
impl Default for Target {
fn default() -> Target {
Target::Texture2d
}
}
enum_number! {
TexelType {
U8 = 5121,
U16R5G6B5 = 33635,
U16R4G4B4A4 = 32819,
U16R5G5B5A1 = 32820,
}
}
impl Default for TexelType {
fn default() -> TexelType {
TexelType::U8
}
}
#[derive(Debug, Default, Deserialize, Serialize)]
pub struct Texture {
/// The texture's format.
#[serde(default)]
pub format: Format,
/// The texture's internal format.
#[serde(rename = "internalFormat")]
#[serde(default)]
pub internal_format: Format,
/// The ID of the sampler used by this texture.
pub sampler: String,
/// The ID of the image used by this texture.
pub source: String,
/// The target that the WebGL texture should be bound to.
#[serde(default)]
pub target: Target,
/// Texel datatype.
#[serde(rename = "type")]
#[serde(default)]
pub kind: TexelType,
/// The user-defined name of this object.
|
}
|
///
/// This is not necessarily unique, e.g., a texture and a buffer could have
/// the same name, or two textures could even have the same name.
pub name: Option<String>,
|
random_line_split
|
error_codes_check.rs
|
//! Checks that all error codes have at least one test to prevent having error
//! codes that are silently not thrown by the compiler anymore.
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fs::read_to_string;
use std::path::Path;
use regex::Regex;
// A few of those error codes can't be tested but all the others can and *should* be tested!
const EXEMPTED_FROM_TEST: &[&str] = &[
"E0227", "E0279", "E0280", "E0313", "E0377", "E0461", "E0462", "E0464", "E0465", "E0476",
"E0482", "E0514", "E0519", "E0523", "E0554", "E0640", "E0717", "E0729",
];
// Some error codes don't have any tests apparently...
const IGNORE_EXPLANATION_CHECK: &[&str] = &["E0570", "E0601", "E0602", "E0729"];
// If the file path contains any of these, we don't want to try to extract error codes from it.
//
// We need to declare each path in the windows version (with backslash).
const PATHS_TO_IGNORE_FOR_EXTRACTION: &[&str] =
&["src/test/", "src\\test\\", "src/doc/", "src\\doc\\", "src/tools/", "src\\tools\\"];
#[derive(Default, Debug)]
struct ErrorCodeStatus {
has_test: bool,
has_explanation: bool,
is_used: bool,
}
fn check_error_code_explanation(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
err_code: String,
) -> bool {
let mut invalid_compile_fail_format = false;
let mut found_error_code = false;
for line in f.lines() {
let s = line.trim();
if s.starts_with("```") {
if s.contains("compile_fail") && s.contains('E') {
if!found_error_code {
error_codes.get_mut(&err_code).map(|x| x.has_test = true);
found_error_code = true;
}
} else if s.contains("compile-fail")
|
} else if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
if!found_error_code {
error_codes.get_mut(&err_code).map(|x| x.has_test = true);
found_error_code = true;
}
}
}
invalid_compile_fail_format
}
fn check_if_error_code_is_test_in_explanation(f: &str, err_code: &str) -> bool {
let mut ignore_found = false;
for line in f.lines() {
let s = line.trim();
if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
return true;
}
if s.starts_with("```") {
if s.contains("compile_fail") && s.contains(err_code) {
return true;
} else if s.contains("ignore") {
// It's very likely that we can't actually make it fail compilation...
ignore_found = true;
}
}
}
ignore_found
}
macro_rules! some_or_continue {
($e:expr) => {
match $e {
Some(e) => e,
None => continue,
}
};
}
fn extract_error_codes(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
path: &Path,
errors: &mut Vec<String>,
) {
let mut reached_no_explanation = false;
for line in f.lines() {
let s = line.trim();
if!reached_no_explanation && s.starts_with('E') && s.contains("include_str!(\"") {
let err_code = s
.split_once(':')
.expect(
format!(
"Expected a line with the format `E0xxx: include_str!(\"..\")`, but got {} \
without a `:` delimiter",
s,
)
.as_str(),
)
.0
.to_owned();
error_codes.entry(err_code.clone()).or_default().has_explanation = true;
// Now we extract the tests from the markdown file!
let md_file_name = match s.split_once("include_str!(\"") {
None => continue,
Some((_, md)) => match md.split_once("\")") {
None => continue,
Some((file_name, _)) => file_name,
},
};
let path = some_or_continue!(path.parent())
.join(md_file_name)
.canonicalize()
.expect("failed to canonicalize error explanation file path");
match read_to_string(&path) {
Ok(content) => {
let has_test = check_if_error_code_is_test_in_explanation(&content, &err_code);
if!has_test &&!IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
errors.push(format!(
"`{}` doesn't use its own error code in compile_fail example",
path.display(),
));
} else if has_test && IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
errors.push(format!(
"`{}` has a compile_fail example with its own error code, it shouldn't \
be listed in IGNORE_EXPLANATION_CHECK!",
path.display(),
));
}
if check_error_code_explanation(&content, error_codes, err_code) {
errors.push(format!(
"`{}` uses invalid tag `compile-fail` instead of `compile_fail`",
path.display(),
));
}
}
Err(e) => {
eprintln!("Couldn't read `{}`: {}", path.display(), e);
}
}
} else if reached_no_explanation && s.starts_with('E') {
let err_code = match s.split_once(',') {
None => s,
Some((err_code, _)) => err_code,
}
.to_string();
if!error_codes.contains_key(&err_code) {
// this check should *never* fail!
error_codes.insert(err_code, ErrorCodeStatus::default());
}
} else if s == ";" {
reached_no_explanation = true;
}
}
}
fn extract_error_codes_from_tests(f: &str, error_codes: &mut HashMap<String, ErrorCodeStatus>) {
for line in f.lines() {
let s = line.trim();
if s.starts_with("error[E") || s.starts_with("warning[E") {
let err_code = match s.split_once(']') {
None => continue,
Some((err_code, _)) => match err_code.split_once('[') {
None => continue,
Some((_, err_code)) => err_code,
},
};
error_codes.entry(err_code.to_owned()).or_default().has_test = true;
}
}
}
fn extract_error_codes_from_source(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
regex: &Regex,
) {
for line in f.lines() {
if line.trim_start().starts_with("//") {
continue;
}
for cap in regex.captures_iter(line) {
if let Some(error_code) = cap.get(1) {
error_codes.entry(error_code.as_str().to_owned()).or_default().is_used = true;
}
}
}
}
pub fn check(paths: &[&Path], bad: &mut bool) {
let mut errors = Vec::new();
let mut found_explanations = 0;
let mut found_tests = 0;
let mut error_codes: HashMap<String, ErrorCodeStatus> = HashMap::new();
// We want error codes which match the following cases:
//
// * foo(a, E0111, a)
// * foo(a, E0111)
// * foo(E0111, a)
// * #[error = "E0111"]
let regex = Regex::new(r#"[(,"\s](E\d{4})[,)"]"#).unwrap();
println!("Checking which error codes lack tests...");
for path in paths {
super::walk(path, &mut |path| super::filter_dirs(path), &mut |entry, contents| {
let file_name = entry.file_name();
if file_name == "error_codes.rs" {
extract_error_codes(contents, &mut error_codes, entry.path(), &mut errors);
found_explanations += 1;
} else if entry.path().extension() == Some(OsStr::new("stderr")) {
extract_error_codes_from_tests(contents, &mut error_codes);
found_tests += 1;
} else if entry.path().extension() == Some(OsStr::new("rs")) {
let path = entry.path().to_string_lossy();
if PATHS_TO_IGNORE_FOR_EXTRACTION.iter().all(|c|!path.contains(c)) {
extract_error_codes_from_source(contents, &mut error_codes, ®ex);
}
}
});
}
if found_explanations == 0 {
eprintln!("No error code explanation was tested!");
*bad = true;
}
if found_tests == 0 {
eprintln!("No error code was found in compilation errors!");
*bad = true;
}
if errors.is_empty() {
println!("Found {} error codes", error_codes.len());
for (err_code, error_status) in &error_codes {
if!error_status.has_test &&!EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
errors.push(format!("Error code {} needs to have at least one UI test!", err_code));
} else if error_status.has_test && EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
errors.push(format!(
"Error code {} has a UI test, it shouldn't be listed into EXEMPTED_FROM_TEST!",
err_code
));
}
if!error_status.is_used &&!error_status.has_explanation {
errors.push(format!(
"Error code {} isn't used and doesn't have an error explanation, it should be \
commented in error_codes.rs file",
err_code
));
}
}
}
if errors.is_empty() {
// Checking if local constants need to be cleaned.
for err_code in EXEMPTED_FROM_TEST {
match error_codes.get(err_code.to_owned()) {
Some(status) => {
if status.has_test {
errors.push(format!(
"{} error code has a test and therefore should be \
removed from the `EXEMPTED_FROM_TEST` constant",
err_code
));
}
}
None => errors.push(format!(
"{} error code isn't used anymore and therefore should be removed \
from `EXEMPTED_FROM_TEST` constant",
err_code
)),
}
}
}
errors.sort();
for err in &errors {
eprintln!("{}", err);
}
println!("Found {} error codes with no tests", errors.len());
if!errors.is_empty() {
*bad = true;
}
println!("Done!");
}
|
{
invalid_compile_fail_format = true;
}
|
conditional_block
|
error_codes_check.rs
|
//! Checks that all error codes have at least one test to prevent having error
//! codes that are silently not thrown by the compiler anymore.
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fs::read_to_string;
use std::path::Path;
use regex::Regex;
// A few of those error codes can't be tested but all the others can and *should* be tested!
const EXEMPTED_FROM_TEST: &[&str] = &[
"E0227", "E0279", "E0280", "E0313", "E0377", "E0461", "E0462", "E0464", "E0465", "E0476",
"E0482", "E0514", "E0519", "E0523", "E0554", "E0640", "E0717", "E0729",
];
// Some error codes don't have any tests apparently...
const IGNORE_EXPLANATION_CHECK: &[&str] = &["E0570", "E0601", "E0602", "E0729"];
// If the file path contains any of these, we don't want to try to extract error codes from it.
//
// We need to declare each path in the windows version (with backslash).
const PATHS_TO_IGNORE_FOR_EXTRACTION: &[&str] =
&["src/test/", "src\\test\\", "src/doc/", "src\\doc\\", "src/tools/", "src\\tools\\"];
#[derive(Default, Debug)]
struct ErrorCodeStatus {
has_test: bool,
has_explanation: bool,
is_used: bool,
}
fn check_error_code_explanation(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
err_code: String,
) -> bool {
let mut invalid_compile_fail_format = false;
let mut found_error_code = false;
for line in f.lines() {
let s = line.trim();
if s.starts_with("```") {
if s.contains("compile_fail") && s.contains('E') {
if!found_error_code {
error_codes.get_mut(&err_code).map(|x| x.has_test = true);
found_error_code = true;
}
} else if s.contains("compile-fail") {
invalid_compile_fail_format = true;
}
} else if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
if!found_error_code {
error_codes.get_mut(&err_code).map(|x| x.has_test = true);
found_error_code = true;
}
}
}
invalid_compile_fail_format
}
fn check_if_error_code_is_test_in_explanation(f: &str, err_code: &str) -> bool {
let mut ignore_found = false;
for line in f.lines() {
let s = line.trim();
if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
return true;
}
if s.starts_with("```") {
if s.contains("compile_fail") && s.contains(err_code) {
return true;
} else if s.contains("ignore") {
// It's very likely that we can't actually make it fail compilation...
ignore_found = true;
}
}
}
ignore_found
}
macro_rules! some_or_continue {
($e:expr) => {
match $e {
Some(e) => e,
None => continue,
}
};
}
fn extract_error_codes(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
path: &Path,
errors: &mut Vec<String>,
) {
let mut reached_no_explanation = false;
for line in f.lines() {
let s = line.trim();
if!reached_no_explanation && s.starts_with('E') && s.contains("include_str!(\"") {
let err_code = s
.split_once(':')
.expect(
format!(
"Expected a line with the format `E0xxx: include_str!(\"..\")`, but got {} \
without a `:` delimiter",
s,
)
.as_str(),
)
.0
.to_owned();
error_codes.entry(err_code.clone()).or_default().has_explanation = true;
// Now we extract the tests from the markdown file!
let md_file_name = match s.split_once("include_str!(\"") {
None => continue,
Some((_, md)) => match md.split_once("\")") {
None => continue,
Some((file_name, _)) => file_name,
},
};
let path = some_or_continue!(path.parent())
.join(md_file_name)
.canonicalize()
.expect("failed to canonicalize error explanation file path");
match read_to_string(&path) {
Ok(content) => {
let has_test = check_if_error_code_is_test_in_explanation(&content, &err_code);
if!has_test &&!IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
errors.push(format!(
"`{}` doesn't use its own error code in compile_fail example",
path.display(),
));
} else if has_test && IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
errors.push(format!(
"`{}` has a compile_fail example with its own error code, it shouldn't \
be listed in IGNORE_EXPLANATION_CHECK!",
path.display(),
));
}
if check_error_code_explanation(&content, error_codes, err_code) {
errors.push(format!(
"`{}` uses invalid tag `compile-fail` instead of `compile_fail`",
path.display(),
));
}
}
Err(e) => {
eprintln!("Couldn't read `{}`: {}", path.display(), e);
}
}
} else if reached_no_explanation && s.starts_with('E') {
let err_code = match s.split_once(',') {
None => s,
Some((err_code, _)) => err_code,
}
.to_string();
if!error_codes.contains_key(&err_code) {
|
reached_no_explanation = true;
}
}
}
fn extract_error_codes_from_tests(f: &str, error_codes: &mut HashMap<String, ErrorCodeStatus>) {
for line in f.lines() {
let s = line.trim();
if s.starts_with("error[E") || s.starts_with("warning[E") {
let err_code = match s.split_once(']') {
None => continue,
Some((err_code, _)) => match err_code.split_once('[') {
None => continue,
Some((_, err_code)) => err_code,
},
};
error_codes.entry(err_code.to_owned()).or_default().has_test = true;
}
}
}
fn extract_error_codes_from_source(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
regex: &Regex,
) {
for line in f.lines() {
if line.trim_start().starts_with("//") {
continue;
}
for cap in regex.captures_iter(line) {
if let Some(error_code) = cap.get(1) {
error_codes.entry(error_code.as_str().to_owned()).or_default().is_used = true;
}
}
}
}
pub fn check(paths: &[&Path], bad: &mut bool) {
let mut errors = Vec::new();
let mut found_explanations = 0;
let mut found_tests = 0;
let mut error_codes: HashMap<String, ErrorCodeStatus> = HashMap::new();
// We want error codes which match the following cases:
//
// * foo(a, E0111, a)
// * foo(a, E0111)
// * foo(E0111, a)
// * #[error = "E0111"]
let regex = Regex::new(r#"[(,"\s](E\d{4})[,)"]"#).unwrap();
println!("Checking which error codes lack tests...");
for path in paths {
super::walk(path, &mut |path| super::filter_dirs(path), &mut |entry, contents| {
let file_name = entry.file_name();
if file_name == "error_codes.rs" {
extract_error_codes(contents, &mut error_codes, entry.path(), &mut errors);
found_explanations += 1;
} else if entry.path().extension() == Some(OsStr::new("stderr")) {
extract_error_codes_from_tests(contents, &mut error_codes);
found_tests += 1;
} else if entry.path().extension() == Some(OsStr::new("rs")) {
let path = entry.path().to_string_lossy();
if PATHS_TO_IGNORE_FOR_EXTRACTION.iter().all(|c|!path.contains(c)) {
extract_error_codes_from_source(contents, &mut error_codes, ®ex);
}
}
});
}
if found_explanations == 0 {
eprintln!("No error code explanation was tested!");
*bad = true;
}
if found_tests == 0 {
eprintln!("No error code was found in compilation errors!");
*bad = true;
}
if errors.is_empty() {
println!("Found {} error codes", error_codes.len());
for (err_code, error_status) in &error_codes {
if!error_status.has_test &&!EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
errors.push(format!("Error code {} needs to have at least one UI test!", err_code));
} else if error_status.has_test && EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
errors.push(format!(
"Error code {} has a UI test, it shouldn't be listed into EXEMPTED_FROM_TEST!",
err_code
));
}
if!error_status.is_used &&!error_status.has_explanation {
errors.push(format!(
"Error code {} isn't used and doesn't have an error explanation, it should be \
commented in error_codes.rs file",
err_code
));
}
}
}
if errors.is_empty() {
// Checking if local constants need to be cleaned.
for err_code in EXEMPTED_FROM_TEST {
match error_codes.get(err_code.to_owned()) {
Some(status) => {
if status.has_test {
errors.push(format!(
"{} error code has a test and therefore should be \
removed from the `EXEMPTED_FROM_TEST` constant",
err_code
));
}
}
None => errors.push(format!(
"{} error code isn't used anymore and therefore should be removed \
from `EXEMPTED_FROM_TEST` constant",
err_code
)),
}
}
}
errors.sort();
for err in &errors {
eprintln!("{}", err);
}
println!("Found {} error codes with no tests", errors.len());
if!errors.is_empty() {
*bad = true;
}
println!("Done!");
}
|
// this check should *never* fail!
error_codes.insert(err_code, ErrorCodeStatus::default());
}
} else if s == ";" {
|
random_line_split
|
error_codes_check.rs
|
//! Checks that all error codes have at least one test to prevent having error
//! codes that are silently not thrown by the compiler anymore.
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fs::read_to_string;
use std::path::Path;
use regex::Regex;
// A few of those error codes can't be tested but all the others can and *should* be tested!
const EXEMPTED_FROM_TEST: &[&str] = &[
"E0227", "E0279", "E0280", "E0313", "E0377", "E0461", "E0462", "E0464", "E0465", "E0476",
"E0482", "E0514", "E0519", "E0523", "E0554", "E0640", "E0717", "E0729",
];
// Some error codes don't have any tests apparently...
const IGNORE_EXPLANATION_CHECK: &[&str] = &["E0570", "E0601", "E0602", "E0729"];
// If the file path contains any of these, we don't want to try to extract error codes from it.
//
// We need to declare each path in the windows version (with backslash).
const PATHS_TO_IGNORE_FOR_EXTRACTION: &[&str] =
&["src/test/", "src\\test\\", "src/doc/", "src\\doc\\", "src/tools/", "src\\tools\\"];
#[derive(Default, Debug)]
struct ErrorCodeStatus {
has_test: bool,
has_explanation: bool,
is_used: bool,
}
fn check_error_code_explanation(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
err_code: String,
) -> bool {
let mut invalid_compile_fail_format = false;
let mut found_error_code = false;
for line in f.lines() {
let s = line.trim();
if s.starts_with("```") {
if s.contains("compile_fail") && s.contains('E') {
if!found_error_code {
error_codes.get_mut(&err_code).map(|x| x.has_test = true);
found_error_code = true;
}
} else if s.contains("compile-fail") {
invalid_compile_fail_format = true;
}
} else if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
if!found_error_code {
error_codes.get_mut(&err_code).map(|x| x.has_test = true);
found_error_code = true;
}
}
}
invalid_compile_fail_format
}
fn check_if_error_code_is_test_in_explanation(f: &str, err_code: &str) -> bool {
let mut ignore_found = false;
for line in f.lines() {
let s = line.trim();
if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
return true;
}
if s.starts_with("```") {
if s.contains("compile_fail") && s.contains(err_code) {
return true;
} else if s.contains("ignore") {
// It's very likely that we can't actually make it fail compilation...
ignore_found = true;
}
}
}
ignore_found
}
macro_rules! some_or_continue {
($e:expr) => {
match $e {
Some(e) => e,
None => continue,
}
};
}
fn extract_error_codes(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
path: &Path,
errors: &mut Vec<String>,
) {
let mut reached_no_explanation = false;
for line in f.lines() {
let s = line.trim();
if!reached_no_explanation && s.starts_with('E') && s.contains("include_str!(\"") {
let err_code = s
.split_once(':')
.expect(
format!(
"Expected a line with the format `E0xxx: include_str!(\"..\")`, but got {} \
without a `:` delimiter",
s,
)
.as_str(),
)
.0
.to_owned();
error_codes.entry(err_code.clone()).or_default().has_explanation = true;
// Now we extract the tests from the markdown file!
let md_file_name = match s.split_once("include_str!(\"") {
None => continue,
Some((_, md)) => match md.split_once("\")") {
None => continue,
Some((file_name, _)) => file_name,
},
};
let path = some_or_continue!(path.parent())
.join(md_file_name)
.canonicalize()
.expect("failed to canonicalize error explanation file path");
match read_to_string(&path) {
Ok(content) => {
let has_test = check_if_error_code_is_test_in_explanation(&content, &err_code);
if!has_test &&!IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
errors.push(format!(
"`{}` doesn't use its own error code in compile_fail example",
path.display(),
));
} else if has_test && IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
errors.push(format!(
"`{}` has a compile_fail example with its own error code, it shouldn't \
be listed in IGNORE_EXPLANATION_CHECK!",
path.display(),
));
}
if check_error_code_explanation(&content, error_codes, err_code) {
errors.push(format!(
"`{}` uses invalid tag `compile-fail` instead of `compile_fail`",
path.display(),
));
}
}
Err(e) => {
eprintln!("Couldn't read `{}`: {}", path.display(), e);
}
}
} else if reached_no_explanation && s.starts_with('E') {
let err_code = match s.split_once(',') {
None => s,
Some((err_code, _)) => err_code,
}
.to_string();
if!error_codes.contains_key(&err_code) {
// this check should *never* fail!
error_codes.insert(err_code, ErrorCodeStatus::default());
}
} else if s == ";" {
reached_no_explanation = true;
}
}
}
fn extract_error_codes_from_tests(f: &str, error_codes: &mut HashMap<String, ErrorCodeStatus>) {
for line in f.lines() {
let s = line.trim();
if s.starts_with("error[E") || s.starts_with("warning[E") {
let err_code = match s.split_once(']') {
None => continue,
Some((err_code, _)) => match err_code.split_once('[') {
None => continue,
Some((_, err_code)) => err_code,
},
};
error_codes.entry(err_code.to_owned()).or_default().has_test = true;
}
}
}
fn
|
(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
regex: &Regex,
) {
for line in f.lines() {
if line.trim_start().starts_with("//") {
continue;
}
for cap in regex.captures_iter(line) {
if let Some(error_code) = cap.get(1) {
error_codes.entry(error_code.as_str().to_owned()).or_default().is_used = true;
}
}
}
}
pub fn check(paths: &[&Path], bad: &mut bool) {
let mut errors = Vec::new();
let mut found_explanations = 0;
let mut found_tests = 0;
let mut error_codes: HashMap<String, ErrorCodeStatus> = HashMap::new();
// We want error codes which match the following cases:
//
// * foo(a, E0111, a)
// * foo(a, E0111)
// * foo(E0111, a)
// * #[error = "E0111"]
let regex = Regex::new(r#"[(,"\s](E\d{4})[,)"]"#).unwrap();
println!("Checking which error codes lack tests...");
for path in paths {
super::walk(path, &mut |path| super::filter_dirs(path), &mut |entry, contents| {
let file_name = entry.file_name();
if file_name == "error_codes.rs" {
extract_error_codes(contents, &mut error_codes, entry.path(), &mut errors);
found_explanations += 1;
} else if entry.path().extension() == Some(OsStr::new("stderr")) {
extract_error_codes_from_tests(contents, &mut error_codes);
found_tests += 1;
} else if entry.path().extension() == Some(OsStr::new("rs")) {
let path = entry.path().to_string_lossy();
if PATHS_TO_IGNORE_FOR_EXTRACTION.iter().all(|c|!path.contains(c)) {
extract_error_codes_from_source(contents, &mut error_codes, ®ex);
}
}
});
}
if found_explanations == 0 {
eprintln!("No error code explanation was tested!");
*bad = true;
}
if found_tests == 0 {
eprintln!("No error code was found in compilation errors!");
*bad = true;
}
if errors.is_empty() {
println!("Found {} error codes", error_codes.len());
for (err_code, error_status) in &error_codes {
if!error_status.has_test &&!EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
errors.push(format!("Error code {} needs to have at least one UI test!", err_code));
} else if error_status.has_test && EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
errors.push(format!(
"Error code {} has a UI test, it shouldn't be listed into EXEMPTED_FROM_TEST!",
err_code
));
}
if!error_status.is_used &&!error_status.has_explanation {
errors.push(format!(
"Error code {} isn't used and doesn't have an error explanation, it should be \
commented in error_codes.rs file",
err_code
));
}
}
}
if errors.is_empty() {
// Checking if local constants need to be cleaned.
for err_code in EXEMPTED_FROM_TEST {
match error_codes.get(err_code.to_owned()) {
Some(status) => {
if status.has_test {
errors.push(format!(
"{} error code has a test and therefore should be \
removed from the `EXEMPTED_FROM_TEST` constant",
err_code
));
}
}
None => errors.push(format!(
"{} error code isn't used anymore and therefore should be removed \
from `EXEMPTED_FROM_TEST` constant",
err_code
)),
}
}
}
errors.sort();
for err in &errors {
eprintln!("{}", err);
}
println!("Found {} error codes with no tests", errors.len());
if!errors.is_empty() {
*bad = true;
}
println!("Done!");
}
|
extract_error_codes_from_source
|
identifier_name
|
error_codes_check.rs
|
//! Checks that all error codes have at least one test to prevent having error
//! codes that are silently not thrown by the compiler anymore.
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fs::read_to_string;
use std::path::Path;
use regex::Regex;
// A few of those error codes can't be tested but all the others can and *should* be tested!
const EXEMPTED_FROM_TEST: &[&str] = &[
"E0227", "E0279", "E0280", "E0313", "E0377", "E0461", "E0462", "E0464", "E0465", "E0476",
"E0482", "E0514", "E0519", "E0523", "E0554", "E0640", "E0717", "E0729",
];
// Some error codes don't have any tests apparently...
const IGNORE_EXPLANATION_CHECK: &[&str] = &["E0570", "E0601", "E0602", "E0729"];
// If the file path contains any of these, we don't want to try to extract error codes from it.
//
// We need to declare each path in the windows version (with backslash).
const PATHS_TO_IGNORE_FOR_EXTRACTION: &[&str] =
&["src/test/", "src\\test\\", "src/doc/", "src\\doc\\", "src/tools/", "src\\tools\\"];
#[derive(Default, Debug)]
struct ErrorCodeStatus {
has_test: bool,
has_explanation: bool,
is_used: bool,
}
fn check_error_code_explanation(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
err_code: String,
) -> bool {
let mut invalid_compile_fail_format = false;
let mut found_error_code = false;
for line in f.lines() {
let s = line.trim();
if s.starts_with("```") {
if s.contains("compile_fail") && s.contains('E') {
if!found_error_code {
error_codes.get_mut(&err_code).map(|x| x.has_test = true);
found_error_code = true;
}
} else if s.contains("compile-fail") {
invalid_compile_fail_format = true;
}
} else if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
if!found_error_code {
error_codes.get_mut(&err_code).map(|x| x.has_test = true);
found_error_code = true;
}
}
}
invalid_compile_fail_format
}
fn check_if_error_code_is_test_in_explanation(f: &str, err_code: &str) -> bool {
let mut ignore_found = false;
for line in f.lines() {
let s = line.trim();
if s.starts_with("#### Note: this error code is no longer emitted by the compiler") {
return true;
}
if s.starts_with("```") {
if s.contains("compile_fail") && s.contains(err_code) {
return true;
} else if s.contains("ignore") {
// It's very likely that we can't actually make it fail compilation...
ignore_found = true;
}
}
}
ignore_found
}
macro_rules! some_or_continue {
($e:expr) => {
match $e {
Some(e) => e,
None => continue,
}
};
}
fn extract_error_codes(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
path: &Path,
errors: &mut Vec<String>,
) {
let mut reached_no_explanation = false;
for line in f.lines() {
let s = line.trim();
if!reached_no_explanation && s.starts_with('E') && s.contains("include_str!(\"") {
let err_code = s
.split_once(':')
.expect(
format!(
"Expected a line with the format `E0xxx: include_str!(\"..\")`, but got {} \
without a `:` delimiter",
s,
)
.as_str(),
)
.0
.to_owned();
error_codes.entry(err_code.clone()).or_default().has_explanation = true;
// Now we extract the tests from the markdown file!
let md_file_name = match s.split_once("include_str!(\"") {
None => continue,
Some((_, md)) => match md.split_once("\")") {
None => continue,
Some((file_name, _)) => file_name,
},
};
let path = some_or_continue!(path.parent())
.join(md_file_name)
.canonicalize()
.expect("failed to canonicalize error explanation file path");
match read_to_string(&path) {
Ok(content) => {
let has_test = check_if_error_code_is_test_in_explanation(&content, &err_code);
if!has_test &&!IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
errors.push(format!(
"`{}` doesn't use its own error code in compile_fail example",
path.display(),
));
} else if has_test && IGNORE_EXPLANATION_CHECK.contains(&err_code.as_str()) {
errors.push(format!(
"`{}` has a compile_fail example with its own error code, it shouldn't \
be listed in IGNORE_EXPLANATION_CHECK!",
path.display(),
));
}
if check_error_code_explanation(&content, error_codes, err_code) {
errors.push(format!(
"`{}` uses invalid tag `compile-fail` instead of `compile_fail`",
path.display(),
));
}
}
Err(e) => {
eprintln!("Couldn't read `{}`: {}", path.display(), e);
}
}
} else if reached_no_explanation && s.starts_with('E') {
let err_code = match s.split_once(',') {
None => s,
Some((err_code, _)) => err_code,
}
.to_string();
if!error_codes.contains_key(&err_code) {
// this check should *never* fail!
error_codes.insert(err_code, ErrorCodeStatus::default());
}
} else if s == ";" {
reached_no_explanation = true;
}
}
}
fn extract_error_codes_from_tests(f: &str, error_codes: &mut HashMap<String, ErrorCodeStatus>) {
for line in f.lines() {
let s = line.trim();
if s.starts_with("error[E") || s.starts_with("warning[E") {
let err_code = match s.split_once(']') {
None => continue,
Some((err_code, _)) => match err_code.split_once('[') {
None => continue,
Some((_, err_code)) => err_code,
},
};
error_codes.entry(err_code.to_owned()).or_default().has_test = true;
}
}
}
fn extract_error_codes_from_source(
f: &str,
error_codes: &mut HashMap<String, ErrorCodeStatus>,
regex: &Regex,
) {
for line in f.lines() {
if line.trim_start().starts_with("//") {
continue;
}
for cap in regex.captures_iter(line) {
if let Some(error_code) = cap.get(1) {
error_codes.entry(error_code.as_str().to_owned()).or_default().is_used = true;
}
}
}
}
pub fn check(paths: &[&Path], bad: &mut bool)
|
found_explanations += 1;
} else if entry.path().extension() == Some(OsStr::new("stderr")) {
extract_error_codes_from_tests(contents, &mut error_codes);
found_tests += 1;
} else if entry.path().extension() == Some(OsStr::new("rs")) {
let path = entry.path().to_string_lossy();
if PATHS_TO_IGNORE_FOR_EXTRACTION.iter().all(|c|!path.contains(c)) {
extract_error_codes_from_source(contents, &mut error_codes, ®ex);
}
}
});
}
if found_explanations == 0 {
eprintln!("No error code explanation was tested!");
*bad = true;
}
if found_tests == 0 {
eprintln!("No error code was found in compilation errors!");
*bad = true;
}
if errors.is_empty() {
println!("Found {} error codes", error_codes.len());
for (err_code, error_status) in &error_codes {
if!error_status.has_test &&!EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
errors.push(format!("Error code {} needs to have at least one UI test!", err_code));
} else if error_status.has_test && EXEMPTED_FROM_TEST.contains(&err_code.as_str()) {
errors.push(format!(
"Error code {} has a UI test, it shouldn't be listed into EXEMPTED_FROM_TEST!",
err_code
));
}
if!error_status.is_used &&!error_status.has_explanation {
errors.push(format!(
"Error code {} isn't used and doesn't have an error explanation, it should be \
commented in error_codes.rs file",
err_code
));
}
}
}
if errors.is_empty() {
// Checking if local constants need to be cleaned.
for err_code in EXEMPTED_FROM_TEST {
match error_codes.get(err_code.to_owned()) {
Some(status) => {
if status.has_test {
errors.push(format!(
"{} error code has a test and therefore should be \
removed from the `EXEMPTED_FROM_TEST` constant",
err_code
));
}
}
None => errors.push(format!(
"{} error code isn't used anymore and therefore should be removed \
from `EXEMPTED_FROM_TEST` constant",
err_code
)),
}
}
}
errors.sort();
for err in &errors {
eprintln!("{}", err);
}
println!("Found {} error codes with no tests", errors.len());
if!errors.is_empty() {
*bad = true;
}
println!("Done!");
}
|
{
let mut errors = Vec::new();
let mut found_explanations = 0;
let mut found_tests = 0;
let mut error_codes: HashMap<String, ErrorCodeStatus> = HashMap::new();
// We want error codes which match the following cases:
//
// * foo(a, E0111, a)
// * foo(a, E0111)
// * foo(E0111, a)
// * #[error = "E0111"]
let regex = Regex::new(r#"[(,"\s](E\d{4})[,)"]"#).unwrap();
println!("Checking which error codes lack tests...");
for path in paths {
super::walk(path, &mut |path| super::filter_dirs(path), &mut |entry, contents| {
let file_name = entry.file_name();
if file_name == "error_codes.rs" {
extract_error_codes(contents, &mut error_codes, entry.path(), &mut errors);
|
identifier_body
|
to_url.rs
|
use url::Url;
use std::path::Path;
pub trait ToUrl {
fn to_url(self) -> Result<Url, String>;
}
impl ToUrl for Url {
fn to_url(self) -> Result<Url, String> {
Ok(self)
}
}
impl<'a> ToUrl for &'a Url {
fn to_url(self) -> Result<Url, String>
|
}
impl<'a> ToUrl for &'a str {
fn to_url(self) -> Result<Url, String> {
Url::parse(self).map_err(|s| {
format!("invalid url `{}`: {}", self, s)
})
}
}
impl<'a> ToUrl for &'a Path {
fn to_url(self) -> Result<Url, String> {
Url::from_file_path(self).map_err(|()| {
format!("invalid path url `{}`", self.display())
})
}
}
|
{
Ok(self.clone())
}
|
identifier_body
|
to_url.rs
|
use url::Url;
use std::path::Path;
pub trait ToUrl {
fn to_url(self) -> Result<Url, String>;
}
impl ToUrl for Url {
fn to_url(self) -> Result<Url, String> {
Ok(self)
}
}
impl<'a> ToUrl for &'a Url {
fn to_url(self) -> Result<Url, String> {
Ok(self.clone())
}
}
impl<'a> ToUrl for &'a str {
fn to_url(self) -> Result<Url, String> {
Url::parse(self).map_err(|s| {
format!("invalid url `{}`: {}", self, s)
})
}
}
impl<'a> ToUrl for &'a Path {
|
})
}
}
|
fn to_url(self) -> Result<Url, String> {
Url::from_file_path(self).map_err(|()| {
format!("invalid path url `{}`", self.display())
|
random_line_split
|
to_url.rs
|
use url::Url;
use std::path::Path;
pub trait ToUrl {
fn to_url(self) -> Result<Url, String>;
}
impl ToUrl for Url {
fn to_url(self) -> Result<Url, String> {
Ok(self)
}
}
impl<'a> ToUrl for &'a Url {
fn to_url(self) -> Result<Url, String> {
Ok(self.clone())
}
}
impl<'a> ToUrl for &'a str {
fn
|
(self) -> Result<Url, String> {
Url::parse(self).map_err(|s| {
format!("invalid url `{}`: {}", self, s)
})
}
}
impl<'a> ToUrl for &'a Path {
fn to_url(self) -> Result<Url, String> {
Url::from_file_path(self).map_err(|()| {
format!("invalid path url `{}`", self.display())
})
}
}
|
to_url
|
identifier_name
|
appsrc.rs
|
use ffi::*;
use ::Transfer;
use ::Element;
use ::Caps;
use std::mem;
use reference::Reference;
use std::ops::{Deref, DerefMut};
pub struct AppSrc{
appsrc: ::Element
}
unsafe impl Sync for AppSrc {}
unsafe impl Send for AppSrc {}
impl AppSrc{
pub fn new(name: &str) -> Option<AppSrc>{
let appsrc = ::Element::new("appsrc",name);
match appsrc{
Some(appsrc) => Some(AppSrc{appsrc: appsrc}),
None => None
}
}
pub fn new_from_element(element: ::Element) -> AppSrc{
AppSrc{appsrc: element}
}
/// Set the capabilities on the `AppSrc`. After calling this method, the source will only
/// produce caps that match `caps`. Once caps is set, the caps on the buffers MUST either
|
/// unknown caps, in which case no caps should be set. This is typically true of file-like
/// sources that push raw byte buffers.
pub fn set_caps(&mut self, caps: &Caps){
unsafe{
gst_app_src_set_caps(self.gst_appsrc_mut(), caps.gst_caps());
}
}
pub fn caps(&self) -> Option<Caps>{
unsafe{
let gst_caps = gst_app_src_get_caps(mem::transmute(self.gst_appsrc()));
Caps::new(gst_caps)
}
}
pub fn latency(&self) -> (u64,u64){
unsafe{
let mut min: u64 = 0;
let mut max: u64 = 0;
gst_app_src_get_latency(mem::transmute(self.gst_appsrc()), &mut min, &mut max);
(min,max)
}
}
pub fn push_buffer(&mut self, buffer: ::Buffer) -> GstFlowReturn{
unsafe{
gst_app_src_push_buffer(self.gst_appsrc_mut(), buffer.transfer())
}
}
pub fn end_of_stream(&mut self) -> GstFlowReturn{
unsafe{
gst_app_src_end_of_stream(self.gst_appsrc_mut())
}
}
pub unsafe fn gst_appsrc(&self) -> *const GstAppSrc{
self.appsrc.gst_element() as *const GstAppSrc
}
pub unsafe fn gst_appsrc_mut(&mut self) -> *mut GstAppSrc{
self.appsrc.gst_element_mut() as *mut GstAppSrc
}
}
impl AsRef<::Element> for AppSrc{
fn as_ref(&self) -> &Element{
&self.appsrc
}
}
impl AsMut<::Element> for AppSrc{
fn as_mut(&mut self) -> &mut Element{
&mut self.appsrc
}
}
impl From<AppSrc> for Element{
fn from(b: AppSrc) -> Element{
b.appsrc
}
}
impl Deref for AppSrc{
type Target = Element;
fn deref(&self) -> &Element{
&self.appsrc
}
}
impl DerefMut for AppSrc{
fn deref_mut(&mut self) -> &mut Element{
&mut self.appsrc
}
}
impl ::Transfer for AppSrc{
unsafe fn transfer(self) -> *mut GstElement{
self.appsrc.transfer()
}
}
impl Reference for AppSrc{
fn reference(&self) -> AppSrc{
AppSrc{ appsrc: self.appsrc.reference() }
}
}
|
/// match the caps OR be left unspecified.
///
/// Before operating an `AppSrc`, the `caps` property MUST be set to fixed caps describing
/// the format of the data that will be pushed with appsrc EXCEPT when pushing buffers with
|
random_line_split
|
appsrc.rs
|
use ffi::*;
use ::Transfer;
use ::Element;
use ::Caps;
use std::mem;
use reference::Reference;
use std::ops::{Deref, DerefMut};
pub struct AppSrc{
appsrc: ::Element
}
unsafe impl Sync for AppSrc {}
unsafe impl Send for AppSrc {}
impl AppSrc{
pub fn new(name: &str) -> Option<AppSrc>{
let appsrc = ::Element::new("appsrc",name);
match appsrc{
Some(appsrc) => Some(AppSrc{appsrc: appsrc}),
None => None
}
}
pub fn new_from_element(element: ::Element) -> AppSrc{
AppSrc{appsrc: element}
}
/// Set the capabilities on the `AppSrc`. After calling this method, the source will only
/// produce caps that match `caps`. Once caps is set, the caps on the buffers MUST either
/// match the caps OR be left unspecified.
///
/// Before operating an `AppSrc`, the `caps` property MUST be set to fixed caps describing
/// the format of the data that will be pushed with appsrc EXCEPT when pushing buffers with
/// unknown caps, in which case no caps should be set. This is typically true of file-like
/// sources that push raw byte buffers.
pub fn set_caps(&mut self, caps: &Caps){
unsafe{
gst_app_src_set_caps(self.gst_appsrc_mut(), caps.gst_caps());
}
}
pub fn caps(&self) -> Option<Caps>{
unsafe{
let gst_caps = gst_app_src_get_caps(mem::transmute(self.gst_appsrc()));
Caps::new(gst_caps)
}
}
pub fn latency(&self) -> (u64,u64){
unsafe{
let mut min: u64 = 0;
let mut max: u64 = 0;
gst_app_src_get_latency(mem::transmute(self.gst_appsrc()), &mut min, &mut max);
(min,max)
}
}
pub fn push_buffer(&mut self, buffer: ::Buffer) -> GstFlowReturn{
unsafe{
gst_app_src_push_buffer(self.gst_appsrc_mut(), buffer.transfer())
}
}
pub fn end_of_stream(&mut self) -> GstFlowReturn{
unsafe{
gst_app_src_end_of_stream(self.gst_appsrc_mut())
}
}
pub unsafe fn gst_appsrc(&self) -> *const GstAppSrc{
self.appsrc.gst_element() as *const GstAppSrc
}
pub unsafe fn gst_appsrc_mut(&mut self) -> *mut GstAppSrc{
self.appsrc.gst_element_mut() as *mut GstAppSrc
}
}
impl AsRef<::Element> for AppSrc{
fn as_ref(&self) -> &Element{
&self.appsrc
}
}
impl AsMut<::Element> for AppSrc{
fn as_mut(&mut self) -> &mut Element{
&mut self.appsrc
}
}
impl From<AppSrc> for Element{
fn from(b: AppSrc) -> Element{
b.appsrc
}
}
impl Deref for AppSrc{
type Target = Element;
fn deref(&self) -> &Element{
&self.appsrc
}
}
impl DerefMut for AppSrc{
fn deref_mut(&mut self) -> &mut Element
|
}
impl ::Transfer for AppSrc{
unsafe fn transfer(self) -> *mut GstElement{
self.appsrc.transfer()
}
}
impl Reference for AppSrc{
fn reference(&self) -> AppSrc{
AppSrc{ appsrc: self.appsrc.reference() }
}
}
|
{
&mut self.appsrc
}
|
identifier_body
|
appsrc.rs
|
use ffi::*;
use ::Transfer;
use ::Element;
use ::Caps;
use std::mem;
use reference::Reference;
use std::ops::{Deref, DerefMut};
pub struct
|
{
appsrc: ::Element
}
unsafe impl Sync for AppSrc {}
unsafe impl Send for AppSrc {}
impl AppSrc{
pub fn new(name: &str) -> Option<AppSrc>{
let appsrc = ::Element::new("appsrc",name);
match appsrc{
Some(appsrc) => Some(AppSrc{appsrc: appsrc}),
None => None
}
}
pub fn new_from_element(element: ::Element) -> AppSrc{
AppSrc{appsrc: element}
}
/// Set the capabilities on the `AppSrc`. After calling this method, the source will only
/// produce caps that match `caps`. Once caps is set, the caps on the buffers MUST either
/// match the caps OR be left unspecified.
///
/// Before operating an `AppSrc`, the `caps` property MUST be set to fixed caps describing
/// the format of the data that will be pushed with appsrc EXCEPT when pushing buffers with
/// unknown caps, in which case no caps should be set. This is typically true of file-like
/// sources that push raw byte buffers.
pub fn set_caps(&mut self, caps: &Caps){
unsafe{
gst_app_src_set_caps(self.gst_appsrc_mut(), caps.gst_caps());
}
}
pub fn caps(&self) -> Option<Caps>{
unsafe{
let gst_caps = gst_app_src_get_caps(mem::transmute(self.gst_appsrc()));
Caps::new(gst_caps)
}
}
pub fn latency(&self) -> (u64,u64){
unsafe{
let mut min: u64 = 0;
let mut max: u64 = 0;
gst_app_src_get_latency(mem::transmute(self.gst_appsrc()), &mut min, &mut max);
(min,max)
}
}
pub fn push_buffer(&mut self, buffer: ::Buffer) -> GstFlowReturn{
unsafe{
gst_app_src_push_buffer(self.gst_appsrc_mut(), buffer.transfer())
}
}
pub fn end_of_stream(&mut self) -> GstFlowReturn{
unsafe{
gst_app_src_end_of_stream(self.gst_appsrc_mut())
}
}
pub unsafe fn gst_appsrc(&self) -> *const GstAppSrc{
self.appsrc.gst_element() as *const GstAppSrc
}
pub unsafe fn gst_appsrc_mut(&mut self) -> *mut GstAppSrc{
self.appsrc.gst_element_mut() as *mut GstAppSrc
}
}
impl AsRef<::Element> for AppSrc{
fn as_ref(&self) -> &Element{
&self.appsrc
}
}
impl AsMut<::Element> for AppSrc{
fn as_mut(&mut self) -> &mut Element{
&mut self.appsrc
}
}
impl From<AppSrc> for Element{
fn from(b: AppSrc) -> Element{
b.appsrc
}
}
impl Deref for AppSrc{
type Target = Element;
fn deref(&self) -> &Element{
&self.appsrc
}
}
impl DerefMut for AppSrc{
fn deref_mut(&mut self) -> &mut Element{
&mut self.appsrc
}
}
impl ::Transfer for AppSrc{
unsafe fn transfer(self) -> *mut GstElement{
self.appsrc.transfer()
}
}
impl Reference for AppSrc{
fn reference(&self) -> AppSrc{
AppSrc{ appsrc: self.appsrc.reference() }
}
}
|
AppSrc
|
identifier_name
|
lib.rs
|
// @generated by Thrift for src/module.thrift
// This file is probably not the place you want to edit!
#![recursion_limit = "100000000"]
#![allow(non_camel_case_types, non_snake_case, non_upper_case_globals, unused_crate_dependencies)]
include!("impl_my_things.rs");
include!("my/other/thing.rs");
pub use self::errors::*;
pub use self::types::*;
/// Thrift type definitions for `module`.
pub mod types {
#![allow(clippy::redundant_closure)]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct
|
{
pub foo: ::std::primitive::i32,
pub bar: ::std::string::String,
// This field forces `..Default::default()` when instantiating this
// struct, to make code future-proof against new fields added later to
// the definition in Thrift. If you don't want this, add the annotation
// `(rust.exhaustive)` to the Thrift struct to eliminate this field.
#[doc(hidden)]
pub _dot_dot_Default_default: self::dot_dot::OtherFields,
}
impl ::std::default::Default for self::MyStruct {
fn default() -> Self {
Self {
foo: ::std::default::Default::default(),
bar: ::std::default::Default::default(),
_dot_dot_Default_default: self::dot_dot::OtherFields(()),
}
}
}
impl ::std::fmt::Debug for self::MyStruct {
fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
formatter
.debug_struct("MyStruct")
.field("foo", &self.foo)
.field("bar", &self.bar)
.finish()
}
}
unsafe impl ::std::marker::Send for self::MyStruct {}
unsafe impl ::std::marker::Sync for self::MyStruct {}
impl ::fbthrift::GetTType for self::MyStruct {
const TTYPE: ::fbthrift::TType = ::fbthrift::TType::Struct;
}
impl<P> ::fbthrift::Serialize<P> for self::MyStruct
where
P: ::fbthrift::ProtocolWriter,
{
fn write(&self, p: &mut P) {
p.write_struct_begin("MyStruct");
p.write_field_begin("foo", ::fbthrift::TType::I32, 1);
::fbthrift::Serialize::write(&self.foo, p);
p.write_field_end();
p.write_field_begin("bar", ::fbthrift::TType::String, 2);
::fbthrift::Serialize::write(&self.bar, p);
p.write_field_end();
p.write_field_stop();
p.write_struct_end();
}
}
impl<P> ::fbthrift::Deserialize<P> for self::MyStruct
where
P: ::fbthrift::ProtocolReader,
{
fn read(p: &mut P) -> ::anyhow::Result<Self> {
static FIELDS: &[::fbthrift::Field] = &[
::fbthrift::Field::new("bar", ::fbthrift::TType::String, 2),
::fbthrift::Field::new("foo", ::fbthrift::TType::I32, 1),
];
let mut field_foo = ::std::option::Option::None;
let mut field_bar = ::std::option::Option::None;
let _ = p.read_struct_begin(|_| ())?;
loop {
let (_, fty, fid) = p.read_field_begin(|_| (), FIELDS)?;
match (fty, fid as ::std::primitive::i32) {
(::fbthrift::TType::Stop, _) => break,
(::fbthrift::TType::I32, 1) => field_foo = ::std::option::Option::Some(::fbthrift::Deserialize::read(p)?),
(::fbthrift::TType::String, 2) => field_bar = ::std::option::Option::Some(::fbthrift::Deserialize::read(p)?),
(fty, _) => p.skip(fty)?,
}
p.read_field_end()?;
}
p.read_struct_end()?;
::std::result::Result::Ok(Self {
foo: field_foo.unwrap_or_default(),
bar: field_bar.unwrap_or_default(),
_dot_dot_Default_default: self::dot_dot::OtherFields(()),
})
}
}
mod dot_dot {
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct OtherFields(pub(crate) ());
#[allow(dead_code)] // if serde isn't being used
pub(super) fn default_for_serde_deserialize() -> OtherFields {
OtherFields(())
}
}
}
/// Error return types.
pub mod errors {
}
|
MyStruct
|
identifier_name
|
lib.rs
|
// @generated by Thrift for src/module.thrift
// This file is probably not the place you want to edit!
#![recursion_limit = "100000000"]
#![allow(non_camel_case_types, non_snake_case, non_upper_case_globals, unused_crate_dependencies)]
include!("impl_my_things.rs");
include!("my/other/thing.rs");
pub use self::errors::*;
pub use self::types::*;
/// Thrift type definitions for `module`.
pub mod types {
#![allow(clippy::redundant_closure)]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MyStruct {
pub foo: ::std::primitive::i32,
pub bar: ::std::string::String,
// This field forces `..Default::default()` when instantiating this
// struct, to make code future-proof against new fields added later to
// the definition in Thrift. If you don't want this, add the annotation
// `(rust.exhaustive)` to the Thrift struct to eliminate this field.
#[doc(hidden)]
pub _dot_dot_Default_default: self::dot_dot::OtherFields,
}
impl ::std::default::Default for self::MyStruct {
fn default() -> Self {
Self {
foo: ::std::default::Default::default(),
bar: ::std::default::Default::default(),
_dot_dot_Default_default: self::dot_dot::OtherFields(()),
}
}
}
impl ::std::fmt::Debug for self::MyStruct {
fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
formatter
.debug_struct("MyStruct")
.field("foo", &self.foo)
.field("bar", &self.bar)
.finish()
}
}
unsafe impl ::std::marker::Send for self::MyStruct {}
unsafe impl ::std::marker::Sync for self::MyStruct {}
impl ::fbthrift::GetTType for self::MyStruct {
const TTYPE: ::fbthrift::TType = ::fbthrift::TType::Struct;
}
impl<P> ::fbthrift::Serialize<P> for self::MyStruct
where
P: ::fbthrift::ProtocolWriter,
{
fn write(&self, p: &mut P)
|
}
impl<P> ::fbthrift::Deserialize<P> for self::MyStruct
where
P: ::fbthrift::ProtocolReader,
{
fn read(p: &mut P) -> ::anyhow::Result<Self> {
static FIELDS: &[::fbthrift::Field] = &[
::fbthrift::Field::new("bar", ::fbthrift::TType::String, 2),
::fbthrift::Field::new("foo", ::fbthrift::TType::I32, 1),
];
let mut field_foo = ::std::option::Option::None;
let mut field_bar = ::std::option::Option::None;
let _ = p.read_struct_begin(|_| ())?;
loop {
let (_, fty, fid) = p.read_field_begin(|_| (), FIELDS)?;
match (fty, fid as ::std::primitive::i32) {
(::fbthrift::TType::Stop, _) => break,
(::fbthrift::TType::I32, 1) => field_foo = ::std::option::Option::Some(::fbthrift::Deserialize::read(p)?),
(::fbthrift::TType::String, 2) => field_bar = ::std::option::Option::Some(::fbthrift::Deserialize::read(p)?),
(fty, _) => p.skip(fty)?,
}
p.read_field_end()?;
}
p.read_struct_end()?;
::std::result::Result::Ok(Self {
foo: field_foo.unwrap_or_default(),
bar: field_bar.unwrap_or_default(),
_dot_dot_Default_default: self::dot_dot::OtherFields(()),
})
}
}
mod dot_dot {
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct OtherFields(pub(crate) ());
#[allow(dead_code)] // if serde isn't being used
pub(super) fn default_for_serde_deserialize() -> OtherFields {
OtherFields(())
}
}
}
/// Error return types.
pub mod errors {
}
|
{
p.write_struct_begin("MyStruct");
p.write_field_begin("foo", ::fbthrift::TType::I32, 1);
::fbthrift::Serialize::write(&self.foo, p);
p.write_field_end();
p.write_field_begin("bar", ::fbthrift::TType::String, 2);
::fbthrift::Serialize::write(&self.bar, p);
p.write_field_end();
p.write_field_stop();
p.write_struct_end();
}
|
identifier_body
|
lib.rs
|
// @generated by Thrift for src/module.thrift
// This file is probably not the place you want to edit!
#![recursion_limit = "100000000"]
#![allow(non_camel_case_types, non_snake_case, non_upper_case_globals, unused_crate_dependencies)]
include!("impl_my_things.rs");
include!("my/other/thing.rs");
pub use self::errors::*;
pub use self::types::*;
/// Thrift type definitions for `module`.
pub mod types {
#![allow(clippy::redundant_closure)]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MyStruct {
pub foo: ::std::primitive::i32,
pub bar: ::std::string::String,
// This field forces `..Default::default()` when instantiating this
// struct, to make code future-proof against new fields added later to
// the definition in Thrift. If you don't want this, add the annotation
// `(rust.exhaustive)` to the Thrift struct to eliminate this field.
#[doc(hidden)]
pub _dot_dot_Default_default: self::dot_dot::OtherFields,
}
|
impl ::std::default::Default for self::MyStruct {
fn default() -> Self {
Self {
foo: ::std::default::Default::default(),
bar: ::std::default::Default::default(),
_dot_dot_Default_default: self::dot_dot::OtherFields(()),
}
}
}
impl ::std::fmt::Debug for self::MyStruct {
fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
formatter
.debug_struct("MyStruct")
.field("foo", &self.foo)
.field("bar", &self.bar)
.finish()
}
}
unsafe impl ::std::marker::Send for self::MyStruct {}
unsafe impl ::std::marker::Sync for self::MyStruct {}
impl ::fbthrift::GetTType for self::MyStruct {
const TTYPE: ::fbthrift::TType = ::fbthrift::TType::Struct;
}
impl<P> ::fbthrift::Serialize<P> for self::MyStruct
where
P: ::fbthrift::ProtocolWriter,
{
fn write(&self, p: &mut P) {
p.write_struct_begin("MyStruct");
p.write_field_begin("foo", ::fbthrift::TType::I32, 1);
::fbthrift::Serialize::write(&self.foo, p);
p.write_field_end();
p.write_field_begin("bar", ::fbthrift::TType::String, 2);
::fbthrift::Serialize::write(&self.bar, p);
p.write_field_end();
p.write_field_stop();
p.write_struct_end();
}
}
impl<P> ::fbthrift::Deserialize<P> for self::MyStruct
where
P: ::fbthrift::ProtocolReader,
{
fn read(p: &mut P) -> ::anyhow::Result<Self> {
static FIELDS: &[::fbthrift::Field] = &[
::fbthrift::Field::new("bar", ::fbthrift::TType::String, 2),
::fbthrift::Field::new("foo", ::fbthrift::TType::I32, 1),
];
let mut field_foo = ::std::option::Option::None;
let mut field_bar = ::std::option::Option::None;
let _ = p.read_struct_begin(|_| ())?;
loop {
let (_, fty, fid) = p.read_field_begin(|_| (), FIELDS)?;
match (fty, fid as ::std::primitive::i32) {
(::fbthrift::TType::Stop, _) => break,
(::fbthrift::TType::I32, 1) => field_foo = ::std::option::Option::Some(::fbthrift::Deserialize::read(p)?),
(::fbthrift::TType::String, 2) => field_bar = ::std::option::Option::Some(::fbthrift::Deserialize::read(p)?),
(fty, _) => p.skip(fty)?,
}
p.read_field_end()?;
}
p.read_struct_end()?;
::std::result::Result::Ok(Self {
foo: field_foo.unwrap_or_default(),
bar: field_bar.unwrap_or_default(),
_dot_dot_Default_default: self::dot_dot::OtherFields(()),
})
}
}
mod dot_dot {
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct OtherFields(pub(crate) ());
#[allow(dead_code)] // if serde isn't being used
pub(super) fn default_for_serde_deserialize() -> OtherFields {
OtherFields(())
}
}
}
/// Error return types.
pub mod errors {
}
|
random_line_split
|
|
mod.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The code to expose the DOM to JavaScript through IDL bindings.
//!
//! Exposing a DOM object to JavaScript
//! ===================================
//!
//! As [explained earlier](../index.html#a-dom-object-and-its-reflector), the
//! implementation of an interface `Foo` involves two objects: the DOM object
//! (implemented in Rust) and the reflector (a `JSObject`).
//!
//! In order to expose the interface's members to the web, properties
//! corresponding to the operations and attributes are defined on an object in
//! the reflector's prototype chain or on the reflector itself.
//!
//! Typically, these properties are either value properties whose value is a
//! function (for operations) or accessor properties that have a getter and
//! optionally a setter function (for attributes, depending on whether they are
//! marked `readonly`).
//!
//! All these JavaScript functions are set up such that, when they're called,
//! they call a Rust function in the generated glue code. This glue code does
//! some sanity checks and [argument conversions](conversions/index.html), and
//! calls into API implementation for the DOM object.
//!
//! Rust reflections of WebIDL constructs
//! =====================================
//!
//! WebIDL members are turned into methods on the DOM object (static methods
//! for a static members and instance methods for regular members).
//!
//! The instance methods for an interface `Foo` are defined on a
//! `dom::bindings::codegen::Bindings::FooBindings::FooMethods` trait. This
//! trait is then implemented for `JSRef<'a, Foo>`.
//!
//! The return type and argument types are determined [as described below]
//! (#rust-reflections-of-webidl-types).
//! In addition to those, all methods that are [allowed to throw]
//! (#throwing-exceptions) will have the return value wrapped in
//! [`Fallible<T>`](error/type.Fallible.html).
//! Methods that use certain WebIDL types like `any` or `object` will get a
//! `*mut JSContext` argument prepended to the argument list. Static methods
//! will be passed a [`GlobalRef`](global/enum.GlobalRef.html) for the relevant
//! global. This argument comes before the `*mut JSContext` argument, if any.
//!
//! Rust reflections of WebIDL operations (methods)
//! -----------------------------------------------
//!
//! A WebIDL operation is turned into one method for every overload.
//! The first overload gets the base name, and consecutive overloads have an
//! underscore appended to the name.
//!
//! The base name of the Rust method is simply the name of the WebIDL operation
//! with the first letter converted to uppercase.
//!
//! Rust reflections of WebIDL attributes
|
//! -------------------------------------
//!
//! A WebIDL attribute is turned into a pair of methods: one for the getter and
//! one for the setter. A readonly attribute only has a getter and no setter.
//!
//! The getter's name is the name of the attribute with the first letter
//! converted to uppercase. It has `Get` prepended to it if the type of the
//! attribute is nullable or if the getter can throw.
//!
//! The method signature for the getter looks just like an operation with no
//! arguments and the attribute's type as the return type.
//!
//! The setter's name is `Set` followed by the name of the attribute with the
//! first letter converted to uppercase. The method signature looks just like
//! an operation with a void return value and a single argument whose type is
//! the attribute's type.
//!
//! Rust reflections of WebIDL constructors
//! ---------------------------------------
//!
//! A WebIDL constructor is turned into a static class method named
//! `Constructor`. The arguments of this method will be the arguments of the
//! WebIDL constructor, with a `GlobalRef` for the relevant global prepended.
//! The return value of the constructor for MyInterface is exactly the same as
//! that of a method returning an instance of MyInterface. Constructors are
//! always [allowed to throw](#throwing-exceptions).
//!
//! Rust reflections of WebIDL types
//! --------------------------------
//!
//! The exact Rust representation for WebIDL types can depend on the precise
//! way that they're being used (e.g., return values and arguments might have
//! different representations).
//!
//! Optional arguments which do not have a default value are represented by
//! wrapping `Option<T>` around the representation of the argument type.
//! Optional arguments which do have a default value are represented by the
//! argument type itself, set to the default value if the argument was not in
//! fact passed in.
//!
//! Variadic WebIDL arguments are represented by wrapping a `Vec<T>` around the
//! representation of the argument type.
//!
//! See [the type mapping for particular types](conversions/index.html).
//!
//! Rust reflections of stringifiers
//! --------------------------------
//!
//! *To be written.*
//!
//! Rust reflections of legacy callers
//! ---------------------------------
//!
//! Legacy callers are not yet implemented.
//!
//! Rust reflections of getters and setters
//! ---------------------------------------
//!
//! *To be written.*
//!
//! Throwing exceptions
//! ===================
//!
//! WebIDL methods, getters, and setters that need to throw exceptions need to
//! be explicitly marked as such with the `[Throws]`, `[GetterThrows]` and
//! `[SetterThrows]` custom attributes.
//!
//! `[Throws]` applies to both methods and attributes; for attributes it means
//! both the getter and the setter (if any) can throw. `[GetterThrows]` applies
//! only to attributes. `[SetterThrows]` applies only to writable attributes.
//!
//! The corresponding Rust methods will have the return value wrapped in
//! [`Fallible<T>`](error/type.Fallible.html). To throw an exception, simply
//! return `Err()` from the method with the appropriate [error value]
//! (error/enum.Error.html).
#![allow(unsafe_code)]
#![deny(missing_docs, non_snake_case)]
pub mod cell;
pub mod global;
pub mod js;
pub mod refcounted;
pub mod utils;
pub mod callback;
pub mod error;
pub mod conversions;
pub mod proxyhandler;
pub mod num;
pub mod str;
pub mod structuredclone;
pub mod trace;
/// Generated JS-Rust bindings.
#[allow(missing_docs, non_snake_case)]
pub mod codegen {
// FIXME(#5853) we shouldn't need to
// allow moved_no_move here
#[allow(unrooted_must_root, moved_no_move)]
pub mod Bindings {
include!(concat!(env!("OUT_DIR"), "/Bindings/mod.rs"));
}
pub mod InterfaceTypes {
include!(concat!(env!("OUT_DIR"), "/InterfaceTypes.rs"));
}
#[allow(unused_imports)]
pub mod InheritTypes {
include!(concat!(env!("OUT_DIR"), "/InheritTypes.rs"));
}
pub mod PrototypeList {
include!(concat!(env!("OUT_DIR"), "/PrototypeList.rs"));
}
#[allow(unreachable_code, non_camel_case_types, non_upper_case_globals, unused_parens,
unused_imports, unused_variables, unused_unsafe, unused_mut, unused_assignments,
dead_code)]
pub mod RegisterBindings {
include!(concat!(env!("OUT_DIR"), "/RegisterBindings.rs"));
}
#[allow(unreachable_code, non_camel_case_types, non_upper_case_globals, unused_parens,
unused_imports, unused_variables, unused_unsafe, unused_mut, unused_assignments,
dead_code)]
pub mod UnionTypes {
include!(concat!(env!("OUT_DIR"), "/UnionTypes.rs"));
}
}
|
random_line_split
|
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `servo` test application.
//!
//! Creates a `Servo` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in `components/servo/lib.rs`.
//!
//! [glutin]: https://github.com/tomaka/glutin
#![feature(start, core_intrinsics)]
#[cfg(target_os = "android")]
extern crate android_injected_glue;
extern crate backtrace;
// The window backed by glutin
extern crate glutin_app as app;
#[macro_use]
extern crate log;
// The Servo engine
extern crate servo;
#[cfg(not(target_os = "android"))]
#[macro_use]
extern crate sig;
use backtrace::Backtrace;
use servo::Servo;
use servo::compositing::windowing::WindowEvent;
#[cfg(target_os = "android")]
use servo::config;
use servo::config::opts::{self, ArgumentParsingResult, parse_url_or_filename};
use servo::config::servo_version;
use servo::ipc_channel::ipc;
use servo::servo_config::prefs::PREFS;
use servo::servo_url::ServoUrl;
use std::env;
|
pub mod platform {
#[cfg(target_os = "macos")]
pub use platform::macos::deinit;
#[cfg(target_os = "macos")]
pub mod macos;
#[cfg(not(target_os = "macos"))]
pub fn deinit() {}
}
#[cfg(not(target_os = "android"))]
fn install_crash_handler() {
use backtrace::Backtrace;
use sig::ffi::Sig;
use std::intrinsics::abort;
use std::thread;
fn handler(_sig: i32) {
let name = thread::current()
.name()
.map(|n| format!(" for thread \"{}\"", n))
.unwrap_or("".to_owned());
println!("Stack trace{}\n{:?}", name, Backtrace::new());
unsafe {
// N.B. Using process::abort() here causes the crash handler to be
// triggered recursively.
abort();
}
}
signal!(Sig::SEGV, handler); // handle segfaults
signal!(Sig::ILL, handler); // handle stack overflow and unsupported CPUs
signal!(Sig::IOT, handler); // handle double panics
signal!(Sig::BUS, handler); // handle invalid memory access
}
#[cfg(target_os = "android")]
fn install_crash_handler() {}
fn main() {
install_crash_handler();
// Parse the command line options and store them globally
let opts_result = opts::from_cmdline_args(&*args());
let content_process_token = if let ArgumentParsingResult::ContentProcess(token) = opts_result {
Some(token)
} else {
if opts::get().is_running_problem_test && ::std::env::var("RUST_LOG").is_err() {
::std::env::set_var("RUST_LOG", "compositing::constellation");
}
None
};
// TODO: once log-panics is released, can this be replaced by
// log_panics::init()?
panic::set_hook(Box::new(|info| {
warn!("Panic hook called.");
let msg = match info.payload().downcast_ref::<&'static str>() {
Some(s) => *s,
None => {
match info.payload().downcast_ref::<String>() {
Some(s) => &**s,
None => "Box<Any>",
}
},
};
let current_thread = thread::current();
let name = current_thread.name().unwrap_or("<unnamed>");
if let Some(location) = info.location() {
println!("{} (thread {}, at {}:{})",
msg,
name,
location.file(),
location.line());
} else {
println!("{} (thread {})", msg, name);
}
if env::var("RUST_BACKTRACE").is_ok() {
println!("{:?}", Backtrace::new());
}
error!("{}", msg);
}));
setup_logging();
if let Some(token) = content_process_token {
return servo::run_content_process(token);
}
if opts::get().is_printing_version {
println!("{}", servo_version());
process::exit(0);
}
let window = app::create_window(None);
// If the url is not provided, we fallback to the homepage in PREFS,
// or a blank page in case the homepage is not set either.
let cwd = env::current_dir().unwrap();
let cmdline_url = opts::get().url.clone();
let pref_url = PREFS.get("shell.homepage").as_string()
.and_then(|str| parse_url_or_filename(&cwd, str).ok());
let blank_url = ServoUrl::parse("about:blank").ok();
let target_url = cmdline_url.or(pref_url).or(blank_url).unwrap();
// Our wrapper around `ServoWrapper` that also implements some
// callbacks required by the glutin window implementation.
let mut servo_wrapper = ServoWrapper {
servo: Servo::new(window.clone())
};
let (sender, receiver) = ipc::channel().unwrap();
servo_wrapper.servo.handle_events(vec![WindowEvent::NewBrowser(target_url, sender)]);
let browser_id = receiver.recv().unwrap();
window.set_browser_id(browser_id);
servo_wrapper.servo.handle_events(vec![WindowEvent::SelectBrowser(browser_id)]);
servo_wrapper.servo.setup_logging();
register_glutin_resize_handler(&window, &mut servo_wrapper);
// Feed events from the window to the browser until the browser
// says to stop.
loop {
let should_continue = servo_wrapper.servo.handle_events(window.wait_events());
if!should_continue {
break;
}
}
unregister_glutin_resize_handler(&window);
platform::deinit()
}
fn register_glutin_resize_handler(window: &Rc<app::window::Window>, browser: &mut ServoWrapper) {
unsafe {
window.set_nested_event_loop_listener(browser);
}
}
fn unregister_glutin_resize_handler(window: &Rc<app::window::Window>) {
unsafe {
window.remove_nested_event_loop_listener();
}
}
struct ServoWrapper {
servo: Servo<app::window::Window>,
}
impl app::NestedEventLoopListener for ServoWrapper {
fn handle_event_from_nested_event_loop(&mut self, event: WindowEvent) -> bool {
let is_resize = match event {
WindowEvent::Resize(..) => true,
_ => false,
};
if!self.servo.handle_events(vec![event]) {
return false;
}
if is_resize {
self.servo.repaint_synchronously()
}
true
}
}
#[cfg(target_os = "android")]
fn setup_logging() {
// Piping logs from stdout/stderr to logcat happens in android_injected_glue.
::std::env::set_var("RUST_LOG", "error");
unsafe { android_injected_glue::ffi::app_dummy() };
}
#[cfg(not(target_os = "android"))]
fn setup_logging() {}
#[cfg(target_os = "android")]
/// Attempt to read parameters from a file since they are not passed to us in Android environments.
/// The first line should be the "servo" argument and the last should be the URL to load.
/// Blank lines and those beginning with a '#' are ignored.
/// Each line should be a separate parameter as would be parsed by the shell.
/// For example, "servo -p 10 http://en.wikipedia.org/wiki/Rust" would take 4 lines.
fn args() -> Vec<String> {
use std::error::Error;
use std::fs::File;
use std::io::{BufRead, BufReader};
let mut params_file = config::basedir::default_config_dir().unwrap();
params_file.push("android_params");
match File::open(params_file.to_str().unwrap()) {
Ok(f) => {
let mut vec = Vec::new();
let file = BufReader::new(&f);
for line in file.lines() {
let l = line.unwrap().trim().to_owned();
// ignore blank lines and those that start with a '#'
match l.is_empty() || l.as_bytes()[0] == b'#' {
true => (),
false => vec.push(l),
}
}
vec
},
Err(e) => {
debug!("Failed to open params file '{}': {}",
params_file.to_str().unwrap(),
Error::description(&e));
vec!["servo".to_owned(), "http://en.wikipedia.org/wiki/Rust".to_owned()]
},
}
}
#[cfg(not(target_os = "android"))]
fn args() -> Vec<String> {
use std::env;
env::args().collect()
}
#[cfg(target_os = "android")]
#[no_mangle]
#[inline(never)]
#[allow(non_snake_case)]
pub extern "C" fn android_main(app: *mut ()) {
android_injected_glue::android_main2(app as *mut _, move |_, _| main());
}
|
use std::panic;
use std::process;
use std::rc::Rc;
use std::thread;
|
random_line_split
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `servo` test application.
//!
//! Creates a `Servo` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in `components/servo/lib.rs`.
//!
//! [glutin]: https://github.com/tomaka/glutin
#![feature(start, core_intrinsics)]
#[cfg(target_os = "android")]
extern crate android_injected_glue;
extern crate backtrace;
// The window backed by glutin
extern crate glutin_app as app;
#[macro_use]
extern crate log;
// The Servo engine
extern crate servo;
#[cfg(not(target_os = "android"))]
#[macro_use]
extern crate sig;
use backtrace::Backtrace;
use servo::Servo;
use servo::compositing::windowing::WindowEvent;
#[cfg(target_os = "android")]
use servo::config;
use servo::config::opts::{self, ArgumentParsingResult, parse_url_or_filename};
use servo::config::servo_version;
use servo::ipc_channel::ipc;
use servo::servo_config::prefs::PREFS;
use servo::servo_url::ServoUrl;
use std::env;
use std::panic;
use std::process;
use std::rc::Rc;
use std::thread;
pub mod platform {
#[cfg(target_os = "macos")]
pub use platform::macos::deinit;
#[cfg(target_os = "macos")]
pub mod macos;
#[cfg(not(target_os = "macos"))]
pub fn deinit() {}
}
#[cfg(not(target_os = "android"))]
fn install_crash_handler() {
use backtrace::Backtrace;
use sig::ffi::Sig;
use std::intrinsics::abort;
use std::thread;
fn handler(_sig: i32) {
let name = thread::current()
.name()
.map(|n| format!(" for thread \"{}\"", n))
.unwrap_or("".to_owned());
println!("Stack trace{}\n{:?}", name, Backtrace::new());
unsafe {
// N.B. Using process::abort() here causes the crash handler to be
// triggered recursively.
abort();
}
}
signal!(Sig::SEGV, handler); // handle segfaults
signal!(Sig::ILL, handler); // handle stack overflow and unsupported CPUs
signal!(Sig::IOT, handler); // handle double panics
signal!(Sig::BUS, handler); // handle invalid memory access
}
#[cfg(target_os = "android")]
fn install_crash_handler() {}
fn main() {
install_crash_handler();
// Parse the command line options and store them globally
let opts_result = opts::from_cmdline_args(&*args());
let content_process_token = if let ArgumentParsingResult::ContentProcess(token) = opts_result
|
else {
if opts::get().is_running_problem_test && ::std::env::var("RUST_LOG").is_err() {
::std::env::set_var("RUST_LOG", "compositing::constellation");
}
None
};
// TODO: once log-panics is released, can this be replaced by
// log_panics::init()?
panic::set_hook(Box::new(|info| {
warn!("Panic hook called.");
let msg = match info.payload().downcast_ref::<&'static str>() {
Some(s) => *s,
None => {
match info.payload().downcast_ref::<String>() {
Some(s) => &**s,
None => "Box<Any>",
}
},
};
let current_thread = thread::current();
let name = current_thread.name().unwrap_or("<unnamed>");
if let Some(location) = info.location() {
println!("{} (thread {}, at {}:{})",
msg,
name,
location.file(),
location.line());
} else {
println!("{} (thread {})", msg, name);
}
if env::var("RUST_BACKTRACE").is_ok() {
println!("{:?}", Backtrace::new());
}
error!("{}", msg);
}));
setup_logging();
if let Some(token) = content_process_token {
return servo::run_content_process(token);
}
if opts::get().is_printing_version {
println!("{}", servo_version());
process::exit(0);
}
let window = app::create_window(None);
// If the url is not provided, we fallback to the homepage in PREFS,
// or a blank page in case the homepage is not set either.
let cwd = env::current_dir().unwrap();
let cmdline_url = opts::get().url.clone();
let pref_url = PREFS.get("shell.homepage").as_string()
.and_then(|str| parse_url_or_filename(&cwd, str).ok());
let blank_url = ServoUrl::parse("about:blank").ok();
let target_url = cmdline_url.or(pref_url).or(blank_url).unwrap();
// Our wrapper around `ServoWrapper` that also implements some
// callbacks required by the glutin window implementation.
let mut servo_wrapper = ServoWrapper {
servo: Servo::new(window.clone())
};
let (sender, receiver) = ipc::channel().unwrap();
servo_wrapper.servo.handle_events(vec![WindowEvent::NewBrowser(target_url, sender)]);
let browser_id = receiver.recv().unwrap();
window.set_browser_id(browser_id);
servo_wrapper.servo.handle_events(vec![WindowEvent::SelectBrowser(browser_id)]);
servo_wrapper.servo.setup_logging();
register_glutin_resize_handler(&window, &mut servo_wrapper);
// Feed events from the window to the browser until the browser
// says to stop.
loop {
let should_continue = servo_wrapper.servo.handle_events(window.wait_events());
if!should_continue {
break;
}
}
unregister_glutin_resize_handler(&window);
platform::deinit()
}
fn register_glutin_resize_handler(window: &Rc<app::window::Window>, browser: &mut ServoWrapper) {
unsafe {
window.set_nested_event_loop_listener(browser);
}
}
fn unregister_glutin_resize_handler(window: &Rc<app::window::Window>) {
unsafe {
window.remove_nested_event_loop_listener();
}
}
struct ServoWrapper {
servo: Servo<app::window::Window>,
}
impl app::NestedEventLoopListener for ServoWrapper {
fn handle_event_from_nested_event_loop(&mut self, event: WindowEvent) -> bool {
let is_resize = match event {
WindowEvent::Resize(..) => true,
_ => false,
};
if!self.servo.handle_events(vec![event]) {
return false;
}
if is_resize {
self.servo.repaint_synchronously()
}
true
}
}
#[cfg(target_os = "android")]
fn setup_logging() {
// Piping logs from stdout/stderr to logcat happens in android_injected_glue.
::std::env::set_var("RUST_LOG", "error");
unsafe { android_injected_glue::ffi::app_dummy() };
}
#[cfg(not(target_os = "android"))]
fn setup_logging() {}
#[cfg(target_os = "android")]
/// Attempt to read parameters from a file since they are not passed to us in Android environments.
/// The first line should be the "servo" argument and the last should be the URL to load.
/// Blank lines and those beginning with a '#' are ignored.
/// Each line should be a separate parameter as would be parsed by the shell.
/// For example, "servo -p 10 http://en.wikipedia.org/wiki/Rust" would take 4 lines.
fn args() -> Vec<String> {
use std::error::Error;
use std::fs::File;
use std::io::{BufRead, BufReader};
let mut params_file = config::basedir::default_config_dir().unwrap();
params_file.push("android_params");
match File::open(params_file.to_str().unwrap()) {
Ok(f) => {
let mut vec = Vec::new();
let file = BufReader::new(&f);
for line in file.lines() {
let l = line.unwrap().trim().to_owned();
// ignore blank lines and those that start with a '#'
match l.is_empty() || l.as_bytes()[0] == b'#' {
true => (),
false => vec.push(l),
}
}
vec
},
Err(e) => {
debug!("Failed to open params file '{}': {}",
params_file.to_str().unwrap(),
Error::description(&e));
vec!["servo".to_owned(), "http://en.wikipedia.org/wiki/Rust".to_owned()]
},
}
}
#[cfg(not(target_os = "android"))]
fn args() -> Vec<String> {
use std::env;
env::args().collect()
}
#[cfg(target_os = "android")]
#[no_mangle]
#[inline(never)]
#[allow(non_snake_case)]
pub extern "C" fn android_main(app: *mut ()) {
android_injected_glue::android_main2(app as *mut _, move |_, _| main());
}
|
{
Some(token)
}
|
conditional_block
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `servo` test application.
//!
//! Creates a `Servo` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in `components/servo/lib.rs`.
//!
//! [glutin]: https://github.com/tomaka/glutin
#![feature(start, core_intrinsics)]
#[cfg(target_os = "android")]
extern crate android_injected_glue;
extern crate backtrace;
// The window backed by glutin
extern crate glutin_app as app;
#[macro_use]
extern crate log;
// The Servo engine
extern crate servo;
#[cfg(not(target_os = "android"))]
#[macro_use]
extern crate sig;
use backtrace::Backtrace;
use servo::Servo;
use servo::compositing::windowing::WindowEvent;
#[cfg(target_os = "android")]
use servo::config;
use servo::config::opts::{self, ArgumentParsingResult, parse_url_or_filename};
use servo::config::servo_version;
use servo::ipc_channel::ipc;
use servo::servo_config::prefs::PREFS;
use servo::servo_url::ServoUrl;
use std::env;
use std::panic;
use std::process;
use std::rc::Rc;
use std::thread;
pub mod platform {
#[cfg(target_os = "macos")]
pub use platform::macos::deinit;
#[cfg(target_os = "macos")]
pub mod macos;
#[cfg(not(target_os = "macos"))]
pub fn deinit() {}
}
#[cfg(not(target_os = "android"))]
fn install_crash_handler() {
use backtrace::Backtrace;
use sig::ffi::Sig;
use std::intrinsics::abort;
use std::thread;
fn handler(_sig: i32) {
let name = thread::current()
.name()
.map(|n| format!(" for thread \"{}\"", n))
.unwrap_or("".to_owned());
println!("Stack trace{}\n{:?}", name, Backtrace::new());
unsafe {
// N.B. Using process::abort() here causes the crash handler to be
// triggered recursively.
abort();
}
}
signal!(Sig::SEGV, handler); // handle segfaults
signal!(Sig::ILL, handler); // handle stack overflow and unsupported CPUs
signal!(Sig::IOT, handler); // handle double panics
signal!(Sig::BUS, handler); // handle invalid memory access
}
#[cfg(target_os = "android")]
fn install_crash_handler() {}
fn main() {
install_crash_handler();
// Parse the command line options and store them globally
let opts_result = opts::from_cmdline_args(&*args());
let content_process_token = if let ArgumentParsingResult::ContentProcess(token) = opts_result {
Some(token)
} else {
if opts::get().is_running_problem_test && ::std::env::var("RUST_LOG").is_err() {
::std::env::set_var("RUST_LOG", "compositing::constellation");
}
None
};
// TODO: once log-panics is released, can this be replaced by
// log_panics::init()?
panic::set_hook(Box::new(|info| {
warn!("Panic hook called.");
let msg = match info.payload().downcast_ref::<&'static str>() {
Some(s) => *s,
None => {
match info.payload().downcast_ref::<String>() {
Some(s) => &**s,
None => "Box<Any>",
}
},
};
let current_thread = thread::current();
let name = current_thread.name().unwrap_or("<unnamed>");
if let Some(location) = info.location() {
println!("{} (thread {}, at {}:{})",
msg,
name,
location.file(),
location.line());
} else {
println!("{} (thread {})", msg, name);
}
if env::var("RUST_BACKTRACE").is_ok() {
println!("{:?}", Backtrace::new());
}
error!("{}", msg);
}));
setup_logging();
if let Some(token) = content_process_token {
return servo::run_content_process(token);
}
if opts::get().is_printing_version {
println!("{}", servo_version());
process::exit(0);
}
let window = app::create_window(None);
// If the url is not provided, we fallback to the homepage in PREFS,
// or a blank page in case the homepage is not set either.
let cwd = env::current_dir().unwrap();
let cmdline_url = opts::get().url.clone();
let pref_url = PREFS.get("shell.homepage").as_string()
.and_then(|str| parse_url_or_filename(&cwd, str).ok());
let blank_url = ServoUrl::parse("about:blank").ok();
let target_url = cmdline_url.or(pref_url).or(blank_url).unwrap();
// Our wrapper around `ServoWrapper` that also implements some
// callbacks required by the glutin window implementation.
let mut servo_wrapper = ServoWrapper {
servo: Servo::new(window.clone())
};
let (sender, receiver) = ipc::channel().unwrap();
servo_wrapper.servo.handle_events(vec![WindowEvent::NewBrowser(target_url, sender)]);
let browser_id = receiver.recv().unwrap();
window.set_browser_id(browser_id);
servo_wrapper.servo.handle_events(vec![WindowEvent::SelectBrowser(browser_id)]);
servo_wrapper.servo.setup_logging();
register_glutin_resize_handler(&window, &mut servo_wrapper);
// Feed events from the window to the browser until the browser
// says to stop.
loop {
let should_continue = servo_wrapper.servo.handle_events(window.wait_events());
if!should_continue {
break;
}
}
unregister_glutin_resize_handler(&window);
platform::deinit()
}
fn register_glutin_resize_handler(window: &Rc<app::window::Window>, browser: &mut ServoWrapper) {
unsafe {
window.set_nested_event_loop_listener(browser);
}
}
fn unregister_glutin_resize_handler(window: &Rc<app::window::Window>) {
unsafe {
window.remove_nested_event_loop_listener();
}
}
struct ServoWrapper {
servo: Servo<app::window::Window>,
}
impl app::NestedEventLoopListener for ServoWrapper {
fn handle_event_from_nested_event_loop(&mut self, event: WindowEvent) -> bool {
let is_resize = match event {
WindowEvent::Resize(..) => true,
_ => false,
};
if!self.servo.handle_events(vec![event]) {
return false;
}
if is_resize {
self.servo.repaint_synchronously()
}
true
}
}
#[cfg(target_os = "android")]
fn setup_logging() {
// Piping logs from stdout/stderr to logcat happens in android_injected_glue.
::std::env::set_var("RUST_LOG", "error");
unsafe { android_injected_glue::ffi::app_dummy() };
}
#[cfg(not(target_os = "android"))]
fn setup_logging() {}
#[cfg(target_os = "android")]
/// Attempt to read parameters from a file since they are not passed to us in Android environments.
/// The first line should be the "servo" argument and the last should be the URL to load.
/// Blank lines and those beginning with a '#' are ignored.
/// Each line should be a separate parameter as would be parsed by the shell.
/// For example, "servo -p 10 http://en.wikipedia.org/wiki/Rust" would take 4 lines.
fn args() -> Vec<String>
|
},
Err(e) => {
debug!("Failed to open params file '{}': {}",
params_file.to_str().unwrap(),
Error::description(&e));
vec!["servo".to_owned(), "http://en.wikipedia.org/wiki/Rust".to_owned()]
},
}
}
#[cfg(not(target_os = "android"))]
fn args() -> Vec<String> {
use std::env;
env::args().collect()
}
#[cfg(target_os = "android")]
#[no_mangle]
#[inline(never)]
#[allow(non_snake_case)]
pub extern "C" fn android_main(app: *mut ()) {
android_injected_glue::android_main2(app as *mut _, move |_, _| main());
}
|
{
use std::error::Error;
use std::fs::File;
use std::io::{BufRead, BufReader};
let mut params_file = config::basedir::default_config_dir().unwrap();
params_file.push("android_params");
match File::open(params_file.to_str().unwrap()) {
Ok(f) => {
let mut vec = Vec::new();
let file = BufReader::new(&f);
for line in file.lines() {
let l = line.unwrap().trim().to_owned();
// ignore blank lines and those that start with a '#'
match l.is_empty() || l.as_bytes()[0] == b'#' {
true => (),
false => vec.push(l),
}
}
vec
|
identifier_body
|
main.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `servo` test application.
//!
//! Creates a `Servo` instance with a simple implementation of
//! the compositor's `WindowMethods` to create a working web browser.
//!
//! This browser's implementation of `WindowMethods` is built on top
//! of [glutin], the cross-platform OpenGL utility and windowing
//! library.
//!
//! For the engine itself look next door in `components/servo/lib.rs`.
//!
//! [glutin]: https://github.com/tomaka/glutin
#![feature(start, core_intrinsics)]
#[cfg(target_os = "android")]
extern crate android_injected_glue;
extern crate backtrace;
// The window backed by glutin
extern crate glutin_app as app;
#[macro_use]
extern crate log;
// The Servo engine
extern crate servo;
#[cfg(not(target_os = "android"))]
#[macro_use]
extern crate sig;
use backtrace::Backtrace;
use servo::Servo;
use servo::compositing::windowing::WindowEvent;
#[cfg(target_os = "android")]
use servo::config;
use servo::config::opts::{self, ArgumentParsingResult, parse_url_or_filename};
use servo::config::servo_version;
use servo::ipc_channel::ipc;
use servo::servo_config::prefs::PREFS;
use servo::servo_url::ServoUrl;
use std::env;
use std::panic;
use std::process;
use std::rc::Rc;
use std::thread;
pub mod platform {
#[cfg(target_os = "macos")]
pub use platform::macos::deinit;
#[cfg(target_os = "macos")]
pub mod macos;
#[cfg(not(target_os = "macos"))]
pub fn deinit() {}
}
#[cfg(not(target_os = "android"))]
fn install_crash_handler() {
use backtrace::Backtrace;
use sig::ffi::Sig;
use std::intrinsics::abort;
use std::thread;
fn handler(_sig: i32) {
let name = thread::current()
.name()
.map(|n| format!(" for thread \"{}\"", n))
.unwrap_or("".to_owned());
println!("Stack trace{}\n{:?}", name, Backtrace::new());
unsafe {
// N.B. Using process::abort() here causes the crash handler to be
// triggered recursively.
abort();
}
}
signal!(Sig::SEGV, handler); // handle segfaults
signal!(Sig::ILL, handler); // handle stack overflow and unsupported CPUs
signal!(Sig::IOT, handler); // handle double panics
signal!(Sig::BUS, handler); // handle invalid memory access
}
#[cfg(target_os = "android")]
fn install_crash_handler() {}
fn main() {
install_crash_handler();
// Parse the command line options and store them globally
let opts_result = opts::from_cmdline_args(&*args());
let content_process_token = if let ArgumentParsingResult::ContentProcess(token) = opts_result {
Some(token)
} else {
if opts::get().is_running_problem_test && ::std::env::var("RUST_LOG").is_err() {
::std::env::set_var("RUST_LOG", "compositing::constellation");
}
None
};
// TODO: once log-panics is released, can this be replaced by
// log_panics::init()?
panic::set_hook(Box::new(|info| {
warn!("Panic hook called.");
let msg = match info.payload().downcast_ref::<&'static str>() {
Some(s) => *s,
None => {
match info.payload().downcast_ref::<String>() {
Some(s) => &**s,
None => "Box<Any>",
}
},
};
let current_thread = thread::current();
let name = current_thread.name().unwrap_or("<unnamed>");
if let Some(location) = info.location() {
println!("{} (thread {}, at {}:{})",
msg,
name,
location.file(),
location.line());
} else {
println!("{} (thread {})", msg, name);
}
if env::var("RUST_BACKTRACE").is_ok() {
println!("{:?}", Backtrace::new());
}
error!("{}", msg);
}));
setup_logging();
if let Some(token) = content_process_token {
return servo::run_content_process(token);
}
if opts::get().is_printing_version {
println!("{}", servo_version());
process::exit(0);
}
let window = app::create_window(None);
// If the url is not provided, we fallback to the homepage in PREFS,
// or a blank page in case the homepage is not set either.
let cwd = env::current_dir().unwrap();
let cmdline_url = opts::get().url.clone();
let pref_url = PREFS.get("shell.homepage").as_string()
.and_then(|str| parse_url_or_filename(&cwd, str).ok());
let blank_url = ServoUrl::parse("about:blank").ok();
let target_url = cmdline_url.or(pref_url).or(blank_url).unwrap();
// Our wrapper around `ServoWrapper` that also implements some
// callbacks required by the glutin window implementation.
let mut servo_wrapper = ServoWrapper {
servo: Servo::new(window.clone())
};
let (sender, receiver) = ipc::channel().unwrap();
servo_wrapper.servo.handle_events(vec![WindowEvent::NewBrowser(target_url, sender)]);
let browser_id = receiver.recv().unwrap();
window.set_browser_id(browser_id);
servo_wrapper.servo.handle_events(vec![WindowEvent::SelectBrowser(browser_id)]);
servo_wrapper.servo.setup_logging();
register_glutin_resize_handler(&window, &mut servo_wrapper);
// Feed events from the window to the browser until the browser
// says to stop.
loop {
let should_continue = servo_wrapper.servo.handle_events(window.wait_events());
if!should_continue {
break;
}
}
unregister_glutin_resize_handler(&window);
platform::deinit()
}
fn register_glutin_resize_handler(window: &Rc<app::window::Window>, browser: &mut ServoWrapper) {
unsafe {
window.set_nested_event_loop_listener(browser);
}
}
fn unregister_glutin_resize_handler(window: &Rc<app::window::Window>) {
unsafe {
window.remove_nested_event_loop_listener();
}
}
struct ServoWrapper {
servo: Servo<app::window::Window>,
}
impl app::NestedEventLoopListener for ServoWrapper {
fn handle_event_from_nested_event_loop(&mut self, event: WindowEvent) -> bool {
let is_resize = match event {
WindowEvent::Resize(..) => true,
_ => false,
};
if!self.servo.handle_events(vec![event]) {
return false;
}
if is_resize {
self.servo.repaint_synchronously()
}
true
}
}
#[cfg(target_os = "android")]
fn setup_logging() {
// Piping logs from stdout/stderr to logcat happens in android_injected_glue.
::std::env::set_var("RUST_LOG", "error");
unsafe { android_injected_glue::ffi::app_dummy() };
}
#[cfg(not(target_os = "android"))]
fn setup_logging() {}
#[cfg(target_os = "android")]
/// Attempt to read parameters from a file since they are not passed to us in Android environments.
/// The first line should be the "servo" argument and the last should be the URL to load.
/// Blank lines and those beginning with a '#' are ignored.
/// Each line should be a separate parameter as would be parsed by the shell.
/// For example, "servo -p 10 http://en.wikipedia.org/wiki/Rust" would take 4 lines.
fn args() -> Vec<String> {
use std::error::Error;
use std::fs::File;
use std::io::{BufRead, BufReader};
let mut params_file = config::basedir::default_config_dir().unwrap();
params_file.push("android_params");
match File::open(params_file.to_str().unwrap()) {
Ok(f) => {
let mut vec = Vec::new();
let file = BufReader::new(&f);
for line in file.lines() {
let l = line.unwrap().trim().to_owned();
// ignore blank lines and those that start with a '#'
match l.is_empty() || l.as_bytes()[0] == b'#' {
true => (),
false => vec.push(l),
}
}
vec
},
Err(e) => {
debug!("Failed to open params file '{}': {}",
params_file.to_str().unwrap(),
Error::description(&e));
vec!["servo".to_owned(), "http://en.wikipedia.org/wiki/Rust".to_owned()]
},
}
}
#[cfg(not(target_os = "android"))]
fn args() -> Vec<String> {
use std::env;
env::args().collect()
}
#[cfg(target_os = "android")]
#[no_mangle]
#[inline(never)]
#[allow(non_snake_case)]
pub extern "C" fn
|
(app: *mut ()) {
android_injected_glue::android_main2(app as *mut _, move |_, _| main());
}
|
android_main
|
identifier_name
|
error.rs
|
//! Types representing various errors that can occur in a Rocket application.
use std::{io, fmt};
use std::sync::atomic::{Ordering, AtomicBool};
use http::hyper;
/// [unstable] Error type for Rocket. Likely to change.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Error {
/// The request method was bad.
BadMethod,
/// The value could not be parsed.
BadParse,
/// There was no such route.
NoRoute, // TODO: Add a chain of routes attempted.
/// The error was internal.
Internal,
/// The requested key/index does not exist.
NoKey,
}
/// The kind of launch error that occured.
///
/// In almost every instance, a launch error occurs because of an I/O error;
/// this is represented by the `Io` variant. A launch error may also occur
/// because of ill-defined routes that lead to collisions or because a fairing
/// encountered an error; these are represented by the `Collision` and
/// `FailedFairing` variants, respectively. The `Unknown` variant captures all
/// other kinds of launch errors.
#[derive(Debug)]
pub enum LaunchErrorKind {
Io(io::Error),
Collision,
FailedFairing,
Unknown(Box<::std::error::Error + Send + Sync>)
}
/// An error that occurs during launch.
///
/// A `LaunchError` is returned by
/// [rocket::launch](/rocket/struct.Rocket.html#method.launch) when launching an
/// application fails for some reason.
///
/// # Panics
///
/// A value of this type panics if it is dropped without first being inspected.
/// An _inspection_ occurs when any method is called. For instance, if
/// `println!("Error: {}", e)` is called, where `e: LaunchError`, the
/// `Display::fmt` method being called by `println!` results in `e` being marked
/// as inspected; a subsequent `drop` of the value will _not_ result in a panic.
/// The following snippet illustrates this:
///
/// ```rust
/// # if false {
/// let error = rocket::ignite().launch();
///
/// // This line is only reached if launching failed. This "inspects" the error.
/// println!("Launch failed! Error: {}", error);
///
/// // This call to drop (explicit here for demonstration) will do nothing.
/// drop(error);
/// # }
/// ```
///
/// When a value of this type panics, the corresponding error message is pretty
/// printed to the console. The following illustrates this:
///
/// ```rust
/// # if false {
/// let error = rocket::ignite().launch();
///
/// // This call to drop (explicit here for demonstration) will result in
/// // `error` being pretty-printed to the console along with a `panic!`.
/// drop(error);
/// # }
/// ```
///
/// # Usage
///
/// A `LaunchError` value should usually be allowed to `drop` without
/// inspection. There are two exceptions to this suggestion.
///
/// 1. If you are writing a library or high-level application on-top of
/// Rocket, you likely want to inspect the value before it drops to avoid a
/// Rocket-specific `panic!`. This typically means simply printing the
/// value.
///
/// 2. You want to display your own error messages.
pub struct LaunchError {
handled: AtomicBool,
kind: LaunchErrorKind
}
impl LaunchError {
#[inline(always)]
fn new(kind: LaunchErrorKind) -> LaunchError {
LaunchError { handled: AtomicBool::new(false), kind: kind }
}
#[inline(always)]
fn was_handled(&self) -> bool {
self.handled.load(Ordering::Acquire)
}
#[inline(always)]
fn mark_handled(&self) {
self.handled.store(true, Ordering::Release)
}
/// Retrieve the `kind` of the launch error.
///
/// # Example
///
/// ```rust
/// # if false {
/// let error = rocket::ignite().launch();
///
/// // This line is only reached if launch failed.
/// let error_kind = error.kind();
/// # }
/// ```
#[inline]
pub fn kind(&self) -> &LaunchErrorKind {
self.mark_handled();
&self.kind
}
}
impl From<LaunchErrorKind> for LaunchError {
#[inline]
fn
|
(kind: LaunchErrorKind) -> LaunchError {
LaunchError::new(kind)
}
}
impl From<hyper::Error> for LaunchError {
#[inline]
fn from(error: hyper::Error) -> LaunchError {
match error {
hyper::Error::Io(e) => LaunchError::new(LaunchErrorKind::Io(e)),
e => LaunchError::new(LaunchErrorKind::Unknown(Box::new(e)))
}
}
}
impl From<io::Error> for LaunchError {
#[inline]
fn from(error: io::Error) -> LaunchError {
LaunchError::new(LaunchErrorKind::Io(error))
}
}
impl fmt::Display for LaunchErrorKind {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
LaunchErrorKind::Io(ref e) => write!(f, "I/O error: {}", e),
LaunchErrorKind::Collision => write!(f, "route collisions detected"),
LaunchErrorKind::FailedFairing => write!(f, "a launch fairing failed"),
LaunchErrorKind::Unknown(ref e) => write!(f, "unknown error: {}", e)
}
}
}
impl fmt::Debug for LaunchError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.mark_handled();
write!(f, "{:?}", self.kind())
}
}
impl fmt::Display for LaunchError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.mark_handled();
write!(f, "{}", self.kind())
}
}
impl ::std::error::Error for LaunchError {
#[inline]
fn description(&self) -> &str {
self.mark_handled();
match *self.kind() {
LaunchErrorKind::Io(_) => "an I/O error occured during launch",
LaunchErrorKind::Collision => "route collisions were detected",
LaunchErrorKind::FailedFairing => "a launch fairing reported an error",
LaunchErrorKind::Unknown(_) => "an unknown error occured during launch"
}
}
}
impl Drop for LaunchError {
fn drop(&mut self) {
if self.was_handled() {
return
}
match *self.kind() {
LaunchErrorKind::Io(ref e) => {
error!("Rocket failed to launch due to an I/O error.");
panic!("{}", e);
}
LaunchErrorKind::Collision => {
error!("Rocket failed to launch due to routing collisions.");
panic!("route collisions detected");
}
LaunchErrorKind::FailedFairing => {
error!("Rocket failed to launch due to a failing fairing.");
panic!("launch fairing failure");
}
LaunchErrorKind::Unknown(ref e) => {
error!("Rocket failed to launch due to an unknown error.");
panic!("{}", e);
}
}
}
}
|
from
|
identifier_name
|
error.rs
|
//! Types representing various errors that can occur in a Rocket application.
use std::{io, fmt};
use std::sync::atomic::{Ordering, AtomicBool};
use http::hyper;
/// [unstable] Error type for Rocket. Likely to change.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Error {
/// The request method was bad.
BadMethod,
/// The value could not be parsed.
BadParse,
/// There was no such route.
NoRoute, // TODO: Add a chain of routes attempted.
/// The error was internal.
Internal,
/// The requested key/index does not exist.
NoKey,
}
/// The kind of launch error that occured.
///
/// In almost every instance, a launch error occurs because of an I/O error;
/// this is represented by the `Io` variant. A launch error may also occur
/// because of ill-defined routes that lead to collisions or because a fairing
/// encountered an error; these are represented by the `Collision` and
/// `FailedFairing` variants, respectively. The `Unknown` variant captures all
/// other kinds of launch errors.
#[derive(Debug)]
pub enum LaunchErrorKind {
Io(io::Error),
Collision,
FailedFairing,
Unknown(Box<::std::error::Error + Send + Sync>)
}
/// An error that occurs during launch.
///
/// A `LaunchError` is returned by
/// [rocket::launch](/rocket/struct.Rocket.html#method.launch) when launching an
/// application fails for some reason.
///
/// # Panics
///
/// A value of this type panics if it is dropped without first being inspected.
/// An _inspection_ occurs when any method is called. For instance, if
/// `println!("Error: {}", e)` is called, where `e: LaunchError`, the
/// `Display::fmt` method being called by `println!` results in `e` being marked
/// as inspected; a subsequent `drop` of the value will _not_ result in a panic.
/// The following snippet illustrates this:
///
/// ```rust
/// # if false {
/// let error = rocket::ignite().launch();
///
/// // This line is only reached if launching failed. This "inspects" the error.
/// println!("Launch failed! Error: {}", error);
///
/// // This call to drop (explicit here for demonstration) will do nothing.
/// drop(error);
/// # }
/// ```
///
/// When a value of this type panics, the corresponding error message is pretty
/// printed to the console. The following illustrates this:
///
/// ```rust
/// # if false {
/// let error = rocket::ignite().launch();
///
/// // This call to drop (explicit here for demonstration) will result in
/// // `error` being pretty-printed to the console along with a `panic!`.
/// drop(error);
/// # }
/// ```
///
/// # Usage
///
/// A `LaunchError` value should usually be allowed to `drop` without
/// inspection. There are two exceptions to this suggestion.
///
/// 1. If you are writing a library or high-level application on-top of
/// Rocket, you likely want to inspect the value before it drops to avoid a
/// Rocket-specific `panic!`. This typically means simply printing the
/// value.
///
/// 2. You want to display your own error messages.
pub struct LaunchError {
handled: AtomicBool,
kind: LaunchErrorKind
}
impl LaunchError {
#[inline(always)]
fn new(kind: LaunchErrorKind) -> LaunchError {
LaunchError { handled: AtomicBool::new(false), kind: kind }
}
#[inline(always)]
fn was_handled(&self) -> bool {
self.handled.load(Ordering::Acquire)
}
#[inline(always)]
fn mark_handled(&self) {
self.handled.store(true, Ordering::Release)
}
/// Retrieve the `kind` of the launch error.
///
/// # Example
///
/// ```rust
/// # if false {
/// let error = rocket::ignite().launch();
///
/// // This line is only reached if launch failed.
/// let error_kind = error.kind();
/// # }
/// ```
#[inline]
pub fn kind(&self) -> &LaunchErrorKind {
self.mark_handled();
&self.kind
}
}
impl From<LaunchErrorKind> for LaunchError {
#[inline]
fn from(kind: LaunchErrorKind) -> LaunchError {
LaunchError::new(kind)
}
}
impl From<hyper::Error> for LaunchError {
#[inline]
fn from(error: hyper::Error) -> LaunchError {
match error {
hyper::Error::Io(e) => LaunchError::new(LaunchErrorKind::Io(e)),
e => LaunchError::new(LaunchErrorKind::Unknown(Box::new(e)))
}
}
}
impl From<io::Error> for LaunchError {
|
LaunchError::new(LaunchErrorKind::Io(error))
}
}
impl fmt::Display for LaunchErrorKind {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
LaunchErrorKind::Io(ref e) => write!(f, "I/O error: {}", e),
LaunchErrorKind::Collision => write!(f, "route collisions detected"),
LaunchErrorKind::FailedFairing => write!(f, "a launch fairing failed"),
LaunchErrorKind::Unknown(ref e) => write!(f, "unknown error: {}", e)
}
}
}
impl fmt::Debug for LaunchError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.mark_handled();
write!(f, "{:?}", self.kind())
}
}
impl fmt::Display for LaunchError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.mark_handled();
write!(f, "{}", self.kind())
}
}
impl ::std::error::Error for LaunchError {
#[inline]
fn description(&self) -> &str {
self.mark_handled();
match *self.kind() {
LaunchErrorKind::Io(_) => "an I/O error occured during launch",
LaunchErrorKind::Collision => "route collisions were detected",
LaunchErrorKind::FailedFairing => "a launch fairing reported an error",
LaunchErrorKind::Unknown(_) => "an unknown error occured during launch"
}
}
}
impl Drop for LaunchError {
fn drop(&mut self) {
if self.was_handled() {
return
}
match *self.kind() {
LaunchErrorKind::Io(ref e) => {
error!("Rocket failed to launch due to an I/O error.");
panic!("{}", e);
}
LaunchErrorKind::Collision => {
error!("Rocket failed to launch due to routing collisions.");
panic!("route collisions detected");
}
LaunchErrorKind::FailedFairing => {
error!("Rocket failed to launch due to a failing fairing.");
panic!("launch fairing failure");
}
LaunchErrorKind::Unknown(ref e) => {
error!("Rocket failed to launch due to an unknown error.");
panic!("{}", e);
}
}
}
}
|
#[inline]
fn from(error: io::Error) -> LaunchError {
|
random_line_split
|
error.rs
|
//! Types representing various errors that can occur in a Rocket application.
use std::{io, fmt};
use std::sync::atomic::{Ordering, AtomicBool};
use http::hyper;
/// [unstable] Error type for Rocket. Likely to change.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Error {
/// The request method was bad.
BadMethod,
/// The value could not be parsed.
BadParse,
/// There was no such route.
NoRoute, // TODO: Add a chain of routes attempted.
/// The error was internal.
Internal,
/// The requested key/index does not exist.
NoKey,
}
/// The kind of launch error that occured.
///
/// In almost every instance, a launch error occurs because of an I/O error;
/// this is represented by the `Io` variant. A launch error may also occur
/// because of ill-defined routes that lead to collisions or because a fairing
/// encountered an error; these are represented by the `Collision` and
/// `FailedFairing` variants, respectively. The `Unknown` variant captures all
/// other kinds of launch errors.
#[derive(Debug)]
pub enum LaunchErrorKind {
Io(io::Error),
Collision,
FailedFairing,
Unknown(Box<::std::error::Error + Send + Sync>)
}
/// An error that occurs during launch.
///
/// A `LaunchError` is returned by
/// [rocket::launch](/rocket/struct.Rocket.html#method.launch) when launching an
/// application fails for some reason.
///
/// # Panics
///
/// A value of this type panics if it is dropped without first being inspected.
/// An _inspection_ occurs when any method is called. For instance, if
/// `println!("Error: {}", e)` is called, where `e: LaunchError`, the
/// `Display::fmt` method being called by `println!` results in `e` being marked
/// as inspected; a subsequent `drop` of the value will _not_ result in a panic.
/// The following snippet illustrates this:
///
/// ```rust
/// # if false {
/// let error = rocket::ignite().launch();
///
/// // This line is only reached if launching failed. This "inspects" the error.
/// println!("Launch failed! Error: {}", error);
///
/// // This call to drop (explicit here for demonstration) will do nothing.
/// drop(error);
/// # }
/// ```
///
/// When a value of this type panics, the corresponding error message is pretty
/// printed to the console. The following illustrates this:
///
/// ```rust
/// # if false {
/// let error = rocket::ignite().launch();
///
/// // This call to drop (explicit here for demonstration) will result in
/// // `error` being pretty-printed to the console along with a `panic!`.
/// drop(error);
/// # }
/// ```
///
/// # Usage
///
/// A `LaunchError` value should usually be allowed to `drop` without
/// inspection. There are two exceptions to this suggestion.
///
/// 1. If you are writing a library or high-level application on-top of
/// Rocket, you likely want to inspect the value before it drops to avoid a
/// Rocket-specific `panic!`. This typically means simply printing the
/// value.
///
/// 2. You want to display your own error messages.
pub struct LaunchError {
handled: AtomicBool,
kind: LaunchErrorKind
}
impl LaunchError {
#[inline(always)]
fn new(kind: LaunchErrorKind) -> LaunchError {
LaunchError { handled: AtomicBool::new(false), kind: kind }
}
#[inline(always)]
fn was_handled(&self) -> bool {
self.handled.load(Ordering::Acquire)
}
#[inline(always)]
fn mark_handled(&self) {
self.handled.store(true, Ordering::Release)
}
/// Retrieve the `kind` of the launch error.
///
/// # Example
///
/// ```rust
/// # if false {
/// let error = rocket::ignite().launch();
///
/// // This line is only reached if launch failed.
/// let error_kind = error.kind();
/// # }
/// ```
#[inline]
pub fn kind(&self) -> &LaunchErrorKind {
self.mark_handled();
&self.kind
}
}
impl From<LaunchErrorKind> for LaunchError {
#[inline]
fn from(kind: LaunchErrorKind) -> LaunchError {
LaunchError::new(kind)
}
}
impl From<hyper::Error> for LaunchError {
#[inline]
fn from(error: hyper::Error) -> LaunchError {
match error {
hyper::Error::Io(e) => LaunchError::new(LaunchErrorKind::Io(e)),
e => LaunchError::new(LaunchErrorKind::Unknown(Box::new(e)))
}
}
}
impl From<io::Error> for LaunchError {
#[inline]
fn from(error: io::Error) -> LaunchError {
LaunchError::new(LaunchErrorKind::Io(error))
}
}
impl fmt::Display for LaunchErrorKind {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
LaunchErrorKind::Io(ref e) => write!(f, "I/O error: {}", e),
LaunchErrorKind::Collision => write!(f, "route collisions detected"),
LaunchErrorKind::FailedFairing => write!(f, "a launch fairing failed"),
LaunchErrorKind::Unknown(ref e) => write!(f, "unknown error: {}", e)
}
}
}
impl fmt::Debug for LaunchError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.mark_handled();
write!(f, "{:?}", self.kind())
}
}
impl fmt::Display for LaunchError {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.mark_handled();
write!(f, "{}", self.kind())
}
}
impl ::std::error::Error for LaunchError {
#[inline]
fn description(&self) -> &str {
self.mark_handled();
match *self.kind() {
LaunchErrorKind::Io(_) => "an I/O error occured during launch",
LaunchErrorKind::Collision => "route collisions were detected",
LaunchErrorKind::FailedFairing => "a launch fairing reported an error",
LaunchErrorKind::Unknown(_) => "an unknown error occured during launch"
}
}
}
impl Drop for LaunchError {
fn drop(&mut self) {
if self.was_handled() {
return
}
match *self.kind() {
LaunchErrorKind::Io(ref e) => {
error!("Rocket failed to launch due to an I/O error.");
panic!("{}", e);
}
LaunchErrorKind::Collision =>
|
LaunchErrorKind::FailedFairing => {
error!("Rocket failed to launch due to a failing fairing.");
panic!("launch fairing failure");
}
LaunchErrorKind::Unknown(ref e) => {
error!("Rocket failed to launch due to an unknown error.");
panic!("{}", e);
}
}
}
}
|
{
error!("Rocket failed to launch due to routing collisions.");
panic!("route collisions detected");
}
|
conditional_block
|
mouse.rs
|
//! Checking the state of mouse-related properties, namely the mouse cursor's position, `n`-clicks and scrolling.
use geometry::Point;
use bear_lib_terminal_sys as ffi;
/// Amount of steps the mouse wheel scrolled in the last [`Event::MouseScroll`](../../enum.Event.html#variant.MouseScroll).
///
/// Negative values indicate an "up" scroll.
|
/// Get the mouse cursor's position in cells.
pub fn position() -> Point {
Point::new(ffi::state(ffi::TK_MOUSE_X), ffi::state(ffi::TK_MOUSE_Y))
}
/// Get the mouse cursor's position in pixels.
pub fn pixel_position() -> Point {
Point::new(ffi::state(ffi::TK_MOUSE_PIXEL_X), ffi::state(ffi::TK_MOUSE_PIXEL_Y))
}
/// Amount of fast consecutive clicks for the [`Event::KeyPressed`](../../enum.Event.html#variant.KeyPressed)
/// with [`key: Mouse*`](../../enum.Event.html#variant.KeyPressed).
pub fn clicks() -> i32 {
ffi::state(ffi::TK_MOUSE_CLICKS)
}
|
///
/// Positive values indicate a "down" scroll.
pub fn scroll() -> i32 {
ffi::state(ffi::TK_MOUSE_WHEEL)
}
|
random_line_split
|
mouse.rs
|
//! Checking the state of mouse-related properties, namely the mouse cursor's position, `n`-clicks and scrolling.
use geometry::Point;
use bear_lib_terminal_sys as ffi;
/// Amount of steps the mouse wheel scrolled in the last [`Event::MouseScroll`](../../enum.Event.html#variant.MouseScroll).
///
/// Negative values indicate an "up" scroll.
///
/// Positive values indicate a "down" scroll.
pub fn
|
() -> i32 {
ffi::state(ffi::TK_MOUSE_WHEEL)
}
/// Get the mouse cursor's position in cells.
pub fn position() -> Point {
Point::new(ffi::state(ffi::TK_MOUSE_X), ffi::state(ffi::TK_MOUSE_Y))
}
/// Get the mouse cursor's position in pixels.
pub fn pixel_position() -> Point {
Point::new(ffi::state(ffi::TK_MOUSE_PIXEL_X), ffi::state(ffi::TK_MOUSE_PIXEL_Y))
}
/// Amount of fast consecutive clicks for the [`Event::KeyPressed`](../../enum.Event.html#variant.KeyPressed)
/// with [`key: Mouse*`](../../enum.Event.html#variant.KeyPressed).
pub fn clicks() -> i32 {
ffi::state(ffi::TK_MOUSE_CLICKS)
}
|
scroll
|
identifier_name
|
mouse.rs
|
//! Checking the state of mouse-related properties, namely the mouse cursor's position, `n`-clicks and scrolling.
use geometry::Point;
use bear_lib_terminal_sys as ffi;
/// Amount of steps the mouse wheel scrolled in the last [`Event::MouseScroll`](../../enum.Event.html#variant.MouseScroll).
///
/// Negative values indicate an "up" scroll.
///
/// Positive values indicate a "down" scroll.
pub fn scroll() -> i32 {
ffi::state(ffi::TK_MOUSE_WHEEL)
}
/// Get the mouse cursor's position in cells.
pub fn position() -> Point {
Point::new(ffi::state(ffi::TK_MOUSE_X), ffi::state(ffi::TK_MOUSE_Y))
}
/// Get the mouse cursor's position in pixels.
pub fn pixel_position() -> Point {
Point::new(ffi::state(ffi::TK_MOUSE_PIXEL_X), ffi::state(ffi::TK_MOUSE_PIXEL_Y))
}
/// Amount of fast consecutive clicks for the [`Event::KeyPressed`](../../enum.Event.html#variant.KeyPressed)
/// with [`key: Mouse*`](../../enum.Event.html#variant.KeyPressed).
pub fn clicks() -> i32
|
{
ffi::state(ffi::TK_MOUSE_CLICKS)
}
|
identifier_body
|
|
functions.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Declaration of built-in (scalar) functions.
//! This module contains built-in functions' enumeration and metadata.
//!
//! Generally, a function has:
//! * a signature
//! * a return type, that is a function of the incoming argument's types
//! * the computation, that must accept each valid signature
//!
//! * Signature: see `Signature`
//! * Return type: a function `(arg_types) -> return_type`. E.g. for sqrt, ([f32]) -> f32, ([f64]) -> f64.
//!
//! This module also has a set of coercion rules to improve user experience: if an argument i32 is passed
//! to a function that supports f64, it is coerced to f64.
use super::{
type_coercion::{coerce, data_types},
ColumnarValue, PhysicalExpr,
};
use crate::error::{DataFusionError, Result};
use crate::physical_plan::array_expressions;
use crate::physical_plan::datetime_expressions;
use crate::physical_plan::math_expressions;
use crate::physical_plan::string_expressions;
use arrow::{
array::ArrayRef,
compute::kernels::length::length,
datatypes::TimeUnit,
datatypes::{DataType, Field, Schema},
record_batch::RecordBatch,
};
use fmt::{Debug, Formatter};
use std::{fmt, str::FromStr, sync::Arc};
/// A function's signature, which defines the function's supported argument types.
#[derive(Debug, Clone)]
pub enum Signature {
/// arbitrary number of arguments of an common type out of a list of valid types
// A function such as `concat` is `Variadic(vec![DataType::Utf8, DataType::LargeUtf8])`
Variadic(Vec<DataType>),
/// arbitrary number of arguments of an arbitrary but equal type
// A function such as `array` is `VariadicEqual`
// The first argument decides the type used for coercion
VariadicEqual,
/// fixed number of arguments of an arbitrary but equal type out of a list of valid types
// A function of one argument of f64 is `Uniform(1, vec![DataType::Float64])`
// A function of two arguments of f64 or f32 is `Uniform(1, vec![DataType::Float32, DataType::Float64])`
Uniform(usize, Vec<DataType>),
/// exact number of arguments of an exact type
Exact(Vec<DataType>),
/// fixed number of arguments of arbitrary types
Any(usize),
}
/// Scalar function
pub type ScalarFunctionImplementation =
Arc<dyn Fn(&[ArrayRef]) -> Result<ArrayRef> + Send + Sync>;
/// A function's return type
pub type ReturnTypeFunction =
Arc<dyn Fn(&[DataType]) -> Result<Arc<DataType>> + Send + Sync>;
/// Enum of all built-in scalar functions
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum BuiltinScalarFunction {
/// sqrt
Sqrt,
/// sin
Sin,
/// cos
Cos,
/// tan
Tan,
/// asin
Asin,
/// acos
Acos,
/// atan
Atan,
/// exp
Exp,
/// log, also known as ln
Log,
/// log2
Log2,
/// log10
Log10,
/// floor
Floor,
/// ceil
Ceil,
/// round
Round,
/// trunc
Trunc,
/// abs
Abs,
/// signum
Signum,
/// length
Length,
/// concat
Concat,
/// to_timestamp
ToTimestamp,
/// construct an array from columns
Array,
}
impl fmt::Display for BuiltinScalarFunction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// lowercase of the debug.
write!(f, "{}", format!("{:?}", self).to_lowercase())
}
}
impl FromStr for BuiltinScalarFunction {
type Err = DataFusionError;
fn from_str(name: &str) -> Result<BuiltinScalarFunction> {
Ok(match name {
"sqrt" => BuiltinScalarFunction::Sqrt,
"sin" => BuiltinScalarFunction::Sin,
"cos" => BuiltinScalarFunction::Cos,
"tan" => BuiltinScalarFunction::Tan,
"asin" => BuiltinScalarFunction::Asin,
"acos" => BuiltinScalarFunction::Acos,
"atan" => BuiltinScalarFunction::Atan,
"exp" => BuiltinScalarFunction::Exp,
"log" => BuiltinScalarFunction::Log,
"log2" => BuiltinScalarFunction::Log2,
"log10" => BuiltinScalarFunction::Log10,
"floor" => BuiltinScalarFunction::Floor,
"ceil" => BuiltinScalarFunction::Ceil,
"round" => BuiltinScalarFunction::Round,
"truc" => BuiltinScalarFunction::Trunc,
"abs" => BuiltinScalarFunction::Abs,
"signum" => BuiltinScalarFunction::Signum,
"length" => BuiltinScalarFunction::Length,
"concat" => BuiltinScalarFunction::Concat,
"to_timestamp" => BuiltinScalarFunction::ToTimestamp,
"array" => BuiltinScalarFunction::Array,
_ => {
return Err(DataFusionError::Plan(format!(
"There is no built-in function named {}",
name
)))
}
})
}
}
/// Returns the datatype of the scalar function
pub fn return_type(
fun: &BuiltinScalarFunction,
arg_types: &Vec<DataType>,
) -> Result<DataType> {
// Note that this function *must* return the same type that the respective physical expression returns
// or the execution panics.
// verify that this is a valid set of data types for this function
data_types(&arg_types, &signature(fun))?;
if arg_types.len() == 0 {
// functions currently cannot be evaluated without arguments, as they can't
// know the number of rows to return.
return Err(DataFusionError::Plan(
format!("Function '{}' requires at least one argument", fun).to_string(),
));
}
// the return type of the built in function. Eventually there
// will be built-in functions whose return type depends on the
// incoming type.
match fun {
BuiltinScalarFunction::Length => Ok(match arg_types[0] {
DataType::LargeUtf8 => DataType::Int64,
DataType::Utf8 => DataType::Int32,
_ => {
// this error is internal as `data_types` should have captured this.
return Err(DataFusionError::Internal(
"The length function can only accept strings.".to_string(),
));
}
}),
BuiltinScalarFunction::Concat => Ok(DataType::Utf8),
BuiltinScalarFunction::ToTimestamp => {
Ok(DataType::Timestamp(TimeUnit::Nanosecond, None))
}
BuiltinScalarFunction::Array => Ok(DataType::FixedSizeList(
Box::new(Field::new("item", arg_types[0].clone(), true)),
arg_types.len() as i32,
)),
_ => Ok(DataType::Float64),
}
}
/// Create a physical (function) expression.
/// This function errors when `args`' can't be coerced to a valid argument type of the function.
pub fn create_physical_expr(
fun: &BuiltinScalarFunction,
args: &Vec<Arc<dyn PhysicalExpr>>,
input_schema: &Schema,
) -> Result<Arc<dyn PhysicalExpr>> {
let fun_expr: ScalarFunctionImplementation = Arc::new(match fun {
BuiltinScalarFunction::Sqrt => math_expressions::sqrt,
BuiltinScalarFunction::Sin => math_expressions::sin,
BuiltinScalarFunction::Cos => math_expressions::cos,
BuiltinScalarFunction::Tan => math_expressions::tan,
BuiltinScalarFunction::Asin => math_expressions::asin,
|
BuiltinScalarFunction::Log => math_expressions::ln,
BuiltinScalarFunction::Log2 => math_expressions::log2,
BuiltinScalarFunction::Log10 => math_expressions::log10,
BuiltinScalarFunction::Floor => math_expressions::floor,
BuiltinScalarFunction::Ceil => math_expressions::ceil,
BuiltinScalarFunction::Round => math_expressions::round,
BuiltinScalarFunction::Trunc => math_expressions::trunc,
BuiltinScalarFunction::Abs => math_expressions::abs,
BuiltinScalarFunction::Signum => math_expressions::signum,
BuiltinScalarFunction::Length => |args| Ok(length(args[0].as_ref())?),
BuiltinScalarFunction::Concat => {
|args| Ok(Arc::new(string_expressions::concatenate(args)?))
}
BuiltinScalarFunction::ToTimestamp => {
|args| Ok(Arc::new(datetime_expressions::to_timestamp(args)?))
}
BuiltinScalarFunction::Array => |args| Ok(array_expressions::array(args)?),
});
// coerce
let args = coerce(args, input_schema, &signature(fun))?;
let arg_types = args
.iter()
.map(|e| e.data_type(input_schema))
.collect::<Result<Vec<_>>>()?;
Ok(Arc::new(ScalarFunctionExpr::new(
&format!("{}", fun),
fun_expr,
args,
&return_type(&fun, &arg_types)?,
)))
}
/// the signatures supported by the function `fun`.
fn signature(fun: &BuiltinScalarFunction) -> Signature {
// note: the physical expression must accept the type returned by this function or the execution panics.
// for now, the list is small, as we do not have many built-in functions.
match fun {
BuiltinScalarFunction::Length => {
Signature::Uniform(1, vec![DataType::Utf8, DataType::LargeUtf8])
}
BuiltinScalarFunction::Concat => Signature::Variadic(vec![DataType::Utf8]),
BuiltinScalarFunction::ToTimestamp => Signature::Uniform(1, vec![DataType::Utf8]),
BuiltinScalarFunction::Array => {
Signature::Variadic(array_expressions::SUPPORTED_ARRAY_TYPES.to_vec())
}
// math expressions expect 1 argument of type f64 or f32
// priority is given to f64 because e.g. `sqrt(1i32)` is in IR (real numbers) and thus we
// return the best approximation for it (in f64).
// We accept f32 because in this case it is clear that the best approximation
// will be as good as the number of digits in the number
_ => Signature::Uniform(1, vec![DataType::Float64, DataType::Float32]),
}
}
/// Physical expression of a scalar function
pub struct ScalarFunctionExpr {
fun: ScalarFunctionImplementation,
name: String,
args: Vec<Arc<dyn PhysicalExpr>>,
return_type: DataType,
}
impl Debug for ScalarFunctionExpr {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("ScalarFunctionExpr")
.field("fun", &"<FUNC>")
.field("name", &self.name)
.field("args", &self.args)
.field("return_type", &self.return_type)
.finish()
}
}
impl ScalarFunctionExpr {
/// Create a new Scalar function
pub fn new(
name: &str,
fun: ScalarFunctionImplementation,
args: Vec<Arc<dyn PhysicalExpr>>,
return_type: &DataType,
) -> Self {
Self {
fun,
name: name.to_owned(),
args,
return_type: return_type.clone(),
}
}
}
impl fmt::Display for ScalarFunctionExpr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}({})",
self.name,
self.args
.iter()
.map(|e| format!("{}", e))
.collect::<Vec<String>>()
.join(", ")
)
}
}
impl PhysicalExpr for ScalarFunctionExpr {
fn data_type(&self, _input_schema: &Schema) -> Result<DataType> {
Ok(self.return_type.clone())
}
fn nullable(&self, _input_schema: &Schema) -> Result<bool> {
Ok(true)
}
fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
// evaluate the arguments
let inputs = self
.args
.iter()
.map(|e| e.evaluate(batch).map(|v| v.into_array(batch.num_rows())))
.collect::<Result<Vec<_>>>()?;
// evaluate the function
let fun = self.fun.as_ref();
(fun)(&inputs).map(|a| ColumnarValue::Array(a))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{error::Result, physical_plan::expressions::lit, scalar::ScalarValue};
use arrow::{
array::{ArrayRef, FixedSizeListArray, Float64Array, Int32Array, StringArray},
datatypes::Field,
record_batch::RecordBatch,
};
fn generic_test_math(value: ScalarValue, expected: &str) -> Result<()> {
// any type works here: we evaluate against a literal of `value`
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let columns: Vec<ArrayRef> = vec![Arc::new(Int32Array::from(vec![1]))];
let arg = lit(value);
let expr =
create_physical_expr(&BuiltinScalarFunction::Exp, &vec![arg], &schema)?;
// type is correct
assert_eq!(expr.data_type(&schema)?, DataType::Float64);
// evaluate works
let batch = RecordBatch::try_new(Arc::new(schema.clone()), columns)?;
let result = expr.evaluate(&batch)?.into_array(batch.num_rows());
// downcast works
let result = result.as_any().downcast_ref::<Float64Array>().unwrap();
// value is correct
assert_eq!(format!("{}", result.value(0)), expected);
Ok(())
}
#[test]
fn test_math_function() -> Result<()> {
// 2.71828182845904523536... : https://oeis.org/A001113
let exp_f64 = "2.718281828459045";
let exp_f32 = "2.7182817459106445";
generic_test_math(ScalarValue::from(1i32), exp_f64)?;
generic_test_math(ScalarValue::from(1u32), exp_f64)?;
generic_test_math(ScalarValue::from(1u64), exp_f64)?;
generic_test_math(ScalarValue::from(1f64), exp_f64)?;
generic_test_math(ScalarValue::from(1f32), exp_f32)?;
Ok(())
}
fn test_concat(value: ScalarValue, expected: &str) -> Result<()> {
// any type works here: we evaluate against a literal of `value`
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let columns: Vec<ArrayRef> = vec![Arc::new(Int32Array::from(vec![1]))];
// concat(value, value)
let expr = create_physical_expr(
&BuiltinScalarFunction::Concat,
&vec![lit(value.clone()), lit(value)],
&schema,
)?;
// type is correct
assert_eq!(expr.data_type(&schema)?, DataType::Utf8);
// evaluate works
let batch = RecordBatch::try_new(Arc::new(schema.clone()), columns)?;
let result = expr.evaluate(&batch)?.into_array(batch.num_rows());
// downcast works
let result = result.as_any().downcast_ref::<StringArray>().unwrap();
// value is correct
assert_eq!(format!("{}", result.value(0)), expected);
Ok(())
}
#[test]
fn test_concat_utf8() -> Result<()> {
test_concat(ScalarValue::Utf8(Some("aa".to_string())), "aaaa")
}
#[test]
fn test_concat_error() -> Result<()> {
let result = return_type(&BuiltinScalarFunction::Concat, &vec![]);
if let Ok(_) = result {
Err(DataFusionError::Plan(
"Function 'concat' cannot accept zero arguments".to_string(),
))
} else {
Ok(())
}
}
fn generic_test_array(
value1: ScalarValue,
value2: ScalarValue,
expected_type: DataType,
expected: &str,
) -> Result<()> {
// any type works here: we evaluate against a literal of `value`
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let columns: Vec<ArrayRef> = vec![Arc::new(Int32Array::from(vec![1]))];
let expr = create_physical_expr(
&BuiltinScalarFunction::Array,
&vec![lit(value1.clone()), lit(value2.clone())],
&schema,
)?;
// type is correct
assert_eq!(
expr.data_type(&schema)?,
// type equals to a common coercion
DataType::FixedSizeList(Box::new(Field::new("item", expected_type, true)), 2)
);
// evaluate works
let batch = RecordBatch::try_new(Arc::new(schema.clone()), columns)?;
let result = expr.evaluate(&batch)?.into_array(batch.num_rows());
// downcast works
let result = result
.as_any()
.downcast_ref::<FixedSizeListArray>()
.unwrap();
// value is correct
assert_eq!(format!("{:?}", result.value(0)), expected);
Ok(())
}
#[test]
fn test_array() -> Result<()> {
generic_test_array(
ScalarValue::Utf8(Some("aa".to_string())),
ScalarValue::Utf8(Some("aa".to_string())),
DataType::Utf8,
"StringArray\n[\n \"aa\",\n \"aa\",\n]",
)?;
// different types, to validate that casting happens
generic_test_array(
ScalarValue::from(1u32),
ScalarValue::from(1u64),
DataType::UInt64,
"PrimitiveArray<UInt64>\n[\n 1,\n 1,\n]",
)?;
// different types (another order), to validate that casting happens
generic_test_array(
ScalarValue::from(1u64),
ScalarValue::from(1u32),
DataType::UInt64,
"PrimitiveArray<UInt64>\n[\n 1,\n 1,\n]",
)
}
}
|
BuiltinScalarFunction::Acos => math_expressions::acos,
BuiltinScalarFunction::Atan => math_expressions::atan,
BuiltinScalarFunction::Exp => math_expressions::exp,
|
random_line_split
|
functions.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Declaration of built-in (scalar) functions.
//! This module contains built-in functions' enumeration and metadata.
//!
//! Generally, a function has:
//! * a signature
//! * a return type, that is a function of the incoming argument's types
//! * the computation, that must accept each valid signature
//!
//! * Signature: see `Signature`
//! * Return type: a function `(arg_types) -> return_type`. E.g. for sqrt, ([f32]) -> f32, ([f64]) -> f64.
//!
//! This module also has a set of coercion rules to improve user experience: if an argument i32 is passed
//! to a function that supports f64, it is coerced to f64.
use super::{
type_coercion::{coerce, data_types},
ColumnarValue, PhysicalExpr,
};
use crate::error::{DataFusionError, Result};
use crate::physical_plan::array_expressions;
use crate::physical_plan::datetime_expressions;
use crate::physical_plan::math_expressions;
use crate::physical_plan::string_expressions;
use arrow::{
array::ArrayRef,
compute::kernels::length::length,
datatypes::TimeUnit,
datatypes::{DataType, Field, Schema},
record_batch::RecordBatch,
};
use fmt::{Debug, Formatter};
use std::{fmt, str::FromStr, sync::Arc};
/// A function's signature, which defines the function's supported argument types.
#[derive(Debug, Clone)]
pub enum
|
{
/// arbitrary number of arguments of an common type out of a list of valid types
// A function such as `concat` is `Variadic(vec![DataType::Utf8, DataType::LargeUtf8])`
Variadic(Vec<DataType>),
/// arbitrary number of arguments of an arbitrary but equal type
// A function such as `array` is `VariadicEqual`
// The first argument decides the type used for coercion
VariadicEqual,
/// fixed number of arguments of an arbitrary but equal type out of a list of valid types
// A function of one argument of f64 is `Uniform(1, vec![DataType::Float64])`
// A function of two arguments of f64 or f32 is `Uniform(1, vec![DataType::Float32, DataType::Float64])`
Uniform(usize, Vec<DataType>),
/// exact number of arguments of an exact type
Exact(Vec<DataType>),
/// fixed number of arguments of arbitrary types
Any(usize),
}
/// Scalar function
pub type ScalarFunctionImplementation =
Arc<dyn Fn(&[ArrayRef]) -> Result<ArrayRef> + Send + Sync>;
/// A function's return type
pub type ReturnTypeFunction =
Arc<dyn Fn(&[DataType]) -> Result<Arc<DataType>> + Send + Sync>;
/// Enum of all built-in scalar functions
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum BuiltinScalarFunction {
/// sqrt
Sqrt,
/// sin
Sin,
/// cos
Cos,
/// tan
Tan,
/// asin
Asin,
/// acos
Acos,
/// atan
Atan,
/// exp
Exp,
/// log, also known as ln
Log,
/// log2
Log2,
/// log10
Log10,
/// floor
Floor,
/// ceil
Ceil,
/// round
Round,
/// trunc
Trunc,
/// abs
Abs,
/// signum
Signum,
/// length
Length,
/// concat
Concat,
/// to_timestamp
ToTimestamp,
/// construct an array from columns
Array,
}
impl fmt::Display for BuiltinScalarFunction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// lowercase of the debug.
write!(f, "{}", format!("{:?}", self).to_lowercase())
}
}
impl FromStr for BuiltinScalarFunction {
type Err = DataFusionError;
fn from_str(name: &str) -> Result<BuiltinScalarFunction> {
Ok(match name {
"sqrt" => BuiltinScalarFunction::Sqrt,
"sin" => BuiltinScalarFunction::Sin,
"cos" => BuiltinScalarFunction::Cos,
"tan" => BuiltinScalarFunction::Tan,
"asin" => BuiltinScalarFunction::Asin,
"acos" => BuiltinScalarFunction::Acos,
"atan" => BuiltinScalarFunction::Atan,
"exp" => BuiltinScalarFunction::Exp,
"log" => BuiltinScalarFunction::Log,
"log2" => BuiltinScalarFunction::Log2,
"log10" => BuiltinScalarFunction::Log10,
"floor" => BuiltinScalarFunction::Floor,
"ceil" => BuiltinScalarFunction::Ceil,
"round" => BuiltinScalarFunction::Round,
"truc" => BuiltinScalarFunction::Trunc,
"abs" => BuiltinScalarFunction::Abs,
"signum" => BuiltinScalarFunction::Signum,
"length" => BuiltinScalarFunction::Length,
"concat" => BuiltinScalarFunction::Concat,
"to_timestamp" => BuiltinScalarFunction::ToTimestamp,
"array" => BuiltinScalarFunction::Array,
_ => {
return Err(DataFusionError::Plan(format!(
"There is no built-in function named {}",
name
)))
}
})
}
}
/// Returns the datatype of the scalar function
pub fn return_type(
fun: &BuiltinScalarFunction,
arg_types: &Vec<DataType>,
) -> Result<DataType> {
// Note that this function *must* return the same type that the respective physical expression returns
// or the execution panics.
// verify that this is a valid set of data types for this function
data_types(&arg_types, &signature(fun))?;
if arg_types.len() == 0 {
// functions currently cannot be evaluated without arguments, as they can't
// know the number of rows to return.
return Err(DataFusionError::Plan(
format!("Function '{}' requires at least one argument", fun).to_string(),
));
}
// the return type of the built in function. Eventually there
// will be built-in functions whose return type depends on the
// incoming type.
match fun {
BuiltinScalarFunction::Length => Ok(match arg_types[0] {
DataType::LargeUtf8 => DataType::Int64,
DataType::Utf8 => DataType::Int32,
_ => {
// this error is internal as `data_types` should have captured this.
return Err(DataFusionError::Internal(
"The length function can only accept strings.".to_string(),
));
}
}),
BuiltinScalarFunction::Concat => Ok(DataType::Utf8),
BuiltinScalarFunction::ToTimestamp => {
Ok(DataType::Timestamp(TimeUnit::Nanosecond, None))
}
BuiltinScalarFunction::Array => Ok(DataType::FixedSizeList(
Box::new(Field::new("item", arg_types[0].clone(), true)),
arg_types.len() as i32,
)),
_ => Ok(DataType::Float64),
}
}
/// Create a physical (function) expression.
/// This function errors when `args`' can't be coerced to a valid argument type of the function.
pub fn create_physical_expr(
fun: &BuiltinScalarFunction,
args: &Vec<Arc<dyn PhysicalExpr>>,
input_schema: &Schema,
) -> Result<Arc<dyn PhysicalExpr>> {
let fun_expr: ScalarFunctionImplementation = Arc::new(match fun {
BuiltinScalarFunction::Sqrt => math_expressions::sqrt,
BuiltinScalarFunction::Sin => math_expressions::sin,
BuiltinScalarFunction::Cos => math_expressions::cos,
BuiltinScalarFunction::Tan => math_expressions::tan,
BuiltinScalarFunction::Asin => math_expressions::asin,
BuiltinScalarFunction::Acos => math_expressions::acos,
BuiltinScalarFunction::Atan => math_expressions::atan,
BuiltinScalarFunction::Exp => math_expressions::exp,
BuiltinScalarFunction::Log => math_expressions::ln,
BuiltinScalarFunction::Log2 => math_expressions::log2,
BuiltinScalarFunction::Log10 => math_expressions::log10,
BuiltinScalarFunction::Floor => math_expressions::floor,
BuiltinScalarFunction::Ceil => math_expressions::ceil,
BuiltinScalarFunction::Round => math_expressions::round,
BuiltinScalarFunction::Trunc => math_expressions::trunc,
BuiltinScalarFunction::Abs => math_expressions::abs,
BuiltinScalarFunction::Signum => math_expressions::signum,
BuiltinScalarFunction::Length => |args| Ok(length(args[0].as_ref())?),
BuiltinScalarFunction::Concat => {
|args| Ok(Arc::new(string_expressions::concatenate(args)?))
}
BuiltinScalarFunction::ToTimestamp => {
|args| Ok(Arc::new(datetime_expressions::to_timestamp(args)?))
}
BuiltinScalarFunction::Array => |args| Ok(array_expressions::array(args)?),
});
// coerce
let args = coerce(args, input_schema, &signature(fun))?;
let arg_types = args
.iter()
.map(|e| e.data_type(input_schema))
.collect::<Result<Vec<_>>>()?;
Ok(Arc::new(ScalarFunctionExpr::new(
&format!("{}", fun),
fun_expr,
args,
&return_type(&fun, &arg_types)?,
)))
}
/// the signatures supported by the function `fun`.
fn signature(fun: &BuiltinScalarFunction) -> Signature {
// note: the physical expression must accept the type returned by this function or the execution panics.
// for now, the list is small, as we do not have many built-in functions.
match fun {
BuiltinScalarFunction::Length => {
Signature::Uniform(1, vec![DataType::Utf8, DataType::LargeUtf8])
}
BuiltinScalarFunction::Concat => Signature::Variadic(vec![DataType::Utf8]),
BuiltinScalarFunction::ToTimestamp => Signature::Uniform(1, vec![DataType::Utf8]),
BuiltinScalarFunction::Array => {
Signature::Variadic(array_expressions::SUPPORTED_ARRAY_TYPES.to_vec())
}
// math expressions expect 1 argument of type f64 or f32
// priority is given to f64 because e.g. `sqrt(1i32)` is in IR (real numbers) and thus we
// return the best approximation for it (in f64).
// We accept f32 because in this case it is clear that the best approximation
// will be as good as the number of digits in the number
_ => Signature::Uniform(1, vec![DataType::Float64, DataType::Float32]),
}
}
/// Physical expression of a scalar function
pub struct ScalarFunctionExpr {
fun: ScalarFunctionImplementation,
name: String,
args: Vec<Arc<dyn PhysicalExpr>>,
return_type: DataType,
}
impl Debug for ScalarFunctionExpr {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("ScalarFunctionExpr")
.field("fun", &"<FUNC>")
.field("name", &self.name)
.field("args", &self.args)
.field("return_type", &self.return_type)
.finish()
}
}
impl ScalarFunctionExpr {
/// Create a new Scalar function
pub fn new(
name: &str,
fun: ScalarFunctionImplementation,
args: Vec<Arc<dyn PhysicalExpr>>,
return_type: &DataType,
) -> Self {
Self {
fun,
name: name.to_owned(),
args,
return_type: return_type.clone(),
}
}
}
impl fmt::Display for ScalarFunctionExpr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}({})",
self.name,
self.args
.iter()
.map(|e| format!("{}", e))
.collect::<Vec<String>>()
.join(", ")
)
}
}
impl PhysicalExpr for ScalarFunctionExpr {
fn data_type(&self, _input_schema: &Schema) -> Result<DataType> {
Ok(self.return_type.clone())
}
fn nullable(&self, _input_schema: &Schema) -> Result<bool> {
Ok(true)
}
fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
// evaluate the arguments
let inputs = self
.args
.iter()
.map(|e| e.evaluate(batch).map(|v| v.into_array(batch.num_rows())))
.collect::<Result<Vec<_>>>()?;
// evaluate the function
let fun = self.fun.as_ref();
(fun)(&inputs).map(|a| ColumnarValue::Array(a))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{error::Result, physical_plan::expressions::lit, scalar::ScalarValue};
use arrow::{
array::{ArrayRef, FixedSizeListArray, Float64Array, Int32Array, StringArray},
datatypes::Field,
record_batch::RecordBatch,
};
fn generic_test_math(value: ScalarValue, expected: &str) -> Result<()> {
// any type works here: we evaluate against a literal of `value`
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let columns: Vec<ArrayRef> = vec![Arc::new(Int32Array::from(vec![1]))];
let arg = lit(value);
let expr =
create_physical_expr(&BuiltinScalarFunction::Exp, &vec![arg], &schema)?;
// type is correct
assert_eq!(expr.data_type(&schema)?, DataType::Float64);
// evaluate works
let batch = RecordBatch::try_new(Arc::new(schema.clone()), columns)?;
let result = expr.evaluate(&batch)?.into_array(batch.num_rows());
// downcast works
let result = result.as_any().downcast_ref::<Float64Array>().unwrap();
// value is correct
assert_eq!(format!("{}", result.value(0)), expected);
Ok(())
}
#[test]
fn test_math_function() -> Result<()> {
// 2.71828182845904523536... : https://oeis.org/A001113
let exp_f64 = "2.718281828459045";
let exp_f32 = "2.7182817459106445";
generic_test_math(ScalarValue::from(1i32), exp_f64)?;
generic_test_math(ScalarValue::from(1u32), exp_f64)?;
generic_test_math(ScalarValue::from(1u64), exp_f64)?;
generic_test_math(ScalarValue::from(1f64), exp_f64)?;
generic_test_math(ScalarValue::from(1f32), exp_f32)?;
Ok(())
}
fn test_concat(value: ScalarValue, expected: &str) -> Result<()> {
// any type works here: we evaluate against a literal of `value`
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let columns: Vec<ArrayRef> = vec![Arc::new(Int32Array::from(vec![1]))];
// concat(value, value)
let expr = create_physical_expr(
&BuiltinScalarFunction::Concat,
&vec![lit(value.clone()), lit(value)],
&schema,
)?;
// type is correct
assert_eq!(expr.data_type(&schema)?, DataType::Utf8);
// evaluate works
let batch = RecordBatch::try_new(Arc::new(schema.clone()), columns)?;
let result = expr.evaluate(&batch)?.into_array(batch.num_rows());
// downcast works
let result = result.as_any().downcast_ref::<StringArray>().unwrap();
// value is correct
assert_eq!(format!("{}", result.value(0)), expected);
Ok(())
}
#[test]
fn test_concat_utf8() -> Result<()> {
test_concat(ScalarValue::Utf8(Some("aa".to_string())), "aaaa")
}
#[test]
fn test_concat_error() -> Result<()> {
let result = return_type(&BuiltinScalarFunction::Concat, &vec![]);
if let Ok(_) = result {
Err(DataFusionError::Plan(
"Function 'concat' cannot accept zero arguments".to_string(),
))
} else {
Ok(())
}
}
fn generic_test_array(
value1: ScalarValue,
value2: ScalarValue,
expected_type: DataType,
expected: &str,
) -> Result<()> {
// any type works here: we evaluate against a literal of `value`
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let columns: Vec<ArrayRef> = vec![Arc::new(Int32Array::from(vec![1]))];
let expr = create_physical_expr(
&BuiltinScalarFunction::Array,
&vec![lit(value1.clone()), lit(value2.clone())],
&schema,
)?;
// type is correct
assert_eq!(
expr.data_type(&schema)?,
// type equals to a common coercion
DataType::FixedSizeList(Box::new(Field::new("item", expected_type, true)), 2)
);
// evaluate works
let batch = RecordBatch::try_new(Arc::new(schema.clone()), columns)?;
let result = expr.evaluate(&batch)?.into_array(batch.num_rows());
// downcast works
let result = result
.as_any()
.downcast_ref::<FixedSizeListArray>()
.unwrap();
// value is correct
assert_eq!(format!("{:?}", result.value(0)), expected);
Ok(())
}
#[test]
fn test_array() -> Result<()> {
generic_test_array(
ScalarValue::Utf8(Some("aa".to_string())),
ScalarValue::Utf8(Some("aa".to_string())),
DataType::Utf8,
"StringArray\n[\n \"aa\",\n \"aa\",\n]",
)?;
// different types, to validate that casting happens
generic_test_array(
ScalarValue::from(1u32),
ScalarValue::from(1u64),
DataType::UInt64,
"PrimitiveArray<UInt64>\n[\n 1,\n 1,\n]",
)?;
// different types (another order), to validate that casting happens
generic_test_array(
ScalarValue::from(1u64),
ScalarValue::from(1u32),
DataType::UInt64,
"PrimitiveArray<UInt64>\n[\n 1,\n 1,\n]",
)
}
}
|
Signature
|
identifier_name
|
functions.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Declaration of built-in (scalar) functions.
//! This module contains built-in functions' enumeration and metadata.
//!
//! Generally, a function has:
//! * a signature
//! * a return type, that is a function of the incoming argument's types
//! * the computation, that must accept each valid signature
//!
//! * Signature: see `Signature`
//! * Return type: a function `(arg_types) -> return_type`. E.g. for sqrt, ([f32]) -> f32, ([f64]) -> f64.
//!
//! This module also has a set of coercion rules to improve user experience: if an argument i32 is passed
//! to a function that supports f64, it is coerced to f64.
use super::{
type_coercion::{coerce, data_types},
ColumnarValue, PhysicalExpr,
};
use crate::error::{DataFusionError, Result};
use crate::physical_plan::array_expressions;
use crate::physical_plan::datetime_expressions;
use crate::physical_plan::math_expressions;
use crate::physical_plan::string_expressions;
use arrow::{
array::ArrayRef,
compute::kernels::length::length,
datatypes::TimeUnit,
datatypes::{DataType, Field, Schema},
record_batch::RecordBatch,
};
use fmt::{Debug, Formatter};
use std::{fmt, str::FromStr, sync::Arc};
/// A function's signature, which defines the function's supported argument types.
#[derive(Debug, Clone)]
pub enum Signature {
/// arbitrary number of arguments of an common type out of a list of valid types
// A function such as `concat` is `Variadic(vec![DataType::Utf8, DataType::LargeUtf8])`
Variadic(Vec<DataType>),
/// arbitrary number of arguments of an arbitrary but equal type
// A function such as `array` is `VariadicEqual`
// The first argument decides the type used for coercion
VariadicEqual,
/// fixed number of arguments of an arbitrary but equal type out of a list of valid types
// A function of one argument of f64 is `Uniform(1, vec![DataType::Float64])`
// A function of two arguments of f64 or f32 is `Uniform(1, vec![DataType::Float32, DataType::Float64])`
Uniform(usize, Vec<DataType>),
/// exact number of arguments of an exact type
Exact(Vec<DataType>),
/// fixed number of arguments of arbitrary types
Any(usize),
}
/// Scalar function
pub type ScalarFunctionImplementation =
Arc<dyn Fn(&[ArrayRef]) -> Result<ArrayRef> + Send + Sync>;
/// A function's return type
pub type ReturnTypeFunction =
Arc<dyn Fn(&[DataType]) -> Result<Arc<DataType>> + Send + Sync>;
/// Enum of all built-in scalar functions
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum BuiltinScalarFunction {
/// sqrt
Sqrt,
/// sin
Sin,
/// cos
Cos,
/// tan
Tan,
/// asin
Asin,
/// acos
Acos,
/// atan
Atan,
/// exp
Exp,
/// log, also known as ln
Log,
/// log2
Log2,
/// log10
Log10,
/// floor
Floor,
/// ceil
Ceil,
/// round
Round,
/// trunc
Trunc,
/// abs
Abs,
/// signum
Signum,
/// length
Length,
/// concat
Concat,
/// to_timestamp
ToTimestamp,
/// construct an array from columns
Array,
}
impl fmt::Display for BuiltinScalarFunction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// lowercase of the debug.
write!(f, "{}", format!("{:?}", self).to_lowercase())
}
}
impl FromStr for BuiltinScalarFunction {
type Err = DataFusionError;
fn from_str(name: &str) -> Result<BuiltinScalarFunction> {
Ok(match name {
"sqrt" => BuiltinScalarFunction::Sqrt,
"sin" => BuiltinScalarFunction::Sin,
"cos" => BuiltinScalarFunction::Cos,
"tan" => BuiltinScalarFunction::Tan,
"asin" => BuiltinScalarFunction::Asin,
"acos" => BuiltinScalarFunction::Acos,
"atan" => BuiltinScalarFunction::Atan,
"exp" => BuiltinScalarFunction::Exp,
"log" => BuiltinScalarFunction::Log,
"log2" => BuiltinScalarFunction::Log2,
"log10" => BuiltinScalarFunction::Log10,
"floor" => BuiltinScalarFunction::Floor,
"ceil" => BuiltinScalarFunction::Ceil,
"round" => BuiltinScalarFunction::Round,
"truc" => BuiltinScalarFunction::Trunc,
"abs" => BuiltinScalarFunction::Abs,
"signum" => BuiltinScalarFunction::Signum,
"length" => BuiltinScalarFunction::Length,
"concat" => BuiltinScalarFunction::Concat,
"to_timestamp" => BuiltinScalarFunction::ToTimestamp,
"array" => BuiltinScalarFunction::Array,
_ => {
return Err(DataFusionError::Plan(format!(
"There is no built-in function named {}",
name
)))
}
})
}
}
/// Returns the datatype of the scalar function
pub fn return_type(
fun: &BuiltinScalarFunction,
arg_types: &Vec<DataType>,
) -> Result<DataType> {
// Note that this function *must* return the same type that the respective physical expression returns
// or the execution panics.
// verify that this is a valid set of data types for this function
data_types(&arg_types, &signature(fun))?;
if arg_types.len() == 0 {
// functions currently cannot be evaluated without arguments, as they can't
// know the number of rows to return.
return Err(DataFusionError::Plan(
format!("Function '{}' requires at least one argument", fun).to_string(),
));
}
// the return type of the built in function. Eventually there
// will be built-in functions whose return type depends on the
// incoming type.
match fun {
BuiltinScalarFunction::Length => Ok(match arg_types[0] {
DataType::LargeUtf8 => DataType::Int64,
DataType::Utf8 => DataType::Int32,
_ => {
// this error is internal as `data_types` should have captured this.
return Err(DataFusionError::Internal(
"The length function can only accept strings.".to_string(),
));
}
}),
BuiltinScalarFunction::Concat => Ok(DataType::Utf8),
BuiltinScalarFunction::ToTimestamp => {
Ok(DataType::Timestamp(TimeUnit::Nanosecond, None))
}
BuiltinScalarFunction::Array => Ok(DataType::FixedSizeList(
Box::new(Field::new("item", arg_types[0].clone(), true)),
arg_types.len() as i32,
)),
_ => Ok(DataType::Float64),
}
}
/// Create a physical (function) expression.
/// This function errors when `args`' can't be coerced to a valid argument type of the function.
pub fn create_physical_expr(
fun: &BuiltinScalarFunction,
args: &Vec<Arc<dyn PhysicalExpr>>,
input_schema: &Schema,
) -> Result<Arc<dyn PhysicalExpr>> {
let fun_expr: ScalarFunctionImplementation = Arc::new(match fun {
BuiltinScalarFunction::Sqrt => math_expressions::sqrt,
BuiltinScalarFunction::Sin => math_expressions::sin,
BuiltinScalarFunction::Cos => math_expressions::cos,
BuiltinScalarFunction::Tan => math_expressions::tan,
BuiltinScalarFunction::Asin => math_expressions::asin,
BuiltinScalarFunction::Acos => math_expressions::acos,
BuiltinScalarFunction::Atan => math_expressions::atan,
BuiltinScalarFunction::Exp => math_expressions::exp,
BuiltinScalarFunction::Log => math_expressions::ln,
BuiltinScalarFunction::Log2 => math_expressions::log2,
BuiltinScalarFunction::Log10 => math_expressions::log10,
BuiltinScalarFunction::Floor => math_expressions::floor,
BuiltinScalarFunction::Ceil => math_expressions::ceil,
BuiltinScalarFunction::Round => math_expressions::round,
BuiltinScalarFunction::Trunc => math_expressions::trunc,
BuiltinScalarFunction::Abs => math_expressions::abs,
BuiltinScalarFunction::Signum => math_expressions::signum,
BuiltinScalarFunction::Length => |args| Ok(length(args[0].as_ref())?),
BuiltinScalarFunction::Concat => {
|args| Ok(Arc::new(string_expressions::concatenate(args)?))
}
BuiltinScalarFunction::ToTimestamp => {
|args| Ok(Arc::new(datetime_expressions::to_timestamp(args)?))
}
BuiltinScalarFunction::Array => |args| Ok(array_expressions::array(args)?),
});
// coerce
let args = coerce(args, input_schema, &signature(fun))?;
let arg_types = args
.iter()
.map(|e| e.data_type(input_schema))
.collect::<Result<Vec<_>>>()?;
Ok(Arc::new(ScalarFunctionExpr::new(
&format!("{}", fun),
fun_expr,
args,
&return_type(&fun, &arg_types)?,
)))
}
/// the signatures supported by the function `fun`.
fn signature(fun: &BuiltinScalarFunction) -> Signature {
// note: the physical expression must accept the type returned by this function or the execution panics.
// for now, the list is small, as we do not have many built-in functions.
match fun {
BuiltinScalarFunction::Length => {
Signature::Uniform(1, vec![DataType::Utf8, DataType::LargeUtf8])
}
BuiltinScalarFunction::Concat => Signature::Variadic(vec![DataType::Utf8]),
BuiltinScalarFunction::ToTimestamp => Signature::Uniform(1, vec![DataType::Utf8]),
BuiltinScalarFunction::Array => {
Signature::Variadic(array_expressions::SUPPORTED_ARRAY_TYPES.to_vec())
}
// math expressions expect 1 argument of type f64 or f32
// priority is given to f64 because e.g. `sqrt(1i32)` is in IR (real numbers) and thus we
// return the best approximation for it (in f64).
// We accept f32 because in this case it is clear that the best approximation
// will be as good as the number of digits in the number
_ => Signature::Uniform(1, vec![DataType::Float64, DataType::Float32]),
}
}
/// Physical expression of a scalar function
pub struct ScalarFunctionExpr {
fun: ScalarFunctionImplementation,
name: String,
args: Vec<Arc<dyn PhysicalExpr>>,
return_type: DataType,
}
impl Debug for ScalarFunctionExpr {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("ScalarFunctionExpr")
.field("fun", &"<FUNC>")
.field("name", &self.name)
.field("args", &self.args)
.field("return_type", &self.return_type)
.finish()
}
}
impl ScalarFunctionExpr {
/// Create a new Scalar function
pub fn new(
name: &str,
fun: ScalarFunctionImplementation,
args: Vec<Arc<dyn PhysicalExpr>>,
return_type: &DataType,
) -> Self
|
}
impl fmt::Display for ScalarFunctionExpr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}({})",
self.name,
self.args
.iter()
.map(|e| format!("{}", e))
.collect::<Vec<String>>()
.join(", ")
)
}
}
impl PhysicalExpr for ScalarFunctionExpr {
fn data_type(&self, _input_schema: &Schema) -> Result<DataType> {
Ok(self.return_type.clone())
}
fn nullable(&self, _input_schema: &Schema) -> Result<bool> {
Ok(true)
}
fn evaluate(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
// evaluate the arguments
let inputs = self
.args
.iter()
.map(|e| e.evaluate(batch).map(|v| v.into_array(batch.num_rows())))
.collect::<Result<Vec<_>>>()?;
// evaluate the function
let fun = self.fun.as_ref();
(fun)(&inputs).map(|a| ColumnarValue::Array(a))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{error::Result, physical_plan::expressions::lit, scalar::ScalarValue};
use arrow::{
array::{ArrayRef, FixedSizeListArray, Float64Array, Int32Array, StringArray},
datatypes::Field,
record_batch::RecordBatch,
};
fn generic_test_math(value: ScalarValue, expected: &str) -> Result<()> {
// any type works here: we evaluate against a literal of `value`
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let columns: Vec<ArrayRef> = vec![Arc::new(Int32Array::from(vec![1]))];
let arg = lit(value);
let expr =
create_physical_expr(&BuiltinScalarFunction::Exp, &vec![arg], &schema)?;
// type is correct
assert_eq!(expr.data_type(&schema)?, DataType::Float64);
// evaluate works
let batch = RecordBatch::try_new(Arc::new(schema.clone()), columns)?;
let result = expr.evaluate(&batch)?.into_array(batch.num_rows());
// downcast works
let result = result.as_any().downcast_ref::<Float64Array>().unwrap();
// value is correct
assert_eq!(format!("{}", result.value(0)), expected);
Ok(())
}
#[test]
fn test_math_function() -> Result<()> {
// 2.71828182845904523536... : https://oeis.org/A001113
let exp_f64 = "2.718281828459045";
let exp_f32 = "2.7182817459106445";
generic_test_math(ScalarValue::from(1i32), exp_f64)?;
generic_test_math(ScalarValue::from(1u32), exp_f64)?;
generic_test_math(ScalarValue::from(1u64), exp_f64)?;
generic_test_math(ScalarValue::from(1f64), exp_f64)?;
generic_test_math(ScalarValue::from(1f32), exp_f32)?;
Ok(())
}
fn test_concat(value: ScalarValue, expected: &str) -> Result<()> {
// any type works here: we evaluate against a literal of `value`
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let columns: Vec<ArrayRef> = vec![Arc::new(Int32Array::from(vec![1]))];
// concat(value, value)
let expr = create_physical_expr(
&BuiltinScalarFunction::Concat,
&vec![lit(value.clone()), lit(value)],
&schema,
)?;
// type is correct
assert_eq!(expr.data_type(&schema)?, DataType::Utf8);
// evaluate works
let batch = RecordBatch::try_new(Arc::new(schema.clone()), columns)?;
let result = expr.evaluate(&batch)?.into_array(batch.num_rows());
// downcast works
let result = result.as_any().downcast_ref::<StringArray>().unwrap();
// value is correct
assert_eq!(format!("{}", result.value(0)), expected);
Ok(())
}
#[test]
fn test_concat_utf8() -> Result<()> {
test_concat(ScalarValue::Utf8(Some("aa".to_string())), "aaaa")
}
#[test]
fn test_concat_error() -> Result<()> {
let result = return_type(&BuiltinScalarFunction::Concat, &vec![]);
if let Ok(_) = result {
Err(DataFusionError::Plan(
"Function 'concat' cannot accept zero arguments".to_string(),
))
} else {
Ok(())
}
}
fn generic_test_array(
value1: ScalarValue,
value2: ScalarValue,
expected_type: DataType,
expected: &str,
) -> Result<()> {
// any type works here: we evaluate against a literal of `value`
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let columns: Vec<ArrayRef> = vec![Arc::new(Int32Array::from(vec![1]))];
let expr = create_physical_expr(
&BuiltinScalarFunction::Array,
&vec![lit(value1.clone()), lit(value2.clone())],
&schema,
)?;
// type is correct
assert_eq!(
expr.data_type(&schema)?,
// type equals to a common coercion
DataType::FixedSizeList(Box::new(Field::new("item", expected_type, true)), 2)
);
// evaluate works
let batch = RecordBatch::try_new(Arc::new(schema.clone()), columns)?;
let result = expr.evaluate(&batch)?.into_array(batch.num_rows());
// downcast works
let result = result
.as_any()
.downcast_ref::<FixedSizeListArray>()
.unwrap();
// value is correct
assert_eq!(format!("{:?}", result.value(0)), expected);
Ok(())
}
#[test]
fn test_array() -> Result<()> {
generic_test_array(
ScalarValue::Utf8(Some("aa".to_string())),
ScalarValue::Utf8(Some("aa".to_string())),
DataType::Utf8,
"StringArray\n[\n \"aa\",\n \"aa\",\n]",
)?;
// different types, to validate that casting happens
generic_test_array(
ScalarValue::from(1u32),
ScalarValue::from(1u64),
DataType::UInt64,
"PrimitiveArray<UInt64>\n[\n 1,\n 1,\n]",
)?;
// different types (another order), to validate that casting happens
generic_test_array(
ScalarValue::from(1u64),
ScalarValue::from(1u32),
DataType::UInt64,
"PrimitiveArray<UInt64>\n[\n 1,\n 1,\n]",
)
}
}
|
{
Self {
fun,
name: name.to_owned(),
args,
return_type: return_type.clone(),
}
}
|
identifier_body
|
main.rs
|
let t1 = std::time::Instant::now();
println!("{:?}", $func);
let t2 = std::time::Instant::now().duration_since(t1);
println!("{}", t2.as_secs() as f64 + t2.subsec_nanos() as f64 / 1000000000.00);
})
}
fn main() {
fn sum_of_squares(n: usize) -> usize {
(1..(n + 1)).map(|x| x * x).fold(0, |acc, item| acc + item)
}
fn square_of_sums(n: usize) -> usize {
(1..(n + 1)).fold(0, |acc, item| acc + item).pow(2)
}
fn s_s(n: usize) -> usize {
square_of_sums(n) - sum_of_squares(n)
}
timeit!(s_s(100));
}
|
macro_rules! timeit {
($func:expr) => ({
|
random_line_split
|
|
main.rs
|
macro_rules! timeit {
($func:expr) => ({
let t1 = std::time::Instant::now();
println!("{:?}", $func);
let t2 = std::time::Instant::now().duration_since(t1);
println!("{}", t2.as_secs() as f64 + t2.subsec_nanos() as f64 / 1000000000.00);
})
}
fn main() {
fn sum_of_squares(n: usize) -> usize {
(1..(n + 1)).map(|x| x * x).fold(0, |acc, item| acc + item)
}
fn
|
(n: usize) -> usize {
(1..(n + 1)).fold(0, |acc, item| acc + item).pow(2)
}
fn s_s(n: usize) -> usize {
square_of_sums(n) - sum_of_squares(n)
}
timeit!(s_s(100));
}
|
square_of_sums
|
identifier_name
|
main.rs
|
macro_rules! timeit {
($func:expr) => ({
let t1 = std::time::Instant::now();
println!("{:?}", $func);
let t2 = std::time::Instant::now().duration_since(t1);
println!("{}", t2.as_secs() as f64 + t2.subsec_nanos() as f64 / 1000000000.00);
})
}
fn main() {
fn sum_of_squares(n: usize) -> usize {
(1..(n + 1)).map(|x| x * x).fold(0, |acc, item| acc + item)
}
fn square_of_sums(n: usize) -> usize {
(1..(n + 1)).fold(0, |acc, item| acc + item).pow(2)
}
fn s_s(n: usize) -> usize
|
timeit!(s_s(100));
}
|
{
square_of_sums(n) - sum_of_squares(n)
}
|
identifier_body
|
test.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Code that generates a test runner to run all the tests in a crate
#![allow(dead_code)]
#![allow(unused_imports)]
use driver::session::Session;
use front::config;
use front::std_inject::with_version;
use metadata::creader::Loader;
use std::cell::RefCell;
use std::slice;
use std::vec::Vec;
use std::vec;
use syntax::ast_util::*;
use syntax::attr::AttrMetaMethods;
use syntax::attr;
use syntax::codemap::{DUMMY_SP, Span, ExpnInfo, NameAndSpan, MacroAttribute};
use syntax::codemap;
use syntax::ext::base::ExtCtxt;
use syntax::ext::expand::ExpansionConfig;
use syntax::fold::Folder;
use syntax::fold;
use syntax::owned_slice::OwnedSlice;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::print::pprust;
use syntax::{ast, ast_util};
use syntax::util::small_vector::SmallVector;
struct Test {
span: Span,
path: Vec<ast::Ident>,
bench: bool,
ignore: bool,
should_fail: bool
}
struct TestCtxt<'a> {
sess: &'a Session,
path: RefCell<Vec<ast::Ident>>,
ext_cx: ExtCtxt<'a>,
testfns: RefCell<Vec<Test> >,
is_test_crate: bool,
config: ast::CrateConfig,
}
// Traverse the crate, collecting all the test functions, eliding any
// existing main functions, and synthesizing a main test harness
pub fn modify_for_testing(sess: &Session,
krate: ast::Crate) -> ast::Crate {
// We generate the test harness when building in the 'test'
// configuration, either with the '--test' or '--cfg test'
// command line options.
let should_test = attr::contains_name(krate.config.as_slice(), "test");
if should_test {
generate_test_harness(sess, krate)
} else {
strip_test_functions(krate)
}
}
struct TestHarnessGenerator<'a> {
cx: TestCtxt<'a>,
}
impl<'a> fold::Folder for TestHarnessGenerator<'a> {
fn fold_crate(&mut self, c: ast::Crate) -> ast::Crate {
let folded = fold::noop_fold_crate(c, self);
// Add a special __test module to the crate that will contain code
// generated for the test harness
ast::Crate {
module: add_test_module(&self.cx, &folded.module),
.. folded
}
}
fn fold_item(&mut self, i: @ast::Item) -> SmallVector<@ast::Item> {
self.cx.path.borrow_mut().push(i.ident);
debug!("current path: {}",
ast_util::path_name_i(self.cx.path.borrow().as_slice()));
if is_test_fn(&self.cx, i) || is_bench_fn(&self.cx, i) {
match i.node {
ast::ItemFn(_, ast::UnsafeFn, _, _, _) => {
let sess = self.cx.sess;
sess.span_fatal(i.span,
"unsafe functions cannot be used for \
tests");
}
_ => {
debug!("this is a test function");
let test = Test {
span: i.span,
path: self.cx.path.borrow().clone(),
bench: is_bench_fn(&self.cx, i),
ignore: is_ignored(&self.cx, i),
should_fail: should_fail(i)
};
self.cx.testfns.borrow_mut().push(test);
// debug!("have {} test/bench functions",
// cx.testfns.len());
}
}
}
let res = fold::noop_fold_item(i, self);
self.cx.path.borrow_mut().pop();
res
}
fn fold_mod(&mut self, m: &ast::Mod) -> ast::Mod {
// Remove any #[main] from the AST so it doesn't clash with
// the one we're going to add. Only if compiling an executable.
fn nomain(cx: &TestCtxt, item: @ast::Item) -> @ast::Item {
if!cx.sess.building_library.get() {
@ast::Item {
attrs: item.attrs.iter().filter_map(|attr| {
if!attr.name().equiv(&("main")) {
Some(*attr)
} else {
None
}
}).collect(),
.. (*item).clone()
}
} else {
item
}
}
let mod_nomain = ast::Mod {
view_items: m.view_items.clone(),
items: m.items.iter().map(|i| nomain(&self.cx, *i)).collect(),
};
fold::noop_fold_mod(&mod_nomain, self)
}
}
fn generate_test_harness(sess: &Session, krate: ast::Crate)
-> ast::Crate {
let loader = &mut Loader::new(sess);
let mut cx: TestCtxt = TestCtxt {
sess: sess,
ext_cx: ExtCtxt::new(&sess.parse_sess, sess.opts.cfg.clone(),
ExpansionConfig {
loader: loader,
deriving_hash_type_parameter: false,
crate_id: from_str("test").unwrap(),
}),
path: RefCell::new(Vec::new()),
testfns: RefCell::new(Vec::new()),
is_test_crate: is_test_crate(&krate),
config: krate.config.clone(),
};
cx.ext_cx.bt_push(ExpnInfo {
call_site: DUMMY_SP,
callee: NameAndSpan {
name: ~"test",
format: MacroAttribute,
span: None
}
});
let mut fold = TestHarnessGenerator {
cx: cx
};
let res = fold.fold_crate(krate);
fold.cx.ext_cx.bt_pop();
return res;
}
fn strip_test_functions(krate: ast::Crate) -> ast::Crate {
// When not compiling with --test we should not compile the
// #[test] functions
config::strip_items(krate, |attrs| {
!attr::contains_name(attrs.as_slice(), "test") &&
!attr::contains_name(attrs.as_slice(), "bench")
})
}
fn is_test_fn(cx: &TestCtxt, i: @ast::Item) -> bool {
let has_test_attr = attr::contains_name(i.attrs.as_slice(), "test");
fn has_test_signature(i: @ast::Item) -> bool
|
if has_test_attr &&!has_test_signature(i) {
let sess = cx.sess;
sess.span_err(
i.span,
"functions used as tests must have signature fn() -> ()."
);
}
return has_test_attr && has_test_signature(i);
}
fn is_bench_fn(cx: &TestCtxt, i: @ast::Item) -> bool {
let has_bench_attr = attr::contains_name(i.attrs.as_slice(), "bench");
fn has_test_signature(i: @ast::Item) -> bool {
match i.node {
ast::ItemFn(ref decl, _, _, ref generics, _) => {
let input_cnt = decl.inputs.len();
let no_output = match decl.output.node {
ast::TyNil => true,
_ => false
};
let tparm_cnt = generics.ty_params.len();
// NB: inadequate check, but we're running
// well before resolve, can't get too deep.
input_cnt == 1u
&& no_output && tparm_cnt == 0u
}
_ => false
}
}
if has_bench_attr &&!has_test_signature(i) {
let sess = cx.sess;
sess.span_err(i.span, "functions used as benches must have signature \
`fn(&mut Bencher) -> ()`");
}
return has_bench_attr && has_test_signature(i);
}
fn is_ignored(cx: &TestCtxt, i: @ast::Item) -> bool {
i.attrs.iter().any(|attr| {
// check ignore(cfg(foo, bar))
attr.name().equiv(&("ignore")) && match attr.meta_item_list() {
Some(ref cfgs) => {
attr::test_cfg(cx.config.as_slice(), cfgs.iter().map(|x| *x))
}
None => true
}
})
}
fn should_fail(i: @ast::Item) -> bool {
attr::contains_name(i.attrs.as_slice(), "should_fail")
}
fn add_test_module(cx: &TestCtxt, m: &ast::Mod) -> ast::Mod {
let testmod = mk_test_module(cx);
ast::Mod {
items: m.items.clone().append_one(testmod),
..(*m).clone()
}
}
/*
We're going to be building a module that looks more or less like:
mod __test {
#![!resolve_unexported]
extern crate test (name = "test", vers = "...");
fn main() {
test::test_main_static(::os::args(), tests)
}
static tests : &'static [test::TestDescAndFn] = &[
... the list of tests in the crate...
];
}
*/
fn mk_std(cx: &TestCtxt) -> ast::ViewItem {
let id_test = token::str_to_ident("test");
let (vi, vis) = if cx.is_test_crate {
(ast::ViewItemUse(
vec!(@nospan(ast::ViewPathSimple(id_test,
path_node(vec!(id_test)),
ast::DUMMY_NODE_ID)))),
ast::Public)
} else {
(ast::ViewItemExternCrate(id_test,
with_version("test"),
ast::DUMMY_NODE_ID),
ast::Inherited)
};
ast::ViewItem {
node: vi,
attrs: Vec::new(),
vis: vis,
span: DUMMY_SP
}
}
fn mk_test_module(cx: &TestCtxt) -> @ast::Item {
// Link to test crate
let view_items = vec!(mk_std(cx));
// A constant vector of test descriptors.
let tests = mk_tests(cx);
// The synthesized main function which will call the console test runner
// with our list of tests
let mainfn = (quote_item!(&cx.ext_cx,
pub fn main() {
#![main]
#![allow(deprecated_owned_vector)]
test::test_main_static(::std::os::args(), TESTS);
}
)).unwrap();
let testmod = ast::Mod {
view_items: view_items,
items: vec!(mainfn, tests),
};
let item_ = ast::ItemMod(testmod);
// This attribute tells resolve to let us call unexported functions
let resolve_unexported_str = InternedString::new("!resolve_unexported");
let resolve_unexported_attr =
attr::mk_attr(attr::mk_word_item(resolve_unexported_str));
let item = ast::Item {
ident: token::str_to_ident("__test"),
attrs: vec!(resolve_unexported_attr),
id: ast::DUMMY_NODE_ID,
node: item_,
vis: ast::Public,
span: DUMMY_SP,
};
debug!("Synthetic test module:\n{}\n", pprust::item_to_str(&item));
return @item;
}
fn nospan<T>(t: T) -> codemap::Spanned<T> {
codemap::Spanned { node: t, span: DUMMY_SP }
}
fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path {
span: DUMMY_SP,
global: false,
segments: ids.move_iter().map(|identifier| ast::PathSegment {
identifier: identifier,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}).collect()
}
}
fn path_node_global(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path {
span: DUMMY_SP,
global: true,
segments: ids.move_iter().map(|identifier| ast::PathSegment {
identifier: identifier,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}).collect()
}
}
fn mk_tests(cx: &TestCtxt) -> @ast::Item {
// The vector of test_descs for this crate
let test_descs = mk_test_descs(cx);
(quote_item!(&cx.ext_cx,
pub static TESTS : &'static [self::test::TestDescAndFn] =
$test_descs
;
)).unwrap()
}
fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crateid(krate.attrs.as_slice()) {
Some(ref s) if "test" == s.name => true,
_ => false
}
}
fn mk_test_descs(cx: &TestCtxt) -> @ast::Expr {
debug!("building test vector from {} tests", cx.testfns.borrow().len());
@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprVstore(@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprVec(cx.testfns.borrow().iter().map(|test| {
mk_test_desc_and_fn_rec(cx, test)
}).collect()),
span: DUMMY_SP,
}, ast::ExprVstoreSlice),
span: DUMMY_SP,
}
}
fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
let span = test.span;
let path = test.path.clone();
debug!("encoding {}", ast_util::path_name_i(path.as_slice()));
let name_lit: ast::Lit =
nospan(ast::LitStr(token::intern_and_get_ident(
ast_util::path_name_i(path.as_slice())),
ast::CookedStr));
let name_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprLit(@name_lit),
span: span
};
let fn_path = path_node_global(path);
let fn_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(fn_path),
span: span,
};
let t_expr = if test.bench {
quote_expr!(&cx.ext_cx, self::test::StaticBenchFn($fn_expr) )
} else {
quote_expr!(&cx.ext_cx, self::test::StaticTestFn($fn_expr) )
};
let ignore_expr = if test.ignore {
quote_expr!(&cx.ext_cx, true )
} else {
quote_expr!(&cx.ext_cx, false )
};
let fail_expr = if test.should_fail {
quote_expr!(&cx.ext_cx, true )
} else {
quote_expr!(&cx.ext_cx, false )
};
let e = quote_expr!(&cx.ext_cx,
self::test::TestDescAndFn {
desc: self::test::TestDesc {
name: self::test::StaticTestName($name_expr),
ignore: $ignore_expr,
should_fail: $fail_expr
},
testfn: $t_expr,
}
);
e
}
|
{
match &i.node {
&ast::ItemFn(ref decl, _, _, ref generics, _) => {
let no_output = match decl.output.node {
ast::TyNil => true,
_ => false
};
decl.inputs.is_empty()
&& no_output
&& !generics.is_parameterized()
}
_ => false
}
}
|
identifier_body
|
test.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Code that generates a test runner to run all the tests in a crate
#![allow(dead_code)]
#![allow(unused_imports)]
use driver::session::Session;
use front::config;
use front::std_inject::with_version;
use metadata::creader::Loader;
use std::cell::RefCell;
use std::slice;
use std::vec::Vec;
use std::vec;
use syntax::ast_util::*;
use syntax::attr::AttrMetaMethods;
use syntax::attr;
use syntax::codemap::{DUMMY_SP, Span, ExpnInfo, NameAndSpan, MacroAttribute};
use syntax::codemap;
use syntax::ext::base::ExtCtxt;
use syntax::ext::expand::ExpansionConfig;
use syntax::fold::Folder;
use syntax::fold;
use syntax::owned_slice::OwnedSlice;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::print::pprust;
use syntax::{ast, ast_util};
use syntax::util::small_vector::SmallVector;
struct Test {
span: Span,
path: Vec<ast::Ident>,
bench: bool,
ignore: bool,
should_fail: bool
}
struct TestCtxt<'a> {
sess: &'a Session,
path: RefCell<Vec<ast::Ident>>,
ext_cx: ExtCtxt<'a>,
testfns: RefCell<Vec<Test> >,
is_test_crate: bool,
config: ast::CrateConfig,
}
// Traverse the crate, collecting all the test functions, eliding any
// existing main functions, and synthesizing a main test harness
pub fn modify_for_testing(sess: &Session,
krate: ast::Crate) -> ast::Crate {
// We generate the test harness when building in the 'test'
// configuration, either with the '--test' or '--cfg test'
// command line options.
let should_test = attr::contains_name(krate.config.as_slice(), "test");
if should_test {
generate_test_harness(sess, krate)
} else {
strip_test_functions(krate)
}
}
struct TestHarnessGenerator<'a> {
cx: TestCtxt<'a>,
}
impl<'a> fold::Folder for TestHarnessGenerator<'a> {
fn fold_crate(&mut self, c: ast::Crate) -> ast::Crate {
let folded = fold::noop_fold_crate(c, self);
// Add a special __test module to the crate that will contain code
// generated for the test harness
ast::Crate {
module: add_test_module(&self.cx, &folded.module),
.. folded
}
}
fn fold_item(&mut self, i: @ast::Item) -> SmallVector<@ast::Item> {
self.cx.path.borrow_mut().push(i.ident);
debug!("current path: {}",
ast_util::path_name_i(self.cx.path.borrow().as_slice()));
if is_test_fn(&self.cx, i) || is_bench_fn(&self.cx, i) {
match i.node {
ast::ItemFn(_, ast::UnsafeFn, _, _, _) => {
let sess = self.cx.sess;
sess.span_fatal(i.span,
"unsafe functions cannot be used for \
tests");
}
_ => {
debug!("this is a test function");
let test = Test {
span: i.span,
path: self.cx.path.borrow().clone(),
bench: is_bench_fn(&self.cx, i),
ignore: is_ignored(&self.cx, i),
should_fail: should_fail(i)
};
self.cx.testfns.borrow_mut().push(test);
// debug!("have {} test/bench functions",
// cx.testfns.len());
}
}
}
let res = fold::noop_fold_item(i, self);
self.cx.path.borrow_mut().pop();
res
}
fn fold_mod(&mut self, m: &ast::Mod) -> ast::Mod {
// Remove any #[main] from the AST so it doesn't clash with
// the one we're going to add. Only if compiling an executable.
fn nomain(cx: &TestCtxt, item: @ast::Item) -> @ast::Item {
if!cx.sess.building_library.get() {
@ast::Item {
attrs: item.attrs.iter().filter_map(|attr| {
if!attr.name().equiv(&("main")) {
Some(*attr)
} else {
None
}
}).collect(),
.. (*item).clone()
}
} else {
item
}
}
let mod_nomain = ast::Mod {
view_items: m.view_items.clone(),
items: m.items.iter().map(|i| nomain(&self.cx, *i)).collect(),
};
fold::noop_fold_mod(&mod_nomain, self)
}
}
fn generate_test_harness(sess: &Session, krate: ast::Crate)
-> ast::Crate {
let loader = &mut Loader::new(sess);
let mut cx: TestCtxt = TestCtxt {
sess: sess,
ext_cx: ExtCtxt::new(&sess.parse_sess, sess.opts.cfg.clone(),
ExpansionConfig {
loader: loader,
deriving_hash_type_parameter: false,
crate_id: from_str("test").unwrap(),
}),
path: RefCell::new(Vec::new()),
testfns: RefCell::new(Vec::new()),
is_test_crate: is_test_crate(&krate),
config: krate.config.clone(),
};
cx.ext_cx.bt_push(ExpnInfo {
call_site: DUMMY_SP,
callee: NameAndSpan {
name: ~"test",
format: MacroAttribute,
span: None
}
});
let mut fold = TestHarnessGenerator {
cx: cx
};
let res = fold.fold_crate(krate);
fold.cx.ext_cx.bt_pop();
return res;
}
fn strip_test_functions(krate: ast::Crate) -> ast::Crate {
// When not compiling with --test we should not compile the
// #[test] functions
config::strip_items(krate, |attrs| {
!attr::contains_name(attrs.as_slice(), "test") &&
!attr::contains_name(attrs.as_slice(), "bench")
})
}
fn is_test_fn(cx: &TestCtxt, i: @ast::Item) -> bool {
let has_test_attr = attr::contains_name(i.attrs.as_slice(), "test");
fn has_test_signature(i: @ast::Item) -> bool {
match &i.node {
&ast::ItemFn(ref decl, _, _, ref generics, _) => {
let no_output = match decl.output.node {
ast::TyNil => true,
_ => false
};
decl.inputs.is_empty()
&& no_output
&&!generics.is_parameterized()
}
_ => false
}
}
if has_test_attr &&!has_test_signature(i) {
let sess = cx.sess;
sess.span_err(
i.span,
"functions used as tests must have signature fn() -> ()."
);
}
return has_test_attr && has_test_signature(i);
}
fn is_bench_fn(cx: &TestCtxt, i: @ast::Item) -> bool {
let has_bench_attr = attr::contains_name(i.attrs.as_slice(), "bench");
fn has_test_signature(i: @ast::Item) -> bool {
match i.node {
ast::ItemFn(ref decl, _, _, ref generics, _) => {
let input_cnt = decl.inputs.len();
let no_output = match decl.output.node {
ast::TyNil => true,
_ => false
};
let tparm_cnt = generics.ty_params.len();
// NB: inadequate check, but we're running
// well before resolve, can't get too deep.
input_cnt == 1u
&& no_output && tparm_cnt == 0u
}
_ => false
}
}
if has_bench_attr &&!has_test_signature(i) {
let sess = cx.sess;
sess.span_err(i.span, "functions used as benches must have signature \
`fn(&mut Bencher) -> ()`");
}
return has_bench_attr && has_test_signature(i);
}
fn is_ignored(cx: &TestCtxt, i: @ast::Item) -> bool {
i.attrs.iter().any(|attr| {
// check ignore(cfg(foo, bar))
attr.name().equiv(&("ignore")) && match attr.meta_item_list() {
Some(ref cfgs) => {
attr::test_cfg(cx.config.as_slice(), cfgs.iter().map(|x| *x))
}
None => true
}
})
}
fn should_fail(i: @ast::Item) -> bool {
attr::contains_name(i.attrs.as_slice(), "should_fail")
}
fn add_test_module(cx: &TestCtxt, m: &ast::Mod) -> ast::Mod {
let testmod = mk_test_module(cx);
ast::Mod {
items: m.items.clone().append_one(testmod),
..(*m).clone()
}
}
/*
We're going to be building a module that looks more or less like:
mod __test {
#![!resolve_unexported]
extern crate test (name = "test", vers = "...");
fn main() {
test::test_main_static(::os::args(), tests)
}
static tests : &'static [test::TestDescAndFn] = &[
... the list of tests in the crate...
];
}
*/
fn mk_std(cx: &TestCtxt) -> ast::ViewItem {
let id_test = token::str_to_ident("test");
let (vi, vis) = if cx.is_test_crate {
(ast::ViewItemUse(
vec!(@nospan(ast::ViewPathSimple(id_test,
path_node(vec!(id_test)),
ast::DUMMY_NODE_ID)))),
ast::Public)
} else {
(ast::ViewItemExternCrate(id_test,
with_version("test"),
ast::DUMMY_NODE_ID),
ast::Inherited)
};
ast::ViewItem {
node: vi,
attrs: Vec::new(),
vis: vis,
span: DUMMY_SP
}
}
fn mk_test_module(cx: &TestCtxt) -> @ast::Item {
// Link to test crate
let view_items = vec!(mk_std(cx));
// A constant vector of test descriptors.
let tests = mk_tests(cx);
// The synthesized main function which will call the console test runner
// with our list of tests
let mainfn = (quote_item!(&cx.ext_cx,
pub fn main() {
#![main]
#![allow(deprecated_owned_vector)]
test::test_main_static(::std::os::args(), TESTS);
}
)).unwrap();
let testmod = ast::Mod {
view_items: view_items,
items: vec!(mainfn, tests),
};
let item_ = ast::ItemMod(testmod);
// This attribute tells resolve to let us call unexported functions
let resolve_unexported_str = InternedString::new("!resolve_unexported");
let resolve_unexported_attr =
attr::mk_attr(attr::mk_word_item(resolve_unexported_str));
let item = ast::Item {
ident: token::str_to_ident("__test"),
attrs: vec!(resolve_unexported_attr),
id: ast::DUMMY_NODE_ID,
node: item_,
vis: ast::Public,
span: DUMMY_SP,
};
debug!("Synthetic test module:\n{}\n", pprust::item_to_str(&item));
return @item;
}
fn
|
<T>(t: T) -> codemap::Spanned<T> {
codemap::Spanned { node: t, span: DUMMY_SP }
}
fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path {
span: DUMMY_SP,
global: false,
segments: ids.move_iter().map(|identifier| ast::PathSegment {
identifier: identifier,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}).collect()
}
}
fn path_node_global(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path {
span: DUMMY_SP,
global: true,
segments: ids.move_iter().map(|identifier| ast::PathSegment {
identifier: identifier,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}).collect()
}
}
fn mk_tests(cx: &TestCtxt) -> @ast::Item {
// The vector of test_descs for this crate
let test_descs = mk_test_descs(cx);
(quote_item!(&cx.ext_cx,
pub static TESTS : &'static [self::test::TestDescAndFn] =
$test_descs
;
)).unwrap()
}
fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crateid(krate.attrs.as_slice()) {
Some(ref s) if "test" == s.name => true,
_ => false
}
}
fn mk_test_descs(cx: &TestCtxt) -> @ast::Expr {
debug!("building test vector from {} tests", cx.testfns.borrow().len());
@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprVstore(@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprVec(cx.testfns.borrow().iter().map(|test| {
mk_test_desc_and_fn_rec(cx, test)
}).collect()),
span: DUMMY_SP,
}, ast::ExprVstoreSlice),
span: DUMMY_SP,
}
}
fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
let span = test.span;
let path = test.path.clone();
debug!("encoding {}", ast_util::path_name_i(path.as_slice()));
let name_lit: ast::Lit =
nospan(ast::LitStr(token::intern_and_get_ident(
ast_util::path_name_i(path.as_slice())),
ast::CookedStr));
let name_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprLit(@name_lit),
span: span
};
let fn_path = path_node_global(path);
let fn_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(fn_path),
span: span,
};
let t_expr = if test.bench {
quote_expr!(&cx.ext_cx, self::test::StaticBenchFn($fn_expr) )
} else {
quote_expr!(&cx.ext_cx, self::test::StaticTestFn($fn_expr) )
};
let ignore_expr = if test.ignore {
quote_expr!(&cx.ext_cx, true )
} else {
quote_expr!(&cx.ext_cx, false )
};
let fail_expr = if test.should_fail {
quote_expr!(&cx.ext_cx, true )
} else {
quote_expr!(&cx.ext_cx, false )
};
let e = quote_expr!(&cx.ext_cx,
self::test::TestDescAndFn {
desc: self::test::TestDesc {
name: self::test::StaticTestName($name_expr),
ignore: $ignore_expr,
should_fail: $fail_expr
},
testfn: $t_expr,
}
);
e
}
|
nospan
|
identifier_name
|
test.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Code that generates a test runner to run all the tests in a crate
#![allow(dead_code)]
#![allow(unused_imports)]
use driver::session::Session;
use front::config;
use front::std_inject::with_version;
use metadata::creader::Loader;
use std::cell::RefCell;
use std::slice;
use std::vec::Vec;
use std::vec;
use syntax::ast_util::*;
use syntax::attr::AttrMetaMethods;
use syntax::attr;
use syntax::codemap::{DUMMY_SP, Span, ExpnInfo, NameAndSpan, MacroAttribute};
use syntax::codemap;
use syntax::ext::base::ExtCtxt;
use syntax::ext::expand::ExpansionConfig;
use syntax::fold::Folder;
use syntax::fold;
use syntax::owned_slice::OwnedSlice;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::print::pprust;
use syntax::{ast, ast_util};
use syntax::util::small_vector::SmallVector;
struct Test {
span: Span,
path: Vec<ast::Ident>,
bench: bool,
ignore: bool,
should_fail: bool
}
struct TestCtxt<'a> {
sess: &'a Session,
path: RefCell<Vec<ast::Ident>>,
ext_cx: ExtCtxt<'a>,
testfns: RefCell<Vec<Test> >,
is_test_crate: bool,
config: ast::CrateConfig,
}
// Traverse the crate, collecting all the test functions, eliding any
// existing main functions, and synthesizing a main test harness
pub fn modify_for_testing(sess: &Session,
krate: ast::Crate) -> ast::Crate {
// We generate the test harness when building in the 'test'
// configuration, either with the '--test' or '--cfg test'
// command line options.
let should_test = attr::contains_name(krate.config.as_slice(), "test");
if should_test {
generate_test_harness(sess, krate)
} else {
strip_test_functions(krate)
}
}
struct TestHarnessGenerator<'a> {
cx: TestCtxt<'a>,
}
impl<'a> fold::Folder for TestHarnessGenerator<'a> {
fn fold_crate(&mut self, c: ast::Crate) -> ast::Crate {
let folded = fold::noop_fold_crate(c, self);
// Add a special __test module to the crate that will contain code
// generated for the test harness
ast::Crate {
module: add_test_module(&self.cx, &folded.module),
.. folded
}
}
fn fold_item(&mut self, i: @ast::Item) -> SmallVector<@ast::Item> {
self.cx.path.borrow_mut().push(i.ident);
debug!("current path: {}",
ast_util::path_name_i(self.cx.path.borrow().as_slice()));
if is_test_fn(&self.cx, i) || is_bench_fn(&self.cx, i) {
match i.node {
ast::ItemFn(_, ast::UnsafeFn, _, _, _) => {
let sess = self.cx.sess;
sess.span_fatal(i.span,
"unsafe functions cannot be used for \
tests");
}
_ =>
|
}
}
let res = fold::noop_fold_item(i, self);
self.cx.path.borrow_mut().pop();
res
}
fn fold_mod(&mut self, m: &ast::Mod) -> ast::Mod {
// Remove any #[main] from the AST so it doesn't clash with
// the one we're going to add. Only if compiling an executable.
fn nomain(cx: &TestCtxt, item: @ast::Item) -> @ast::Item {
if!cx.sess.building_library.get() {
@ast::Item {
attrs: item.attrs.iter().filter_map(|attr| {
if!attr.name().equiv(&("main")) {
Some(*attr)
} else {
None
}
}).collect(),
.. (*item).clone()
}
} else {
item
}
}
let mod_nomain = ast::Mod {
view_items: m.view_items.clone(),
items: m.items.iter().map(|i| nomain(&self.cx, *i)).collect(),
};
fold::noop_fold_mod(&mod_nomain, self)
}
}
fn generate_test_harness(sess: &Session, krate: ast::Crate)
-> ast::Crate {
let loader = &mut Loader::new(sess);
let mut cx: TestCtxt = TestCtxt {
sess: sess,
ext_cx: ExtCtxt::new(&sess.parse_sess, sess.opts.cfg.clone(),
ExpansionConfig {
loader: loader,
deriving_hash_type_parameter: false,
crate_id: from_str("test").unwrap(),
}),
path: RefCell::new(Vec::new()),
testfns: RefCell::new(Vec::new()),
is_test_crate: is_test_crate(&krate),
config: krate.config.clone(),
};
cx.ext_cx.bt_push(ExpnInfo {
call_site: DUMMY_SP,
callee: NameAndSpan {
name: ~"test",
format: MacroAttribute,
span: None
}
});
let mut fold = TestHarnessGenerator {
cx: cx
};
let res = fold.fold_crate(krate);
fold.cx.ext_cx.bt_pop();
return res;
}
fn strip_test_functions(krate: ast::Crate) -> ast::Crate {
// When not compiling with --test we should not compile the
// #[test] functions
config::strip_items(krate, |attrs| {
!attr::contains_name(attrs.as_slice(), "test") &&
!attr::contains_name(attrs.as_slice(), "bench")
})
}
fn is_test_fn(cx: &TestCtxt, i: @ast::Item) -> bool {
let has_test_attr = attr::contains_name(i.attrs.as_slice(), "test");
fn has_test_signature(i: @ast::Item) -> bool {
match &i.node {
&ast::ItemFn(ref decl, _, _, ref generics, _) => {
let no_output = match decl.output.node {
ast::TyNil => true,
_ => false
};
decl.inputs.is_empty()
&& no_output
&&!generics.is_parameterized()
}
_ => false
}
}
if has_test_attr &&!has_test_signature(i) {
let sess = cx.sess;
sess.span_err(
i.span,
"functions used as tests must have signature fn() -> ()."
);
}
return has_test_attr && has_test_signature(i);
}
fn is_bench_fn(cx: &TestCtxt, i: @ast::Item) -> bool {
let has_bench_attr = attr::contains_name(i.attrs.as_slice(), "bench");
fn has_test_signature(i: @ast::Item) -> bool {
match i.node {
ast::ItemFn(ref decl, _, _, ref generics, _) => {
let input_cnt = decl.inputs.len();
let no_output = match decl.output.node {
ast::TyNil => true,
_ => false
};
let tparm_cnt = generics.ty_params.len();
// NB: inadequate check, but we're running
// well before resolve, can't get too deep.
input_cnt == 1u
&& no_output && tparm_cnt == 0u
}
_ => false
}
}
if has_bench_attr &&!has_test_signature(i) {
let sess = cx.sess;
sess.span_err(i.span, "functions used as benches must have signature \
`fn(&mut Bencher) -> ()`");
}
return has_bench_attr && has_test_signature(i);
}
fn is_ignored(cx: &TestCtxt, i: @ast::Item) -> bool {
i.attrs.iter().any(|attr| {
// check ignore(cfg(foo, bar))
attr.name().equiv(&("ignore")) && match attr.meta_item_list() {
Some(ref cfgs) => {
attr::test_cfg(cx.config.as_slice(), cfgs.iter().map(|x| *x))
}
None => true
}
})
}
fn should_fail(i: @ast::Item) -> bool {
attr::contains_name(i.attrs.as_slice(), "should_fail")
}
fn add_test_module(cx: &TestCtxt, m: &ast::Mod) -> ast::Mod {
let testmod = mk_test_module(cx);
ast::Mod {
items: m.items.clone().append_one(testmod),
..(*m).clone()
}
}
/*
We're going to be building a module that looks more or less like:
mod __test {
#![!resolve_unexported]
extern crate test (name = "test", vers = "...");
fn main() {
test::test_main_static(::os::args(), tests)
}
static tests : &'static [test::TestDescAndFn] = &[
... the list of tests in the crate...
];
}
*/
fn mk_std(cx: &TestCtxt) -> ast::ViewItem {
let id_test = token::str_to_ident("test");
let (vi, vis) = if cx.is_test_crate {
(ast::ViewItemUse(
vec!(@nospan(ast::ViewPathSimple(id_test,
path_node(vec!(id_test)),
ast::DUMMY_NODE_ID)))),
ast::Public)
} else {
(ast::ViewItemExternCrate(id_test,
with_version("test"),
ast::DUMMY_NODE_ID),
ast::Inherited)
};
ast::ViewItem {
node: vi,
attrs: Vec::new(),
vis: vis,
span: DUMMY_SP
}
}
fn mk_test_module(cx: &TestCtxt) -> @ast::Item {
// Link to test crate
let view_items = vec!(mk_std(cx));
// A constant vector of test descriptors.
let tests = mk_tests(cx);
// The synthesized main function which will call the console test runner
// with our list of tests
let mainfn = (quote_item!(&cx.ext_cx,
pub fn main() {
#![main]
#![allow(deprecated_owned_vector)]
test::test_main_static(::std::os::args(), TESTS);
}
)).unwrap();
let testmod = ast::Mod {
view_items: view_items,
items: vec!(mainfn, tests),
};
let item_ = ast::ItemMod(testmod);
// This attribute tells resolve to let us call unexported functions
let resolve_unexported_str = InternedString::new("!resolve_unexported");
let resolve_unexported_attr =
attr::mk_attr(attr::mk_word_item(resolve_unexported_str));
let item = ast::Item {
ident: token::str_to_ident("__test"),
attrs: vec!(resolve_unexported_attr),
id: ast::DUMMY_NODE_ID,
node: item_,
vis: ast::Public,
span: DUMMY_SP,
};
debug!("Synthetic test module:\n{}\n", pprust::item_to_str(&item));
return @item;
}
fn nospan<T>(t: T) -> codemap::Spanned<T> {
codemap::Spanned { node: t, span: DUMMY_SP }
}
fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path {
span: DUMMY_SP,
global: false,
segments: ids.move_iter().map(|identifier| ast::PathSegment {
identifier: identifier,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}).collect()
}
}
fn path_node_global(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path {
span: DUMMY_SP,
global: true,
segments: ids.move_iter().map(|identifier| ast::PathSegment {
identifier: identifier,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}).collect()
}
}
fn mk_tests(cx: &TestCtxt) -> @ast::Item {
// The vector of test_descs for this crate
let test_descs = mk_test_descs(cx);
(quote_item!(&cx.ext_cx,
pub static TESTS : &'static [self::test::TestDescAndFn] =
$test_descs
;
)).unwrap()
}
fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crateid(krate.attrs.as_slice()) {
Some(ref s) if "test" == s.name => true,
_ => false
}
}
fn mk_test_descs(cx: &TestCtxt) -> @ast::Expr {
debug!("building test vector from {} tests", cx.testfns.borrow().len());
@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprVstore(@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprVec(cx.testfns.borrow().iter().map(|test| {
mk_test_desc_and_fn_rec(cx, test)
}).collect()),
span: DUMMY_SP,
}, ast::ExprVstoreSlice),
span: DUMMY_SP,
}
}
fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
let span = test.span;
let path = test.path.clone();
debug!("encoding {}", ast_util::path_name_i(path.as_slice()));
let name_lit: ast::Lit =
nospan(ast::LitStr(token::intern_and_get_ident(
ast_util::path_name_i(path.as_slice())),
ast::CookedStr));
let name_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprLit(@name_lit),
span: span
};
let fn_path = path_node_global(path);
let fn_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(fn_path),
span: span,
};
let t_expr = if test.bench {
quote_expr!(&cx.ext_cx, self::test::StaticBenchFn($fn_expr) )
} else {
quote_expr!(&cx.ext_cx, self::test::StaticTestFn($fn_expr) )
};
let ignore_expr = if test.ignore {
quote_expr!(&cx.ext_cx, true )
} else {
quote_expr!(&cx.ext_cx, false )
};
let fail_expr = if test.should_fail {
quote_expr!(&cx.ext_cx, true )
} else {
quote_expr!(&cx.ext_cx, false )
};
let e = quote_expr!(&cx.ext_cx,
self::test::TestDescAndFn {
desc: self::test::TestDesc {
name: self::test::StaticTestName($name_expr),
ignore: $ignore_expr,
should_fail: $fail_expr
},
testfn: $t_expr,
}
);
e
}
|
{
debug!("this is a test function");
let test = Test {
span: i.span,
path: self.cx.path.borrow().clone(),
bench: is_bench_fn(&self.cx, i),
ignore: is_ignored(&self.cx, i),
should_fail: should_fail(i)
};
self.cx.testfns.borrow_mut().push(test);
// debug!("have {} test/bench functions",
// cx.testfns.len());
}
|
conditional_block
|
test.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Code that generates a test runner to run all the tests in a crate
#![allow(dead_code)]
#![allow(unused_imports)]
use driver::session::Session;
use front::config;
use front::std_inject::with_version;
use metadata::creader::Loader;
use std::cell::RefCell;
use std::slice;
use std::vec::Vec;
use std::vec;
use syntax::ast_util::*;
use syntax::attr::AttrMetaMethods;
use syntax::attr;
use syntax::codemap::{DUMMY_SP, Span, ExpnInfo, NameAndSpan, MacroAttribute};
use syntax::codemap;
use syntax::ext::base::ExtCtxt;
use syntax::ext::expand::ExpansionConfig;
use syntax::fold::Folder;
use syntax::fold;
use syntax::owned_slice::OwnedSlice;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::print::pprust;
use syntax::{ast, ast_util};
use syntax::util::small_vector::SmallVector;
struct Test {
span: Span,
path: Vec<ast::Ident>,
bench: bool,
ignore: bool,
should_fail: bool
}
struct TestCtxt<'a> {
sess: &'a Session,
path: RefCell<Vec<ast::Ident>>,
ext_cx: ExtCtxt<'a>,
testfns: RefCell<Vec<Test> >,
is_test_crate: bool,
config: ast::CrateConfig,
}
// Traverse the crate, collecting all the test functions, eliding any
// existing main functions, and synthesizing a main test harness
pub fn modify_for_testing(sess: &Session,
krate: ast::Crate) -> ast::Crate {
// We generate the test harness when building in the 'test'
// configuration, either with the '--test' or '--cfg test'
// command line options.
let should_test = attr::contains_name(krate.config.as_slice(), "test");
if should_test {
generate_test_harness(sess, krate)
} else {
strip_test_functions(krate)
}
}
struct TestHarnessGenerator<'a> {
cx: TestCtxt<'a>,
}
impl<'a> fold::Folder for TestHarnessGenerator<'a> {
fn fold_crate(&mut self, c: ast::Crate) -> ast::Crate {
let folded = fold::noop_fold_crate(c, self);
// Add a special __test module to the crate that will contain code
// generated for the test harness
ast::Crate {
module: add_test_module(&self.cx, &folded.module),
.. folded
}
}
fn fold_item(&mut self, i: @ast::Item) -> SmallVector<@ast::Item> {
self.cx.path.borrow_mut().push(i.ident);
debug!("current path: {}",
ast_util::path_name_i(self.cx.path.borrow().as_slice()));
if is_test_fn(&self.cx, i) || is_bench_fn(&self.cx, i) {
match i.node {
ast::ItemFn(_, ast::UnsafeFn, _, _, _) => {
let sess = self.cx.sess;
sess.span_fatal(i.span,
"unsafe functions cannot be used for \
tests");
}
_ => {
debug!("this is a test function");
let test = Test {
span: i.span,
path: self.cx.path.borrow().clone(),
bench: is_bench_fn(&self.cx, i),
ignore: is_ignored(&self.cx, i),
should_fail: should_fail(i)
};
self.cx.testfns.borrow_mut().push(test);
// debug!("have {} test/bench functions",
// cx.testfns.len());
}
}
}
let res = fold::noop_fold_item(i, self);
self.cx.path.borrow_mut().pop();
res
}
fn fold_mod(&mut self, m: &ast::Mod) -> ast::Mod {
// Remove any #[main] from the AST so it doesn't clash with
// the one we're going to add. Only if compiling an executable.
fn nomain(cx: &TestCtxt, item: @ast::Item) -> @ast::Item {
if!cx.sess.building_library.get() {
@ast::Item {
attrs: item.attrs.iter().filter_map(|attr| {
if!attr.name().equiv(&("main")) {
Some(*attr)
} else {
None
}
}).collect(),
.. (*item).clone()
}
} else {
item
}
}
let mod_nomain = ast::Mod {
view_items: m.view_items.clone(),
items: m.items.iter().map(|i| nomain(&self.cx, *i)).collect(),
};
fold::noop_fold_mod(&mod_nomain, self)
}
}
fn generate_test_harness(sess: &Session, krate: ast::Crate)
-> ast::Crate {
let loader = &mut Loader::new(sess);
let mut cx: TestCtxt = TestCtxt {
sess: sess,
ext_cx: ExtCtxt::new(&sess.parse_sess, sess.opts.cfg.clone(),
ExpansionConfig {
loader: loader,
deriving_hash_type_parameter: false,
crate_id: from_str("test").unwrap(),
}),
path: RefCell::new(Vec::new()),
testfns: RefCell::new(Vec::new()),
is_test_crate: is_test_crate(&krate),
config: krate.config.clone(),
};
cx.ext_cx.bt_push(ExpnInfo {
call_site: DUMMY_SP,
callee: NameAndSpan {
name: ~"test",
format: MacroAttribute,
span: None
}
});
let mut fold = TestHarnessGenerator {
cx: cx
};
let res = fold.fold_crate(krate);
fold.cx.ext_cx.bt_pop();
return res;
}
fn strip_test_functions(krate: ast::Crate) -> ast::Crate {
// When not compiling with --test we should not compile the
// #[test] functions
config::strip_items(krate, |attrs| {
!attr::contains_name(attrs.as_slice(), "test") &&
!attr::contains_name(attrs.as_slice(), "bench")
})
}
fn is_test_fn(cx: &TestCtxt, i: @ast::Item) -> bool {
let has_test_attr = attr::contains_name(i.attrs.as_slice(), "test");
fn has_test_signature(i: @ast::Item) -> bool {
match &i.node {
&ast::ItemFn(ref decl, _, _, ref generics, _) => {
|
ast::TyNil => true,
_ => false
};
decl.inputs.is_empty()
&& no_output
&&!generics.is_parameterized()
}
_ => false
}
}
if has_test_attr &&!has_test_signature(i) {
let sess = cx.sess;
sess.span_err(
i.span,
"functions used as tests must have signature fn() -> ()."
);
}
return has_test_attr && has_test_signature(i);
}
fn is_bench_fn(cx: &TestCtxt, i: @ast::Item) -> bool {
let has_bench_attr = attr::contains_name(i.attrs.as_slice(), "bench");
fn has_test_signature(i: @ast::Item) -> bool {
match i.node {
ast::ItemFn(ref decl, _, _, ref generics, _) => {
let input_cnt = decl.inputs.len();
let no_output = match decl.output.node {
ast::TyNil => true,
_ => false
};
let tparm_cnt = generics.ty_params.len();
// NB: inadequate check, but we're running
// well before resolve, can't get too deep.
input_cnt == 1u
&& no_output && tparm_cnt == 0u
}
_ => false
}
}
if has_bench_attr &&!has_test_signature(i) {
let sess = cx.sess;
sess.span_err(i.span, "functions used as benches must have signature \
`fn(&mut Bencher) -> ()`");
}
return has_bench_attr && has_test_signature(i);
}
fn is_ignored(cx: &TestCtxt, i: @ast::Item) -> bool {
i.attrs.iter().any(|attr| {
// check ignore(cfg(foo, bar))
attr.name().equiv(&("ignore")) && match attr.meta_item_list() {
Some(ref cfgs) => {
attr::test_cfg(cx.config.as_slice(), cfgs.iter().map(|x| *x))
}
None => true
}
})
}
fn should_fail(i: @ast::Item) -> bool {
attr::contains_name(i.attrs.as_slice(), "should_fail")
}
fn add_test_module(cx: &TestCtxt, m: &ast::Mod) -> ast::Mod {
let testmod = mk_test_module(cx);
ast::Mod {
items: m.items.clone().append_one(testmod),
..(*m).clone()
}
}
/*
We're going to be building a module that looks more or less like:
mod __test {
#![!resolve_unexported]
extern crate test (name = "test", vers = "...");
fn main() {
test::test_main_static(::os::args(), tests)
}
static tests : &'static [test::TestDescAndFn] = &[
... the list of tests in the crate...
];
}
*/
fn mk_std(cx: &TestCtxt) -> ast::ViewItem {
let id_test = token::str_to_ident("test");
let (vi, vis) = if cx.is_test_crate {
(ast::ViewItemUse(
vec!(@nospan(ast::ViewPathSimple(id_test,
path_node(vec!(id_test)),
ast::DUMMY_NODE_ID)))),
ast::Public)
} else {
(ast::ViewItemExternCrate(id_test,
with_version("test"),
ast::DUMMY_NODE_ID),
ast::Inherited)
};
ast::ViewItem {
node: vi,
attrs: Vec::new(),
vis: vis,
span: DUMMY_SP
}
}
fn mk_test_module(cx: &TestCtxt) -> @ast::Item {
// Link to test crate
let view_items = vec!(mk_std(cx));
// A constant vector of test descriptors.
let tests = mk_tests(cx);
// The synthesized main function which will call the console test runner
// with our list of tests
let mainfn = (quote_item!(&cx.ext_cx,
pub fn main() {
#![main]
#![allow(deprecated_owned_vector)]
test::test_main_static(::std::os::args(), TESTS);
}
)).unwrap();
let testmod = ast::Mod {
view_items: view_items,
items: vec!(mainfn, tests),
};
let item_ = ast::ItemMod(testmod);
// This attribute tells resolve to let us call unexported functions
let resolve_unexported_str = InternedString::new("!resolve_unexported");
let resolve_unexported_attr =
attr::mk_attr(attr::mk_word_item(resolve_unexported_str));
let item = ast::Item {
ident: token::str_to_ident("__test"),
attrs: vec!(resolve_unexported_attr),
id: ast::DUMMY_NODE_ID,
node: item_,
vis: ast::Public,
span: DUMMY_SP,
};
debug!("Synthetic test module:\n{}\n", pprust::item_to_str(&item));
return @item;
}
fn nospan<T>(t: T) -> codemap::Spanned<T> {
codemap::Spanned { node: t, span: DUMMY_SP }
}
fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path {
span: DUMMY_SP,
global: false,
segments: ids.move_iter().map(|identifier| ast::PathSegment {
identifier: identifier,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}).collect()
}
}
fn path_node_global(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path {
span: DUMMY_SP,
global: true,
segments: ids.move_iter().map(|identifier| ast::PathSegment {
identifier: identifier,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}).collect()
}
}
fn mk_tests(cx: &TestCtxt) -> @ast::Item {
// The vector of test_descs for this crate
let test_descs = mk_test_descs(cx);
(quote_item!(&cx.ext_cx,
pub static TESTS : &'static [self::test::TestDescAndFn] =
$test_descs
;
)).unwrap()
}
fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crateid(krate.attrs.as_slice()) {
Some(ref s) if "test" == s.name => true,
_ => false
}
}
fn mk_test_descs(cx: &TestCtxt) -> @ast::Expr {
debug!("building test vector from {} tests", cx.testfns.borrow().len());
@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprVstore(@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprVec(cx.testfns.borrow().iter().map(|test| {
mk_test_desc_and_fn_rec(cx, test)
}).collect()),
span: DUMMY_SP,
}, ast::ExprVstoreSlice),
span: DUMMY_SP,
}
}
fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
let span = test.span;
let path = test.path.clone();
debug!("encoding {}", ast_util::path_name_i(path.as_slice()));
let name_lit: ast::Lit =
nospan(ast::LitStr(token::intern_and_get_ident(
ast_util::path_name_i(path.as_slice())),
ast::CookedStr));
let name_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprLit(@name_lit),
span: span
};
let fn_path = path_node_global(path);
let fn_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(fn_path),
span: span,
};
let t_expr = if test.bench {
quote_expr!(&cx.ext_cx, self::test::StaticBenchFn($fn_expr) )
} else {
quote_expr!(&cx.ext_cx, self::test::StaticTestFn($fn_expr) )
};
let ignore_expr = if test.ignore {
quote_expr!(&cx.ext_cx, true )
} else {
quote_expr!(&cx.ext_cx, false )
};
let fail_expr = if test.should_fail {
quote_expr!(&cx.ext_cx, true )
} else {
quote_expr!(&cx.ext_cx, false )
};
let e = quote_expr!(&cx.ext_cx,
self::test::TestDescAndFn {
desc: self::test::TestDesc {
name: self::test::StaticTestName($name_expr),
ignore: $ignore_expr,
should_fail: $fail_expr
},
testfn: $t_expr,
}
);
e
}
|
let no_output = match decl.output.node {
|
random_line_split
|
length.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Animation implementation for various length-related types.
use super::{Animate, Procedure};
use crate::values::computed::length::LengthPercentage;
use crate::values::computed::Percentage;
use style_traits::values::specified::AllowedNumericType;
/// <https://drafts.csswg.org/css-transitions/#animtype-lpcalc>
impl Animate for LengthPercentage {
#[inline]
fn animate(&self, other: &Self, procedure: Procedure) -> Result<Self, ()>
|
percentage,
AllowedNumericType::All,
))
}
}
|
{
let animate_percentage_half = |this: Option<Percentage>, other: Option<Percentage>| {
if this.is_none() && other.is_none() {
return Ok(None);
}
let this = this.unwrap_or_default();
let other = other.unwrap_or_default();
Ok(Some(this.animate(&other, procedure)?))
};
let length = self
.unclamped_length()
.animate(&other.unclamped_length(), procedure)?;
let percentage =
animate_percentage_half(self.specified_percentage(), other.specified_percentage())?;
// Gets clamped as needed after the animation if needed, so no need to
// specify any particular AllowedNumericType.
Ok(LengthPercentage::new_calc(
length,
|
identifier_body
|
length.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Animation implementation for various length-related types.
use super::{Animate, Procedure};
use crate::values::computed::length::LengthPercentage;
use crate::values::computed::Percentage;
use style_traits::values::specified::AllowedNumericType;
/// <https://drafts.csswg.org/css-transitions/#animtype-lpcalc>
impl Animate for LengthPercentage {
#[inline]
fn animate(&self, other: &Self, procedure: Procedure) -> Result<Self, ()> {
let animate_percentage_half = |this: Option<Percentage>, other: Option<Percentage>| {
if this.is_none() && other.is_none()
|
let this = this.unwrap_or_default();
let other = other.unwrap_or_default();
Ok(Some(this.animate(&other, procedure)?))
};
let length = self
.unclamped_length()
.animate(&other.unclamped_length(), procedure)?;
let percentage =
animate_percentage_half(self.specified_percentage(), other.specified_percentage())?;
// Gets clamped as needed after the animation if needed, so no need to
// specify any particular AllowedNumericType.
Ok(LengthPercentage::new_calc(
length,
percentage,
AllowedNumericType::All,
))
}
}
|
{
return Ok(None);
}
|
conditional_block
|
length.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Animation implementation for various length-related types.
use super::{Animate, Procedure};
use crate::values::computed::length::LengthPercentage;
use crate::values::computed::Percentage;
use style_traits::values::specified::AllowedNumericType;
/// <https://drafts.csswg.org/css-transitions/#animtype-lpcalc>
impl Animate for LengthPercentage {
#[inline]
fn animate(&self, other: &Self, procedure: Procedure) -> Result<Self, ()> {
let animate_percentage_half = |this: Option<Percentage>, other: Option<Percentage>| {
if this.is_none() && other.is_none() {
return Ok(None);
}
let this = this.unwrap_or_default();
let other = other.unwrap_or_default();
Ok(Some(this.animate(&other, procedure)?))
};
let length = self
.unclamped_length()
.animate(&other.unclamped_length(), procedure)?;
let percentage =
animate_percentage_half(self.specified_percentage(), other.specified_percentage())?;
|
// Gets clamped as needed after the animation if needed, so no need to
// specify any particular AllowedNumericType.
Ok(LengthPercentage::new_calc(
length,
percentage,
AllowedNumericType::All,
))
}
}
|
random_line_split
|
|
length.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Animation implementation for various length-related types.
use super::{Animate, Procedure};
use crate::values::computed::length::LengthPercentage;
use crate::values::computed::Percentage;
use style_traits::values::specified::AllowedNumericType;
/// <https://drafts.csswg.org/css-transitions/#animtype-lpcalc>
impl Animate for LengthPercentage {
#[inline]
fn
|
(&self, other: &Self, procedure: Procedure) -> Result<Self, ()> {
let animate_percentage_half = |this: Option<Percentage>, other: Option<Percentage>| {
if this.is_none() && other.is_none() {
return Ok(None);
}
let this = this.unwrap_or_default();
let other = other.unwrap_or_default();
Ok(Some(this.animate(&other, procedure)?))
};
let length = self
.unclamped_length()
.animate(&other.unclamped_length(), procedure)?;
let percentage =
animate_percentage_half(self.specified_percentage(), other.specified_percentage())?;
// Gets clamped as needed after the animation if needed, so no need to
// specify any particular AllowedNumericType.
Ok(LengthPercentage::new_calc(
length,
percentage,
AllowedNumericType::All,
))
}
}
|
animate
|
identifier_name
|
customevent.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::CustomEventBinding;
use dom::bindings::codegen::Bindings::CustomEventBinding::CustomEventMethods;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::InheritTypes::{EventCast, CustomEventDerived};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{Root, MutHeapJSVal};
use dom::bindings::utils::reflect_dom_object;
use dom::event::{Event, EventTypeId};
use js::jsapi::{JSContext, HandleValue};
use js::jsval::JSVal;
use util::str::DOMString;
// https://dom.spec.whatwg.org/#interface-customevent
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct CustomEvent {
event: Event,
#[ignore_heap_size_of = "Defined in rust-mozjs"]
detail: MutHeapJSVal,
}
impl CustomEventDerived for Event {
fn is_customevent(&self) -> bool {
*self.type_id() == EventTypeId::CustomEvent
}
}
impl CustomEvent {
fn new_inherited(type_id: EventTypeId) -> CustomEvent {
CustomEvent {
event: Event::new_inherited(type_id),
detail: MutHeapJSVal::new(),
}
}
pub fn new_uninitialized(global: GlobalRef) -> Root<CustomEvent> {
reflect_dom_object(box CustomEvent::new_inherited(EventTypeId::CustomEvent),
global,
CustomEventBinding::Wrap)
}
pub fn new(global: GlobalRef,
type_: DOMString,
bubbles: bool,
cancelable: bool,
detail: HandleValue) -> Root<CustomEvent> {
let ev = CustomEvent::new_uninitialized(global);
ev.r().InitCustomEvent(global.get_cx(), type_, bubbles, cancelable, detail);
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &CustomEventBinding::CustomEventInit) -> Fallible<Root<CustomEvent>>{
Ok(CustomEvent::new(global,
type_,
init.parent.bubbles,
init.parent.cancelable,
HandleValue { ptr: &init.detail }))
}
}
impl<'a> CustomEventMethods for &'a CustomEvent {
// https://dom.spec.whatwg.org/#dom-customevent-detail
fn Detail(self, _cx: *mut JSContext) -> JSVal {
self.detail.get()
}
// https://dom.spec.whatwg.org/#dom-customevent-initcustomevent
fn InitCustomEvent(self,
_cx: *mut JSContext,
type_: DOMString,
can_bubble: bool,
cancelable: bool,
detail: HandleValue) {
let event = EventCast::from_ref(self);
if event.dispatching()
|
self.detail.set(detail.get());
event.InitEvent(type_, can_bubble, cancelable);
}
}
|
{
return;
}
|
conditional_block
|
customevent.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::CustomEventBinding;
use dom::bindings::codegen::Bindings::CustomEventBinding::CustomEventMethods;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::InheritTypes::{EventCast, CustomEventDerived};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{Root, MutHeapJSVal};
use dom::bindings::utils::reflect_dom_object;
use dom::event::{Event, EventTypeId};
use js::jsapi::{JSContext, HandleValue};
use js::jsval::JSVal;
use util::str::DOMString;
// https://dom.spec.whatwg.org/#interface-customevent
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct CustomEvent {
event: Event,
#[ignore_heap_size_of = "Defined in rust-mozjs"]
detail: MutHeapJSVal,
}
impl CustomEventDerived for Event {
fn is_customevent(&self) -> bool {
*self.type_id() == EventTypeId::CustomEvent
}
}
impl CustomEvent {
fn new_inherited(type_id: EventTypeId) -> CustomEvent {
CustomEvent {
event: Event::new_inherited(type_id),
detail: MutHeapJSVal::new(),
}
}
pub fn new_uninitialized(global: GlobalRef) -> Root<CustomEvent> {
reflect_dom_object(box CustomEvent::new_inherited(EventTypeId::CustomEvent),
global,
CustomEventBinding::Wrap)
}
pub fn new(global: GlobalRef,
|
type_: DOMString,
bubbles: bool,
cancelable: bool,
detail: HandleValue) -> Root<CustomEvent> {
let ev = CustomEvent::new_uninitialized(global);
ev.r().InitCustomEvent(global.get_cx(), type_, bubbles, cancelable, detail);
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &CustomEventBinding::CustomEventInit) -> Fallible<Root<CustomEvent>>{
Ok(CustomEvent::new(global,
type_,
init.parent.bubbles,
init.parent.cancelable,
HandleValue { ptr: &init.detail }))
}
}
impl<'a> CustomEventMethods for &'a CustomEvent {
// https://dom.spec.whatwg.org/#dom-customevent-detail
fn Detail(self, _cx: *mut JSContext) -> JSVal {
self.detail.get()
}
// https://dom.spec.whatwg.org/#dom-customevent-initcustomevent
fn InitCustomEvent(self,
_cx: *mut JSContext,
type_: DOMString,
can_bubble: bool,
cancelable: bool,
detail: HandleValue) {
let event = EventCast::from_ref(self);
if event.dispatching() {
return;
}
self.detail.set(detail.get());
event.InitEvent(type_, can_bubble, cancelable);
}
}
|
random_line_split
|
|
customevent.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::CustomEventBinding;
use dom::bindings::codegen::Bindings::CustomEventBinding::CustomEventMethods;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::InheritTypes::{EventCast, CustomEventDerived};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{Root, MutHeapJSVal};
use dom::bindings::utils::reflect_dom_object;
use dom::event::{Event, EventTypeId};
use js::jsapi::{JSContext, HandleValue};
use js::jsval::JSVal;
use util::str::DOMString;
// https://dom.spec.whatwg.org/#interface-customevent
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct CustomEvent {
event: Event,
#[ignore_heap_size_of = "Defined in rust-mozjs"]
detail: MutHeapJSVal,
}
impl CustomEventDerived for Event {
fn is_customevent(&self) -> bool {
*self.type_id() == EventTypeId::CustomEvent
}
}
impl CustomEvent {
fn new_inherited(type_id: EventTypeId) -> CustomEvent {
CustomEvent {
event: Event::new_inherited(type_id),
detail: MutHeapJSVal::new(),
}
}
pub fn new_uninitialized(global: GlobalRef) -> Root<CustomEvent> {
reflect_dom_object(box CustomEvent::new_inherited(EventTypeId::CustomEvent),
global,
CustomEventBinding::Wrap)
}
pub fn new(global: GlobalRef,
type_: DOMString,
bubbles: bool,
cancelable: bool,
detail: HandleValue) -> Root<CustomEvent> {
let ev = CustomEvent::new_uninitialized(global);
ev.r().InitCustomEvent(global.get_cx(), type_, bubbles, cancelable, detail);
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &CustomEventBinding::CustomEventInit) -> Fallible<Root<CustomEvent>>{
Ok(CustomEvent::new(global,
type_,
init.parent.bubbles,
init.parent.cancelable,
HandleValue { ptr: &init.detail }))
}
}
impl<'a> CustomEventMethods for &'a CustomEvent {
// https://dom.spec.whatwg.org/#dom-customevent-detail
fn Detail(self, _cx: *mut JSContext) -> JSVal {
self.detail.get()
}
// https://dom.spec.whatwg.org/#dom-customevent-initcustomevent
fn InitCustomEvent(self,
_cx: *mut JSContext,
type_: DOMString,
can_bubble: bool,
cancelable: bool,
detail: HandleValue)
|
}
|
{
let event = EventCast::from_ref(self);
if event.dispatching() {
return;
}
self.detail.set(detail.get());
event.InitEvent(type_, can_bubble, cancelable);
}
|
identifier_body
|
customevent.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::CustomEventBinding;
use dom::bindings::codegen::Bindings::CustomEventBinding::CustomEventMethods;
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::InheritTypes::{EventCast, CustomEventDerived};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{Root, MutHeapJSVal};
use dom::bindings::utils::reflect_dom_object;
use dom::event::{Event, EventTypeId};
use js::jsapi::{JSContext, HandleValue};
use js::jsval::JSVal;
use util::str::DOMString;
// https://dom.spec.whatwg.org/#interface-customevent
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct CustomEvent {
event: Event,
#[ignore_heap_size_of = "Defined in rust-mozjs"]
detail: MutHeapJSVal,
}
impl CustomEventDerived for Event {
fn is_customevent(&self) -> bool {
*self.type_id() == EventTypeId::CustomEvent
}
}
impl CustomEvent {
fn new_inherited(type_id: EventTypeId) -> CustomEvent {
CustomEvent {
event: Event::new_inherited(type_id),
detail: MutHeapJSVal::new(),
}
}
pub fn new_uninitialized(global: GlobalRef) -> Root<CustomEvent> {
reflect_dom_object(box CustomEvent::new_inherited(EventTypeId::CustomEvent),
global,
CustomEventBinding::Wrap)
}
pub fn new(global: GlobalRef,
type_: DOMString,
bubbles: bool,
cancelable: bool,
detail: HandleValue) -> Root<CustomEvent> {
let ev = CustomEvent::new_uninitialized(global);
ev.r().InitCustomEvent(global.get_cx(), type_, bubbles, cancelable, detail);
ev
}
pub fn
|
(global: GlobalRef,
type_: DOMString,
init: &CustomEventBinding::CustomEventInit) -> Fallible<Root<CustomEvent>>{
Ok(CustomEvent::new(global,
type_,
init.parent.bubbles,
init.parent.cancelable,
HandleValue { ptr: &init.detail }))
}
}
impl<'a> CustomEventMethods for &'a CustomEvent {
// https://dom.spec.whatwg.org/#dom-customevent-detail
fn Detail(self, _cx: *mut JSContext) -> JSVal {
self.detail.get()
}
// https://dom.spec.whatwg.org/#dom-customevent-initcustomevent
fn InitCustomEvent(self,
_cx: *mut JSContext,
type_: DOMString,
can_bubble: bool,
cancelable: bool,
detail: HandleValue) {
let event = EventCast::from_ref(self);
if event.dispatching() {
return;
}
self.detail.set(detail.get());
event.InitEvent(type_, can_bubble, cancelable);
}
}
|
Constructor
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.