file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
restyle_damage.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The restyle damage is a hint that tells layout which kind of operations may
//! be needed in presence of incremental style changes.
#![deny(missing_docs)]
use computed_values::display;
use heapsize::HeapSizeOf;
use matching::{StyleChange, StyleDifference};
use properties::ComputedValues;
use std::fmt;
bitflags! {
#[doc = "Individual layout actions that may be necessary after restyling."]
pub flags ServoRestyleDamage: u8 {
#[doc = "Repaint the node itself."]
#[doc = "Currently unused; need to decide how this propagates."]
const REPAINT = 0x01,
#[doc = "The stacking-context-relative position of this node or its descendants has \
changed."]
#[doc = "Propagates both up and down the flow tree."]
const REPOSITION = 0x02,
#[doc = "Recompute the overflow regions (bounding box of object and all descendants)."]
#[doc = "Propagates down the flow tree because the computation is bottom-up."]
const STORE_OVERFLOW = 0x04,
#[doc = "Recompute intrinsic inline_sizes (minimum and preferred)."]
#[doc = "Propagates down the flow tree because the computation is"]
#[doc = "bottom-up."]
const BUBBLE_ISIZES = 0x08,
#[doc = "Recompute actual inline-sizes and block-sizes, only taking out-of-flow children \
into account. \
|
#[doc = "Recompute actual inline_sizes and block_sizes."]
#[doc = "Propagates up the flow tree because the computation is"]
#[doc = "top-down."]
const REFLOW = 0x20,
#[doc = "Re-resolve generated content. \
Propagates up the flow tree because the computation is inorder."]
const RESOLVE_GENERATED_CONTENT = 0x40,
#[doc = "The entire flow needs to be reconstructed."]
const RECONSTRUCT_FLOW = 0x80
}
}
impl HeapSizeOf for ServoRestyleDamage {
fn heap_size_of_children(&self) -> usize { 0 }
}
impl ServoRestyleDamage {
/// Compute the `StyleDifference` (including the appropriate restyle damage)
/// for a given style change between `old` and `new`.
pub fn compute_style_difference(
old: &ComputedValues,
new: &ComputedValues,
) -> StyleDifference {
let damage = compute_damage(old, new);
let change = if damage.is_empty() {
StyleChange::Unchanged
} else {
// FIXME(emilio): Differentiate between reset and inherited
// properties here, and set `reset_only` appropriately so the
// optimization to skip the cascade in those cases applies.
StyleChange::Changed { reset_only: false }
};
StyleDifference::new(damage, change)
}
/// Returns a bitmask that represents a flow that needs to be rebuilt and
/// reflowed.
///
/// FIXME(bholley): Do we ever actually need this? Shouldn't
/// RECONSTRUCT_FLOW imply everything else?
pub fn rebuild_and_reflow() -> ServoRestyleDamage {
REPAINT | REPOSITION | STORE_OVERFLOW | BUBBLE_ISIZES | REFLOW_OUT_OF_FLOW | REFLOW |
RECONSTRUCT_FLOW
}
/// Returns a bitmask indicating that the frame needs to be reconstructed.
pub fn reconstruct() -> ServoRestyleDamage {
RECONSTRUCT_FLOW
}
/// Supposing a flow has the given `position` property and this damage,
/// returns the damage that we should add to the *parent* of this flow.
pub fn damage_for_parent(self, child_is_absolutely_positioned: bool) -> ServoRestyleDamage {
if child_is_absolutely_positioned {
self & (REPAINT | REPOSITION | STORE_OVERFLOW | REFLOW_OUT_OF_FLOW |
RESOLVE_GENERATED_CONTENT)
} else {
self & (REPAINT | REPOSITION | STORE_OVERFLOW | REFLOW | REFLOW_OUT_OF_FLOW |
RESOLVE_GENERATED_CONTENT)
}
}
/// Supposing the *parent* of a flow with the given `position` property has
/// this damage, returns the damage that we should add to this flow.
pub fn damage_for_child(self,
parent_is_absolutely_positioned: bool,
child_is_absolutely_positioned: bool)
-> ServoRestyleDamage {
match (parent_is_absolutely_positioned, child_is_absolutely_positioned) {
(false, true) => {
// Absolute children are out-of-flow and therefore insulated from changes.
//
// FIXME(pcwalton): Au contraire, if the containing block dimensions change!
self & (REPAINT | REPOSITION)
}
(true, false) => {
// Changing the position of an absolutely-positioned block requires us to reflow
// its kids.
if self.contains(REFLOW_OUT_OF_FLOW) {
self | REFLOW
} else {
self
}
}
_ => {
// TODO(pcwalton): Take floatedness into account.
self & (REPAINT | REPOSITION | REFLOW)
}
}
}
}
impl Default for ServoRestyleDamage {
fn default() -> Self {
Self::empty()
}
}
impl fmt::Display for ServoRestyleDamage {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let mut first_elem = true;
let to_iter =
[ (REPAINT, "Repaint")
, (REPOSITION, "Reposition")
, (STORE_OVERFLOW, "StoreOverflow")
, (BUBBLE_ISIZES, "BubbleISizes")
, (REFLOW_OUT_OF_FLOW, "ReflowOutOfFlow")
, (REFLOW, "Reflow")
, (RESOLVE_GENERATED_CONTENT, "ResolveGeneratedContent")
, (RECONSTRUCT_FLOW, "ReconstructFlow")
];
for &(damage, damage_str) in &to_iter {
if self.contains(damage) {
if!first_elem { write!(f, " | ")?; }
write!(f, "{}", damage_str)?;
first_elem = false;
}
}
if first_elem {
write!(f, "NoDamage")?;
}
Ok(())
}
}
// NB: We need the braces inside the RHS due to Rust #8012. This particular
// version of this macro might be safe anyway, but we want to avoid silent
// breakage on modifications.
macro_rules! add_if_not_equal(
($old:ident, $new:ident, $damage:ident,
[ $($effect:ident),* ], [ $($style_struct_getter:ident.$name:ident),* ]) => ({
if $( ($old.$style_struct_getter().$name!= $new.$style_struct_getter().$name) )||* {
$damage.insert($($effect)|*);
true
} else {
false
}
})
);
fn compute_damage(old: &ComputedValues, new: &ComputedValues) -> ServoRestyleDamage {
let mut damage = ServoRestyleDamage::empty();
// This should check every CSS property, as enumerated in the fields of
// http://doc.servo.org/style/properties/struct.ComputedValues.html
// FIXME: Test somehow that every property is included.
add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES, REFLOW_OUT_OF_FLOW,
REFLOW, RECONSTRUCT_FLOW], [
get_box.clear, get_box.float, get_box.display, get_box.position, get_counters.content,
get_counters.counter_reset, get_counters.counter_increment,
get_inheritedbox._servo_under_display_none,
get_list.quotes, get_list.list_style_type,
// If these text or font properties change, we need to reconstruct the flow so that
// text shaping is re-run.
get_inheritedtext.letter_spacing, get_inheritedtext.text_rendering,
get_inheritedtext.text_transform, get_inheritedtext.word_spacing,
get_inheritedtext.overflow_wrap, get_inheritedtext.text_justify,
get_inheritedtext.white_space, get_inheritedtext.word_break, get_text.text_overflow,
get_font.font_family, get_font.font_style, get_font.font_variant_caps, get_font.font_weight,
get_font.font_size, get_font.font_stretch,
get_inheritedbox.direction, get_inheritedbox.writing_mode,
get_text.text_decoration_line, get_text.unicode_bidi,
get_inheritedtable.empty_cells, get_inheritedtable.caption_side,
get_column.column_width, get_column.column_count
]) || (new.get_box().display == display::T::inline &&
add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES,
REFLOW_OUT_OF_FLOW, REFLOW, RECONSTRUCT_FLOW], [
// For inline boxes only, border/padding styles are used in flow construction (to decide
// whether to create fragments for empty flows).
get_border.border_top_width, get_border.border_right_width,
get_border.border_bottom_width, get_border.border_left_width,
get_padding.padding_top, get_padding.padding_right,
get_padding.padding_bottom, get_padding.padding_left
])) || add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES,
REFLOW_OUT_OF_FLOW, REFLOW],
[get_border.border_top_width, get_border.border_right_width,
get_border.border_bottom_width, get_border.border_left_width,
get_margin.margin_top, get_margin.margin_right,
get_margin.margin_bottom, get_margin.margin_left,
get_padding.padding_top, get_padding.padding_right,
get_padding.padding_bottom, get_padding.padding_left,
get_position.width, get_position.height,
get_inheritedtext.line_height,
get_inheritedtext.text_align, get_inheritedtext.text_indent,
get_table.table_layout,
get_inheritedtable.border_collapse,
get_inheritedtable.border_spacing,
get_column.column_gap,
get_position.flex_direction,
get_position.flex_wrap,
get_position.justify_content,
get_position.align_items,
get_position.align_content,
get_position.order,
get_position.flex_basis,
get_position.flex_grow,
get_position.flex_shrink,
get_position.align_self
]) || add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, REFLOW_OUT_OF_FLOW], [
get_position.top, get_position.left,
get_position.right, get_position.bottom,
get_effects.opacity,
get_box.transform, get_box.transform_style, get_box.transform_origin,
get_box.perspective, get_box.perspective_origin
]) || add_if_not_equal!(old, new, damage,
[REPAINT], [
get_color.color, get_background.background_color,
get_background.background_image, get_background.background_position_x,
get_background.background_position_y, get_background.background_repeat,
get_background.background_attachment, get_background.background_clip,
get_background.background_origin, get_background.background_size,
get_border.border_top_color, get_border.border_right_color,
get_border.border_bottom_color, get_border.border_left_color,
get_border.border_top_style, get_border.border_right_style,
get_border.border_bottom_style, get_border.border_left_style,
get_border.border_top_left_radius, get_border.border_top_right_radius,
get_border.border_bottom_left_radius, get_border.border_bottom_right_radius,
get_position.z_index, get_box._servo_overflow_clip_box,
get_inheritedtext._servo_text_decorations_in_effect,
get_pointing.cursor, get_pointing.pointer_events,
get_effects.box_shadow, get_effects.clip, get_inheritedtext.text_shadow, get_effects.filter,
get_effects.mix_blend_mode, get_inheritedbox.image_rendering,
// Note: May require REFLOW et al. if `visibility: collapse` is implemented.
get_inheritedbox.visibility
]);
// Paint worklets may depend on custom properties,
// so if they have changed we should repaint.
if old.get_custom_properties()!= new.get_custom_properties() {
damage.insert(REPAINT);
}
// If the layer requirements of this flow have changed due to the value
// of the transform, then reflow is required to rebuild the layers.
if old.transform_requires_layer()!= new.transform_requires_layer() {
damage.insert(ServoRestyleDamage::rebuild_and_reflow());
}
damage
}
|
Propagates up the flow tree because the computation is top-down."]
const REFLOW_OUT_OF_FLOW = 0x10,
|
random_line_split
|
in-band-lifetimes.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(warnings)]
#![feature(in_band_lifetimes)]
fn foo(x: &'x u8) -> &'x u8 { x }
fn foo2(x: &'a u8, y: &u8) -> &'a u8 { x }
fn check_in_band_can_be_late_bound() {
let _: for<'x> fn(&'x u8, &u8) -> &'x u8 = foo2;
}
struct ForInherentNoParams;
impl ForInherentNoParams {
fn foo(x: &'a u32, y: &u32) -> &'a u32 { x }
}
struct X<'a>(&'a u8);
impl<'a> X<'a> {
fn inner(&self) -> &'a u8 {
self.0
}
fn same_lifetime_as_parameter(&mut self, x: &'a u8) {
self.0 = x;
}
}
impl X<'b> {
fn inner_2(&self) -> &'b u8 {
self.0
}
fn reference_already_introduced_in_band_from_method_with_explicit_binders<'a>(
&'b self, x: &'a u32
) {}
}
struct Y<T>(T);
impl Y<&'a u8> {
fn inner(&self) -> &'a u8 {
self.0
}
}
trait MyTrait<'a> {
fn my_lifetime(&self) -> &'a u8;
fn any_lifetime() -> &'b u8;
fn borrowed_lifetime(&'b self) -> &'b u8;
fn default_impl(&self, x: &'b u32, y: &u32) -> &'b u32 { x }
fn in_band_def_explicit_impl(&self, x: &'b u8);
}
impl MyTrait<'a> for Y<&'a u8> {
fn my_lifetime(&self) -> &'a u8 { self.0 }
fn any_lifetime() -> &'b u8 { &0 }
fn borrowed_lifetime(&'b self) -> &'b u8 { &*self.0 }
fn in_band_def_explicit_impl<'b>(&self, x: &'b u8) {}
}
fn test_hrtb_defined_lifetime_where<F>(_: F) where for<'a> F: Fn(&'a u8) {}
fn test_hrtb_defined_lifetime_polytraitref<F>(_: F) where F: for<'a> Fn(&'a u8) {}
fn reference_in_band_from_locals(x: &'test u32) -> &'test u32 {
let y: &'test u32 = x;
y
}
fn in_generics_in_band<T: MyTrait<'a>>(x: &T) {}
fn where_clause_in_band<T>(x: &T) where T: MyTrait<'a> {}
fn impl_trait_in_band(x: &impl MyTrait<'a>) {}
// Tests around using in-band lifetimes within existential traits.
trait FunkyTrait<'a> { }
impl<'a, T> FunkyTrait<'a> for T { }
fn existential_impl_trait_in_band_outlives(x: &'a u32) -> impl ::std::fmt::Debug + 'a {
x
}
fn existential_impl_trait_in_band_param(x: &'a u32) -> impl FunkyTrait<'a> {
x
}
fn existential_impl_trait_in_band_param_static(x: &'a u32) -> impl FunkyTrait<'static> + 'a {
x
}
fn existential_impl_trait_in_band_param_outlives(x: &'a u32) -> impl FunkyTrait<'a> + 'a {
x
}
fn existential_impl_trait_in_band_higher_ranked(x: &'a u32) -> impl for<'b> FunkyTrait<'b> + 'a {
x
}
fn main()
|
{}
|
identifier_body
|
|
in-band-lifetimes.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(warnings)]
#![feature(in_band_lifetimes)]
fn foo(x: &'x u8) -> &'x u8 { x }
fn foo2(x: &'a u8, y: &u8) -> &'a u8 { x }
fn check_in_band_can_be_late_bound() {
let _: for<'x> fn(&'x u8, &u8) -> &'x u8 = foo2;
}
struct ForInherentNoParams;
impl ForInherentNoParams {
fn
|
(x: &'a u32, y: &u32) -> &'a u32 { x }
}
struct X<'a>(&'a u8);
impl<'a> X<'a> {
fn inner(&self) -> &'a u8 {
self.0
}
fn same_lifetime_as_parameter(&mut self, x: &'a u8) {
self.0 = x;
}
}
impl X<'b> {
fn inner_2(&self) -> &'b u8 {
self.0
}
fn reference_already_introduced_in_band_from_method_with_explicit_binders<'a>(
&'b self, x: &'a u32
) {}
}
struct Y<T>(T);
impl Y<&'a u8> {
fn inner(&self) -> &'a u8 {
self.0
}
}
trait MyTrait<'a> {
fn my_lifetime(&self) -> &'a u8;
fn any_lifetime() -> &'b u8;
fn borrowed_lifetime(&'b self) -> &'b u8;
fn default_impl(&self, x: &'b u32, y: &u32) -> &'b u32 { x }
fn in_band_def_explicit_impl(&self, x: &'b u8);
}
impl MyTrait<'a> for Y<&'a u8> {
fn my_lifetime(&self) -> &'a u8 { self.0 }
fn any_lifetime() -> &'b u8 { &0 }
fn borrowed_lifetime(&'b self) -> &'b u8 { &*self.0 }
fn in_band_def_explicit_impl<'b>(&self, x: &'b u8) {}
}
fn test_hrtb_defined_lifetime_where<F>(_: F) where for<'a> F: Fn(&'a u8) {}
fn test_hrtb_defined_lifetime_polytraitref<F>(_: F) where F: for<'a> Fn(&'a u8) {}
fn reference_in_band_from_locals(x: &'test u32) -> &'test u32 {
let y: &'test u32 = x;
y
}
fn in_generics_in_band<T: MyTrait<'a>>(x: &T) {}
fn where_clause_in_band<T>(x: &T) where T: MyTrait<'a> {}
fn impl_trait_in_band(x: &impl MyTrait<'a>) {}
// Tests around using in-band lifetimes within existential traits.
trait FunkyTrait<'a> { }
impl<'a, T> FunkyTrait<'a> for T { }
fn existential_impl_trait_in_band_outlives(x: &'a u32) -> impl ::std::fmt::Debug + 'a {
x
}
fn existential_impl_trait_in_band_param(x: &'a u32) -> impl FunkyTrait<'a> {
x
}
fn existential_impl_trait_in_band_param_static(x: &'a u32) -> impl FunkyTrait<'static> + 'a {
x
}
fn existential_impl_trait_in_band_param_outlives(x: &'a u32) -> impl FunkyTrait<'a> + 'a {
x
}
fn existential_impl_trait_in_band_higher_ranked(x: &'a u32) -> impl for<'b> FunkyTrait<'b> + 'a {
x
}
fn main() {}
|
foo
|
identifier_name
|
in-band-lifetimes.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(warnings)]
#![feature(in_band_lifetimes)]
fn foo(x: &'x u8) -> &'x u8 { x }
fn foo2(x: &'a u8, y: &u8) -> &'a u8 { x }
fn check_in_band_can_be_late_bound() {
let _: for<'x> fn(&'x u8, &u8) -> &'x u8 = foo2;
}
struct ForInherentNoParams;
impl ForInherentNoParams {
fn foo(x: &'a u32, y: &u32) -> &'a u32 { x }
}
struct X<'a>(&'a u8);
impl<'a> X<'a> {
fn inner(&self) -> &'a u8 {
self.0
}
fn same_lifetime_as_parameter(&mut self, x: &'a u8) {
self.0 = x;
}
}
impl X<'b> {
fn inner_2(&self) -> &'b u8 {
self.0
}
fn reference_already_introduced_in_band_from_method_with_explicit_binders<'a>(
&'b self, x: &'a u32
) {}
}
struct Y<T>(T);
impl Y<&'a u8> {
fn inner(&self) -> &'a u8 {
self.0
}
}
trait MyTrait<'a> {
fn my_lifetime(&self) -> &'a u8;
fn any_lifetime() -> &'b u8;
fn borrowed_lifetime(&'b self) -> &'b u8;
fn default_impl(&self, x: &'b u32, y: &u32) -> &'b u32 { x }
fn in_band_def_explicit_impl(&self, x: &'b u8);
}
impl MyTrait<'a> for Y<&'a u8> {
fn my_lifetime(&self) -> &'a u8 { self.0 }
fn any_lifetime() -> &'b u8 { &0 }
fn borrowed_lifetime(&'b self) -> &'b u8 { &*self.0 }
fn in_band_def_explicit_impl<'b>(&self, x: &'b u8) {}
}
fn test_hrtb_defined_lifetime_where<F>(_: F) where for<'a> F: Fn(&'a u8) {}
fn test_hrtb_defined_lifetime_polytraitref<F>(_: F) where F: for<'a> Fn(&'a u8) {}
fn reference_in_band_from_locals(x: &'test u32) -> &'test u32 {
let y: &'test u32 = x;
y
}
fn in_generics_in_band<T: MyTrait<'a>>(x: &T) {}
fn where_clause_in_band<T>(x: &T) where T: MyTrait<'a> {}
fn impl_trait_in_band(x: &impl MyTrait<'a>) {}
// Tests around using in-band lifetimes within existential traits.
trait FunkyTrait<'a> { }
impl<'a, T> FunkyTrait<'a> for T { }
fn existential_impl_trait_in_band_outlives(x: &'a u32) -> impl ::std::fmt::Debug + 'a {
x
}
fn existential_impl_trait_in_band_param(x: &'a u32) -> impl FunkyTrait<'a> {
x
}
fn existential_impl_trait_in_band_param_static(x: &'a u32) -> impl FunkyTrait<'static> + 'a {
x
}
fn existential_impl_trait_in_band_param_outlives(x: &'a u32) -> impl FunkyTrait<'a> + 'a {
x
}
fn existential_impl_trait_in_band_higher_ranked(x: &'a u32) -> impl for<'b> FunkyTrait<'b> + 'a {
x
}
fn main() {}
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
random_line_split
|
struct_change_field_name.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test incremental compilation tracking where we change field names
// in between revisions (hashing should be stable).
// revisions:rpass1 cfail2
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#[cfg(rpass1)]
pub struct X {
pub x: u32
}
#[cfg(cfail2)]
pub struct X {
pub y: u32
}
pub struct EmbedX {
x: X
}
pub struct Y {
pub y: char
}
#[rustc_dirty(label="TypeckTables", cfg="cfail2")]
pub fn use_X() -> u32 {
let x: X = X { x: 22 };
//[cfail2]~^ ERROR struct `X` has no field named `x`
x.x as u32
//[cfail2]~^ ERROR no field `x` on type `X`
}
#[rustc_dirty(label="TypeckTables", cfg="cfail2")]
pub fn use_EmbedX(embed: EmbedX) -> u32
|
#[rustc_clean(label="TypeckTables", cfg="cfail2")]
pub fn use_Y() {
let x: Y = Y { y: 'c' };
}
pub fn main() { }
|
{
embed.x.x as u32
//[cfail2]~^ ERROR no field `x` on type `X`
}
|
identifier_body
|
struct_change_field_name.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test incremental compilation tracking where we change field names
// in between revisions (hashing should be stable).
// revisions:rpass1 cfail2
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#[cfg(rpass1)]
pub struct X {
pub x: u32
}
#[cfg(cfail2)]
pub struct
|
{
pub y: u32
}
pub struct EmbedX {
x: X
}
pub struct Y {
pub y: char
}
#[rustc_dirty(label="TypeckTables", cfg="cfail2")]
pub fn use_X() -> u32 {
let x: X = X { x: 22 };
//[cfail2]~^ ERROR struct `X` has no field named `x`
x.x as u32
//[cfail2]~^ ERROR no field `x` on type `X`
}
#[rustc_dirty(label="TypeckTables", cfg="cfail2")]
pub fn use_EmbedX(embed: EmbedX) -> u32 {
embed.x.x as u32
//[cfail2]~^ ERROR no field `x` on type `X`
}
#[rustc_clean(label="TypeckTables", cfg="cfail2")]
pub fn use_Y() {
let x: Y = Y { y: 'c' };
}
pub fn main() { }
|
X
|
identifier_name
|
struct_change_field_name.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test incremental compilation tracking where we change field names
// in between revisions (hashing should be stable).
// revisions:rpass1 cfail2
// compile-flags: -Z query-dep-graph
#![feature(rustc_attrs)]
#[cfg(rpass1)]
pub struct X {
pub x: u32
}
#[cfg(cfail2)]
pub struct X {
pub y: u32
}
pub struct EmbedX {
x: X
}
pub struct Y {
pub y: char
}
#[rustc_dirty(label="TypeckTables", cfg="cfail2")]
pub fn use_X() -> u32 {
let x: X = X { x: 22 };
//[cfail2]~^ ERROR struct `X` has no field named `x`
x.x as u32
//[cfail2]~^ ERROR no field `x` on type `X`
}
|
embed.x.x as u32
//[cfail2]~^ ERROR no field `x` on type `X`
}
#[rustc_clean(label="TypeckTables", cfg="cfail2")]
pub fn use_Y() {
let x: Y = Y { y: 'c' };
}
pub fn main() { }
|
#[rustc_dirty(label="TypeckTables", cfg="cfail2")]
pub fn use_EmbedX(embed: EmbedX) -> u32 {
|
random_line_split
|
task-comm-3.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(std_misc)]
// no-pretty-expanded FIXME #15189
use std::thread;
use std::sync::mpsc::{channel, Sender};
pub fn main() { println!("===== WITHOUT THREADS ====="); test00(); }
fn test00_start(ch: &Sender<isize>, message: isize, count: isize)
|
fn test00() {
let number_of_tasks: isize = 16;
let number_of_messages: isize = 4;
println!("Creating tasks");
let (tx, rx) = channel();
let mut i: isize = 0;
// Create and spawn threads...
let mut results = Vec::new();
while i < number_of_tasks {
let tx = tx.clone();
results.push(thread::spawn({
let i = i;
move|| {
test00_start(&tx, i, number_of_messages)
}
}));
i = i + 1;
}
// Read from spawned threads...
let mut sum = 0;
for _r in &results {
i = 0;
while i < number_of_messages {
let value = rx.recv().unwrap();
sum += value;
i = i + 1;
}
}
// Join spawned threads...
for r in results { r.join(); }
println!("Completed: Final number is: ");
println!("{}", sum);
// assert (sum == (((number_of_threads * (number_of_threads - 1)) / 2) *
// number_of_messages));
assert_eq!(sum, 480);
}
|
{
println!("Starting test00_start");
let mut i: isize = 0;
while i < count {
println!("Sending Message");
ch.send(message + 0).unwrap();
i = i + 1;
}
println!("Ending test00_start");
}
|
identifier_body
|
task-comm-3.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(std_misc)]
// no-pretty-expanded FIXME #15189
use std::thread;
use std::sync::mpsc::{channel, Sender};
pub fn main() { println!("===== WITHOUT THREADS ====="); test00(); }
fn test00_start(ch: &Sender<isize>, message: isize, count: isize) {
println!("Starting test00_start");
let mut i: isize = 0;
while i < count {
println!("Sending Message");
ch.send(message + 0).unwrap();
i = i + 1;
}
println!("Ending test00_start");
}
fn test00() {
let number_of_tasks: isize = 16;
let number_of_messages: isize = 4;
println!("Creating tasks");
let (tx, rx) = channel();
let mut i: isize = 0;
// Create and spawn threads...
let mut results = Vec::new();
while i < number_of_tasks {
let tx = tx.clone();
results.push(thread::spawn({
let i = i;
move|| {
test00_start(&tx, i, number_of_messages)
}
}));
i = i + 1;
}
// Read from spawned threads...
let mut sum = 0;
for _r in &results {
|
let value = rx.recv().unwrap();
sum += value;
i = i + 1;
}
}
// Join spawned threads...
for r in results { r.join(); }
println!("Completed: Final number is: ");
println!("{}", sum);
// assert (sum == (((number_of_threads * (number_of_threads - 1)) / 2) *
// number_of_messages));
assert_eq!(sum, 480);
}
|
i = 0;
while i < number_of_messages {
|
random_line_split
|
task-comm-3.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(std_misc)]
// no-pretty-expanded FIXME #15189
use std::thread;
use std::sync::mpsc::{channel, Sender};
pub fn main() { println!("===== WITHOUT THREADS ====="); test00(); }
fn test00_start(ch: &Sender<isize>, message: isize, count: isize) {
println!("Starting test00_start");
let mut i: isize = 0;
while i < count {
println!("Sending Message");
ch.send(message + 0).unwrap();
i = i + 1;
}
println!("Ending test00_start");
}
fn
|
() {
let number_of_tasks: isize = 16;
let number_of_messages: isize = 4;
println!("Creating tasks");
let (tx, rx) = channel();
let mut i: isize = 0;
// Create and spawn threads...
let mut results = Vec::new();
while i < number_of_tasks {
let tx = tx.clone();
results.push(thread::spawn({
let i = i;
move|| {
test00_start(&tx, i, number_of_messages)
}
}));
i = i + 1;
}
// Read from spawned threads...
let mut sum = 0;
for _r in &results {
i = 0;
while i < number_of_messages {
let value = rx.recv().unwrap();
sum += value;
i = i + 1;
}
}
// Join spawned threads...
for r in results { r.join(); }
println!("Completed: Final number is: ");
println!("{}", sum);
// assert (sum == (((number_of_threads * (number_of_threads - 1)) / 2) *
// number_of_messages));
assert_eq!(sum, 480);
}
|
test00
|
identifier_name
|
namespace.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use gecko_bindings::structs::nsIAtom;
use std::borrow::Borrow;
use std::fmt;
use std::ops::Deref;
use string_cache::{Atom, WeakAtom};
#[macro_export]
macro_rules! ns {
() => { $crate::string_cache::Namespace(atom!("")) }
}
|
#[derive(Hash)]
pub struct WeakNamespace(WeakAtom);
impl Deref for Namespace {
type Target = WeakNamespace;
#[inline]
fn deref(&self) -> &WeakNamespace {
let weak: *const WeakAtom = &*self.0;
unsafe {
&*(weak as *const WeakNamespace)
}
}
}
impl fmt::Display for Namespace {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(w)
}
}
impl Borrow<WeakNamespace> for Namespace {
#[inline]
fn borrow(&self) -> &WeakNamespace {
self
}
}
impl WeakNamespace {
#[inline]
pub unsafe fn new<'a>(atom: *mut nsIAtom) -> &'a Self {
&*(atom as *const WeakNamespace)
}
#[inline]
pub fn clone(&self) -> Namespace {
Namespace(self.0.clone())
}
}
impl Eq for WeakNamespace {}
impl PartialEq for WeakNamespace {
#[inline]
fn eq(&self, other: &Self) -> bool {
let weak: *const WeakNamespace = self;
let other: *const WeakNamespace = other;
weak == other
}
}
|
#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
pub struct Namespace(pub Atom);
|
random_line_split
|
namespace.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use gecko_bindings::structs::nsIAtom;
use std::borrow::Borrow;
use std::fmt;
use std::ops::Deref;
use string_cache::{Atom, WeakAtom};
#[macro_export]
macro_rules! ns {
() => { $crate::string_cache::Namespace(atom!("")) }
}
#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
pub struct Namespace(pub Atom);
#[derive(Hash)]
pub struct WeakNamespace(WeakAtom);
impl Deref for Namespace {
type Target = WeakNamespace;
#[inline]
fn deref(&self) -> &WeakNamespace {
let weak: *const WeakAtom = &*self.0;
unsafe {
&*(weak as *const WeakNamespace)
}
}
}
impl fmt::Display for Namespace {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result
|
}
impl Borrow<WeakNamespace> for Namespace {
#[inline]
fn borrow(&self) -> &WeakNamespace {
self
}
}
impl WeakNamespace {
#[inline]
pub unsafe fn new<'a>(atom: *mut nsIAtom) -> &'a Self {
&*(atom as *const WeakNamespace)
}
#[inline]
pub fn clone(&self) -> Namespace {
Namespace(self.0.clone())
}
}
impl Eq for WeakNamespace {}
impl PartialEq for WeakNamespace {
#[inline]
fn eq(&self, other: &Self) -> bool {
let weak: *const WeakNamespace = self;
let other: *const WeakNamespace = other;
weak == other
}
}
|
{
self.0.fmt(w)
}
|
identifier_body
|
namespace.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use gecko_bindings::structs::nsIAtom;
use std::borrow::Borrow;
use std::fmt;
use std::ops::Deref;
use string_cache::{Atom, WeakAtom};
#[macro_export]
macro_rules! ns {
() => { $crate::string_cache::Namespace(atom!("")) }
}
#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
pub struct Namespace(pub Atom);
#[derive(Hash)]
pub struct
|
(WeakAtom);
impl Deref for Namespace {
type Target = WeakNamespace;
#[inline]
fn deref(&self) -> &WeakNamespace {
let weak: *const WeakAtom = &*self.0;
unsafe {
&*(weak as *const WeakNamespace)
}
}
}
impl fmt::Display for Namespace {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(w)
}
}
impl Borrow<WeakNamespace> for Namespace {
#[inline]
fn borrow(&self) -> &WeakNamespace {
self
}
}
impl WeakNamespace {
#[inline]
pub unsafe fn new<'a>(atom: *mut nsIAtom) -> &'a Self {
&*(atom as *const WeakNamespace)
}
#[inline]
pub fn clone(&self) -> Namespace {
Namespace(self.0.clone())
}
}
impl Eq for WeakNamespace {}
impl PartialEq for WeakNamespace {
#[inline]
fn eq(&self, other: &Self) -> bool {
let weak: *const WeakNamespace = self;
let other: *const WeakNamespace = other;
weak == other
}
}
|
WeakNamespace
|
identifier_name
|
header_struct.rs
|
// Copyright 2016-17 Alexander Reece
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io;
use common::Class;
pub struct HeaderStructWriter<'a> {
class: &'a Class,
}
impl<'a> HeaderStructWriter<'a> {
pub fn new(class: &'a Class) -> Self {
HeaderStructWriter {
class: class
}
}
pub fn write_to<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
if self.class.fields().is_empty() {
return Ok(());
}
try!(writeln!(writer, "\n// Generated by primalgen::spec::frame_payload_enum::ClassEnumWriter"));
try!(self.write_struct(writer));
try!(self.write_inherent_impl(writer));
Ok(())
}
fn write_struct<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
try!(writeln!(writer, "#[derive(Debug, Default)]"));
let lifetimes = if self.class.has_field_lifetimes() { "<'a>" } else { "" };
try!(writeln!(writer, "pub struct Properties{} {{", lifetimes));
for field in self.class.fields() {
try!(writeln!(writer, "{}: Option<{}>,", field.var_name(), field.ty().cow_definition("a")));
}
try!(writeln!(writer, "}} // struct Properties"));
Ok(())
}
fn write_inherent_impl<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
let lifetimes = if self.class.has_field_lifetimes() { "<'a>" } else { "" };
try!(writeln!(writer, "\nimpl{0} Properties{0} {{", lifetimes));
try!(self.write_flag_bits(writer));
try!(self.write_getters(writer));
try!(writeln!(writer, "}} // impl Properties"));
Ok(())
}
fn write_flag_bits<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
try!(writeln!(writer, "fn flag_bits(&self) -> ::bit_vec::BitVec {{"));
let padding_bytes = if self.class.fields().len() % 8!= 0 { 1 } else { 0 };
let num_bits = (self.class.fields().len() / 8 + padding_bytes) * 8;
try!(writeln!(writer, "let mut flags = ::bit_vec::BitVec::from_elem({}, false);", num_bits));
let mut bit_num = 0;
for field in self.class.fields() {
try!(writeln!(
writer,
"flags.set({}, self.{}.is_some());",
bit_num,
field.var_name()
));
bit_num += 1;
}
try!(writeln!(writer, "flags"));
try!(writeln!(writer, "}} // fn flag_bits()"));
Ok(())
}
fn write_getters<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
|
}
|
{
if self.class.fields().is_empty() {
return Ok(());
}
try!(writeln!(writer, "impl_properties! {{"));
for field in self.class.fields() {
try!(write!(writer, "({0}, {0}_mut, set_{0}, take_{0}) -> ", field.var_name()));
let ty = field.ty().borrowed_type();
try!(match (field.ty().is_copy(), field.ty().is_owned()) {
(true, _) => writeln!(writer, "Option< {} >,", ty),
(_, true) => writeln!(writer, "Option< &{} >,", ty),
_ => writeln!(writer, "Option< Cow<{}> >,", ty)
});
}
try!(writeln!(writer, "}} // impl_properties"));
Ok(())
}
|
identifier_body
|
header_struct.rs
|
// Copyright 2016-17 Alexander Reece
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io;
use common::Class;
pub struct HeaderStructWriter<'a> {
class: &'a Class,
}
impl<'a> HeaderStructWriter<'a> {
pub fn new(class: &'a Class) -> Self {
HeaderStructWriter {
class: class
}
}
pub fn write_to<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
if self.class.fields().is_empty() {
return Ok(());
}
try!(writeln!(writer, "\n// Generated by primalgen::spec::frame_payload_enum::ClassEnumWriter"));
try!(self.write_struct(writer));
try!(self.write_inherent_impl(writer));
Ok(())
}
fn write_struct<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
try!(writeln!(writer, "#[derive(Debug, Default)]"));
let lifetimes = if self.class.has_field_lifetimes() { "<'a>" } else { "" };
try!(writeln!(writer, "pub struct Properties{} {{", lifetimes));
for field in self.class.fields() {
try!(writeln!(writer, "{}: Option<{}>,", field.var_name(), field.ty().cow_definition("a")));
}
try!(writeln!(writer, "}} // struct Properties"));
Ok(())
}
fn write_inherent_impl<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
let lifetimes = if self.class.has_field_lifetimes() { "<'a>" } else { "" };
try!(writeln!(writer, "\nimpl{0} Properties{0} {{", lifetimes));
try!(self.write_flag_bits(writer));
try!(self.write_getters(writer));
try!(writeln!(writer, "}} // impl Properties"));
Ok(())
}
fn write_flag_bits<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
try!(writeln!(writer, "fn flag_bits(&self) -> ::bit_vec::BitVec {{"));
let padding_bytes = if self.class.fields().len() % 8!= 0 { 1 } else { 0 };
let num_bits = (self.class.fields().len() / 8 + padding_bytes) * 8;
try!(writeln!(writer, "let mut flags = ::bit_vec::BitVec::from_elem({}, false);", num_bits));
let mut bit_num = 0;
for field in self.class.fields() {
try!(writeln!(
writer,
"flags.set({}, self.{}.is_some());",
bit_num,
field.var_name()
));
bit_num += 1;
}
try!(writeln!(writer, "flags"));
try!(writeln!(writer, "}} // fn flag_bits()"));
Ok(())
}
fn write_getters<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
if self.class.fields().is_empty()
|
try!(writeln!(writer, "impl_properties! {{"));
for field in self.class.fields() {
try!(write!(writer, "({0}, {0}_mut, set_{0}, take_{0}) -> ", field.var_name()));
let ty = field.ty().borrowed_type();
try!(match (field.ty().is_copy(), field.ty().is_owned()) {
(true, _) => writeln!(writer, "Option< {} >,", ty),
(_, true) => writeln!(writer, "Option< &{} >,", ty),
_ => writeln!(writer, "Option< Cow<{}> >,", ty)
});
}
try!(writeln!(writer, "}} // impl_properties"));
Ok(())
}
}
|
{
return Ok(());
}
|
conditional_block
|
header_struct.rs
|
// Copyright 2016-17 Alexander Reece
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io;
use common::Class;
pub struct
|
<'a> {
class: &'a Class,
}
impl<'a> HeaderStructWriter<'a> {
pub fn new(class: &'a Class) -> Self {
HeaderStructWriter {
class: class
}
}
pub fn write_to<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
if self.class.fields().is_empty() {
return Ok(());
}
try!(writeln!(writer, "\n// Generated by primalgen::spec::frame_payload_enum::ClassEnumWriter"));
try!(self.write_struct(writer));
try!(self.write_inherent_impl(writer));
Ok(())
}
fn write_struct<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
try!(writeln!(writer, "#[derive(Debug, Default)]"));
let lifetimes = if self.class.has_field_lifetimes() { "<'a>" } else { "" };
try!(writeln!(writer, "pub struct Properties{} {{", lifetimes));
for field in self.class.fields() {
try!(writeln!(writer, "{}: Option<{}>,", field.var_name(), field.ty().cow_definition("a")));
}
try!(writeln!(writer, "}} // struct Properties"));
Ok(())
}
fn write_inherent_impl<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
let lifetimes = if self.class.has_field_lifetimes() { "<'a>" } else { "" };
try!(writeln!(writer, "\nimpl{0} Properties{0} {{", lifetimes));
try!(self.write_flag_bits(writer));
try!(self.write_getters(writer));
try!(writeln!(writer, "}} // impl Properties"));
Ok(())
}
fn write_flag_bits<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
try!(writeln!(writer, "fn flag_bits(&self) -> ::bit_vec::BitVec {{"));
let padding_bytes = if self.class.fields().len() % 8!= 0 { 1 } else { 0 };
let num_bits = (self.class.fields().len() / 8 + padding_bytes) * 8;
try!(writeln!(writer, "let mut flags = ::bit_vec::BitVec::from_elem({}, false);", num_bits));
let mut bit_num = 0;
for field in self.class.fields() {
try!(writeln!(
writer,
"flags.set({}, self.{}.is_some());",
bit_num,
field.var_name()
));
bit_num += 1;
}
try!(writeln!(writer, "flags"));
try!(writeln!(writer, "}} // fn flag_bits()"));
Ok(())
}
fn write_getters<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
if self.class.fields().is_empty() {
return Ok(());
}
try!(writeln!(writer, "impl_properties! {{"));
for field in self.class.fields() {
try!(write!(writer, "({0}, {0}_mut, set_{0}, take_{0}) -> ", field.var_name()));
let ty = field.ty().borrowed_type();
try!(match (field.ty().is_copy(), field.ty().is_owned()) {
(true, _) => writeln!(writer, "Option< {} >,", ty),
(_, true) => writeln!(writer, "Option< &{} >,", ty),
_ => writeln!(writer, "Option< Cow<{}> >,", ty)
});
}
try!(writeln!(writer, "}} // impl_properties"));
Ok(())
}
}
|
HeaderStructWriter
|
identifier_name
|
header_struct.rs
|
// Copyright 2016-17 Alexander Reece
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io;
use common::Class;
pub struct HeaderStructWriter<'a> {
class: &'a Class,
}
impl<'a> HeaderStructWriter<'a> {
pub fn new(class: &'a Class) -> Self {
HeaderStructWriter {
class: class
}
}
pub fn write_to<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
if self.class.fields().is_empty() {
return Ok(());
}
try!(writeln!(writer, "\n// Generated by primalgen::spec::frame_payload_enum::ClassEnumWriter"));
try!(self.write_struct(writer));
try!(self.write_inherent_impl(writer));
Ok(())
}
fn write_struct<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
try!(writeln!(writer, "#[derive(Debug, Default)]"));
let lifetimes = if self.class.has_field_lifetimes() { "<'a>" } else { "" };
try!(writeln!(writer, "pub struct Properties{} {{", lifetimes));
for field in self.class.fields() {
try!(writeln!(writer, "{}: Option<{}>,", field.var_name(), field.ty().cow_definition("a")));
}
try!(writeln!(writer, "}} // struct Properties"));
Ok(())
}
fn write_inherent_impl<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
let lifetimes = if self.class.has_field_lifetimes() { "<'a>" } else { "" };
try!(writeln!(writer, "\nimpl{0} Properties{0} {{", lifetimes));
try!(self.write_flag_bits(writer));
try!(self.write_getters(writer));
try!(writeln!(writer, "}} // impl Properties"));
Ok(())
}
fn write_flag_bits<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
try!(writeln!(writer, "fn flag_bits(&self) -> ::bit_vec::BitVec {{"));
let padding_bytes = if self.class.fields().len() % 8!= 0 { 1 } else { 0 };
let num_bits = (self.class.fields().len() / 8 + padding_bytes) * 8;
try!(writeln!(writer, "let mut flags = ::bit_vec::BitVec::from_elem({}, false);", num_bits));
let mut bit_num = 0;
for field in self.class.fields() {
try!(writeln!(
writer,
"flags.set({}, self.{}.is_some());",
bit_num,
field.var_name()
));
bit_num += 1;
}
try!(writeln!(writer, "flags"));
try!(writeln!(writer, "}} // fn flag_bits()"));
Ok(())
}
fn write_getters<W>(&self, writer: &mut W) -> io::Result<()>
where W: io::Write
{
if self.class.fields().is_empty() {
return Ok(());
}
try!(writeln!(writer, "impl_properties! {{"));
for field in self.class.fields() {
try!(write!(writer, "({0}, {0}_mut, set_{0}, take_{0}) -> ", field.var_name()));
let ty = field.ty().borrowed_type();
|
(_, true) => writeln!(writer, "Option< &{} >,", ty),
_ => writeln!(writer, "Option< Cow<{}> >,", ty)
});
}
try!(writeln!(writer, "}} // impl_properties"));
Ok(())
}
}
|
try!(match (field.ty().is_copy(), field.ty().is_owned()) {
(true, _) => writeln!(writer, "Option< {} >,", ty),
|
random_line_split
|
io.rs
|
use {EventSet, Selector, PollOpt, Token};
use bytes::{Buf, MutBuf};
// Re-export the io::Result / Error types for convenience
pub use std::io::{Read, Write, Result, Error};
/// A value that may be registered with an `EventLoop`
pub trait Evented {
#[doc(hidden)]
fn register(&self, selector: &mut Selector, token: Token, interest: EventSet, opts: PollOpt) -> Result<()>;
#[doc(hidden)]
fn reregister(&self, selector: &mut Selector, token: Token, interest: EventSet, opts: PollOpt) -> Result<()>;
#[doc(hidden)]
fn deregister(&self, selector: &mut Selector) -> Result<()>;
}
pub trait TryRead {
fn try_read_buf<B: MutBuf>(&mut self, buf: &mut B) -> Result<Option<usize>>
where Self : Sized
{
// Reads the length of the slice supplied by buf.mut_bytes into the buffer
// This is not guaranteed to consume an entire datagram or segment.
// If your protocol is msg based (instead of continuous stream) you should
// ensure that your buffer is large enough to hold an entire segment (1532 bytes if not jumbo
// frames)
let res = self.try_read(buf.mut_bytes());
if let Ok(Some(cnt)) = res
|
res
}
fn try_read(&mut self, buf: &mut [u8]) -> Result<Option<usize>>;
}
pub trait TryWrite {
fn try_write_buf<B: Buf>(&mut self, buf: &mut B) -> Result<Option<usize>>
where Self : Sized
{
let res = self.try_write(buf.bytes());
if let Ok(Some(cnt)) = res {
buf.advance(cnt);
}
res
}
fn try_write(&mut self, buf: &[u8]) -> Result<Option<usize>>;
}
impl<T: Read> TryRead for T {
fn try_read(&mut self, dst: &mut [u8]) -> Result<Option<usize>> {
self.read(dst)
.map(|cnt| Some(cnt))
.or_else(to_non_block)
}
}
impl<T: Write> TryWrite for T {
fn try_write(&mut self, src: &[u8]) -> Result<Option<usize>> {
self.write(src)
.map(|cnt| Some(cnt))
.or_else(to_non_block)
}
}
pub trait TryAccept {
type Output;
fn accept(&self) -> Result<Option<Self::Output>>;
}
/*
*
* ===== Helpers =====
*
*/
pub fn to_non_block<T>(err: Error) -> Result<Option<T>> {
use std::io::ErrorKind::WouldBlock;
if let WouldBlock = err.kind() {
return Ok(None);
}
Err(err)
}
|
{
buf.advance(cnt);
}
|
conditional_block
|
io.rs
|
use {EventSet, Selector, PollOpt, Token};
use bytes::{Buf, MutBuf};
// Re-export the io::Result / Error types for convenience
pub use std::io::{Read, Write, Result, Error};
/// A value that may be registered with an `EventLoop`
pub trait Evented {
#[doc(hidden)]
fn register(&self, selector: &mut Selector, token: Token, interest: EventSet, opts: PollOpt) -> Result<()>;
#[doc(hidden)]
fn reregister(&self, selector: &mut Selector, token: Token, interest: EventSet, opts: PollOpt) -> Result<()>;
#[doc(hidden)]
fn deregister(&self, selector: &mut Selector) -> Result<()>;
}
pub trait TryRead {
fn
|
<B: MutBuf>(&mut self, buf: &mut B) -> Result<Option<usize>>
where Self : Sized
{
// Reads the length of the slice supplied by buf.mut_bytes into the buffer
// This is not guaranteed to consume an entire datagram or segment.
// If your protocol is msg based (instead of continuous stream) you should
// ensure that your buffer is large enough to hold an entire segment (1532 bytes if not jumbo
// frames)
let res = self.try_read(buf.mut_bytes());
if let Ok(Some(cnt)) = res {
buf.advance(cnt);
}
res
}
fn try_read(&mut self, buf: &mut [u8]) -> Result<Option<usize>>;
}
pub trait TryWrite {
fn try_write_buf<B: Buf>(&mut self, buf: &mut B) -> Result<Option<usize>>
where Self : Sized
{
let res = self.try_write(buf.bytes());
if let Ok(Some(cnt)) = res {
buf.advance(cnt);
}
res
}
fn try_write(&mut self, buf: &[u8]) -> Result<Option<usize>>;
}
impl<T: Read> TryRead for T {
fn try_read(&mut self, dst: &mut [u8]) -> Result<Option<usize>> {
self.read(dst)
.map(|cnt| Some(cnt))
.or_else(to_non_block)
}
}
impl<T: Write> TryWrite for T {
fn try_write(&mut self, src: &[u8]) -> Result<Option<usize>> {
self.write(src)
.map(|cnt| Some(cnt))
.or_else(to_non_block)
}
}
pub trait TryAccept {
type Output;
fn accept(&self) -> Result<Option<Self::Output>>;
}
/*
*
* ===== Helpers =====
*
*/
pub fn to_non_block<T>(err: Error) -> Result<Option<T>> {
use std::io::ErrorKind::WouldBlock;
if let WouldBlock = err.kind() {
return Ok(None);
}
Err(err)
}
|
try_read_buf
|
identifier_name
|
io.rs
|
use {EventSet, Selector, PollOpt, Token};
use bytes::{Buf, MutBuf};
// Re-export the io::Result / Error types for convenience
pub use std::io::{Read, Write, Result, Error};
/// A value that may be registered with an `EventLoop`
pub trait Evented {
#[doc(hidden)]
fn register(&self, selector: &mut Selector, token: Token, interest: EventSet, opts: PollOpt) -> Result<()>;
#[doc(hidden)]
fn reregister(&self, selector: &mut Selector, token: Token, interest: EventSet, opts: PollOpt) -> Result<()>;
#[doc(hidden)]
fn deregister(&self, selector: &mut Selector) -> Result<()>;
}
pub trait TryRead {
fn try_read_buf<B: MutBuf>(&mut self, buf: &mut B) -> Result<Option<usize>>
where Self : Sized
{
// Reads the length of the slice supplied by buf.mut_bytes into the buffer
// This is not guaranteed to consume an entire datagram or segment.
// If your protocol is msg based (instead of continuous stream) you should
// ensure that your buffer is large enough to hold an entire segment (1532 bytes if not jumbo
// frames)
let res = self.try_read(buf.mut_bytes());
if let Ok(Some(cnt)) = res {
buf.advance(cnt);
}
res
}
fn try_read(&mut self, buf: &mut [u8]) -> Result<Option<usize>>;
}
pub trait TryWrite {
fn try_write_buf<B: Buf>(&mut self, buf: &mut B) -> Result<Option<usize>>
where Self : Sized
{
let res = self.try_write(buf.bytes());
if let Ok(Some(cnt)) = res {
buf.advance(cnt);
}
res
}
fn try_write(&mut self, buf: &[u8]) -> Result<Option<usize>>;
}
impl<T: Read> TryRead for T {
fn try_read(&mut self, dst: &mut [u8]) -> Result<Option<usize>>
|
}
impl<T: Write> TryWrite for T {
fn try_write(&mut self, src: &[u8]) -> Result<Option<usize>> {
self.write(src)
.map(|cnt| Some(cnt))
.or_else(to_non_block)
}
}
pub trait TryAccept {
type Output;
fn accept(&self) -> Result<Option<Self::Output>>;
}
/*
*
* ===== Helpers =====
*
*/
pub fn to_non_block<T>(err: Error) -> Result<Option<T>> {
use std::io::ErrorKind::WouldBlock;
if let WouldBlock = err.kind() {
return Ok(None);
}
Err(err)
}
|
{
self.read(dst)
.map(|cnt| Some(cnt))
.or_else(to_non_block)
}
|
identifier_body
|
io.rs
|
use {EventSet, Selector, PollOpt, Token};
use bytes::{Buf, MutBuf};
// Re-export the io::Result / Error types for convenience
pub use std::io::{Read, Write, Result, Error};
/// A value that may be registered with an `EventLoop`
pub trait Evented {
#[doc(hidden)]
fn register(&self, selector: &mut Selector, token: Token, interest: EventSet, opts: PollOpt) -> Result<()>;
#[doc(hidden)]
fn reregister(&self, selector: &mut Selector, token: Token, interest: EventSet, opts: PollOpt) -> Result<()>;
#[doc(hidden)]
fn deregister(&self, selector: &mut Selector) -> Result<()>;
|
where Self : Sized
{
// Reads the length of the slice supplied by buf.mut_bytes into the buffer
// This is not guaranteed to consume an entire datagram or segment.
// If your protocol is msg based (instead of continuous stream) you should
// ensure that your buffer is large enough to hold an entire segment (1532 bytes if not jumbo
// frames)
let res = self.try_read(buf.mut_bytes());
if let Ok(Some(cnt)) = res {
buf.advance(cnt);
}
res
}
fn try_read(&mut self, buf: &mut [u8]) -> Result<Option<usize>>;
}
pub trait TryWrite {
fn try_write_buf<B: Buf>(&mut self, buf: &mut B) -> Result<Option<usize>>
where Self : Sized
{
let res = self.try_write(buf.bytes());
if let Ok(Some(cnt)) = res {
buf.advance(cnt);
}
res
}
fn try_write(&mut self, buf: &[u8]) -> Result<Option<usize>>;
}
impl<T: Read> TryRead for T {
fn try_read(&mut self, dst: &mut [u8]) -> Result<Option<usize>> {
self.read(dst)
.map(|cnt| Some(cnt))
.or_else(to_non_block)
}
}
impl<T: Write> TryWrite for T {
fn try_write(&mut self, src: &[u8]) -> Result<Option<usize>> {
self.write(src)
.map(|cnt| Some(cnt))
.or_else(to_non_block)
}
}
pub trait TryAccept {
type Output;
fn accept(&self) -> Result<Option<Self::Output>>;
}
/*
*
* ===== Helpers =====
*
*/
pub fn to_non_block<T>(err: Error) -> Result<Option<T>> {
use std::io::ErrorKind::WouldBlock;
if let WouldBlock = err.kind() {
return Ok(None);
}
Err(err)
}
|
}
pub trait TryRead {
fn try_read_buf<B: MutBuf>(&mut self, buf: &mut B) -> Result<Option<usize>>
|
random_line_split
|
sprite_displayer.rs
|
use nalgebra::na::{ Eye, Mat4 };
use std::sync::{ Arc, Mutex };
use std::num;
use super::managed_display::{ ManagedDisplay, Texture };
use super::Drawable;
static vertexShader: &'static str = "
#version 110
uniform mat4 uMatrix;
attribute vec2 iPosition;
attribute vec2 iTexCoords;
varying vec2 vTexCoords;
void main() {
gl_Position = vec4(iPosition, 0.0, 1.0) * uMatrix;
vTexCoords = iTexCoords;
}
";
static fragmentShader: &'static str = "
#version 110
uniform sampler2D uTexture;
varying vec2 vTexCoords;
void main() {
gl_FragColor = texture2D(uTexture, vTexCoords);
}
";
pub struct SpriteDisplayer {
display: Arc<ManagedDisplay>,
insideMatrix: Mat4<f32>,
texture: Texture,
vertexBuffer: super::raw::VertexBuffer,
indexBuffer: super::raw::IndexBuffer,
program: super::raw::Program,
uniforms: Mutex<super::raw::ProgramUniforms>
}
impl SpriteDisplayer {
pub fn new(display: Arc<ManagedDisplay>, resourceName: &str) -> Result<SpriteDisplayer, String>
|
Ok(SpriteDisplayer {
display: display,
insideMatrix: Eye::new_identity(4),
texture: texture,
vertexBuffer: vb,
indexBuffer: ib,
program: program,
uniforms: Mutex::new(uniforms)
})
}
pub fn set_rectangle_coords(&mut self, leftCoord: Option<f32>, topCoord: Option<f32>, rightCoord: Option<f32>, bottomCoord: Option<f32>) {
self.insideMatrix = Eye::new_identity(4);
let heightToWidthRatio = (self.texture.get_height() as f32) / (self.texture.get_width() as f32);
let (leftCoord, topCoord, rightCoord, bottomCoord) =
match (leftCoord, topCoord, rightCoord, bottomCoord) {
(Some(l), Some(t), Some(r), Some(b))
=> (l, t, r, b),
(Some(l), None, Some(r), Some(b))
=> (l, b + heightToWidthRatio * num::abs(r - l), r, b),
(Some(l), Some(t), Some(r), None)
=> (l, t, r, t - heightToWidthRatio * num::abs(r - l)),
(None, Some(t), Some(r), Some(b))
=> (r - (t - b) / heightToWidthRatio, t, r, b),
(Some(l), Some(t), None, Some(b))
=> (l, t, l + (t - b) / heightToWidthRatio, b),
(None, None, Some(r), Some(b))
=> (-r, -b, r, b),
(None, Some(t), None, Some(b))
=> (-0.5 * num::abs(t - b) / heightToWidthRatio, t, 0.5 * num::abs(t - b) / heightToWidthRatio, b),
(None, Some(t), Some(r), None)
=> (-r, t, r, -t),
(Some(l), None, None, Some(b))
=> (l, -b, -l, b),
(Some(l), None, Some(r), None)
=> (l, 0.5 * num::abs(r - l) * heightToWidthRatio, r, -0.5 * num::abs(r - l) * heightToWidthRatio),
(Some(l), Some(t), None, None)
=> (l, t, -l, -t),
(Some(l), None, None, None)
=> (l, 0.5 * num::abs(l * 2.0) * heightToWidthRatio, -l, -0.5 * num::abs(l * 2.0) * heightToWidthRatio),
(None, None, Some(r), None)
=> (-r, 0.5 * num::abs(r * 2.0) * heightToWidthRatio, r, -0.5 * num::abs(r * 2.0) * heightToWidthRatio),
(None, Some(t), None, None)
=> (-0.5 * num::abs(t * 2.0) / heightToWidthRatio, t, 0.5 * num::abs(t * 2.0) / heightToWidthRatio, -t),
(None, None, None, Some(b))
=> (-0.5 * num::abs(b * 2.0) / heightToWidthRatio, -b, 0.5 * num::abs(b * 2.0) / heightToWidthRatio, b),
(None, None, None, None)
=> (-0.5, -0.5, 0.5, 0.5)
};
self.insideMatrix.m11 = (rightCoord - leftCoord) / 2.0;
self.insideMatrix.m41 = (rightCoord + leftCoord) / 2.0;
self.insideMatrix.m22 = (topCoord - bottomCoord) / 2.0;
self.insideMatrix.m42 = (topCoord + bottomCoord) / 2.0;
}
pub fn set_resource(&mut self, resourceName: &str)
{
self.texture = self.display.load_texture(resourceName).unwrap();
self.uniforms.lock().set_texture("uTexture", self.texture.deref());
}
}
impl Drawable for SpriteDisplayer {
fn draw(&self, matrix: &Mat4<f32>) {
let mut uniforms = self.uniforms.lock();
uniforms.set_value("uMatrix", self.insideMatrix * matrix.clone());
self.display.draw(&self.vertexBuffer, &self.indexBuffer, &self.program, uniforms.deref());
}
}
|
{
let texture = try!(display.load_texture(resourceName));
let vs = display.build_shader(super::raw::GLSL, super::raw::Vertex, vertexShader, "main").unwrap();
let fs = display.build_shader(super::raw::GLSL, super::raw::Fragment, fragmentShader, "main").unwrap();
let program = display.build_program(&[ &vs, &fs ]).unwrap();
let mut uniforms = program.build_uniforms();
let vb = display.build_vertex_buffer2(
&[
( (-1.0 as f32, -1.0 as f32), (0.0 as f32, 1.0 as f32) ),
( (-1.0 as f32, 1.0 as f32), (0.0 as f32, 0.0 as f32) ),
( ( 1.0 as f32, 1.0 as f32), (1.0 as f32, 0.0 as f32) ),
( ( 1.0 as f32, -1.0 as f32), (1.0 as f32, 1.0 as f32) )
],
&[ "iPosition", "iTexCoords" ]
);
let ib = display.build_index_buffer(super::raw::TrianglesList, &[ 0 as u16, 1, 2, 0, 2, 3 ]);
uniforms.set_texture("uTexture", texture.deref());
|
identifier_body
|
sprite_displayer.rs
|
use nalgebra::na::{ Eye, Mat4 };
use std::sync::{ Arc, Mutex };
use std::num;
use super::managed_display::{ ManagedDisplay, Texture };
use super::Drawable;
static vertexShader: &'static str = "
#version 110
uniform mat4 uMatrix;
attribute vec2 iPosition;
attribute vec2 iTexCoords;
varying vec2 vTexCoords;
void main() {
gl_Position = vec4(iPosition, 0.0, 1.0) * uMatrix;
vTexCoords = iTexCoords;
}
";
static fragmentShader: &'static str = "
#version 110
uniform sampler2D uTexture;
varying vec2 vTexCoords;
void main() {
gl_FragColor = texture2D(uTexture, vTexCoords);
}
";
pub struct SpriteDisplayer {
display: Arc<ManagedDisplay>,
insideMatrix: Mat4<f32>,
texture: Texture,
vertexBuffer: super::raw::VertexBuffer,
indexBuffer: super::raw::IndexBuffer,
program: super::raw::Program,
uniforms: Mutex<super::raw::ProgramUniforms>
}
impl SpriteDisplayer {
pub fn new(display: Arc<ManagedDisplay>, resourceName: &str) -> Result<SpriteDisplayer, String> {
let texture = try!(display.load_texture(resourceName));
let vs = display.build_shader(super::raw::GLSL, super::raw::Vertex, vertexShader, "main").unwrap();
let fs = display.build_shader(super::raw::GLSL, super::raw::Fragment, fragmentShader, "main").unwrap();
let program = display.build_program(&[ &vs, &fs ]).unwrap();
let mut uniforms = program.build_uniforms();
let vb = display.build_vertex_buffer2(
&[
( (-1.0 as f32, -1.0 as f32), (0.0 as f32, 1.0 as f32) ),
( (-1.0 as f32, 1.0 as f32), (0.0 as f32, 0.0 as f32) ),
( ( 1.0 as f32, 1.0 as f32), (1.0 as f32, 0.0 as f32) ),
( ( 1.0 as f32, -1.0 as f32), (1.0 as f32, 1.0 as f32) )
],
&[ "iPosition", "iTexCoords" ]
);
let ib = display.build_index_buffer(super::raw::TrianglesList, &[ 0 as u16, 1, 2, 0, 2, 3 ]);
uniforms.set_texture("uTexture", texture.deref());
Ok(SpriteDisplayer {
display: display,
insideMatrix: Eye::new_identity(4),
texture: texture,
vertexBuffer: vb,
indexBuffer: ib,
program: program,
uniforms: Mutex::new(uniforms)
})
}
pub fn set_rectangle_coords(&mut self, leftCoord: Option<f32>, topCoord: Option<f32>, rightCoord: Option<f32>, bottomCoord: Option<f32>) {
self.insideMatrix = Eye::new_identity(4);
let heightToWidthRatio = (self.texture.get_height() as f32) / (self.texture.get_width() as f32);
let (leftCoord, topCoord, rightCoord, bottomCoord) =
match (leftCoord, topCoord, rightCoord, bottomCoord) {
(Some(l), Some(t), Some(r), Some(b))
=> (l, t, r, b),
(Some(l), None, Some(r), Some(b))
=> (l, b + heightToWidthRatio * num::abs(r - l), r, b),
(Some(l), Some(t), Some(r), None)
=> (l, t, r, t - heightToWidthRatio * num::abs(r - l)),
(None, Some(t), Some(r), Some(b))
=> (r - (t - b) / heightToWidthRatio, t, r, b),
(Some(l), Some(t), None, Some(b))
=> (l, t, l + (t - b) / heightToWidthRatio, b),
(None, None, Some(r), Some(b))
=> (-r, -b, r, b),
(None, Some(t), None, Some(b))
=> (-0.5 * num::abs(t - b) / heightToWidthRatio, t, 0.5 * num::abs(t - b) / heightToWidthRatio, b),
(None, Some(t), Some(r), None)
=> (-r, t, r, -t),
(Some(l), None, None, Some(b))
=> (l, -b, -l, b),
(Some(l), None, Some(r), None)
=> (l, 0.5 * num::abs(r - l) * heightToWidthRatio, r, -0.5 * num::abs(r - l) * heightToWidthRatio),
(Some(l), Some(t), None, None)
=> (l, t, -l, -t),
(Some(l), None, None, None)
=> (l, 0.5 * num::abs(l * 2.0) * heightToWidthRatio, -l, -0.5 * num::abs(l * 2.0) * heightToWidthRatio),
(None, None, Some(r), None)
=> (-r, 0.5 * num::abs(r * 2.0) * heightToWidthRatio, r, -0.5 * num::abs(r * 2.0) * heightToWidthRatio),
(None, Some(t), None, None)
=> (-0.5 * num::abs(t * 2.0) / heightToWidthRatio, t, 0.5 * num::abs(t * 2.0) / heightToWidthRatio, -t),
(None, None, None, Some(b))
=> (-0.5 * num::abs(b * 2.0) / heightToWidthRatio, -b, 0.5 * num::abs(b * 2.0) / heightToWidthRatio, b),
(None, None, None, None)
=> (-0.5, -0.5, 0.5, 0.5)
};
self.insideMatrix.m11 = (rightCoord - leftCoord) / 2.0;
self.insideMatrix.m41 = (rightCoord + leftCoord) / 2.0;
self.insideMatrix.m22 = (topCoord - bottomCoord) / 2.0;
self.insideMatrix.m42 = (topCoord + bottomCoord) / 2.0;
}
pub fn set_resource(&mut self, resourceName: &str)
{
self.texture = self.display.load_texture(resourceName).unwrap();
self.uniforms.lock().set_texture("uTexture", self.texture.deref());
}
}
impl Drawable for SpriteDisplayer {
fn draw(&self, matrix: &Mat4<f32>) {
let mut uniforms = self.uniforms.lock();
|
uniforms.set_value("uMatrix", self.insideMatrix * matrix.clone());
self.display.draw(&self.vertexBuffer, &self.indexBuffer, &self.program, uniforms.deref());
}
}
|
random_line_split
|
|
sprite_displayer.rs
|
use nalgebra::na::{ Eye, Mat4 };
use std::sync::{ Arc, Mutex };
use std::num;
use super::managed_display::{ ManagedDisplay, Texture };
use super::Drawable;
static vertexShader: &'static str = "
#version 110
uniform mat4 uMatrix;
attribute vec2 iPosition;
attribute vec2 iTexCoords;
varying vec2 vTexCoords;
void main() {
gl_Position = vec4(iPosition, 0.0, 1.0) * uMatrix;
vTexCoords = iTexCoords;
}
";
static fragmentShader: &'static str = "
#version 110
uniform sampler2D uTexture;
varying vec2 vTexCoords;
void main() {
gl_FragColor = texture2D(uTexture, vTexCoords);
}
";
pub struct SpriteDisplayer {
display: Arc<ManagedDisplay>,
insideMatrix: Mat4<f32>,
texture: Texture,
vertexBuffer: super::raw::VertexBuffer,
indexBuffer: super::raw::IndexBuffer,
program: super::raw::Program,
uniforms: Mutex<super::raw::ProgramUniforms>
}
impl SpriteDisplayer {
pub fn
|
(display: Arc<ManagedDisplay>, resourceName: &str) -> Result<SpriteDisplayer, String> {
let texture = try!(display.load_texture(resourceName));
let vs = display.build_shader(super::raw::GLSL, super::raw::Vertex, vertexShader, "main").unwrap();
let fs = display.build_shader(super::raw::GLSL, super::raw::Fragment, fragmentShader, "main").unwrap();
let program = display.build_program(&[ &vs, &fs ]).unwrap();
let mut uniforms = program.build_uniforms();
let vb = display.build_vertex_buffer2(
&[
( (-1.0 as f32, -1.0 as f32), (0.0 as f32, 1.0 as f32) ),
( (-1.0 as f32, 1.0 as f32), (0.0 as f32, 0.0 as f32) ),
( ( 1.0 as f32, 1.0 as f32), (1.0 as f32, 0.0 as f32) ),
( ( 1.0 as f32, -1.0 as f32), (1.0 as f32, 1.0 as f32) )
],
&[ "iPosition", "iTexCoords" ]
);
let ib = display.build_index_buffer(super::raw::TrianglesList, &[ 0 as u16, 1, 2, 0, 2, 3 ]);
uniforms.set_texture("uTexture", texture.deref());
Ok(SpriteDisplayer {
display: display,
insideMatrix: Eye::new_identity(4),
texture: texture,
vertexBuffer: vb,
indexBuffer: ib,
program: program,
uniforms: Mutex::new(uniforms)
})
}
pub fn set_rectangle_coords(&mut self, leftCoord: Option<f32>, topCoord: Option<f32>, rightCoord: Option<f32>, bottomCoord: Option<f32>) {
self.insideMatrix = Eye::new_identity(4);
let heightToWidthRatio = (self.texture.get_height() as f32) / (self.texture.get_width() as f32);
let (leftCoord, topCoord, rightCoord, bottomCoord) =
match (leftCoord, topCoord, rightCoord, bottomCoord) {
(Some(l), Some(t), Some(r), Some(b))
=> (l, t, r, b),
(Some(l), None, Some(r), Some(b))
=> (l, b + heightToWidthRatio * num::abs(r - l), r, b),
(Some(l), Some(t), Some(r), None)
=> (l, t, r, t - heightToWidthRatio * num::abs(r - l)),
(None, Some(t), Some(r), Some(b))
=> (r - (t - b) / heightToWidthRatio, t, r, b),
(Some(l), Some(t), None, Some(b))
=> (l, t, l + (t - b) / heightToWidthRatio, b),
(None, None, Some(r), Some(b))
=> (-r, -b, r, b),
(None, Some(t), None, Some(b))
=> (-0.5 * num::abs(t - b) / heightToWidthRatio, t, 0.5 * num::abs(t - b) / heightToWidthRatio, b),
(None, Some(t), Some(r), None)
=> (-r, t, r, -t),
(Some(l), None, None, Some(b))
=> (l, -b, -l, b),
(Some(l), None, Some(r), None)
=> (l, 0.5 * num::abs(r - l) * heightToWidthRatio, r, -0.5 * num::abs(r - l) * heightToWidthRatio),
(Some(l), Some(t), None, None)
=> (l, t, -l, -t),
(Some(l), None, None, None)
=> (l, 0.5 * num::abs(l * 2.0) * heightToWidthRatio, -l, -0.5 * num::abs(l * 2.0) * heightToWidthRatio),
(None, None, Some(r), None)
=> (-r, 0.5 * num::abs(r * 2.0) * heightToWidthRatio, r, -0.5 * num::abs(r * 2.0) * heightToWidthRatio),
(None, Some(t), None, None)
=> (-0.5 * num::abs(t * 2.0) / heightToWidthRatio, t, 0.5 * num::abs(t * 2.0) / heightToWidthRatio, -t),
(None, None, None, Some(b))
=> (-0.5 * num::abs(b * 2.0) / heightToWidthRatio, -b, 0.5 * num::abs(b * 2.0) / heightToWidthRatio, b),
(None, None, None, None)
=> (-0.5, -0.5, 0.5, 0.5)
};
self.insideMatrix.m11 = (rightCoord - leftCoord) / 2.0;
self.insideMatrix.m41 = (rightCoord + leftCoord) / 2.0;
self.insideMatrix.m22 = (topCoord - bottomCoord) / 2.0;
self.insideMatrix.m42 = (topCoord + bottomCoord) / 2.0;
}
pub fn set_resource(&mut self, resourceName: &str)
{
self.texture = self.display.load_texture(resourceName).unwrap();
self.uniforms.lock().set_texture("uTexture", self.texture.deref());
}
}
impl Drawable for SpriteDisplayer {
fn draw(&self, matrix: &Mat4<f32>) {
let mut uniforms = self.uniforms.lock();
uniforms.set_value("uMatrix", self.insideMatrix * matrix.clone());
self.display.draw(&self.vertexBuffer, &self.indexBuffer, &self.program, uniforms.deref());
}
}
|
new
|
identifier_name
|
lib.rs
|
pub struct Driver {
age: u8,
}
impl Driver {
pub fn new(age: u8) -> Driver {
Driver { age }
}
pub fn get_age(&self) -> u8 {
self.age
}
}
pub trait Car {
fn drive_car(&self);
}
pub struct RealCar {}
impl Car for RealCar {
fn drive_car(&self) {
println!("Car has been driven!");
}
}
pub struct ProxyCar {
driver: Driver,
real_car: RealCar,
}
impl Car for ProxyCar {
fn drive_car(&self) {
if self.driver.age <= 16
|
else {
self.real_car.drive_car();
}
}
}
impl ProxyCar {
pub fn new(driver: Driver) -> ProxyCar {
ProxyCar {
driver,
real_car: RealCar {},
}
}
}
|
{
println!("Sorry, the driver is too young to drive.")
}
|
conditional_block
|
lib.rs
|
pub struct Driver {
age: u8,
}
impl Driver {
pub fn new(age: u8) -> Driver {
Driver { age }
}
pub fn get_age(&self) -> u8 {
self.age
}
}
pub trait Car {
fn drive_car(&self);
}
pub struct
|
{}
impl Car for RealCar {
fn drive_car(&self) {
println!("Car has been driven!");
}
}
pub struct ProxyCar {
driver: Driver,
real_car: RealCar,
}
impl Car for ProxyCar {
fn drive_car(&self) {
if self.driver.age <= 16 {
println!("Sorry, the driver is too young to drive.")
} else {
self.real_car.drive_car();
}
}
}
impl ProxyCar {
pub fn new(driver: Driver) -> ProxyCar {
ProxyCar {
driver,
real_car: RealCar {},
}
}
}
|
RealCar
|
identifier_name
|
lib.rs
|
pub struct Driver {
age: u8,
}
impl Driver {
pub fn new(age: u8) -> Driver
|
pub fn get_age(&self) -> u8 {
self.age
}
}
pub trait Car {
fn drive_car(&self);
}
pub struct RealCar {}
impl Car for RealCar {
fn drive_car(&self) {
println!("Car has been driven!");
}
}
pub struct ProxyCar {
driver: Driver,
real_car: RealCar,
}
impl Car for ProxyCar {
fn drive_car(&self) {
if self.driver.age <= 16 {
println!("Sorry, the driver is too young to drive.")
} else {
self.real_car.drive_car();
}
}
}
impl ProxyCar {
pub fn new(driver: Driver) -> ProxyCar {
ProxyCar {
driver,
real_car: RealCar {},
}
}
}
|
{
Driver { age }
}
|
identifier_body
|
builder.rs
|
use anyhow::Context;
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Deserialize)]
#[allow(non_camel_case_types)]
pub enum Arch {
#[serde(rename = "i686")]
I686,
#[serde(rename = "x86_64")]
X86_64,
#[serde(rename = "arm")]
ARM,
#[serde(rename = "armv6h")]
ARMV6H,
#[serde(rename = "armv7h")]
ARMV7H,
#[serde(rename = "aarch64")]
AARCH64,
}
impl std::fmt::Display for Arch {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
match *self {
Arch::I686 => "i686".fmt(f),
Arch::X86_64 => "x86_64".fmt(f),
Arch::ARM => "arm".fmt(f),
Arch::ARMV6H => "armv6h".fmt(f),
Arch::ARMV7H => "armv7h".fmt(f),
Arch::AARCH64 => "aarch64".fmt(f),
}
}
}
#[derive(Debug, Clone)]
pub struct ChrootHelper<'a> {
chroot_dir: &'a str,
#[allow(dead_code)]
arch: Arch,
}
impl<'a> ChrootHelper<'a> {
pub fn new(chroot_dir: &'a str, arch: Arch) -> Self {
ChrootHelper { chroot_dir, arch }
}
pub async fn makechrootpkg<P, Q, R, S>(
&self,
package_dir: P,
srcdest: Q,
pkgdest: R,
logdest: S,
) -> Result<(), anyhow::Error>
where
P: AsRef<std::path::Path>,
Q: AsRef<std::path::Path>,
R: AsRef<std::path::Path>,
S: AsRef<std::path::Path>,
{
|
let current_dir = current_dir_buf.as_path();
let mut srcdest_arg = std::ffi::OsString::from("SRCDEST=");
srcdest_arg.push(current_dir.join(srcdest));
let mut pkgdest_arg = std::ffi::OsString::from("PKGDEST=");
pkgdest_arg.push(current_dir.join(pkgdest));
let mut logdest_arg = std::ffi::OsString::from("LOGDEST=");
logdest_arg.push(current_dir.join(logdest));
let mut cmd = tokio::process::Command::new("sudo");
cmd.current_dir(package_dir)
.arg("env")
.arg(srcdest_arg)
.arg(pkgdest_arg)
.arg(logdest_arg)
.arg("makechrootpkg")
.arg("-cur")
.arg(current_dir.join(self.chroot_dir));
log::info!("{:?}", cmd);
let status = cmd.status().await?;
if status.success() {
Ok(())
} else {
Err(anyhow::anyhow!("makechrootpkg failed"))
}
}
}
#[derive(Debug, Clone)]
pub struct Builder<'a> {
signer: Option<&'a super::signer::Signer<'a>>,
srcdest: &'a str,
logdest: &'a str,
}
impl<'a> Builder<'a> {
pub fn new(
signer: Option<&'a super::signer::Signer<'a>>,
srcdest: &'a str,
logdest: &'a str,
) -> Self {
Builder {
signer,
srcdest,
logdest,
}
}
pub async fn build_package<P, Q>(
&self,
package_dir: P,
repo_dir: Q,
chroot_helper: &ChrootHelper<'a>,
) -> Result<Vec<std::path::PathBuf>, anyhow::Error>
where
P: AsRef<std::path::Path>,
Q: AsRef<std::path::Path>,
{
let package_dir = package_dir.as_ref();
let tempdir = tempdir::TempDir::new("guzuta-pkgdest")?;
let pkgdest = tempdir.path();
chroot_helper
.makechrootpkg(package_dir, self.srcdest, pkgdest, self.logdest)
.await?;
let mut dir = tokio::fs::read_dir(pkgdest).await?;
let mut futures_unordered = futures::stream::FuturesUnordered::new();
while let Some(entry) = dir.next_entry().await? {
let dest = repo_dir.as_ref().join(entry.file_name());
futures_unordered.push(async move {
let symlink_package_path = package_dir.join(entry.file_name());
if symlink_package_path.read_link().is_ok() {
// Unlink symlink created by makechrootpkg
log::info!("Unlink symlink {}", symlink_package_path.display());
tokio::fs::remove_file(symlink_package_path).await?;
}
log::info!("Copy {} to {}", entry.path().display(), dest.display());
tokio::fs::copy(entry.path(), &dest)
.await
.with_context(|| {
format!("Unable to copy file {:?} to {:?}", entry.path(), dest)
})?;
if let Some(signer) = self.signer {
let mut sig_dest = dest.clone().into_os_string();
sig_dest.push(".sig");
signer.sign(&dest, sig_dest).await?;
}
Ok::<_, anyhow::Error>(dest)
});
}
use futures::StreamExt as _;
let mut paths = vec![];
while let Some(path) = futures_unordered.next().await {
paths.push(path?);
}
Ok(paths)
}
}
|
let current_dir_buf = std::env::current_dir()?;
|
random_line_split
|
builder.rs
|
use anyhow::Context;
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Deserialize)]
#[allow(non_camel_case_types)]
pub enum Arch {
#[serde(rename = "i686")]
I686,
#[serde(rename = "x86_64")]
X86_64,
#[serde(rename = "arm")]
ARM,
#[serde(rename = "armv6h")]
ARMV6H,
#[serde(rename = "armv7h")]
ARMV7H,
#[serde(rename = "aarch64")]
AARCH64,
}
impl std::fmt::Display for Arch {
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
match *self {
Arch::I686 => "i686".fmt(f),
Arch::X86_64 => "x86_64".fmt(f),
Arch::ARM => "arm".fmt(f),
Arch::ARMV6H => "armv6h".fmt(f),
Arch::ARMV7H => "armv7h".fmt(f),
Arch::AARCH64 => "aarch64".fmt(f),
}
}
}
#[derive(Debug, Clone)]
pub struct ChrootHelper<'a> {
chroot_dir: &'a str,
#[allow(dead_code)]
arch: Arch,
}
impl<'a> ChrootHelper<'a> {
pub fn new(chroot_dir: &'a str, arch: Arch) -> Self {
ChrootHelper { chroot_dir, arch }
}
pub async fn makechrootpkg<P, Q, R, S>(
&self,
package_dir: P,
srcdest: Q,
pkgdest: R,
logdest: S,
) -> Result<(), anyhow::Error>
where
P: AsRef<std::path::Path>,
Q: AsRef<std::path::Path>,
R: AsRef<std::path::Path>,
S: AsRef<std::path::Path>,
{
let current_dir_buf = std::env::current_dir()?;
let current_dir = current_dir_buf.as_path();
let mut srcdest_arg = std::ffi::OsString::from("SRCDEST=");
srcdest_arg.push(current_dir.join(srcdest));
let mut pkgdest_arg = std::ffi::OsString::from("PKGDEST=");
pkgdest_arg.push(current_dir.join(pkgdest));
let mut logdest_arg = std::ffi::OsString::from("LOGDEST=");
logdest_arg.push(current_dir.join(logdest));
let mut cmd = tokio::process::Command::new("sudo");
cmd.current_dir(package_dir)
.arg("env")
.arg(srcdest_arg)
.arg(pkgdest_arg)
.arg(logdest_arg)
.arg("makechrootpkg")
.arg("-cur")
.arg(current_dir.join(self.chroot_dir));
log::info!("{:?}", cmd);
let status = cmd.status().await?;
if status.success() {
Ok(())
} else {
Err(anyhow::anyhow!("makechrootpkg failed"))
}
}
}
#[derive(Debug, Clone)]
pub struct Builder<'a> {
signer: Option<&'a super::signer::Signer<'a>>,
srcdest: &'a str,
logdest: &'a str,
}
impl<'a> Builder<'a> {
pub fn new(
signer: Option<&'a super::signer::Signer<'a>>,
srcdest: &'a str,
logdest: &'a str,
) -> Self {
Builder {
signer,
srcdest,
logdest,
}
}
pub async fn build_package<P, Q>(
&self,
package_dir: P,
repo_dir: Q,
chroot_helper: &ChrootHelper<'a>,
) -> Result<Vec<std::path::PathBuf>, anyhow::Error>
where
P: AsRef<std::path::Path>,
Q: AsRef<std::path::Path>,
{
let package_dir = package_dir.as_ref();
let tempdir = tempdir::TempDir::new("guzuta-pkgdest")?;
let pkgdest = tempdir.path();
chroot_helper
.makechrootpkg(package_dir, self.srcdest, pkgdest, self.logdest)
.await?;
let mut dir = tokio::fs::read_dir(pkgdest).await?;
let mut futures_unordered = futures::stream::FuturesUnordered::new();
while let Some(entry) = dir.next_entry().await? {
let dest = repo_dir.as_ref().join(entry.file_name());
futures_unordered.push(async move {
let symlink_package_path = package_dir.join(entry.file_name());
if symlink_package_path.read_link().is_ok() {
// Unlink symlink created by makechrootpkg
log::info!("Unlink symlink {}", symlink_package_path.display());
tokio::fs::remove_file(symlink_package_path).await?;
}
log::info!("Copy {} to {}", entry.path().display(), dest.display());
tokio::fs::copy(entry.path(), &dest)
.await
.with_context(|| {
format!("Unable to copy file {:?} to {:?}", entry.path(), dest)
})?;
if let Some(signer) = self.signer
|
Ok::<_, anyhow::Error>(dest)
});
}
use futures::StreamExt as _;
let mut paths = vec![];
while let Some(path) = futures_unordered.next().await {
paths.push(path?);
}
Ok(paths)
}
}
|
{
let mut sig_dest = dest.clone().into_os_string();
sig_dest.push(".sig");
signer.sign(&dest, sig_dest).await?;
}
|
conditional_block
|
builder.rs
|
use anyhow::Context;
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Deserialize)]
#[allow(non_camel_case_types)]
pub enum Arch {
#[serde(rename = "i686")]
I686,
#[serde(rename = "x86_64")]
X86_64,
#[serde(rename = "arm")]
ARM,
#[serde(rename = "armv6h")]
ARMV6H,
#[serde(rename = "armv7h")]
ARMV7H,
#[serde(rename = "aarch64")]
AARCH64,
}
impl std::fmt::Display for Arch {
fn
|
(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
match *self {
Arch::I686 => "i686".fmt(f),
Arch::X86_64 => "x86_64".fmt(f),
Arch::ARM => "arm".fmt(f),
Arch::ARMV6H => "armv6h".fmt(f),
Arch::ARMV7H => "armv7h".fmt(f),
Arch::AARCH64 => "aarch64".fmt(f),
}
}
}
#[derive(Debug, Clone)]
pub struct ChrootHelper<'a> {
chroot_dir: &'a str,
#[allow(dead_code)]
arch: Arch,
}
impl<'a> ChrootHelper<'a> {
pub fn new(chroot_dir: &'a str, arch: Arch) -> Self {
ChrootHelper { chroot_dir, arch }
}
pub async fn makechrootpkg<P, Q, R, S>(
&self,
package_dir: P,
srcdest: Q,
pkgdest: R,
logdest: S,
) -> Result<(), anyhow::Error>
where
P: AsRef<std::path::Path>,
Q: AsRef<std::path::Path>,
R: AsRef<std::path::Path>,
S: AsRef<std::path::Path>,
{
let current_dir_buf = std::env::current_dir()?;
let current_dir = current_dir_buf.as_path();
let mut srcdest_arg = std::ffi::OsString::from("SRCDEST=");
srcdest_arg.push(current_dir.join(srcdest));
let mut pkgdest_arg = std::ffi::OsString::from("PKGDEST=");
pkgdest_arg.push(current_dir.join(pkgdest));
let mut logdest_arg = std::ffi::OsString::from("LOGDEST=");
logdest_arg.push(current_dir.join(logdest));
let mut cmd = tokio::process::Command::new("sudo");
cmd.current_dir(package_dir)
.arg("env")
.arg(srcdest_arg)
.arg(pkgdest_arg)
.arg(logdest_arg)
.arg("makechrootpkg")
.arg("-cur")
.arg(current_dir.join(self.chroot_dir));
log::info!("{:?}", cmd);
let status = cmd.status().await?;
if status.success() {
Ok(())
} else {
Err(anyhow::anyhow!("makechrootpkg failed"))
}
}
}
#[derive(Debug, Clone)]
pub struct Builder<'a> {
signer: Option<&'a super::signer::Signer<'a>>,
srcdest: &'a str,
logdest: &'a str,
}
impl<'a> Builder<'a> {
pub fn new(
signer: Option<&'a super::signer::Signer<'a>>,
srcdest: &'a str,
logdest: &'a str,
) -> Self {
Builder {
signer,
srcdest,
logdest,
}
}
pub async fn build_package<P, Q>(
&self,
package_dir: P,
repo_dir: Q,
chroot_helper: &ChrootHelper<'a>,
) -> Result<Vec<std::path::PathBuf>, anyhow::Error>
where
P: AsRef<std::path::Path>,
Q: AsRef<std::path::Path>,
{
let package_dir = package_dir.as_ref();
let tempdir = tempdir::TempDir::new("guzuta-pkgdest")?;
let pkgdest = tempdir.path();
chroot_helper
.makechrootpkg(package_dir, self.srcdest, pkgdest, self.logdest)
.await?;
let mut dir = tokio::fs::read_dir(pkgdest).await?;
let mut futures_unordered = futures::stream::FuturesUnordered::new();
while let Some(entry) = dir.next_entry().await? {
let dest = repo_dir.as_ref().join(entry.file_name());
futures_unordered.push(async move {
let symlink_package_path = package_dir.join(entry.file_name());
if symlink_package_path.read_link().is_ok() {
// Unlink symlink created by makechrootpkg
log::info!("Unlink symlink {}", symlink_package_path.display());
tokio::fs::remove_file(symlink_package_path).await?;
}
log::info!("Copy {} to {}", entry.path().display(), dest.display());
tokio::fs::copy(entry.path(), &dest)
.await
.with_context(|| {
format!("Unable to copy file {:?} to {:?}", entry.path(), dest)
})?;
if let Some(signer) = self.signer {
let mut sig_dest = dest.clone().into_os_string();
sig_dest.push(".sig");
signer.sign(&dest, sig_dest).await?;
}
Ok::<_, anyhow::Error>(dest)
});
}
use futures::StreamExt as _;
let mut paths = vec![];
while let Some(path) = futures_unordered.next().await {
paths.push(path?);
}
Ok(paths)
}
}
|
fmt
|
identifier_name
|
isatty.rs
|
//! Helper module which provides a function to test
//! if stdout is a tty.
cfg_if::cfg_if! {
if #[cfg(unix)] {
pub fn stdout_isatty() -> bool {
unsafe { libc::isatty(libc::STDOUT_FILENO)!= 0 }
}
} else if #[cfg(windows)] {
pub fn stdout_isatty() -> bool {
type DWORD = u32;
type BOOL = i32;
type HANDLE = *mut u8;
type LPDWORD = *mut u32;
const STD_OUTPUT_HANDLE: DWORD = -11i32 as DWORD;
extern "system" {
fn GetStdHandle(which: DWORD) -> HANDLE;
fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: LPDWORD) -> BOOL;
}
unsafe {
let handle = GetStdHandle(STD_OUTPUT_HANDLE);
let mut out = 0;
GetConsoleMode(handle, &mut out)!= 0
}
}
} else {
|
}
}
}
|
// FIXME: Implement isatty on SGX
pub fn stdout_isatty() -> bool {
false
|
random_line_split
|
specular_reflection.rs
|
//! Defines a BRDF that describes specular reflection
use std::f32;
use enum_set::EnumSet;
use linalg::Vector;
use film::Colorf;
use bxdf::{self, BxDF, BxDFType};
use bxdf::fresnel::Fresnel;
/// Specular reflection BRDF that implements a specularly reflective material model
|
fresnel: Box<Fresnel + Send + Sync>
}
impl SpecularReflection {
/// Create a specularly reflective BRDF with the reflective color and Fresnel term
pub fn new(c: &Colorf, fresnel: Box<Fresnel + Send + Sync>) -> SpecularReflection {
SpecularReflection { reflectance: *c, fresnel: fresnel }
}
}
impl BxDF for SpecularReflection {
fn bxdf_type(&self) -> EnumSet<BxDFType> {
let mut e = EnumSet::new();
e.insert(BxDFType::Specular);
e.insert(BxDFType::Reflection);
e
}
/// We'll never exactly hit the specular reflection direction with some pair
/// so this just returns black. Use `sample` instead
fn eval(&self, _: &Vector, _: &Vector) -> Colorf { Colorf::broadcast(0.0) }
/// Sampling the specular BRDF just returns the specular reflection direction
/// for the light leaving along `w_o`
fn sample(&self, w_o: &Vector, _: &(f32, f32)) -> (Colorf, Vector, f32) {
if w_o.z!= 0.0 {
let w_i = Vector::new(-w_o.x, -w_o.y, w_o.z);
let c = self.fresnel.fresnel(-bxdf::cos_theta(w_o)) * self.reflectance / f32::abs(bxdf::cos_theta(&w_i));
(c, w_i, 1.0)
} else {
(Colorf::black(), Vector::broadcast(0.0), 0.0)
}
}
}
|
pub struct SpecularReflection {
/// Color of the reflective material
reflectance: Colorf,
/// Fresnel term for the reflection model
|
random_line_split
|
specular_reflection.rs
|
//! Defines a BRDF that describes specular reflection
use std::f32;
use enum_set::EnumSet;
use linalg::Vector;
use film::Colorf;
use bxdf::{self, BxDF, BxDFType};
use bxdf::fresnel::Fresnel;
/// Specular reflection BRDF that implements a specularly reflective material model
pub struct SpecularReflection {
/// Color of the reflective material
reflectance: Colorf,
/// Fresnel term for the reflection model
fresnel: Box<Fresnel + Send + Sync>
}
impl SpecularReflection {
/// Create a specularly reflective BRDF with the reflective color and Fresnel term
pub fn
|
(c: &Colorf, fresnel: Box<Fresnel + Send + Sync>) -> SpecularReflection {
SpecularReflection { reflectance: *c, fresnel: fresnel }
}
}
impl BxDF for SpecularReflection {
fn bxdf_type(&self) -> EnumSet<BxDFType> {
let mut e = EnumSet::new();
e.insert(BxDFType::Specular);
e.insert(BxDFType::Reflection);
e
}
/// We'll never exactly hit the specular reflection direction with some pair
/// so this just returns black. Use `sample` instead
fn eval(&self, _: &Vector, _: &Vector) -> Colorf { Colorf::broadcast(0.0) }
/// Sampling the specular BRDF just returns the specular reflection direction
/// for the light leaving along `w_o`
fn sample(&self, w_o: &Vector, _: &(f32, f32)) -> (Colorf, Vector, f32) {
if w_o.z!= 0.0 {
let w_i = Vector::new(-w_o.x, -w_o.y, w_o.z);
let c = self.fresnel.fresnel(-bxdf::cos_theta(w_o)) * self.reflectance / f32::abs(bxdf::cos_theta(&w_i));
(c, w_i, 1.0)
} else {
(Colorf::black(), Vector::broadcast(0.0), 0.0)
}
}
}
|
new
|
identifier_name
|
specular_reflection.rs
|
//! Defines a BRDF that describes specular reflection
use std::f32;
use enum_set::EnumSet;
use linalg::Vector;
use film::Colorf;
use bxdf::{self, BxDF, BxDFType};
use bxdf::fresnel::Fresnel;
/// Specular reflection BRDF that implements a specularly reflective material model
pub struct SpecularReflection {
/// Color of the reflective material
reflectance: Colorf,
/// Fresnel term for the reflection model
fresnel: Box<Fresnel + Send + Sync>
}
impl SpecularReflection {
/// Create a specularly reflective BRDF with the reflective color and Fresnel term
pub fn new(c: &Colorf, fresnel: Box<Fresnel + Send + Sync>) -> SpecularReflection {
SpecularReflection { reflectance: *c, fresnel: fresnel }
}
}
impl BxDF for SpecularReflection {
fn bxdf_type(&self) -> EnumSet<BxDFType> {
let mut e = EnumSet::new();
e.insert(BxDFType::Specular);
e.insert(BxDFType::Reflection);
e
}
/// We'll never exactly hit the specular reflection direction with some pair
/// so this just returns black. Use `sample` instead
fn eval(&self, _: &Vector, _: &Vector) -> Colorf { Colorf::broadcast(0.0) }
/// Sampling the specular BRDF just returns the specular reflection direction
/// for the light leaving along `w_o`
fn sample(&self, w_o: &Vector, _: &(f32, f32)) -> (Colorf, Vector, f32) {
if w_o.z!= 0.0
|
else {
(Colorf::black(), Vector::broadcast(0.0), 0.0)
}
}
}
|
{
let w_i = Vector::new(-w_o.x, -w_o.y, w_o.z);
let c = self.fresnel.fresnel(-bxdf::cos_theta(w_o)) * self.reflectance / f32::abs(bxdf::cos_theta(&w_i));
(c, w_i, 1.0)
}
|
conditional_block
|
TestLog1p.rs
|
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
* limitations under the License.
*/
// Don't edit this file! It is auto-generated by frameworks/rs/api/generate.sh.
#pragma version(1)
#pragma rs java_package_name(android.renderscript.cts)
float __attribute__((kernel)) testLog1pFloatFloat(float inV) {
return log1p(inV);
}
float2 __attribute__((kernel)) testLog1pFloat2Float2(float2 inV) {
return log1p(inV);
}
float3 __attribute__((kernel)) testLog1pFloat3Float3(float3 inV) {
return log1p(inV);
}
float4 __attribute__((kernel)) testLog1pFloat4Float4(float4 inV) {
return log1p(inV);
}
half __attribute__((kernel)) testLog1pHalfHalf(half inV) {
return log1p(inV);
}
half2 __attribute__((kernel)) testLog1pHalf2Half2(half2 inV) {
return log1p(inV);
}
half3 __attribute__((kernel)) testLog1pHalf3Half3(half3 inV) {
return log1p(inV);
}
half4 __attribute__((kernel)) testLog1pHalf4Half4(half4 inV) {
return log1p(inV);
}
|
* See the License for the specific language governing permissions and
|
random_line_split
|
request_response.rs
|
use ansi_term::Colour::*;
use crate::{MismatchResult, display_result};
use pact_matching::models::RequestResponseInteraction;
use pact_matching::Mismatch;
pub fn display_request_response_result(
errors: &mut Vec<(Option<String>, String, Option<MismatchResult>)>,
interaction: &RequestResponseInteraction,
match_result: &Result<Option<String>, MismatchResult>,
description: &String
)
|
Red.paint("FAILED")
} else {
Green.paint("OK")
};
let header_results = match interaction.response.headers {
Some(ref h) => Some(h.iter().map(|(k, v)| {
(k.clone(), v.join(", "), if mismatches.iter().any(|m| {
match *m {
Mismatch::HeaderMismatch { ref key,.. } => k == key,
_ => false
}
}) {
Red.paint("FAILED")
} else {
Green.paint("OK")
})
}).collect()),
None => None
};
let body_result = if mismatches.iter().any(|m| m.mismatch_type() == "BodyMismatch" ||
m.mismatch_type() == "BodyTypeMismatch") {
Red.paint("FAILED")
} else {
Green.paint("OK")
};
display_result(interaction.response.status, status_result, header_results, body_result);
errors.push((interaction.id.clone(), description.clone(), Some(err.clone())));
}
}
}
}
|
{
match match_result {
Ok(id) => {
display_result(
interaction.response.status,
Green.paint("OK"),
interaction.response.headers.clone().map(|h| h.iter().map(|(k, v)| {
(k.clone(), v.join(", "), Green.paint("OK"))
}).collect()), Green.paint("OK")
);
errors.push((id.clone(), description.clone(), None));
},
Err(ref err) => match *err {
MismatchResult::Error(ref err_des, _) => {
println!(" {}", Red.paint(format!("Request Failed - {}", err_des)));
errors.push((err.interaction_id().clone(), description.clone(), Some(err.clone())));
},
MismatchResult::Mismatches { ref mismatches, .. } => {
let description = description.to_owned() + " returns a response which ";
let status_result = if mismatches.iter().any(|m| m.mismatch_type() == "StatusMismatch") {
|
identifier_body
|
request_response.rs
|
use ansi_term::Colour::*;
|
use crate::{MismatchResult, display_result};
use pact_matching::models::RequestResponseInteraction;
use pact_matching::Mismatch;
pub fn display_request_response_result(
errors: &mut Vec<(Option<String>, String, Option<MismatchResult>)>,
interaction: &RequestResponseInteraction,
match_result: &Result<Option<String>, MismatchResult>,
description: &String
) {
match match_result {
Ok(id) => {
display_result(
interaction.response.status,
Green.paint("OK"),
interaction.response.headers.clone().map(|h| h.iter().map(|(k, v)| {
(k.clone(), v.join(", "), Green.paint("OK"))
}).collect()), Green.paint("OK")
);
errors.push((id.clone(), description.clone(), None));
},
Err(ref err) => match *err {
MismatchResult::Error(ref err_des, _) => {
println!(" {}", Red.paint(format!("Request Failed - {}", err_des)));
errors.push((err.interaction_id().clone(), description.clone(), Some(err.clone())));
},
MismatchResult::Mismatches { ref mismatches,.. } => {
let description = description.to_owned() + " returns a response which ";
let status_result = if mismatches.iter().any(|m| m.mismatch_type() == "StatusMismatch") {
Red.paint("FAILED")
} else {
Green.paint("OK")
};
let header_results = match interaction.response.headers {
Some(ref h) => Some(h.iter().map(|(k, v)| {
(k.clone(), v.join(", "), if mismatches.iter().any(|m| {
match *m {
Mismatch::HeaderMismatch { ref key,.. } => k == key,
_ => false
}
}) {
Red.paint("FAILED")
} else {
Green.paint("OK")
})
}).collect()),
None => None
};
let body_result = if mismatches.iter().any(|m| m.mismatch_type() == "BodyMismatch" ||
m.mismatch_type() == "BodyTypeMismatch") {
Red.paint("FAILED")
} else {
Green.paint("OK")
};
display_result(interaction.response.status, status_result, header_results, body_result);
errors.push((interaction.id.clone(), description.clone(), Some(err.clone())));
}
}
}
}
|
random_line_split
|
|
request_response.rs
|
use ansi_term::Colour::*;
use crate::{MismatchResult, display_result};
use pact_matching::models::RequestResponseInteraction;
use pact_matching::Mismatch;
pub fn
|
(
errors: &mut Vec<(Option<String>, String, Option<MismatchResult>)>,
interaction: &RequestResponseInteraction,
match_result: &Result<Option<String>, MismatchResult>,
description: &String
) {
match match_result {
Ok(id) => {
display_result(
interaction.response.status,
Green.paint("OK"),
interaction.response.headers.clone().map(|h| h.iter().map(|(k, v)| {
(k.clone(), v.join(", "), Green.paint("OK"))
}).collect()), Green.paint("OK")
);
errors.push((id.clone(), description.clone(), None));
},
Err(ref err) => match *err {
MismatchResult::Error(ref err_des, _) => {
println!(" {}", Red.paint(format!("Request Failed - {}", err_des)));
errors.push((err.interaction_id().clone(), description.clone(), Some(err.clone())));
},
MismatchResult::Mismatches { ref mismatches,.. } => {
let description = description.to_owned() + " returns a response which ";
let status_result = if mismatches.iter().any(|m| m.mismatch_type() == "StatusMismatch") {
Red.paint("FAILED")
} else {
Green.paint("OK")
};
let header_results = match interaction.response.headers {
Some(ref h) => Some(h.iter().map(|(k, v)| {
(k.clone(), v.join(", "), if mismatches.iter().any(|m| {
match *m {
Mismatch::HeaderMismatch { ref key,.. } => k == key,
_ => false
}
}) {
Red.paint("FAILED")
} else {
Green.paint("OK")
})
}).collect()),
None => None
};
let body_result = if mismatches.iter().any(|m| m.mismatch_type() == "BodyMismatch" ||
m.mismatch_type() == "BodyTypeMismatch") {
Red.paint("FAILED")
} else {
Green.paint("OK")
};
display_result(interaction.response.status, status_result, header_results, body_result);
errors.push((interaction.id.clone(), description.clone(), Some(err.clone())));
}
}
}
}
|
display_request_response_result
|
identifier_name
|
unwind.rs
|
Handling in LLVM" (llvm.org/docs/ExceptionHandling.html) and
//! documents linked from it.
//! These are also good reads:
//! http://theofilos.cs.columbia.edu/blog/2013/09/22/base_abi/
//! http://monoinfinito.wordpress.com/series/exception-handling-in-c/
//! http://www.airs.com/blog/index.php?s=exception+frames
//!
//! ## A brief summary
//!
//! Exception handling happens in two phases: a search phase and a cleanup phase.
//!
//! In both phases the unwinder walks stack frames from top to bottom using
//! information from the stack frame unwind sections of the current process's
//! modules ("module" here refers to an OS module, i.e. an executable or a
//! dynamic library).
//!
//! For each stack frame, it invokes the associated "personality routine", whose
//! address is also stored in the unwind info section.
//!
//! In the search phase, the job of a personality routine is to examine exception
//! object being thrown, and to decide whether it should be caught at that stack
//! frame. Once the handler frame has been identified, cleanup phase begins.
//!
//! In the cleanup phase, personality routines invoke cleanup code associated
//! with their stack frames (i.e. destructors). Once stack has been unwound down
//! to the handler frame level, unwinding stops and the last personality routine
//! transfers control to its catch block.
//!
//! ## Frame unwind info registration
//!
//! Each module has its own frame unwind info section (usually ".eh_frame"), and
//! unwinder needs to know about all of them in order for unwinding to be able to
//! cross module boundaries.
//!
//! On some platforms, like Linux, this is achieved by dynamically enumerating
//! currently loaded modules via the dl_iterate_phdr() API and finding all
//!.eh_frame sections.
//!
//! Others, like Windows, require modules to actively register their unwind info
//! sections by calling __register_frame_info() API at startup. In the latter
//! case it is essential that there is only one copy of the unwinder runtime in
//! the process. This is usually achieved by linking to the dynamic version of
//! the unwind runtime.
//!
//! Currently Rust uses unwind runtime provided by libgcc.
use prelude::v1::*;
use any::Any;
use cell::Cell;
use cmp;
use failure;
use fmt;
use intrinsics;
use libc::c_void;
use mem;
use sync::atomic::{self, Ordering};
use sync::{Once, ONCE_INIT};
use rt::libunwind as uw;
struct Exception {
uwe: uw::_Unwind_Exception,
cause: Option<Box<Any + Send>>,
}
pub type Callback = fn(msg: &(Any + Send), file: &'static str, line: uint);
// Variables used for invoking callbacks when a thread starts to unwind.
//
// For more information, see below.
const MAX_CALLBACKS: uint = 16;
static CALLBACKS: [atomic::AtomicUsize; MAX_CALLBACKS] =
[atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT];
static CALLBACK_CNT: atomic::AtomicUsize = atomic::ATOMIC_USIZE_INIT;
thread_local! { static PANICKING: Cell<bool> = Cell::new(false) }
/// Invoke a closure, capturing the cause of panic if one occurs.
///
/// This function will return `None` if the closure did not panic, and will
/// return `Some(cause)` if the closure panics. The `cause` returned is the
/// object with which panic was originally invoked.
///
/// This function also is unsafe for a variety of reasons:
///
/// * This is not safe to call in a nested fashion. The unwinding
/// interface for Rust is designed to have at most one try/catch block per
/// thread, not multiple. No runtime checking is currently performed to uphold
/// this invariant, so this function is not safe. A nested try/catch block
/// may result in corruption of the outer try/catch block's state, especially
/// if this is used within a thread itself.
///
/// * It is not sound to trigger unwinding while already unwinding. Rust threads
/// have runtime checks in place to ensure this invariant, but it is not
/// guaranteed that a rust thread is in place when invoking this function.
/// Unwinding twice can lead to resource leaks where some destructors are not
/// run.
pub unsafe fn try<F: FnOnce()>(f: F) -> Result<(), Box<Any + Send>> {
let mut f = Some(f);
let prev = PANICKING.with(|s| s.get());
PANICKING.with(|s| s.set(false));
let ep = rust_try(try_fn::<F>, &mut f as *mut _ as *mut c_void);
PANICKING.with(|s| s.set(prev));
return if ep.is_null() {
Ok(())
} else {
let my_ep = ep as *mut Exception;
rtdebug!("caught {}", (*my_ep).uwe.exception_class);
let cause = (*my_ep).cause.take();
uw::_Unwind_DeleteException(ep);
Err(cause.unwrap())
};
extern fn try_fn<F: FnOnce()>(opt_closure: *mut c_void) {
let opt_closure = opt_closure as *mut Option<F>;
unsafe { (*opt_closure).take().unwrap()(); }
}
#[link(name = "rustrt_native", kind = "static")]
#[cfg(not(test))]
extern {}
extern {
// Rust's try-catch
// When f(...) returns normally, the return value is null.
// When f(...) throws, the return value is a pointer to the caught
// exception object.
fn rust_try(f: extern fn(*mut c_void),
data: *mut c_void) -> *mut uw::_Unwind_Exception;
}
}
/// Determines whether the current thread is unwinding because of panic.
pub fn panicking() -> bool {
PANICKING.with(|s| s.get())
}
// An uninlined, unmangled function upon which to slap yer breakpoints
#[inline(never)]
#[no_mangle]
fn rust_panic(cause: Box<Any + Send>) ->! {
rtdebug!("begin_unwind()");
unsafe {
let exception = box Exception {
uwe: uw::_Unwind_Exception {
exception_class: rust_exception_class(),
exception_cleanup: exception_cleanup,
private: [0; uw::unwinder_private_data_size],
},
cause: Some(cause),
};
let error = uw::_Unwind_RaiseException(mem::transmute(exception));
rtabort!("Could not unwind stack, error = {}", error as int)
}
extern fn exception_cleanup(_unwind_code: uw::_Unwind_Reason_Code,
exception: *mut uw::_Unwind_Exception) {
rtdebug!("exception_cleanup()");
unsafe {
let _: Box<Exception> = mem::transmute(exception);
}
}
}
// Rust's exception class identifier. This is used by personality routines to
// determine whether the exception was thrown by their own runtime.
fn rust_exception_class() -> uw::_Unwind_Exception_Class {
// M O Z \0 R U S T -- vendor, language
0x4d4f5a_00_52555354
}
// We could implement our personality routine in pure Rust, however exception
// info decoding is tedious. More importantly, personality routines have to
// handle various platform quirks, which are not fun to maintain. For this
// reason, we attempt to reuse personality routine of the C language:
// __gcc_personality_v0.
//
// Since C does not support exception catching, __gcc_personality_v0 simply
// always returns _URC_CONTINUE_UNWIND in search phase, and always returns
// _URC_INSTALL_CONTEXT (i.e. "invoke cleanup code") in cleanup phase.
//
// This is pretty close to Rust's exception handling approach, except that Rust
// does have a single "catch-all" handler at the bottom of each thread's stack.
// So we have two versions of the personality routine:
// - rust_eh_personality, used by all cleanup landing pads, which never catches,
// so the behavior of __gcc_personality_v0 is perfectly adequate there, and
// - rust_eh_personality_catch, used only by rust_try(), which always catches.
//
// Note, however, that for implementation simplicity, rust_eh_personality_catch
// lacks code to install a landing pad, so in order to obtain exception object
// pointer (which it needs to return upstream), rust_try() employs another trick:
// it calls into the nested rust_try_inner(), whose landing pad does not resume
// unwinds. Instead, it extracts the exception pointer and performs a "normal"
// return.
//
// See also: rt/rust_try.ll
#[cfg(all(not(target_arch = "arm"),
not(all(windows, target_arch = "x86_64")),
not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern fn rust_eh_personality(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
}
// iOS on armv7 is using SjLj exceptions and therefore requires to use
// a specialized personality routine: __gcc_personality_sj0
#[cfg(all(target_os = "ios", target_arch = "arm", not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_sj0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_sj0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
unsafe {
__gcc_personality_sj0(_version, actions, _exception_class, _ue_header,
_context)
}
}
}
}
// ARM EHABI uses a slightly different personality routine signature,
// but otherwise works the same.
#[cfg(all(target_arch = "arm", not(target_os = "ios"), not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(state: uw::_Unwind_State,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern "C" fn rust_eh_personality(
state: uw::_Unwind_State,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(state, ue_header, context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
state: uw::_Unwind_State,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (state as c_int & uw::_US_ACTION_MASK as c_int)
== uw::_US_VIRTUAL_UNWIND_FRAME as c_int { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
}
// Win64 SEH (see http://msdn.microsoft.com/en-us/library/1eyas8tf.aspx)
//
// This looks a bit convoluted because rather than implementing a native SEH handler,
// GCC reuses the same personality routine as for the other architectures by wrapping it
// with an "API translator" layer (_GCC_specific_handler).
#[cfg(all(windows, target_arch = "x86_64", not(test)))]
#[doc(hidden)]
#[allow(non_camel_case_types, non_snake_case)]
pub mod eabi {
pub use self::EXCEPTION_DISPOSITION::*;
use rt::libunwind as uw;
use libc::{c_void, c_int};
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct EXCEPTION_RECORD;
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct CONTEXT;
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct DISPATCHER_CONTEXT;
#[repr(C)]
#[derive(Copy)]
pub enum EXCEPTION_DISPOSITION {
ExceptionContinueExecution,
ExceptionContinueSearch,
ExceptionNestedException,
ExceptionCollidedUnwind
}
type _Unwind_Personality_Fn =
extern "C" fn(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code;
extern "C" {
fn __gcc_personality_seh0(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION;
fn _GCC_specific_handler(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT,
personality: _Unwind_Personality_Fn
) -> EXCEPTION_DISPOSITION;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern "C" fn rust_eh_personality(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION
{
unsafe {
__gcc_personality_seh0(exceptionRecord, establisherFrame,
contextRecord, dispatcherContext)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION
{
extern "C" fn inner(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
unsafe {
_GCC_specific_handler(exceptionRecord, establisherFrame,
contextRecord, dispatcherContext,
inner)
}
}
}
#[cfg(not(test))]
/// Entry point of panic from the libcore crate.
#[lang = "panic_fmt"]
pub extern fn rust_begin_unwind(msg: fmt::Arguments,
file: &'static str, line: uint) ->! {
begin_unwind_fmt(msg, &(file, line))
}
/// The entry point for unwinding with a formatted message.
///
/// This is designed to reduce the amount of code required at the call
/// site as much as possible (so that `panic!()` has as low an impact
/// on (e.g.) the inlining of other functions as possible), by moving
/// the actual formatting into this shared place.
#[inline(never)] #[cold]
pub fn begin_unwind_fmt(msg: fmt::Arguments, file_line: &(&'static str, uint)) ->! {
use fmt::Writer;
// We do two allocations here, unfortunately. But (a) they're
// required with the current scheme, and (b) we don't handle
// panic + OOM properly anyway (see comment in begin_unwind
// below).
let mut s = String::new();
let _ = write!(&mut s, "{}", msg);
begin_unwind_inner(box s, file_line)
}
/// This is the entry point of unwinding for panic!() and assert!().
#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible
pub fn begin_unwind<M: Any + Send>(msg: M, file_line: &(&'static str, uint)) ->! {
// Note that this should be the only allocation performed in this code path.
// Currently this means that panic!() on OOM will invoke this code path,
// but then again we're not really ready for panic on OOM anyway. If
// we do start doing this, then we should propagate this allocation to
// be performed in the parent of this thread instead of the thread that's
// panicking.
// see below for why we do the `Any` coercion here.
begin_unwind_inner(box msg, file_line)
}
/// The core of the unwinding.
///
/// This is non-generic to avoid instantiation bloat in other crates
/// (which makes compilation of small crates noticeably slower). (Note:
/// we need the `Any` object anyway, we're not just creating it to
/// avoid being generic.)
///
/// Doing this split took the LLVM IR line counts of `fn main() { panic!()
/// }` from ~1900/3700 (-O/no opts) to 180/590.
#[inline(never)] #[cold] // this is the slow path, please never inline this
fn begin_unwind_inner(msg: Box<Any + Send>, file_line: &(&'static str, uint)) ->! {
// Make sure the default failure handler is registered before we look at the
// callbacks.
static INIT: Once = ONCE_INIT;
INIT.call_once(|| unsafe { register(failure::on_fail); });
// First, invoke call the user-defined callbacks triggered on thread panic.
//
// By the time that we see a callback has been registered (by reading
// MAX_CALLBACKS), the actual callback itself may have not been stored yet,
// so we just chalk it up to a race condition and move on to the next
// callback. Additionally, CALLBACK_CNT may briefly be higher than
// MAX_CALLBACKS, so we're sure to clamp it as necessary.
let callbacks = {
let amt = CALLBACK_CNT.load(Ordering::SeqCst);
&CALLBACKS[..cmp::min(amt, MAX_CALLBACKS)]
};
for cb in callbacks.iter() {
match cb.load(Ordering::SeqCst) {
0 => {}
n => {
let f: Callback = unsafe { mem::transmute(n) };
let (file, line) = *file_line;
f(&*msg, file, line);
}
}
};
// Now that we've run all the necessary unwind callbacks, we actually
|
// unwinding or otherwise exiting the thread cleanly.
rterrln!("thread panicked while panicking. aborting.");
unsafe { intrinsics::abort() }
}
PANICKING.with(|s| s.set(true));
rust_panic(msg);
}
/// Register a callback to be invoked when a thread unwinds.
///
/// This is an unsafe and experimental API which allows for an arbitrary
/// callback to be invoked when a thread panics. This callback is invoked on both
/// the initial unwinding and a double unwinding if one occurs. Additionally,
/// the local `Task` will be in place for the duration of the callback, and
/// the
|
// perform the unwinding.
if panicking() {
// If a thread panics while it's already unwinding then we
// have limited options. Currently our preference is to
// just abort. In the future we may consider resuming
|
random_line_split
|
unwind.rs
|
LLVM" (llvm.org/docs/ExceptionHandling.html) and
//! documents linked from it.
//! These are also good reads:
//! http://theofilos.cs.columbia.edu/blog/2013/09/22/base_abi/
//! http://monoinfinito.wordpress.com/series/exception-handling-in-c/
//! http://www.airs.com/blog/index.php?s=exception+frames
//!
//! ## A brief summary
//!
//! Exception handling happens in two phases: a search phase and a cleanup phase.
//!
//! In both phases the unwinder walks stack frames from top to bottom using
//! information from the stack frame unwind sections of the current process's
//! modules ("module" here refers to an OS module, i.e. an executable or a
//! dynamic library).
//!
//! For each stack frame, it invokes the associated "personality routine", whose
//! address is also stored in the unwind info section.
//!
//! In the search phase, the job of a personality routine is to examine exception
//! object being thrown, and to decide whether it should be caught at that stack
//! frame. Once the handler frame has been identified, cleanup phase begins.
//!
//! In the cleanup phase, personality routines invoke cleanup code associated
//! with their stack frames (i.e. destructors). Once stack has been unwound down
//! to the handler frame level, unwinding stops and the last personality routine
//! transfers control to its catch block.
//!
//! ## Frame unwind info registration
//!
//! Each module has its own frame unwind info section (usually ".eh_frame"), and
//! unwinder needs to know about all of them in order for unwinding to be able to
//! cross module boundaries.
//!
//! On some platforms, like Linux, this is achieved by dynamically enumerating
//! currently loaded modules via the dl_iterate_phdr() API and finding all
//!.eh_frame sections.
//!
//! Others, like Windows, require modules to actively register their unwind info
//! sections by calling __register_frame_info() API at startup. In the latter
//! case it is essential that there is only one copy of the unwinder runtime in
//! the process. This is usually achieved by linking to the dynamic version of
//! the unwind runtime.
//!
//! Currently Rust uses unwind runtime provided by libgcc.
use prelude::v1::*;
use any::Any;
use cell::Cell;
use cmp;
use failure;
use fmt;
use intrinsics;
use libc::c_void;
use mem;
use sync::atomic::{self, Ordering};
use sync::{Once, ONCE_INIT};
use rt::libunwind as uw;
struct Exception {
uwe: uw::_Unwind_Exception,
cause: Option<Box<Any + Send>>,
}
pub type Callback = fn(msg: &(Any + Send), file: &'static str, line: uint);
// Variables used for invoking callbacks when a thread starts to unwind.
//
// For more information, see below.
const MAX_CALLBACKS: uint = 16;
static CALLBACKS: [atomic::AtomicUsize; MAX_CALLBACKS] =
[atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT];
static CALLBACK_CNT: atomic::AtomicUsize = atomic::ATOMIC_USIZE_INIT;
thread_local! { static PANICKING: Cell<bool> = Cell::new(false) }
/// Invoke a closure, capturing the cause of panic if one occurs.
///
/// This function will return `None` if the closure did not panic, and will
/// return `Some(cause)` if the closure panics. The `cause` returned is the
/// object with which panic was originally invoked.
///
/// This function also is unsafe for a variety of reasons:
///
/// * This is not safe to call in a nested fashion. The unwinding
/// interface for Rust is designed to have at most one try/catch block per
/// thread, not multiple. No runtime checking is currently performed to uphold
/// this invariant, so this function is not safe. A nested try/catch block
/// may result in corruption of the outer try/catch block's state, especially
/// if this is used within a thread itself.
///
/// * It is not sound to trigger unwinding while already unwinding. Rust threads
/// have runtime checks in place to ensure this invariant, but it is not
/// guaranteed that a rust thread is in place when invoking this function.
/// Unwinding twice can lead to resource leaks where some destructors are not
/// run.
pub unsafe fn try<F: FnOnce()>(f: F) -> Result<(), Box<Any + Send>> {
let mut f = Some(f);
let prev = PANICKING.with(|s| s.get());
PANICKING.with(|s| s.set(false));
let ep = rust_try(try_fn::<F>, &mut f as *mut _ as *mut c_void);
PANICKING.with(|s| s.set(prev));
return if ep.is_null() {
Ok(())
} else {
let my_ep = ep as *mut Exception;
rtdebug!("caught {}", (*my_ep).uwe.exception_class);
let cause = (*my_ep).cause.take();
uw::_Unwind_DeleteException(ep);
Err(cause.unwrap())
};
extern fn try_fn<F: FnOnce()>(opt_closure: *mut c_void) {
let opt_closure = opt_closure as *mut Option<F>;
unsafe { (*opt_closure).take().unwrap()(); }
}
#[link(name = "rustrt_native", kind = "static")]
#[cfg(not(test))]
extern {}
extern {
// Rust's try-catch
// When f(...) returns normally, the return value is null.
// When f(...) throws, the return value is a pointer to the caught
// exception object.
fn rust_try(f: extern fn(*mut c_void),
data: *mut c_void) -> *mut uw::_Unwind_Exception;
}
}
/// Determines whether the current thread is unwinding because of panic.
pub fn panicking() -> bool {
PANICKING.with(|s| s.get())
}
// An uninlined, unmangled function upon which to slap yer breakpoints
#[inline(never)]
#[no_mangle]
fn rust_panic(cause: Box<Any + Send>) ->! {
rtdebug!("begin_unwind()");
unsafe {
let exception = box Exception {
uwe: uw::_Unwind_Exception {
exception_class: rust_exception_class(),
exception_cleanup: exception_cleanup,
private: [0; uw::unwinder_private_data_size],
},
cause: Some(cause),
};
let error = uw::_Unwind_RaiseException(mem::transmute(exception));
rtabort!("Could not unwind stack, error = {}", error as int)
}
extern fn exception_cleanup(_unwind_code: uw::_Unwind_Reason_Code,
exception: *mut uw::_Unwind_Exception) {
rtdebug!("exception_cleanup()");
unsafe {
let _: Box<Exception> = mem::transmute(exception);
}
}
}
// Rust's exception class identifier. This is used by personality routines to
// determine whether the exception was thrown by their own runtime.
fn rust_exception_class() -> uw::_Unwind_Exception_Class {
// M O Z \0 R U S T -- vendor, language
0x4d4f5a_00_52555354
}
// We could implement our personality routine in pure Rust, however exception
// info decoding is tedious. More importantly, personality routines have to
// handle various platform quirks, which are not fun to maintain. For this
// reason, we attempt to reuse personality routine of the C language:
// __gcc_personality_v0.
//
// Since C does not support exception catching, __gcc_personality_v0 simply
// always returns _URC_CONTINUE_UNWIND in search phase, and always returns
// _URC_INSTALL_CONTEXT (i.e. "invoke cleanup code") in cleanup phase.
//
// This is pretty close to Rust's exception handling approach, except that Rust
// does have a single "catch-all" handler at the bottom of each thread's stack.
// So we have two versions of the personality routine:
// - rust_eh_personality, used by all cleanup landing pads, which never catches,
// so the behavior of __gcc_personality_v0 is perfectly adequate there, and
// - rust_eh_personality_catch, used only by rust_try(), which always catches.
//
// Note, however, that for implementation simplicity, rust_eh_personality_catch
// lacks code to install a landing pad, so in order to obtain exception object
// pointer (which it needs to return upstream), rust_try() employs another trick:
// it calls into the nested rust_try_inner(), whose landing pad does not resume
// unwinds. Instead, it extracts the exception pointer and performs a "normal"
// return.
//
// See also: rt/rust_try.ll
#[cfg(all(not(target_arch = "arm"),
not(all(windows, target_arch = "x86_64")),
not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern fn rust_eh_personality(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
}
// iOS on armv7 is using SjLj exceptions and therefore requires to use
// a specialized personality routine: __gcc_personality_sj0
#[cfg(all(target_os = "ios", target_arch = "arm", not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_sj0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_sj0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
unsafe {
__gcc_personality_sj0(_version, actions, _exception_class, _ue_header,
_context)
}
}
}
}
// ARM EHABI uses a slightly different personality routine signature,
// but otherwise works the same.
#[cfg(all(target_arch = "arm", not(target_os = "ios"), not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(state: uw::_Unwind_State,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern "C" fn rust_eh_personality(
state: uw::_Unwind_State,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(state, ue_header, context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
state: uw::_Unwind_State,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (state as c_int & uw::_US_ACTION_MASK as c_int)
== uw::_US_VIRTUAL_UNWIND_FRAME as c_int { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
}
// Win64 SEH (see http://msdn.microsoft.com/en-us/library/1eyas8tf.aspx)
//
// This looks a bit convoluted because rather than implementing a native SEH handler,
// GCC reuses the same personality routine as for the other architectures by wrapping it
// with an "API translator" layer (_GCC_specific_handler).
#[cfg(all(windows, target_arch = "x86_64", not(test)))]
#[doc(hidden)]
#[allow(non_camel_case_types, non_snake_case)]
pub mod eabi {
pub use self::EXCEPTION_DISPOSITION::*;
use rt::libunwind as uw;
use libc::{c_void, c_int};
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct EXCEPTION_RECORD;
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct CONTEXT;
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct DISPATCHER_CONTEXT;
#[repr(C)]
#[derive(Copy)]
pub enum EXCEPTION_DISPOSITION {
ExceptionContinueExecution,
ExceptionContinueSearch,
ExceptionNestedException,
ExceptionCollidedUnwind
}
type _Unwind_Personality_Fn =
extern "C" fn(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code;
extern "C" {
fn __gcc_personality_seh0(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION;
fn _GCC_specific_handler(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT,
personality: _Unwind_Personality_Fn
) -> EXCEPTION_DISPOSITION;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern "C" fn rust_eh_personality(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION
{
unsafe {
__gcc_personality_seh0(exceptionRecord, establisherFrame,
contextRecord, dispatcherContext)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION
{
extern "C" fn inner(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
unsafe {
_GCC_specific_handler(exceptionRecord, establisherFrame,
contextRecord, dispatcherContext,
inner)
}
}
}
#[cfg(not(test))]
/// Entry point of panic from the libcore crate.
#[lang = "panic_fmt"]
pub extern fn rust_begin_unwind(msg: fmt::Arguments,
file: &'static str, line: uint) ->! {
begin_unwind_fmt(msg, &(file, line))
}
/// The entry point for unwinding with a formatted message.
///
/// This is designed to reduce the amount of code required at the call
/// site as much as possible (so that `panic!()` has as low an impact
/// on (e.g.) the inlining of other functions as possible), by moving
/// the actual formatting into this shared place.
#[inline(never)] #[cold]
pub fn begin_unwind_fmt(msg: fmt::Arguments, file_line: &(&'static str, uint)) ->! {
use fmt::Writer;
// We do two allocations here, unfortunately. But (a) they're
// required with the current scheme, and (b) we don't handle
// panic + OOM properly anyway (see comment in begin_unwind
// below).
let mut s = String::new();
let _ = write!(&mut s, "{}", msg);
begin_unwind_inner(box s, file_line)
}
/// This is the entry point of unwinding for panic!() and assert!().
#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible
pub fn begin_unwind<M: Any + Send>(msg: M, file_line: &(&'static str, uint)) ->! {
// Note that this should be the only allocation performed in this code path.
// Currently this means that panic!() on OOM will invoke this code path,
// but then again we're not really ready for panic on OOM anyway. If
// we do start doing this, then we should propagate this allocation to
// be performed in the parent of this thread instead of the thread that's
// panicking.
// see below for why we do the `Any` coercion here.
begin_unwind_inner(box msg, file_line)
}
/// The core of the unwinding.
///
/// This is non-generic to avoid instantiation bloat in other crates
/// (which makes compilation of small crates noticeably slower). (Note:
/// we need the `Any` object anyway, we're not just creating it to
/// avoid being generic.)
///
/// Doing this split took the LLVM IR line counts of `fn main() { panic!()
/// }` from ~1900/3700 (-O/no opts) to 180/590.
#[inline(never)] #[cold] // this is the slow path, please never inline this
fn begin_unwind_inner(msg: Box<Any + Send>, file_line: &(&'static str, uint)) ->!
|
n => {
let f: Callback = unsafe { mem::transmute(n) };
let (file, line) = *file_line;
f(&*msg, file, line);
}
}
};
// Now that we've run all the necessary unwind callbacks, we actually
// perform the unwinding.
if panicking() {
// If a thread panics while it's already unwinding then we
// have limited options. Currently our preference is to
// just abort. In the future we may consider resuming
// unwinding or otherwise exiting the thread cleanly.
rterrln!("thread panicked while panicking. aborting.");
unsafe { intrinsics::abort() }
}
PANICKING.with(|s| s.set(true));
rust_panic(msg);
}
/// Register a callback to be invoked when a thread unwinds.
///
/// This is an unsafe and experimental API which allows for an arbitrary
/// callback to be invoked when a thread panics. This callback is invoked on both
/// the initial unwinding and a double unwinding if one occurs. Additionally,
/// the local `Task` will be in place for the duration of the callback, and
///
|
{
// Make sure the default failure handler is registered before we look at the
// callbacks.
static INIT: Once = ONCE_INIT;
INIT.call_once(|| unsafe { register(failure::on_fail); });
// First, invoke call the user-defined callbacks triggered on thread panic.
//
// By the time that we see a callback has been registered (by reading
// MAX_CALLBACKS), the actual callback itself may have not been stored yet,
// so we just chalk it up to a race condition and move on to the next
// callback. Additionally, CALLBACK_CNT may briefly be higher than
// MAX_CALLBACKS, so we're sure to clamp it as necessary.
let callbacks = {
let amt = CALLBACK_CNT.load(Ordering::SeqCst);
&CALLBACKS[..cmp::min(amt, MAX_CALLBACKS)]
};
for cb in callbacks.iter() {
match cb.load(Ordering::SeqCst) {
0 => {}
|
identifier_body
|
unwind.rs
|
LLVM" (llvm.org/docs/ExceptionHandling.html) and
//! documents linked from it.
//! These are also good reads:
//! http://theofilos.cs.columbia.edu/blog/2013/09/22/base_abi/
//! http://monoinfinito.wordpress.com/series/exception-handling-in-c/
//! http://www.airs.com/blog/index.php?s=exception+frames
//!
//! ## A brief summary
//!
//! Exception handling happens in two phases: a search phase and a cleanup phase.
//!
//! In both phases the unwinder walks stack frames from top to bottom using
//! information from the stack frame unwind sections of the current process's
//! modules ("module" here refers to an OS module, i.e. an executable or a
//! dynamic library).
//!
//! For each stack frame, it invokes the associated "personality routine", whose
//! address is also stored in the unwind info section.
//!
//! In the search phase, the job of a personality routine is to examine exception
//! object being thrown, and to decide whether it should be caught at that stack
//! frame. Once the handler frame has been identified, cleanup phase begins.
//!
//! In the cleanup phase, personality routines invoke cleanup code associated
//! with their stack frames (i.e. destructors). Once stack has been unwound down
//! to the handler frame level, unwinding stops and the last personality routine
//! transfers control to its catch block.
//!
//! ## Frame unwind info registration
//!
//! Each module has its own frame unwind info section (usually ".eh_frame"), and
//! unwinder needs to know about all of them in order for unwinding to be able to
//! cross module boundaries.
//!
//! On some platforms, like Linux, this is achieved by dynamically enumerating
//! currently loaded modules via the dl_iterate_phdr() API and finding all
//!.eh_frame sections.
//!
//! Others, like Windows, require modules to actively register their unwind info
//! sections by calling __register_frame_info() API at startup. In the latter
//! case it is essential that there is only one copy of the unwinder runtime in
//! the process. This is usually achieved by linking to the dynamic version of
//! the unwind runtime.
//!
//! Currently Rust uses unwind runtime provided by libgcc.
use prelude::v1::*;
use any::Any;
use cell::Cell;
use cmp;
use failure;
use fmt;
use intrinsics;
use libc::c_void;
use mem;
use sync::atomic::{self, Ordering};
use sync::{Once, ONCE_INIT};
use rt::libunwind as uw;
struct Exception {
uwe: uw::_Unwind_Exception,
cause: Option<Box<Any + Send>>,
}
pub type Callback = fn(msg: &(Any + Send), file: &'static str, line: uint);
// Variables used for invoking callbacks when a thread starts to unwind.
//
// For more information, see below.
const MAX_CALLBACKS: uint = 16;
static CALLBACKS: [atomic::AtomicUsize; MAX_CALLBACKS] =
[atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT];
static CALLBACK_CNT: atomic::AtomicUsize = atomic::ATOMIC_USIZE_INIT;
thread_local! { static PANICKING: Cell<bool> = Cell::new(false) }
/// Invoke a closure, capturing the cause of panic if one occurs.
///
/// This function will return `None` if the closure did not panic, and will
/// return `Some(cause)` if the closure panics. The `cause` returned is the
/// object with which panic was originally invoked.
///
/// This function also is unsafe for a variety of reasons:
///
/// * This is not safe to call in a nested fashion. The unwinding
/// interface for Rust is designed to have at most one try/catch block per
/// thread, not multiple. No runtime checking is currently performed to uphold
/// this invariant, so this function is not safe. A nested try/catch block
/// may result in corruption of the outer try/catch block's state, especially
/// if this is used within a thread itself.
///
/// * It is not sound to trigger unwinding while already unwinding. Rust threads
/// have runtime checks in place to ensure this invariant, but it is not
/// guaranteed that a rust thread is in place when invoking this function.
/// Unwinding twice can lead to resource leaks where some destructors are not
/// run.
pub unsafe fn try<F: FnOnce()>(f: F) -> Result<(), Box<Any + Send>> {
let mut f = Some(f);
let prev = PANICKING.with(|s| s.get());
PANICKING.with(|s| s.set(false));
let ep = rust_try(try_fn::<F>, &mut f as *mut _ as *mut c_void);
PANICKING.with(|s| s.set(prev));
return if ep.is_null() {
Ok(())
} else {
let my_ep = ep as *mut Exception;
rtdebug!("caught {}", (*my_ep).uwe.exception_class);
let cause = (*my_ep).cause.take();
uw::_Unwind_DeleteException(ep);
Err(cause.unwrap())
};
extern fn try_fn<F: FnOnce()>(opt_closure: *mut c_void) {
let opt_closure = opt_closure as *mut Option<F>;
unsafe { (*opt_closure).take().unwrap()(); }
}
#[link(name = "rustrt_native", kind = "static")]
#[cfg(not(test))]
extern {}
extern {
// Rust's try-catch
// When f(...) returns normally, the return value is null.
// When f(...) throws, the return value is a pointer to the caught
// exception object.
fn rust_try(f: extern fn(*mut c_void),
data: *mut c_void) -> *mut uw::_Unwind_Exception;
}
}
/// Determines whether the current thread is unwinding because of panic.
pub fn panicking() -> bool {
PANICKING.with(|s| s.get())
}
// An uninlined, unmangled function upon which to slap yer breakpoints
#[inline(never)]
#[no_mangle]
fn rust_panic(cause: Box<Any + Send>) ->! {
rtdebug!("begin_unwind()");
unsafe {
let exception = box Exception {
uwe: uw::_Unwind_Exception {
exception_class: rust_exception_class(),
exception_cleanup: exception_cleanup,
private: [0; uw::unwinder_private_data_size],
},
cause: Some(cause),
};
let error = uw::_Unwind_RaiseException(mem::transmute(exception));
rtabort!("Could not unwind stack, error = {}", error as int)
}
extern fn exception_cleanup(_unwind_code: uw::_Unwind_Reason_Code,
exception: *mut uw::_Unwind_Exception) {
rtdebug!("exception_cleanup()");
unsafe {
let _: Box<Exception> = mem::transmute(exception);
}
}
}
// Rust's exception class identifier. This is used by personality routines to
// determine whether the exception was thrown by their own runtime.
fn rust_exception_class() -> uw::_Unwind_Exception_Class {
// M O Z \0 R U S T -- vendor, language
0x4d4f5a_00_52555354
}
// We could implement our personality routine in pure Rust, however exception
// info decoding is tedious. More importantly, personality routines have to
// handle various platform quirks, which are not fun to maintain. For this
// reason, we attempt to reuse personality routine of the C language:
// __gcc_personality_v0.
//
// Since C does not support exception catching, __gcc_personality_v0 simply
// always returns _URC_CONTINUE_UNWIND in search phase, and always returns
// _URC_INSTALL_CONTEXT (i.e. "invoke cleanup code") in cleanup phase.
//
// This is pretty close to Rust's exception handling approach, except that Rust
// does have a single "catch-all" handler at the bottom of each thread's stack.
// So we have two versions of the personality routine:
// - rust_eh_personality, used by all cleanup landing pads, which never catches,
// so the behavior of __gcc_personality_v0 is perfectly adequate there, and
// - rust_eh_personality_catch, used only by rust_try(), which always catches.
//
// Note, however, that for implementation simplicity, rust_eh_personality_catch
// lacks code to install a landing pad, so in order to obtain exception object
// pointer (which it needs to return upstream), rust_try() employs another trick:
// it calls into the nested rust_try_inner(), whose landing pad does not resume
// unwinds. Instead, it extracts the exception pointer and performs a "normal"
// return.
//
// See also: rt/rust_try.ll
#[cfg(all(not(target_arch = "arm"),
not(all(windows, target_arch = "x86_64")),
not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern fn rust_eh_personality(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
}
// iOS on armv7 is using SjLj exceptions and therefore requires to use
// a specialized personality routine: __gcc_personality_sj0
#[cfg(all(target_os = "ios", target_arch = "arm", not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_sj0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_sj0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
unsafe {
__gcc_personality_sj0(_version, actions, _exception_class, _ue_header,
_context)
}
}
}
}
// ARM EHABI uses a slightly different personality routine signature,
// but otherwise works the same.
#[cfg(all(target_arch = "arm", not(target_os = "ios"), not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(state: uw::_Unwind_State,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern "C" fn rust_eh_personality(
state: uw::_Unwind_State,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(state, ue_header, context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
state: uw::_Unwind_State,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (state as c_int & uw::_US_ACTION_MASK as c_int)
== uw::_US_VIRTUAL_UNWIND_FRAME as c_int { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
}
// Win64 SEH (see http://msdn.microsoft.com/en-us/library/1eyas8tf.aspx)
//
// This looks a bit convoluted because rather than implementing a native SEH handler,
// GCC reuses the same personality routine as for the other architectures by wrapping it
// with an "API translator" layer (_GCC_specific_handler).
#[cfg(all(windows, target_arch = "x86_64", not(test)))]
#[doc(hidden)]
#[allow(non_camel_case_types, non_snake_case)]
pub mod eabi {
pub use self::EXCEPTION_DISPOSITION::*;
use rt::libunwind as uw;
use libc::{c_void, c_int};
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct EXCEPTION_RECORD;
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct CONTEXT;
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct DISPATCHER_CONTEXT;
#[repr(C)]
#[derive(Copy)]
pub enum EXCEPTION_DISPOSITION {
ExceptionContinueExecution,
ExceptionContinueSearch,
ExceptionNestedException,
ExceptionCollidedUnwind
}
type _Unwind_Personality_Fn =
extern "C" fn(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code;
extern "C" {
fn __gcc_personality_seh0(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION;
fn _GCC_specific_handler(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT,
personality: _Unwind_Personality_Fn
) -> EXCEPTION_DISPOSITION;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern "C" fn rust_eh_personality(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION
{
unsafe {
__gcc_personality_seh0(exceptionRecord, establisherFrame,
contextRecord, dispatcherContext)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION
{
extern "C" fn inner(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else
|
}
unsafe {
_GCC_specific_handler(exceptionRecord, establisherFrame,
contextRecord, dispatcherContext,
inner)
}
}
}
#[cfg(not(test))]
/// Entry point of panic from the libcore crate.
#[lang = "panic_fmt"]
pub extern fn rust_begin_unwind(msg: fmt::Arguments,
file: &'static str, line: uint) ->! {
begin_unwind_fmt(msg, &(file, line))
}
/// The entry point for unwinding with a formatted message.
///
/// This is designed to reduce the amount of code required at the call
/// site as much as possible (so that `panic!()` has as low an impact
/// on (e.g.) the inlining of other functions as possible), by moving
/// the actual formatting into this shared place.
#[inline(never)] #[cold]
pub fn begin_unwind_fmt(msg: fmt::Arguments, file_line: &(&'static str, uint)) ->! {
use fmt::Writer;
// We do two allocations here, unfortunately. But (a) they're
// required with the current scheme, and (b) we don't handle
// panic + OOM properly anyway (see comment in begin_unwind
// below).
let mut s = String::new();
let _ = write!(&mut s, "{}", msg);
begin_unwind_inner(box s, file_line)
}
/// This is the entry point of unwinding for panic!() and assert!().
#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible
pub fn begin_unwind<M: Any + Send>(msg: M, file_line: &(&'static str, uint)) ->! {
// Note that this should be the only allocation performed in this code path.
// Currently this means that panic!() on OOM will invoke this code path,
// but then again we're not really ready for panic on OOM anyway. If
// we do start doing this, then we should propagate this allocation to
// be performed in the parent of this thread instead of the thread that's
// panicking.
// see below for why we do the `Any` coercion here.
begin_unwind_inner(box msg, file_line)
}
/// The core of the unwinding.
///
/// This is non-generic to avoid instantiation bloat in other crates
/// (which makes compilation of small crates noticeably slower). (Note:
/// we need the `Any` object anyway, we're not just creating it to
/// avoid being generic.)
///
/// Doing this split took the LLVM IR line counts of `fn main() { panic!()
/// }` from ~1900/3700 (-O/no opts) to 180/590.
#[inline(never)] #[cold] // this is the slow path, please never inline this
fn begin_unwind_inner(msg: Box<Any + Send>, file_line: &(&'static str, uint)) ->! {
// Make sure the default failure handler is registered before we look at the
// callbacks.
static INIT: Once = ONCE_INIT;
INIT.call_once(|| unsafe { register(failure::on_fail); });
// First, invoke call the user-defined callbacks triggered on thread panic.
//
// By the time that we see a callback has been registered (by reading
// MAX_CALLBACKS), the actual callback itself may have not been stored yet,
// so we just chalk it up to a race condition and move on to the next
// callback. Additionally, CALLBACK_CNT may briefly be higher than
// MAX_CALLBACKS, so we're sure to clamp it as necessary.
let callbacks = {
let amt = CALLBACK_CNT.load(Ordering::SeqCst);
&CALLBACKS[..cmp::min(amt, MAX_CALLBACKS)]
};
for cb in callbacks.iter() {
match cb.load(Ordering::SeqCst) {
0 => {}
n => {
let f: Callback = unsafe { mem::transmute(n) };
let (file, line) = *file_line;
f(&*msg, file, line);
}
}
};
// Now that we've run all the necessary unwind callbacks, we actually
// perform the unwinding.
if panicking() {
// If a thread panics while it's already unwinding then we
// have limited options. Currently our preference is to
// just abort. In the future we may consider resuming
// unwinding or otherwise exiting the thread cleanly.
rterrln!("thread panicked while panicking. aborting.");
unsafe { intrinsics::abort() }
}
PANICKING.with(|s| s.set(true));
rust_panic(msg);
}
/// Register a callback to be invoked when a thread unwinds.
///
/// This is an unsafe and experimental API which allows for an arbitrary
/// callback to be invoked when a thread panics. This callback is invoked on both
/// the initial unwinding and a double unwinding if one occurs. Additionally,
/// the local `Task` will be in place for the duration of the callback, and
///
|
{ // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
|
conditional_block
|
unwind.rs
|
LLVM" (llvm.org/docs/ExceptionHandling.html) and
//! documents linked from it.
//! These are also good reads:
//! http://theofilos.cs.columbia.edu/blog/2013/09/22/base_abi/
//! http://monoinfinito.wordpress.com/series/exception-handling-in-c/
//! http://www.airs.com/blog/index.php?s=exception+frames
//!
//! ## A brief summary
//!
//! Exception handling happens in two phases: a search phase and a cleanup phase.
//!
//! In both phases the unwinder walks stack frames from top to bottom using
//! information from the stack frame unwind sections of the current process's
//! modules ("module" here refers to an OS module, i.e. an executable or a
//! dynamic library).
//!
//! For each stack frame, it invokes the associated "personality routine", whose
//! address is also stored in the unwind info section.
//!
//! In the search phase, the job of a personality routine is to examine exception
//! object being thrown, and to decide whether it should be caught at that stack
//! frame. Once the handler frame has been identified, cleanup phase begins.
//!
//! In the cleanup phase, personality routines invoke cleanup code associated
//! with their stack frames (i.e. destructors). Once stack has been unwound down
//! to the handler frame level, unwinding stops and the last personality routine
//! transfers control to its catch block.
//!
//! ## Frame unwind info registration
//!
//! Each module has its own frame unwind info section (usually ".eh_frame"), and
//! unwinder needs to know about all of them in order for unwinding to be able to
//! cross module boundaries.
//!
//! On some platforms, like Linux, this is achieved by dynamically enumerating
//! currently loaded modules via the dl_iterate_phdr() API and finding all
//!.eh_frame sections.
//!
//! Others, like Windows, require modules to actively register their unwind info
//! sections by calling __register_frame_info() API at startup. In the latter
//! case it is essential that there is only one copy of the unwinder runtime in
//! the process. This is usually achieved by linking to the dynamic version of
//! the unwind runtime.
//!
//! Currently Rust uses unwind runtime provided by libgcc.
use prelude::v1::*;
use any::Any;
use cell::Cell;
use cmp;
use failure;
use fmt;
use intrinsics;
use libc::c_void;
use mem;
use sync::atomic::{self, Ordering};
use sync::{Once, ONCE_INIT};
use rt::libunwind as uw;
struct Exception {
uwe: uw::_Unwind_Exception,
cause: Option<Box<Any + Send>>,
}
pub type Callback = fn(msg: &(Any + Send), file: &'static str, line: uint);
// Variables used for invoking callbacks when a thread starts to unwind.
//
// For more information, see below.
const MAX_CALLBACKS: uint = 16;
static CALLBACKS: [atomic::AtomicUsize; MAX_CALLBACKS] =
[atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT,
atomic::ATOMIC_USIZE_INIT, atomic::ATOMIC_USIZE_INIT];
static CALLBACK_CNT: atomic::AtomicUsize = atomic::ATOMIC_USIZE_INIT;
thread_local! { static PANICKING: Cell<bool> = Cell::new(false) }
/// Invoke a closure, capturing the cause of panic if one occurs.
///
/// This function will return `None` if the closure did not panic, and will
/// return `Some(cause)` if the closure panics. The `cause` returned is the
/// object with which panic was originally invoked.
///
/// This function also is unsafe for a variety of reasons:
///
/// * This is not safe to call in a nested fashion. The unwinding
/// interface for Rust is designed to have at most one try/catch block per
/// thread, not multiple. No runtime checking is currently performed to uphold
/// this invariant, so this function is not safe. A nested try/catch block
/// may result in corruption of the outer try/catch block's state, especially
/// if this is used within a thread itself.
///
/// * It is not sound to trigger unwinding while already unwinding. Rust threads
/// have runtime checks in place to ensure this invariant, but it is not
/// guaranteed that a rust thread is in place when invoking this function.
/// Unwinding twice can lead to resource leaks where some destructors are not
/// run.
pub unsafe fn try<F: FnOnce()>(f: F) -> Result<(), Box<Any + Send>> {
let mut f = Some(f);
let prev = PANICKING.with(|s| s.get());
PANICKING.with(|s| s.set(false));
let ep = rust_try(try_fn::<F>, &mut f as *mut _ as *mut c_void);
PANICKING.with(|s| s.set(prev));
return if ep.is_null() {
Ok(())
} else {
let my_ep = ep as *mut Exception;
rtdebug!("caught {}", (*my_ep).uwe.exception_class);
let cause = (*my_ep).cause.take();
uw::_Unwind_DeleteException(ep);
Err(cause.unwrap())
};
extern fn try_fn<F: FnOnce()>(opt_closure: *mut c_void) {
let opt_closure = opt_closure as *mut Option<F>;
unsafe { (*opt_closure).take().unwrap()(); }
}
#[link(name = "rustrt_native", kind = "static")]
#[cfg(not(test))]
extern {}
extern {
// Rust's try-catch
// When f(...) returns normally, the return value is null.
// When f(...) throws, the return value is a pointer to the caught
// exception object.
fn rust_try(f: extern fn(*mut c_void),
data: *mut c_void) -> *mut uw::_Unwind_Exception;
}
}
/// Determines whether the current thread is unwinding because of panic.
pub fn panicking() -> bool {
PANICKING.with(|s| s.get())
}
// An uninlined, unmangled function upon which to slap yer breakpoints
#[inline(never)]
#[no_mangle]
fn rust_panic(cause: Box<Any + Send>) ->! {
rtdebug!("begin_unwind()");
unsafe {
let exception = box Exception {
uwe: uw::_Unwind_Exception {
exception_class: rust_exception_class(),
exception_cleanup: exception_cleanup,
private: [0; uw::unwinder_private_data_size],
},
cause: Some(cause),
};
let error = uw::_Unwind_RaiseException(mem::transmute(exception));
rtabort!("Could not unwind stack, error = {}", error as int)
}
extern fn exception_cleanup(_unwind_code: uw::_Unwind_Reason_Code,
exception: *mut uw::_Unwind_Exception) {
rtdebug!("exception_cleanup()");
unsafe {
let _: Box<Exception> = mem::transmute(exception);
}
}
}
// Rust's exception class identifier. This is used by personality routines to
// determine whether the exception was thrown by their own runtime.
fn rust_exception_class() -> uw::_Unwind_Exception_Class {
// M O Z \0 R U S T -- vendor, language
0x4d4f5a_00_52555354
}
// We could implement our personality routine in pure Rust, however exception
// info decoding is tedious. More importantly, personality routines have to
// handle various platform quirks, which are not fun to maintain. For this
// reason, we attempt to reuse personality routine of the C language:
// __gcc_personality_v0.
//
// Since C does not support exception catching, __gcc_personality_v0 simply
// always returns _URC_CONTINUE_UNWIND in search phase, and always returns
// _URC_INSTALL_CONTEXT (i.e. "invoke cleanup code") in cleanup phase.
//
// This is pretty close to Rust's exception handling approach, except that Rust
// does have a single "catch-all" handler at the bottom of each thread's stack.
// So we have two versions of the personality routine:
// - rust_eh_personality, used by all cleanup landing pads, which never catches,
// so the behavior of __gcc_personality_v0 is perfectly adequate there, and
// - rust_eh_personality_catch, used only by rust_try(), which always catches.
//
// Note, however, that for implementation simplicity, rust_eh_personality_catch
// lacks code to install a landing pad, so in order to obtain exception object
// pointer (which it needs to return upstream), rust_try() employs another trick:
// it calls into the nested rust_try_inner(), whose landing pad does not resume
// unwinds. Instead, it extracts the exception pointer and performs a "normal"
// return.
//
// See also: rt/rust_try.ll
#[cfg(all(not(target_arch = "arm"),
not(all(windows, target_arch = "x86_64")),
not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern fn rust_eh_personality(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
}
// iOS on armv7 is using SjLj exceptions and therefore requires to use
// a specialized personality routine: __gcc_personality_sj0
#[cfg(all(target_os = "ios", target_arch = "arm", not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_sj0(version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn
|
(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_sj0(version, actions, exception_class, ue_header,
context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
unsafe {
__gcc_personality_sj0(_version, actions, _exception_class, _ue_header,
_context)
}
}
}
}
// ARM EHABI uses a slightly different personality routine signature,
// but otherwise works the same.
#[cfg(all(target_arch = "arm", not(target_os = "ios"), not(test)))]
#[doc(hidden)]
pub mod eabi {
use rt::libunwind as uw;
use libc::c_int;
extern "C" {
fn __gcc_personality_v0(state: uw::_Unwind_State,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context)
-> uw::_Unwind_Reason_Code;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern "C" fn rust_eh_personality(
state: uw::_Unwind_State,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
unsafe {
__gcc_personality_v0(state, ue_header, context)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
state: uw::_Unwind_State,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (state as c_int & uw::_US_ACTION_MASK as c_int)
== uw::_US_VIRTUAL_UNWIND_FRAME as c_int { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
}
// Win64 SEH (see http://msdn.microsoft.com/en-us/library/1eyas8tf.aspx)
//
// This looks a bit convoluted because rather than implementing a native SEH handler,
// GCC reuses the same personality routine as for the other architectures by wrapping it
// with an "API translator" layer (_GCC_specific_handler).
#[cfg(all(windows, target_arch = "x86_64", not(test)))]
#[doc(hidden)]
#[allow(non_camel_case_types, non_snake_case)]
pub mod eabi {
pub use self::EXCEPTION_DISPOSITION::*;
use rt::libunwind as uw;
use libc::{c_void, c_int};
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct EXCEPTION_RECORD;
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct CONTEXT;
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct DISPATCHER_CONTEXT;
#[repr(C)]
#[derive(Copy)]
pub enum EXCEPTION_DISPOSITION {
ExceptionContinueExecution,
ExceptionContinueSearch,
ExceptionNestedException,
ExceptionCollidedUnwind
}
type _Unwind_Personality_Fn =
extern "C" fn(
version: c_int,
actions: uw::_Unwind_Action,
exception_class: uw::_Unwind_Exception_Class,
ue_header: *mut uw::_Unwind_Exception,
context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code;
extern "C" {
fn __gcc_personality_seh0(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION;
fn _GCC_specific_handler(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT,
personality: _Unwind_Personality_Fn
) -> EXCEPTION_DISPOSITION;
}
#[lang="eh_personality"]
#[no_mangle] // referenced from rust_try.ll
extern "C" fn rust_eh_personality(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION
{
unsafe {
__gcc_personality_seh0(exceptionRecord, establisherFrame,
contextRecord, dispatcherContext)
}
}
#[no_mangle] // referenced from rust_try.ll
pub extern "C" fn rust_eh_personality_catch(
exceptionRecord: *mut EXCEPTION_RECORD,
establisherFrame: *mut c_void,
contextRecord: *mut CONTEXT,
dispatcherContext: *mut DISPATCHER_CONTEXT
) -> EXCEPTION_DISPOSITION
{
extern "C" fn inner(
_version: c_int,
actions: uw::_Unwind_Action,
_exception_class: uw::_Unwind_Exception_Class,
_ue_header: *mut uw::_Unwind_Exception,
_context: *mut uw::_Unwind_Context
) -> uw::_Unwind_Reason_Code
{
if (actions as c_int & uw::_UA_SEARCH_PHASE as c_int)!= 0 { // search phase
uw::_URC_HANDLER_FOUND // catch!
}
else { // cleanup phase
uw::_URC_INSTALL_CONTEXT
}
}
unsafe {
_GCC_specific_handler(exceptionRecord, establisherFrame,
contextRecord, dispatcherContext,
inner)
}
}
}
#[cfg(not(test))]
/// Entry point of panic from the libcore crate.
#[lang = "panic_fmt"]
pub extern fn rust_begin_unwind(msg: fmt::Arguments,
file: &'static str, line: uint) ->! {
begin_unwind_fmt(msg, &(file, line))
}
/// The entry point for unwinding with a formatted message.
///
/// This is designed to reduce the amount of code required at the call
/// site as much as possible (so that `panic!()` has as low an impact
/// on (e.g.) the inlining of other functions as possible), by moving
/// the actual formatting into this shared place.
#[inline(never)] #[cold]
pub fn begin_unwind_fmt(msg: fmt::Arguments, file_line: &(&'static str, uint)) ->! {
use fmt::Writer;
// We do two allocations here, unfortunately. But (a) they're
// required with the current scheme, and (b) we don't handle
// panic + OOM properly anyway (see comment in begin_unwind
// below).
let mut s = String::new();
let _ = write!(&mut s, "{}", msg);
begin_unwind_inner(box s, file_line)
}
/// This is the entry point of unwinding for panic!() and assert!().
#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible
pub fn begin_unwind<M: Any + Send>(msg: M, file_line: &(&'static str, uint)) ->! {
// Note that this should be the only allocation performed in this code path.
// Currently this means that panic!() on OOM will invoke this code path,
// but then again we're not really ready for panic on OOM anyway. If
// we do start doing this, then we should propagate this allocation to
// be performed in the parent of this thread instead of the thread that's
// panicking.
// see below for why we do the `Any` coercion here.
begin_unwind_inner(box msg, file_line)
}
/// The core of the unwinding.
///
/// This is non-generic to avoid instantiation bloat in other crates
/// (which makes compilation of small crates noticeably slower). (Note:
/// we need the `Any` object anyway, we're not just creating it to
/// avoid being generic.)
///
/// Doing this split took the LLVM IR line counts of `fn main() { panic!()
/// }` from ~1900/3700 (-O/no opts) to 180/590.
#[inline(never)] #[cold] // this is the slow path, please never inline this
fn begin_unwind_inner(msg: Box<Any + Send>, file_line: &(&'static str, uint)) ->! {
// Make sure the default failure handler is registered before we look at the
// callbacks.
static INIT: Once = ONCE_INIT;
INIT.call_once(|| unsafe { register(failure::on_fail); });
// First, invoke call the user-defined callbacks triggered on thread panic.
//
// By the time that we see a callback has been registered (by reading
// MAX_CALLBACKS), the actual callback itself may have not been stored yet,
// so we just chalk it up to a race condition and move on to the next
// callback. Additionally, CALLBACK_CNT may briefly be higher than
// MAX_CALLBACKS, so we're sure to clamp it as necessary.
let callbacks = {
let amt = CALLBACK_CNT.load(Ordering::SeqCst);
&CALLBACKS[..cmp::min(amt, MAX_CALLBACKS)]
};
for cb in callbacks.iter() {
match cb.load(Ordering::SeqCst) {
0 => {}
n => {
let f: Callback = unsafe { mem::transmute(n) };
let (file, line) = *file_line;
f(&*msg, file, line);
}
}
};
// Now that we've run all the necessary unwind callbacks, we actually
// perform the unwinding.
if panicking() {
// If a thread panics while it's already unwinding then we
// have limited options. Currently our preference is to
// just abort. In the future we may consider resuming
// unwinding or otherwise exiting the thread cleanly.
rterrln!("thread panicked while panicking. aborting.");
unsafe { intrinsics::abort() }
}
PANICKING.with(|s| s.set(true));
rust_panic(msg);
}
/// Register a callback to be invoked when a thread unwinds.
///
/// This is an unsafe and experimental API which allows for an arbitrary
/// callback to be invoked when a thread panics. This callback is invoked on both
/// the initial unwinding and a double unwinding if one occurs. Additionally,
/// the local `Task` will be in place for the duration of the callback, and
///
|
rust_eh_personality
|
identifier_name
|
lib.rs
|
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(exit_status)]
#![feature(set_stdio)]
#![feature(libc)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(test)]
#![feature(unicode)]
#![feature(path_ext)]
#![feature(path_relative_from)]
#![feature(slice_patterns)]
extern crate arena;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate rustc_trans;
extern crate rustc_driver;
extern crate rustc_resolve;
extern crate rustc_lint;
extern crate rustc_back;
extern crate serialize;
extern crate syntax;
extern crate test as testing;
extern crate rustc_unicode;
#[macro_use] extern crate log;
extern crate serialize as rustc_serialize; // used by deriving
use std::cell::RefCell;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::mpsc::channel;
use externalfiles::ExternalHtml;
use serialize::Decodable;
use serialize::json::{self, Json};
use rustc::session::search_paths::SearchPaths;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
#[macro_use]
pub mod externalfiles;
pub mod clean;
pub mod core;
pub mod doctree;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
const PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
const DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
thread_local!(pub static ANALYSISKEY: Rc<RefCell<Option<core::CrateAnalysis>>> = {
Rc::new(RefCell::new(None))
});
struct Output {
krate: clean::Crate,
json_plugins: Vec<plugins::PluginJson>,
passes: Vec<String>,
}
pub fn main() {
const STACK_SIZE: usize = 32000000; // 32MB
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
let s = env::args().collect::<Vec<_>>();
main_args(&s)
}).unwrap().join().unwrap();
env::set_exit_status(res as i32);
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflag("V", "version", "print rustdoc's version"),
optflag("v", "verbose", "use verbose output"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optopt("", "crate-name", "specify the name of this crate", "NAME"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "extern", "pass an --extern to rustc", "NAME=PATH"),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "list of passes to also run, you might want \
to pass it multiple times; a value of `list` \
will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optopt("", "target", "target triple to document", "TRIPLE"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL"),
optflag("", "markdown-no-toc", "don't include table of contents")
)
}
pub fn usage(argv0: &str) {
println!("{}",
getopts::usage(&format!("{} [options] <input>", argv0),
&opts()));
}
pub fn main_args(args: &[String]) -> isize {
let matches = match getopts::getopts(args.tail(), &opts()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(&args[0]);
return 0;
} else if matches.opt_present("version") {
rustc_driver::version("rustdoc", &matches);
return 0;
}
if matches.opt_strs("passes") == ["list"] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES {
println!("{:>20} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES {
println!("{:>20}", name);
}
return 0;
}
if matches.free.is_empty() {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = &matches.free[0];
let mut libs = SearchPaths::new();
for s in &matches.opt_strs("L") {
libs.add_path(s);
}
let externs = match parse_externs(&matches) {
Ok(ex) => ex,
Err(err) => {
println!("{}", err);
return 1;
}
};
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.split_whitespace())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| PathBuf::from(&s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
&matches.opt_strs("html-in-header"),
&matches.opt_strs("html-before-content"),
&matches.opt_strs("html-after-content")) {
Some(eh) => eh,
None => return 3
};
let crate_name = matches.opt_str("crate-name");
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, externs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, externs, test_args, crate_name)
}
(false, true) => return markdown::render(input,
output.unwrap_or(PathBuf::from("doc")),
&matches, &external_html,
!matches.opt_present("markdown-no-toc")),
(false, false) => {}
}
let out = match acquire_input(input, externs, &matches) {
Ok(out) => out,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
let Output { krate, json_plugins, passes, } = out;
info!("going to format");
match matches.opt_str("w").as_ref().map(|s| &**s) {
Some("html") | None => {
match html::render::run(krate, &external_html,
output.unwrap_or(PathBuf::from("doc")),
passes.into_iter().collect()) {
Ok(()) => {}
Err(e) => panic!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, json_plugins,
output.unwrap_or(PathBuf::from("doc.json"))) {
Ok(()) => {}
Err(e) => panic!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
externs: core::Externs,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| &**s) {
Some("rust") => Ok(rust_input(input, externs, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, externs, matches))
}
}
}
}
/// Extracts `--extern CRATE=PATH` arguments from `matches` and
/// returns a `HashMap` mapping crate names to their paths or else an
/// error message.
fn parse_externs(matches: &getopts::Matches) -> Result<core::Externs, String> {
let mut externs = HashMap::new();
for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(2, '=');
let name = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must not be empty".to_string());
}
};
let location = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must be of the format `foo=bar`".to_string());
}
};
let name = name.to_string();
externs.entry(name).or_insert(vec![]).push(location.to_string());
}
Ok(externs)
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
#[allow(deprecated)] // for old Path in plugin manager
fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matches) -> Output {
let mut default_passes =!matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let mut paths = SearchPaths::new();
for s in &matches.opt_strs("L") {
paths.add_path(s);
}
let cfgs = matches.opt_strs("cfg");
let triple = matches.opt_str("target");
let cr = PathBuf::from(cratefile);
info!("starting to run rustc");
let (tx, rx) = channel();
std::thread::spawn(move || {
use rustc::session::config::Input;
tx.send(core::run_core(paths, cfgs, externs, Input::File(cr),
triple)).unwrap();
}).join().map_err(|_| "rustc failed").unwrap();
let (mut krate, analysis) = rx.recv().unwrap();
info!("finished with rustc");
let mut analysis = Some(analysis);
ANALYSISKEY.with(|s| {
*s.borrow_mut() = analysis.take();
});
match matches.opt_str("crate-name") {
Some(name) => krate.name = name,
None => {}
}
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.as_ref().unwrap().doc_list() {
Some(nested) => {
for inner in nested {
match *inner {
clean::Word(ref x)
if "no_default_passes" == *x => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == *x => {
for pass in value.split_whitespace() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == *x => {
for p in value.split_whitespace() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.insert(0, name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(PathBuf::from(path));
for pass in &passes {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == *pass
}) {
Some(i) => PASSES[i].1,
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
let (krate, json) = pm.run_plugins(krate);
return Output { krate: krate, json_plugins: json, passes: passes, };
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
let mut bytes = Vec::new();
match File::open(input).and_then(|mut f| f.read_to_end(&mut bytes)) {
Ok(_) => {}
Err(e) => return Err(format!("couldn't open {}: {}", input, e)),
};
match json::from_reader(&mut &bytes[..]) {
Err(s) => Err(format!("{:?}", s)),
Ok(Json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.remove(&"schema".to_string()) {
Some(Json::String(version)) => {
if version!= SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.remove(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok(Output { krate: krate, json_plugins: plugin_output, passes: Vec::new(), })
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson>,
dst: PathBuf) -> io::Result<()>
|
{
// {
// "schema": version,
// "crate": { parsed crate ... },
// "plugins": { output of plugins ... }
// }
let mut json = std::collections::BTreeMap::new();
json.insert("schema".to_string(), Json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.into_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
}
}
}).collect();
// FIXME #8335: yuck, Rust -> str -> JSON round trip! No way to .encode
// straight to the Rust JSON representation.
|
identifier_body
|
|
lib.rs
|
terms.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "rustdoc"]
#![unstable(feature = "rustdoc")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(exit_status)]
#![feature(set_stdio)]
#![feature(libc)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(test)]
#![feature(unicode)]
#![feature(path_ext)]
#![feature(path_relative_from)]
#![feature(slice_patterns)]
extern crate arena;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate rustc_trans;
extern crate rustc_driver;
extern crate rustc_resolve;
extern crate rustc_lint;
extern crate rustc_back;
extern crate serialize;
extern crate syntax;
extern crate test as testing;
extern crate rustc_unicode;
#[macro_use] extern crate log;
extern crate serialize as rustc_serialize; // used by deriving
use std::cell::RefCell;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::mpsc::channel;
use externalfiles::ExternalHtml;
use serialize::Decodable;
use serialize::json::{self, Json};
use rustc::session::search_paths::SearchPaths;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
#[macro_use]
pub mod externalfiles;
pub mod clean;
pub mod core;
pub mod doctree;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
const PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
const DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
thread_local!(pub static ANALYSISKEY: Rc<RefCell<Option<core::CrateAnalysis>>> = {
Rc::new(RefCell::new(None))
});
struct Output {
krate: clean::Crate,
json_plugins: Vec<plugins::PluginJson>,
passes: Vec<String>,
}
pub fn main() {
const STACK_SIZE: usize = 32000000; // 32MB
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
let s = env::args().collect::<Vec<_>>();
main_args(&s)
}).unwrap().join().unwrap();
env::set_exit_status(res as i32);
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflag("V", "version", "print rustdoc's version"),
optflag("v", "verbose", "use verbose output"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optopt("", "crate-name", "specify the name of this crate", "NAME"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "extern", "pass an --extern to rustc", "NAME=PATH"),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "list of passes to also run, you might want \
to pass it multiple times; a value of `list` \
will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optopt("", "target", "target triple to document", "TRIPLE"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
|
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL"),
optflag("", "markdown-no-toc", "don't include table of contents")
)
}
pub fn usage(argv0: &str) {
println!("{}",
getopts::usage(&format!("{} [options] <input>", argv0),
&opts()));
}
pub fn main_args(args: &[String]) -> isize {
let matches = match getopts::getopts(args.tail(), &opts()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(&args[0]);
return 0;
} else if matches.opt_present("version") {
rustc_driver::version("rustdoc", &matches);
return 0;
}
if matches.opt_strs("passes") == ["list"] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES {
println!("{:>20} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES {
println!("{:>20}", name);
}
return 0;
}
if matches.free.is_empty() {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = &matches.free[0];
let mut libs = SearchPaths::new();
for s in &matches.opt_strs("L") {
libs.add_path(s);
}
let externs = match parse_externs(&matches) {
Ok(ex) => ex,
Err(err) => {
println!("{}", err);
return 1;
}
};
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.split_whitespace())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| PathBuf::from(&s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
&matches.opt_strs("html-in-header"),
&matches.opt_strs("html-before-content"),
&matches.opt_strs("html-after-content")) {
Some(eh) => eh,
None => return 3
};
let crate_name = matches.opt_str("crate-name");
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, externs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, externs, test_args, crate_name)
}
(false, true) => return markdown::render(input,
output.unwrap_or(PathBuf::from("doc")),
&matches, &external_html,
!matches.opt_present("markdown-no-toc")),
(false, false) => {}
}
let out = match acquire_input(input, externs, &matches) {
Ok(out) => out,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
let Output { krate, json_plugins, passes, } = out;
info!("going to format");
match matches.opt_str("w").as_ref().map(|s| &**s) {
Some("html") | None => {
match html::render::run(krate, &external_html,
output.unwrap_or(PathBuf::from("doc")),
passes.into_iter().collect()) {
Ok(()) => {}
Err(e) => panic!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, json_plugins,
output.unwrap_or(PathBuf::from("doc.json"))) {
Ok(()) => {}
Err(e) => panic!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
externs: core::Externs,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| &**s) {
Some("rust") => Ok(rust_input(input, externs, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, externs, matches))
}
}
}
}
/// Extracts `--extern CRATE=PATH` arguments from `matches` and
/// returns a `HashMap` mapping crate names to their paths or else an
/// error message.
fn parse_externs(matches: &getopts::Matches) -> Result<core::Externs, String> {
let mut externs = HashMap::new();
for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(2, '=');
let name = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must not be empty".to_string());
}
};
let location = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must be of the format `foo=bar`".to_string());
}
};
let name = name.to_string();
externs.entry(name).or_insert(vec![]).push(location.to_string());
}
Ok(externs)
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
#[allow(deprecated)] // for old Path in plugin manager
fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matches) -> Output {
let mut default_passes =!matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let mut paths = SearchPaths::new();
for s in &matches.opt_strs("L") {
paths.add_path(s);
}
let cfgs = matches.opt_strs("cfg");
let triple = matches.opt_str("target");
let cr = PathBuf::from(cratefile);
info!("starting to run rustc");
let (tx, rx) = channel();
std::thread::spawn(move || {
use rustc::session::config::Input;
tx.send(core::run_core(paths, cfgs, externs, Input::File(cr),
triple)).unwrap();
}).join().map_err(|_| "rustc failed").unwrap();
let (mut krate, analysis) = rx.recv().unwrap();
info!("finished with rustc");
let mut analysis = Some(analysis);
ANALYSISKEY.with(|s| {
*s.borrow_mut() = analysis.take();
});
match matches.opt_str("crate-name") {
Some(name) => krate.name = name,
None => {}
}
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.as_ref().unwrap().doc_list() {
Some(nested) => {
for inner in nested {
match *inner {
clean::Word(ref x)
if "no_default_passes" == *x => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == *x => {
for pass in value.split_whitespace() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == *x => {
for p in value.split_whitespace() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.insert(0, name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(PathBuf::from(path));
for pass in &passes {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == *pass
}) {
Some(i) => PASSES[i].1,
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
let (krate, json) = pm.run_plugins(krate);
return Output { krate: krate, json_plugins: json, passes: passes, };
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
let mut bytes = Vec::new();
match File::open(input).and_then(|mut f| f.read_to_end(&mut bytes)) {
Ok(_) => {}
Err(e) => return Err(format!("couldn't open {}: {}", input, e)),
};
match json::from_reader(&mut &bytes[..]) {
Err(s) => Err(format!("{:?}", s)),
Ok(Json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.remove(&"schema".to_string()) {
Some(Json::String(version)) => {
if version!= SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.remove(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok(Output { krate: krate, json_plugins: plugin_output, passes: Vec::new(), })
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson>,
dst: PathBuf) -> io::Result<()> {
// {
// "schema": version,
// "crate": { parsed crate... },
// "plugins": { output of plugins... }
// }
let mut json = std::collections::BTreeMap::new();
json.insert("schema".to_string(), Json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.into_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
|
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
|
random_line_split
|
lib.rs
|
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "rustdoc"]
#![unstable(feature = "rustdoc")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(exit_status)]
#![feature(set_stdio)]
#![feature(libc)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(test)]
#![feature(unicode)]
#![feature(path_ext)]
#![feature(path_relative_from)]
#![feature(slice_patterns)]
extern crate arena;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate rustc_trans;
extern crate rustc_driver;
extern crate rustc_resolve;
extern crate rustc_lint;
extern crate rustc_back;
extern crate serialize;
extern crate syntax;
extern crate test as testing;
extern crate rustc_unicode;
#[macro_use] extern crate log;
extern crate serialize as rustc_serialize; // used by deriving
use std::cell::RefCell;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::mpsc::channel;
use externalfiles::ExternalHtml;
use serialize::Decodable;
use serialize::json::{self, Json};
use rustc::session::search_paths::SearchPaths;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
#[macro_use]
pub mod externalfiles;
pub mod clean;
pub mod core;
pub mod doctree;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
const PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
const DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
thread_local!(pub static ANALYSISKEY: Rc<RefCell<Option<core::CrateAnalysis>>> = {
Rc::new(RefCell::new(None))
});
struct
|
{
krate: clean::Crate,
json_plugins: Vec<plugins::PluginJson>,
passes: Vec<String>,
}
pub fn main() {
const STACK_SIZE: usize = 32000000; // 32MB
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
let s = env::args().collect::<Vec<_>>();
main_args(&s)
}).unwrap().join().unwrap();
env::set_exit_status(res as i32);
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflag("V", "version", "print rustdoc's version"),
optflag("v", "verbose", "use verbose output"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optopt("", "crate-name", "specify the name of this crate", "NAME"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "extern", "pass an --extern to rustc", "NAME=PATH"),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "list of passes to also run, you might want \
to pass it multiple times; a value of `list` \
will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optopt("", "target", "target triple to document", "TRIPLE"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL"),
optflag("", "markdown-no-toc", "don't include table of contents")
)
}
pub fn usage(argv0: &str) {
println!("{}",
getopts::usage(&format!("{} [options] <input>", argv0),
&opts()));
}
pub fn main_args(args: &[String]) -> isize {
let matches = match getopts::getopts(args.tail(), &opts()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(&args[0]);
return 0;
} else if matches.opt_present("version") {
rustc_driver::version("rustdoc", &matches);
return 0;
}
if matches.opt_strs("passes") == ["list"] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES {
println!("{:>20} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES {
println!("{:>20}", name);
}
return 0;
}
if matches.free.is_empty() {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = &matches.free[0];
let mut libs = SearchPaths::new();
for s in &matches.opt_strs("L") {
libs.add_path(s);
}
let externs = match parse_externs(&matches) {
Ok(ex) => ex,
Err(err) => {
println!("{}", err);
return 1;
}
};
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.split_whitespace())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| PathBuf::from(&s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
&matches.opt_strs("html-in-header"),
&matches.opt_strs("html-before-content"),
&matches.opt_strs("html-after-content")) {
Some(eh) => eh,
None => return 3
};
let crate_name = matches.opt_str("crate-name");
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, externs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, externs, test_args, crate_name)
}
(false, true) => return markdown::render(input,
output.unwrap_or(PathBuf::from("doc")),
&matches, &external_html,
!matches.opt_present("markdown-no-toc")),
(false, false) => {}
}
let out = match acquire_input(input, externs, &matches) {
Ok(out) => out,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
let Output { krate, json_plugins, passes, } = out;
info!("going to format");
match matches.opt_str("w").as_ref().map(|s| &**s) {
Some("html") | None => {
match html::render::run(krate, &external_html,
output.unwrap_or(PathBuf::from("doc")),
passes.into_iter().collect()) {
Ok(()) => {}
Err(e) => panic!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, json_plugins,
output.unwrap_or(PathBuf::from("doc.json"))) {
Ok(()) => {}
Err(e) => panic!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
externs: core::Externs,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| &**s) {
Some("rust") => Ok(rust_input(input, externs, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, externs, matches))
}
}
}
}
/// Extracts `--extern CRATE=PATH` arguments from `matches` and
/// returns a `HashMap` mapping crate names to their paths or else an
/// error message.
fn parse_externs(matches: &getopts::Matches) -> Result<core::Externs, String> {
let mut externs = HashMap::new();
for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(2, '=');
let name = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must not be empty".to_string());
}
};
let location = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must be of the format `foo=bar`".to_string());
}
};
let name = name.to_string();
externs.entry(name).or_insert(vec![]).push(location.to_string());
}
Ok(externs)
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
#[allow(deprecated)] // for old Path in plugin manager
fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matches) -> Output {
let mut default_passes =!matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let mut paths = SearchPaths::new();
for s in &matches.opt_strs("L") {
paths.add_path(s);
}
let cfgs = matches.opt_strs("cfg");
let triple = matches.opt_str("target");
let cr = PathBuf::from(cratefile);
info!("starting to run rustc");
let (tx, rx) = channel();
std::thread::spawn(move || {
use rustc::session::config::Input;
tx.send(core::run_core(paths, cfgs, externs, Input::File(cr),
triple)).unwrap();
}).join().map_err(|_| "rustc failed").unwrap();
let (mut krate, analysis) = rx.recv().unwrap();
info!("finished with rustc");
let mut analysis = Some(analysis);
ANALYSISKEY.with(|s| {
*s.borrow_mut() = analysis.take();
});
match matches.opt_str("crate-name") {
Some(name) => krate.name = name,
None => {}
}
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.as_ref().unwrap().doc_list() {
Some(nested) => {
for inner in nested {
match *inner {
clean::Word(ref x)
if "no_default_passes" == *x => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == *x => {
for pass in value.split_whitespace() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == *x => {
for p in value.split_whitespace() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.insert(0, name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(PathBuf::from(path));
for pass in &passes {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == *pass
}) {
Some(i) => PASSES[i].1,
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
let (krate, json) = pm.run_plugins(krate);
return Output { krate: krate, json_plugins: json, passes: passes, };
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
let mut bytes = Vec::new();
match File::open(input).and_then(|mut f| f.read_to_end(&mut bytes)) {
Ok(_) => {}
Err(e) => return Err(format!("couldn't open {}: {}", input, e)),
};
match json::from_reader(&mut &bytes[..]) {
Err(s) => Err(format!("{:?}", s)),
Ok(Json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.remove(&"schema".to_string()) {
Some(Json::String(version)) => {
if version!= SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.remove(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok(Output { krate: krate, json_plugins: plugin_output, passes: Vec::new(), })
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson>,
dst: PathBuf) -> io::Result<()> {
// {
// "schema": version,
// "crate": { parsed crate... },
// "plugins": { output of plugins... }
// }
let mut json = std::collections::BTreeMap::new();
json.insert("schema".to_string(), Json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.into_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
|
Output
|
identifier_name
|
lib.rs
|
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "rustdoc"]
#![unstable(feature = "rustdoc")]
#![staged_api]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(exit_status)]
#![feature(set_stdio)]
#![feature(libc)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(test)]
#![feature(unicode)]
#![feature(path_ext)]
#![feature(path_relative_from)]
#![feature(slice_patterns)]
extern crate arena;
extern crate getopts;
extern crate libc;
extern crate rustc;
extern crate rustc_trans;
extern crate rustc_driver;
extern crate rustc_resolve;
extern crate rustc_lint;
extern crate rustc_back;
extern crate serialize;
extern crate syntax;
extern crate test as testing;
extern crate rustc_unicode;
#[macro_use] extern crate log;
extern crate serialize as rustc_serialize; // used by deriving
use std::cell::RefCell;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::mpsc::channel;
use externalfiles::ExternalHtml;
use serialize::Decodable;
use serialize::json::{self, Json};
use rustc::session::search_paths::SearchPaths;
// reexported from `clean` so it can be easily updated with the mod itself
pub use clean::SCHEMA_VERSION;
#[macro_use]
pub mod externalfiles;
pub mod clean;
pub mod core;
pub mod doctree;
pub mod fold;
pub mod html {
pub mod highlight;
pub mod escape;
pub mod item_type;
pub mod format;
pub mod layout;
pub mod markdown;
pub mod render;
pub mod toc;
}
pub mod markdown;
pub mod passes;
pub mod plugins;
pub mod visit_ast;
pub mod test;
mod flock;
type Pass = (&'static str, // name
fn(clean::Crate) -> plugins::PluginResult, // fn
&'static str); // description
const PASSES: &'static [Pass] = &[
("strip-hidden", passes::strip_hidden,
"strips all doc(hidden) items from the output"),
("unindent-comments", passes::unindent_comments,
"removes excess indentation on comments in order for markdown to like it"),
("collapse-docs", passes::collapse_docs,
"concatenates all document attributes into one document attribute"),
("strip-private", passes::strip_private,
"strips all private items from a crate which cannot be seen externally"),
];
const DEFAULT_PASSES: &'static [&'static str] = &[
"strip-hidden",
"strip-private",
"collapse-docs",
"unindent-comments",
];
thread_local!(pub static ANALYSISKEY: Rc<RefCell<Option<core::CrateAnalysis>>> = {
Rc::new(RefCell::new(None))
});
struct Output {
krate: clean::Crate,
json_plugins: Vec<plugins::PluginJson>,
passes: Vec<String>,
}
pub fn main() {
const STACK_SIZE: usize = 32000000; // 32MB
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
let s = env::args().collect::<Vec<_>>();
main_args(&s)
}).unwrap().join().unwrap();
env::set_exit_status(res as i32);
}
pub fn opts() -> Vec<getopts::OptGroup> {
use getopts::*;
vec!(
optflag("h", "help", "show this help message"),
optflag("V", "version", "print rustdoc's version"),
optflag("v", "verbose", "use verbose output"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optopt("", "crate-name", "specify the name of this crate", "NAME"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "extern", "pass an --extern to rustc", "NAME=PATH"),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
optmulti("", "passes", "list of passes to also run, you might want \
to pass it multiple times; a value of `list` \
will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("", "no-defaults", "don't run the default passes"),
optflag("", "test", "run code examples as tests"),
optmulti("", "test-args", "arguments to pass to the test runner",
"ARGS"),
optopt("", "target", "target triple to document", "TRIPLE"),
optmulti("", "markdown-css", "CSS files to include via <link> in a rendered Markdown file",
"FILES"),
optmulti("", "html-in-header",
"files to include inline in the <head> section of a rendered Markdown file \
or generated documentation",
"FILES"),
optmulti("", "html-before-content",
"files to include inline between <body> and the content of a rendered \
Markdown file or generated documentation",
"FILES"),
optmulti("", "html-after-content",
"files to include inline between the content and </body> of a rendered \
Markdown file or generated documentation",
"FILES"),
optopt("", "markdown-playground-url",
"URL to send code snippets to", "URL"),
optflag("", "markdown-no-toc", "don't include table of contents")
)
}
pub fn usage(argv0: &str) {
println!("{}",
getopts::usage(&format!("{} [options] <input>", argv0),
&opts()));
}
pub fn main_args(args: &[String]) -> isize {
let matches = match getopts::getopts(args.tail(), &opts()) {
Ok(m) => m,
Err(err) => {
println!("{}", err);
return 1;
}
};
if matches.opt_present("h") || matches.opt_present("help") {
usage(&args[0]);
return 0;
} else if matches.opt_present("version") {
rustc_driver::version("rustdoc", &matches);
return 0;
}
if matches.opt_strs("passes") == ["list"] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES {
println!("{:>20} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
for &name in DEFAULT_PASSES {
println!("{:>20}", name);
}
return 0;
}
if matches.free.is_empty() {
println!("expected an input file to act on");
return 1;
} if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
let input = &matches.free[0];
let mut libs = SearchPaths::new();
for s in &matches.opt_strs("L") {
libs.add_path(s);
}
let externs = match parse_externs(&matches) {
Ok(ex) => ex,
Err(err) => {
println!("{}", err);
return 1;
}
};
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.split_whitespace())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
let output = matches.opt_str("o").map(|s| PathBuf::from(&s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
&matches.opt_strs("html-in-header"),
&matches.opt_strs("html-before-content"),
&matches.opt_strs("html-after-content")) {
Some(eh) => eh,
None => return 3
};
let crate_name = matches.opt_str("crate-name");
match (should_test, markdown_input) {
(true, true) => {
return markdown::test(input, libs, externs, test_args)
}
(true, false) => {
return test::run(input, cfgs, libs, externs, test_args, crate_name)
}
(false, true) => return markdown::render(input,
output.unwrap_or(PathBuf::from("doc")),
&matches, &external_html,
!matches.opt_present("markdown-no-toc")),
(false, false) => {}
}
let out = match acquire_input(input, externs, &matches) {
Ok(out) => out,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
let Output { krate, json_plugins, passes, } = out;
info!("going to format");
match matches.opt_str("w").as_ref().map(|s| &**s) {
Some("html") | None => {
match html::render::run(krate, &external_html,
output.unwrap_or(PathBuf::from("doc")),
passes.into_iter().collect()) {
Ok(()) => {}
Err(e) => panic!("failed to generate documentation: {}", e),
}
}
Some("json") => {
match json_output(krate, json_plugins,
output.unwrap_or(PathBuf::from("doc.json"))) {
Ok(()) =>
|
Err(e) => panic!("failed to write json: {}", e),
}
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str,
externs: core::Externs,
matches: &getopts::Matches) -> Result<Output, String> {
match matches.opt_str("r").as_ref().map(|s| &**s) {
Some("rust") => Ok(rust_input(input, externs, matches)),
Some("json") => json_input(input),
Some(s) => Err(format!("unknown input format: {}", s)),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, externs, matches))
}
}
}
}
/// Extracts `--extern CRATE=PATH` arguments from `matches` and
/// returns a `HashMap` mapping crate names to their paths or else an
/// error message.
fn parse_externs(matches: &getopts::Matches) -> Result<core::Externs, String> {
let mut externs = HashMap::new();
for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(2, '=');
let name = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must not be empty".to_string());
}
};
let location = match parts.next() {
Some(s) => s,
None => {
return Err("--extern value must be of the format `foo=bar`".to_string());
}
};
let name = name.to_string();
externs.entry(name).or_insert(vec![]).push(location.to_string());
}
Ok(externs)
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
#[allow(deprecated)] // for old Path in plugin manager
fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matches) -> Output {
let mut default_passes =!matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let mut paths = SearchPaths::new();
for s in &matches.opt_strs("L") {
paths.add_path(s);
}
let cfgs = matches.opt_strs("cfg");
let triple = matches.opt_str("target");
let cr = PathBuf::from(cratefile);
info!("starting to run rustc");
let (tx, rx) = channel();
std::thread::spawn(move || {
use rustc::session::config::Input;
tx.send(core::run_core(paths, cfgs, externs, Input::File(cr),
triple)).unwrap();
}).join().map_err(|_| "rustc failed").unwrap();
let (mut krate, analysis) = rx.recv().unwrap();
info!("finished with rustc");
let mut analysis = Some(analysis);
ANALYSISKEY.with(|s| {
*s.borrow_mut() = analysis.take();
});
match matches.opt_str("crate-name") {
Some(name) => krate.name = name,
None => {}
}
// Process all of the crate attributes, extracting plugin metadata along
// with the passes which we are supposed to run.
match krate.module.as_ref().unwrap().doc_list() {
Some(nested) => {
for inner in nested {
match *inner {
clean::Word(ref x)
if "no_default_passes" == *x => {
default_passes = false;
}
clean::NameValue(ref x, ref value)
if "passes" == *x => {
for pass in value.split_whitespace() {
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == *x => {
for p in value.split_whitespace() {
plugins.push(p.to_string());
}
}
_ => {}
}
}
}
None => {}
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.insert(0, name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(PathBuf::from(path));
for pass in &passes {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == *pass
}) {
Some(i) => PASSES[i].1,
None => {
error!("unknown pass {}, skipping", *pass);
continue
},
};
pm.add_plugin(plugin);
}
info!("loading plugins...");
for pname in plugins {
pm.load_plugin(pname);
}
// Run everything!
info!("Executing passes/plugins");
let (krate, json) = pm.run_plugins(krate);
return Output { krate: krate, json_plugins: json, passes: passes, };
}
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
let mut bytes = Vec::new();
match File::open(input).and_then(|mut f| f.read_to_end(&mut bytes)) {
Ok(_) => {}
Err(e) => return Err(format!("couldn't open {}: {}", input, e)),
};
match json::from_reader(&mut &bytes[..]) {
Err(s) => Err(format!("{:?}", s)),
Ok(Json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.remove(&"schema".to_string()) {
Some(Json::String(version)) => {
if version!= SCHEMA_VERSION {
return Err(format!(
"sorry, but I only understand version {}",
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.remove(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = Vec::new();
Ok(Output { krate: krate, json_plugins: plugin_output, passes: Vec::new(), })
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_string())
}
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson>,
dst: PathBuf) -> io::Result<()> {
// {
// "schema": version,
// "crate": { parsed crate... },
// "plugins": { output of plugins... }
// }
let mut json = std::collections::BTreeMap::new();
json.insert("schema".to_string(), Json::String(SCHEMA_VERSION.to_string()));
let plugins_json = res.into_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_string(), json))
|
{}
|
conditional_block
|
subclass.rs
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use syn::{parse_quote, FnArg, PatType, Type, TypePtr};
use crate::conversion::analysis::fun::{FnKind, MethodKind, ReceiverMutability};
use crate::conversion::analysis::pod::PodPhase;
use crate::conversion::api::{
CppVisibility, FuncToConvert, RustSubclassFnDetails, SubclassName, Synthesis, Virtualness,
};
use crate::{
conversion::{
analysis::fun::function_wrapper::{
CppFunction, CppFunctionBody, CppFunctionKind, TypeConversionPolicy,
},
api::{Api, ApiName},
},
types::{make_ident, Namespace, QualifiedName},
};
use super::{FnAnalysis, FnPhase};
pub(super) fn subclasses_by_superclass(
apis: &[Api<PodPhase>],
) -> HashMap<QualifiedName, Vec<SubclassName>>
|
pub(super) fn create_subclass_fn_wrapper(
sub: SubclassName,
super_fn_name: &QualifiedName,
fun: &FuncToConvert,
) -> Box<FuncToConvert> {
let self_ty = Some(sub.cpp());
Box::new(FuncToConvert {
synthesized_this_type: self_ty.clone(),
self_ty,
ident: super_fn_name.get_final_ident(),
doc_attr: fun.doc_attr.clone(),
inputs: fun.inputs.clone(),
output: fun.output.clone(),
vis: fun.vis.clone(),
virtualness: Virtualness::None,
cpp_vis: CppVisibility::Public,
special_member: None,
unused_template_param: fun.unused_template_param,
original_name: None,
references: fun.references.clone(),
synthesis: fun.synthesis.clone(),
is_deleted: fun.is_deleted,
})
}
pub(super) fn create_subclass_function(
sub: &SubclassName,
analysis: &super::FnAnalysis,
name: &ApiName,
receiver_mutability: &ReceiverMutability,
superclass: &QualifiedName,
dependency: Option<&QualifiedName>,
) -> Api<FnPhase> {
let cpp = sub.cpp();
let holder_name = sub.holder();
let rust_call_name = make_ident(format!(
"{}_{}",
sub.0.name.get_final_item(),
name.name.get_final_item()
));
let params = std::iter::once(parse_quote! {
me: & #holder_name
})
.chain(analysis.params.iter().skip(1).cloned())
.collect();
let kind = if matches!(receiver_mutability, ReceiverMutability::Mutable) {
CppFunctionKind::Method
} else {
CppFunctionKind::ConstMethod
};
let subclass_function: Api<FnPhase> = Api::RustSubclassFn {
name: ApiName::new_in_root_namespace(rust_call_name.clone()),
subclass: sub.clone(),
details: Box::new(RustSubclassFnDetails {
params,
ret: analysis.ret_type.clone(),
method_name: make_ident(&analysis.rust_name),
cpp_impl: CppFunction {
payload: CppFunctionBody::FunctionCall(Namespace::new(), rust_call_name),
wrapper_function_name: name.name.get_final_ident(),
original_cpp_name: name.cpp_name(),
return_conversion: analysis.ret_conversion.clone(),
argument_conversion: analysis
.param_details
.iter()
.skip(1)
.map(|p| p.conversion.clone())
.collect(),
kind,
pass_obs_field: true,
qualification: Some(cpp),
},
superclass: superclass.clone(),
receiver_mutability: receiver_mutability.clone(),
dependency: dependency.cloned(),
requires_unsafe: analysis.param_details.iter().any(|pd| pd.requires_unsafe),
is_pure_virtual: matches!(
analysis.kind,
FnKind::Method(_, MethodKind::PureVirtual(..))
),
}),
};
subclass_function
}
pub(super) fn create_subclass_constructor(
sub: SubclassName,
analysis: &FnAnalysis,
sup: &QualifiedName,
fun: &FuncToConvert,
) -> (Box<FuncToConvert>, ApiName) {
let holder = sub.holder();
let cpp = sub.cpp();
let synthesis = Some({
let wrapper_function_name = cpp.get_final_ident();
let initial_arg = TypeConversionPolicy::new_unconverted(parse_quote! {
rust::Box< #holder >
});
let args = std::iter::once(initial_arg).chain(
analysis
.param_details
.iter()
.skip(1) // skip placement new destination
.map(|aa| aa.conversion.clone()),
);
let cpp_impl = CppFunction {
payload: CppFunctionBody::ConstructSuperclass(sup.to_cpp_name()),
wrapper_function_name,
return_conversion: None,
argument_conversion: args.collect(),
kind: CppFunctionKind::SynthesizedConstructor,
pass_obs_field: false,
qualification: Some(cpp.clone()),
original_cpp_name: cpp.to_cpp_name(),
};
Synthesis::SubclassConstructor {
subclass: sub.clone(),
cpp_impl: Box::new(cpp_impl),
is_trivial: analysis.param_details.len() == 1, // just placement new
// destination, no other parameters
}
});
let subclass_constructor_name =
make_ident(format!("{}_{}", cpp.get_final_item(), cpp.get_final_item()));
let mut existing_params = fun.inputs.clone();
if let Some(FnArg::Typed(PatType { ty,.. })) = existing_params.first_mut() {
if let Type::Ptr(TypePtr { elem,.. }) = &mut **ty {
*elem = Box::new(Type::Path(sub.cpp().to_type_path()));
} else {
panic!("Unexpected self type parameter when creating subclass constructor");
}
} else {
panic!("Unexpected self type parameter when creating subclass constructor");
}
let mut existing_params = existing_params.into_iter();
let self_param = existing_params.next();
let boxed_holder_param: FnArg = parse_quote! {
peer: rust::Box<#holder>
};
let inputs = self_param
.into_iter()
.chain(std::iter::once(boxed_holder_param))
.chain(existing_params)
.collect();
let maybe_wrap = Box::new(FuncToConvert {
ident: subclass_constructor_name.clone(),
doc_attr: fun.doc_attr.clone(),
inputs,
output: fun.output.clone(),
vis: fun.vis.clone(),
virtualness: Virtualness::None,
cpp_vis: CppVisibility::Public,
special_member: fun.special_member.clone(),
original_name: None,
unused_template_param: fun.unused_template_param,
references: fun.references.clone(),
synthesized_this_type: Some(cpp.clone()),
self_ty: Some(cpp),
synthesis,
is_deleted: fun.is_deleted,
});
let subclass_constructor_name = ApiName::new_with_cpp_name(
&Namespace::new(),
subclass_constructor_name,
Some(sub.cpp().get_final_item().to_string()),
);
(maybe_wrap, subclass_constructor_name)
}
|
{
let mut subclasses_per_superclass: HashMap<QualifiedName, Vec<SubclassName>> = HashMap::new();
for api in apis.iter() {
if let Api::Subclass { name, superclass } = api {
subclasses_per_superclass
.entry(superclass.clone())
.or_default()
.push(name.clone());
}
}
subclasses_per_superclass
}
|
identifier_body
|
subclass.rs
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use syn::{parse_quote, FnArg, PatType, Type, TypePtr};
use crate::conversion::analysis::fun::{FnKind, MethodKind, ReceiverMutability};
use crate::conversion::analysis::pod::PodPhase;
use crate::conversion::api::{
CppVisibility, FuncToConvert, RustSubclassFnDetails, SubclassName, Synthesis, Virtualness,
};
use crate::{
conversion::{
analysis::fun::function_wrapper::{
CppFunction, CppFunctionBody, CppFunctionKind, TypeConversionPolicy,
},
api::{Api, ApiName},
},
types::{make_ident, Namespace, QualifiedName},
};
use super::{FnAnalysis, FnPhase};
pub(super) fn subclasses_by_superclass(
apis: &[Api<PodPhase>],
) -> HashMap<QualifiedName, Vec<SubclassName>> {
let mut subclasses_per_superclass: HashMap<QualifiedName, Vec<SubclassName>> = HashMap::new();
for api in apis.iter() {
if let Api::Subclass { name, superclass } = api {
subclasses_per_superclass
.entry(superclass.clone())
.or_default()
.push(name.clone());
}
}
subclasses_per_superclass
}
pub(super) fn
|
(
sub: SubclassName,
super_fn_name: &QualifiedName,
fun: &FuncToConvert,
) -> Box<FuncToConvert> {
let self_ty = Some(sub.cpp());
Box::new(FuncToConvert {
synthesized_this_type: self_ty.clone(),
self_ty,
ident: super_fn_name.get_final_ident(),
doc_attr: fun.doc_attr.clone(),
inputs: fun.inputs.clone(),
output: fun.output.clone(),
vis: fun.vis.clone(),
virtualness: Virtualness::None,
cpp_vis: CppVisibility::Public,
special_member: None,
unused_template_param: fun.unused_template_param,
original_name: None,
references: fun.references.clone(),
synthesis: fun.synthesis.clone(),
is_deleted: fun.is_deleted,
})
}
pub(super) fn create_subclass_function(
sub: &SubclassName,
analysis: &super::FnAnalysis,
name: &ApiName,
receiver_mutability: &ReceiverMutability,
superclass: &QualifiedName,
dependency: Option<&QualifiedName>,
) -> Api<FnPhase> {
let cpp = sub.cpp();
let holder_name = sub.holder();
let rust_call_name = make_ident(format!(
"{}_{}",
sub.0.name.get_final_item(),
name.name.get_final_item()
));
let params = std::iter::once(parse_quote! {
me: & #holder_name
})
.chain(analysis.params.iter().skip(1).cloned())
.collect();
let kind = if matches!(receiver_mutability, ReceiverMutability::Mutable) {
CppFunctionKind::Method
} else {
CppFunctionKind::ConstMethod
};
let subclass_function: Api<FnPhase> = Api::RustSubclassFn {
name: ApiName::new_in_root_namespace(rust_call_name.clone()),
subclass: sub.clone(),
details: Box::new(RustSubclassFnDetails {
params,
ret: analysis.ret_type.clone(),
method_name: make_ident(&analysis.rust_name),
cpp_impl: CppFunction {
payload: CppFunctionBody::FunctionCall(Namespace::new(), rust_call_name),
wrapper_function_name: name.name.get_final_ident(),
original_cpp_name: name.cpp_name(),
return_conversion: analysis.ret_conversion.clone(),
argument_conversion: analysis
.param_details
.iter()
.skip(1)
.map(|p| p.conversion.clone())
.collect(),
kind,
pass_obs_field: true,
qualification: Some(cpp),
},
superclass: superclass.clone(),
receiver_mutability: receiver_mutability.clone(),
dependency: dependency.cloned(),
requires_unsafe: analysis.param_details.iter().any(|pd| pd.requires_unsafe),
is_pure_virtual: matches!(
analysis.kind,
FnKind::Method(_, MethodKind::PureVirtual(..))
),
}),
};
subclass_function
}
pub(super) fn create_subclass_constructor(
sub: SubclassName,
analysis: &FnAnalysis,
sup: &QualifiedName,
fun: &FuncToConvert,
) -> (Box<FuncToConvert>, ApiName) {
let holder = sub.holder();
let cpp = sub.cpp();
let synthesis = Some({
let wrapper_function_name = cpp.get_final_ident();
let initial_arg = TypeConversionPolicy::new_unconverted(parse_quote! {
rust::Box< #holder >
});
let args = std::iter::once(initial_arg).chain(
analysis
.param_details
.iter()
.skip(1) // skip placement new destination
.map(|aa| aa.conversion.clone()),
);
let cpp_impl = CppFunction {
payload: CppFunctionBody::ConstructSuperclass(sup.to_cpp_name()),
wrapper_function_name,
return_conversion: None,
argument_conversion: args.collect(),
kind: CppFunctionKind::SynthesizedConstructor,
pass_obs_field: false,
qualification: Some(cpp.clone()),
original_cpp_name: cpp.to_cpp_name(),
};
Synthesis::SubclassConstructor {
subclass: sub.clone(),
cpp_impl: Box::new(cpp_impl),
is_trivial: analysis.param_details.len() == 1, // just placement new
// destination, no other parameters
}
});
let subclass_constructor_name =
make_ident(format!("{}_{}", cpp.get_final_item(), cpp.get_final_item()));
let mut existing_params = fun.inputs.clone();
if let Some(FnArg::Typed(PatType { ty,.. })) = existing_params.first_mut() {
if let Type::Ptr(TypePtr { elem,.. }) = &mut **ty {
*elem = Box::new(Type::Path(sub.cpp().to_type_path()));
} else {
panic!("Unexpected self type parameter when creating subclass constructor");
}
} else {
panic!("Unexpected self type parameter when creating subclass constructor");
}
let mut existing_params = existing_params.into_iter();
let self_param = existing_params.next();
let boxed_holder_param: FnArg = parse_quote! {
peer: rust::Box<#holder>
};
let inputs = self_param
.into_iter()
.chain(std::iter::once(boxed_holder_param))
.chain(existing_params)
.collect();
let maybe_wrap = Box::new(FuncToConvert {
ident: subclass_constructor_name.clone(),
doc_attr: fun.doc_attr.clone(),
inputs,
output: fun.output.clone(),
vis: fun.vis.clone(),
virtualness: Virtualness::None,
cpp_vis: CppVisibility::Public,
special_member: fun.special_member.clone(),
original_name: None,
unused_template_param: fun.unused_template_param,
references: fun.references.clone(),
synthesized_this_type: Some(cpp.clone()),
self_ty: Some(cpp),
synthesis,
is_deleted: fun.is_deleted,
});
let subclass_constructor_name = ApiName::new_with_cpp_name(
&Namespace::new(),
subclass_constructor_name,
Some(sub.cpp().get_final_item().to_string()),
);
(maybe_wrap, subclass_constructor_name)
}
|
create_subclass_fn_wrapper
|
identifier_name
|
subclass.rs
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use syn::{parse_quote, FnArg, PatType, Type, TypePtr};
use crate::conversion::analysis::fun::{FnKind, MethodKind, ReceiverMutability};
use crate::conversion::analysis::pod::PodPhase;
use crate::conversion::api::{
CppVisibility, FuncToConvert, RustSubclassFnDetails, SubclassName, Synthesis, Virtualness,
};
use crate::{
conversion::{
analysis::fun::function_wrapper::{
CppFunction, CppFunctionBody, CppFunctionKind, TypeConversionPolicy,
},
api::{Api, ApiName},
},
types::{make_ident, Namespace, QualifiedName},
};
use super::{FnAnalysis, FnPhase};
pub(super) fn subclasses_by_superclass(
apis: &[Api<PodPhase>],
) -> HashMap<QualifiedName, Vec<SubclassName>> {
let mut subclasses_per_superclass: HashMap<QualifiedName, Vec<SubclassName>> = HashMap::new();
for api in apis.iter() {
if let Api::Subclass { name, superclass } = api {
subclasses_per_superclass
.entry(superclass.clone())
.or_default()
.push(name.clone());
}
}
subclasses_per_superclass
}
pub(super) fn create_subclass_fn_wrapper(
sub: SubclassName,
super_fn_name: &QualifiedName,
fun: &FuncToConvert,
) -> Box<FuncToConvert> {
let self_ty = Some(sub.cpp());
Box::new(FuncToConvert {
synthesized_this_type: self_ty.clone(),
self_ty,
ident: super_fn_name.get_final_ident(),
doc_attr: fun.doc_attr.clone(),
inputs: fun.inputs.clone(),
output: fun.output.clone(),
vis: fun.vis.clone(),
virtualness: Virtualness::None,
cpp_vis: CppVisibility::Public,
special_member: None,
unused_template_param: fun.unused_template_param,
original_name: None,
references: fun.references.clone(),
synthesis: fun.synthesis.clone(),
is_deleted: fun.is_deleted,
})
}
pub(super) fn create_subclass_function(
sub: &SubclassName,
analysis: &super::FnAnalysis,
name: &ApiName,
receiver_mutability: &ReceiverMutability,
superclass: &QualifiedName,
dependency: Option<&QualifiedName>,
) -> Api<FnPhase> {
let cpp = sub.cpp();
let holder_name = sub.holder();
let rust_call_name = make_ident(format!(
"{}_{}",
sub.0.name.get_final_item(),
name.name.get_final_item()
));
let params = std::iter::once(parse_quote! {
me: & #holder_name
})
.chain(analysis.params.iter().skip(1).cloned())
.collect();
let kind = if matches!(receiver_mutability, ReceiverMutability::Mutable) {
CppFunctionKind::Method
} else {
CppFunctionKind::ConstMethod
};
let subclass_function: Api<FnPhase> = Api::RustSubclassFn {
name: ApiName::new_in_root_namespace(rust_call_name.clone()),
subclass: sub.clone(),
details: Box::new(RustSubclassFnDetails {
params,
ret: analysis.ret_type.clone(),
method_name: make_ident(&analysis.rust_name),
cpp_impl: CppFunction {
payload: CppFunctionBody::FunctionCall(Namespace::new(), rust_call_name),
wrapper_function_name: name.name.get_final_ident(),
original_cpp_name: name.cpp_name(),
return_conversion: analysis.ret_conversion.clone(),
argument_conversion: analysis
.param_details
.iter()
.skip(1)
.map(|p| p.conversion.clone())
.collect(),
kind,
pass_obs_field: true,
qualification: Some(cpp),
},
superclass: superclass.clone(),
receiver_mutability: receiver_mutability.clone(),
dependency: dependency.cloned(),
requires_unsafe: analysis.param_details.iter().any(|pd| pd.requires_unsafe),
is_pure_virtual: matches!(
analysis.kind,
FnKind::Method(_, MethodKind::PureVirtual(..))
),
}),
};
subclass_function
}
pub(super) fn create_subclass_constructor(
sub: SubclassName,
analysis: &FnAnalysis,
sup: &QualifiedName,
fun: &FuncToConvert,
) -> (Box<FuncToConvert>, ApiName) {
let holder = sub.holder();
let cpp = sub.cpp();
let synthesis = Some({
let wrapper_function_name = cpp.get_final_ident();
let initial_arg = TypeConversionPolicy::new_unconverted(parse_quote! {
rust::Box< #holder >
});
let args = std::iter::once(initial_arg).chain(
analysis
.param_details
.iter()
.skip(1) // skip placement new destination
.map(|aa| aa.conversion.clone()),
);
let cpp_impl = CppFunction {
payload: CppFunctionBody::ConstructSuperclass(sup.to_cpp_name()),
wrapper_function_name,
return_conversion: None,
argument_conversion: args.collect(),
kind: CppFunctionKind::SynthesizedConstructor,
pass_obs_field: false,
qualification: Some(cpp.clone()),
original_cpp_name: cpp.to_cpp_name(),
};
Synthesis::SubclassConstructor {
subclass: sub.clone(),
cpp_impl: Box::new(cpp_impl),
is_trivial: analysis.param_details.len() == 1, // just placement new
// destination, no other parameters
}
});
let subclass_constructor_name =
make_ident(format!("{}_{}", cpp.get_final_item(), cpp.get_final_item()));
let mut existing_params = fun.inputs.clone();
if let Some(FnArg::Typed(PatType { ty,.. })) = existing_params.first_mut() {
if let Type::Ptr(TypePtr { elem,.. }) = &mut **ty {
*elem = Box::new(Type::Path(sub.cpp().to_type_path()));
} else
|
} else {
panic!("Unexpected self type parameter when creating subclass constructor");
}
let mut existing_params = existing_params.into_iter();
let self_param = existing_params.next();
let boxed_holder_param: FnArg = parse_quote! {
peer: rust::Box<#holder>
};
let inputs = self_param
.into_iter()
.chain(std::iter::once(boxed_holder_param))
.chain(existing_params)
.collect();
let maybe_wrap = Box::new(FuncToConvert {
ident: subclass_constructor_name.clone(),
doc_attr: fun.doc_attr.clone(),
inputs,
output: fun.output.clone(),
vis: fun.vis.clone(),
virtualness: Virtualness::None,
cpp_vis: CppVisibility::Public,
special_member: fun.special_member.clone(),
original_name: None,
unused_template_param: fun.unused_template_param,
references: fun.references.clone(),
synthesized_this_type: Some(cpp.clone()),
self_ty: Some(cpp),
synthesis,
is_deleted: fun.is_deleted,
});
let subclass_constructor_name = ApiName::new_with_cpp_name(
&Namespace::new(),
subclass_constructor_name,
Some(sub.cpp().get_final_item().to_string()),
);
(maybe_wrap, subclass_constructor_name)
}
|
{
panic!("Unexpected self type parameter when creating subclass constructor");
}
|
conditional_block
|
subclass.rs
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use syn::{parse_quote, FnArg, PatType, Type, TypePtr};
use crate::conversion::analysis::fun::{FnKind, MethodKind, ReceiverMutability};
use crate::conversion::analysis::pod::PodPhase;
use crate::conversion::api::{
CppVisibility, FuncToConvert, RustSubclassFnDetails, SubclassName, Synthesis, Virtualness,
};
use crate::{
conversion::{
analysis::fun::function_wrapper::{
CppFunction, CppFunctionBody, CppFunctionKind, TypeConversionPolicy,
},
api::{Api, ApiName},
},
types::{make_ident, Namespace, QualifiedName},
};
use super::{FnAnalysis, FnPhase};
pub(super) fn subclasses_by_superclass(
apis: &[Api<PodPhase>],
) -> HashMap<QualifiedName, Vec<SubclassName>> {
let mut subclasses_per_superclass: HashMap<QualifiedName, Vec<SubclassName>> = HashMap::new();
for api in apis.iter() {
if let Api::Subclass { name, superclass } = api {
subclasses_per_superclass
.entry(superclass.clone())
.or_default()
.push(name.clone());
}
}
subclasses_per_superclass
}
pub(super) fn create_subclass_fn_wrapper(
sub: SubclassName,
super_fn_name: &QualifiedName,
fun: &FuncToConvert,
) -> Box<FuncToConvert> {
let self_ty = Some(sub.cpp());
Box::new(FuncToConvert {
synthesized_this_type: self_ty.clone(),
self_ty,
ident: super_fn_name.get_final_ident(),
doc_attr: fun.doc_attr.clone(),
inputs: fun.inputs.clone(),
output: fun.output.clone(),
vis: fun.vis.clone(),
virtualness: Virtualness::None,
cpp_vis: CppVisibility::Public,
special_member: None,
unused_template_param: fun.unused_template_param,
original_name: None,
references: fun.references.clone(),
synthesis: fun.synthesis.clone(),
is_deleted: fun.is_deleted,
})
}
pub(super) fn create_subclass_function(
sub: &SubclassName,
analysis: &super::FnAnalysis,
name: &ApiName,
receiver_mutability: &ReceiverMutability,
superclass: &QualifiedName,
dependency: Option<&QualifiedName>,
) -> Api<FnPhase> {
let cpp = sub.cpp();
let holder_name = sub.holder();
let rust_call_name = make_ident(format!(
"{}_{}",
sub.0.name.get_final_item(),
name.name.get_final_item()
));
let params = std::iter::once(parse_quote! {
me: & #holder_name
})
.chain(analysis.params.iter().skip(1).cloned())
.collect();
let kind = if matches!(receiver_mutability, ReceiverMutability::Mutable) {
CppFunctionKind::Method
} else {
CppFunctionKind::ConstMethod
};
let subclass_function: Api<FnPhase> = Api::RustSubclassFn {
name: ApiName::new_in_root_namespace(rust_call_name.clone()),
subclass: sub.clone(),
details: Box::new(RustSubclassFnDetails {
params,
ret: analysis.ret_type.clone(),
method_name: make_ident(&analysis.rust_name),
cpp_impl: CppFunction {
payload: CppFunctionBody::FunctionCall(Namespace::new(), rust_call_name),
wrapper_function_name: name.name.get_final_ident(),
original_cpp_name: name.cpp_name(),
return_conversion: analysis.ret_conversion.clone(),
argument_conversion: analysis
.param_details
.iter()
.skip(1)
.map(|p| p.conversion.clone())
.collect(),
kind,
pass_obs_field: true,
qualification: Some(cpp),
},
superclass: superclass.clone(),
receiver_mutability: receiver_mutability.clone(),
dependency: dependency.cloned(),
requires_unsafe: analysis.param_details.iter().any(|pd| pd.requires_unsafe),
is_pure_virtual: matches!(
analysis.kind,
FnKind::Method(_, MethodKind::PureVirtual(..))
),
}),
};
subclass_function
}
pub(super) fn create_subclass_constructor(
sub: SubclassName,
analysis: &FnAnalysis,
sup: &QualifiedName,
fun: &FuncToConvert,
) -> (Box<FuncToConvert>, ApiName) {
let holder = sub.holder();
let cpp = sub.cpp();
let synthesis = Some({
let wrapper_function_name = cpp.get_final_ident();
let initial_arg = TypeConversionPolicy::new_unconverted(parse_quote! {
rust::Box< #holder >
});
let args = std::iter::once(initial_arg).chain(
analysis
.param_details
.iter()
.skip(1) // skip placement new destination
.map(|aa| aa.conversion.clone()),
);
let cpp_impl = CppFunction {
payload: CppFunctionBody::ConstructSuperclass(sup.to_cpp_name()),
wrapper_function_name,
return_conversion: None,
argument_conversion: args.collect(),
kind: CppFunctionKind::SynthesizedConstructor,
pass_obs_field: false,
qualification: Some(cpp.clone()),
original_cpp_name: cpp.to_cpp_name(),
};
Synthesis::SubclassConstructor {
subclass: sub.clone(),
cpp_impl: Box::new(cpp_impl),
is_trivial: analysis.param_details.len() == 1, // just placement new
// destination, no other parameters
}
});
let subclass_constructor_name =
make_ident(format!("{}_{}", cpp.get_final_item(), cpp.get_final_item()));
let mut existing_params = fun.inputs.clone();
if let Some(FnArg::Typed(PatType { ty,.. })) = existing_params.first_mut() {
if let Type::Ptr(TypePtr { elem,.. }) = &mut **ty {
*elem = Box::new(Type::Path(sub.cpp().to_type_path()));
} else {
panic!("Unexpected self type parameter when creating subclass constructor");
}
} else {
panic!("Unexpected self type parameter when creating subclass constructor");
}
let mut existing_params = existing_params.into_iter();
let self_param = existing_params.next();
let boxed_holder_param: FnArg = parse_quote! {
peer: rust::Box<#holder>
};
let inputs = self_param
.into_iter()
.chain(std::iter::once(boxed_holder_param))
.chain(existing_params)
.collect();
let maybe_wrap = Box::new(FuncToConvert {
ident: subclass_constructor_name.clone(),
doc_attr: fun.doc_attr.clone(),
inputs,
output: fun.output.clone(),
vis: fun.vis.clone(),
virtualness: Virtualness::None,
cpp_vis: CppVisibility::Public,
special_member: fun.special_member.clone(),
|
self_ty: Some(cpp),
synthesis,
is_deleted: fun.is_deleted,
});
let subclass_constructor_name = ApiName::new_with_cpp_name(
&Namespace::new(),
subclass_constructor_name,
Some(sub.cpp().get_final_item().to_string()),
);
(maybe_wrap, subclass_constructor_name)
}
|
original_name: None,
unused_template_param: fun.unused_template_param,
references: fun.references.clone(),
synthesized_this_type: Some(cpp.clone()),
|
random_line_split
|
pptypedef.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn let_in<T>(x: T, f: |T|) {}
fn main()
|
{
let_in(3u, |i| { assert!(i == 3i); });
//~^ ERROR expected `uint`, found `int`
let_in(3i, |i| { assert!(i == 3u); });
//~^ ERROR expected `int`, found `uint`
}
|
identifier_body
|
|
pptypedef.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn let_in<T>(x: T, f: |T|) {}
fn
|
() {
let_in(3u, |i| { assert!(i == 3i); });
//~^ ERROR expected `uint`, found `int`
let_in(3i, |i| { assert!(i == 3u); });
//~^ ERROR expected `int`, found `uint`
}
|
main
|
identifier_name
|
pptypedef.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn let_in<T>(x: T, f: |T|) {}
|
let_in(3i, |i| { assert!(i == 3u); });
//~^ ERROR expected `int`, found `uint`
}
|
fn main() {
let_in(3u, |i| { assert!(i == 3i); });
//~^ ERROR expected `uint`, found `int`
|
random_line_split
|
gecko.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Specified types for legacy Gecko-only properties.
use crate::parser::{Parse, ParserContext};
use crate::values::computed::length::CSSPixelLength;
use crate::values::computed::{self, LengthPercentage};
use crate::values::generics::rect::Rect;
use cssparser::{Parser, Token};
use std::fmt;
use style_traits::values::SequenceWriter;
use style_traits::{CssWriter, ParseError, StyleParseErrorKind, ToCss};
fn parse_pixel_or_percent<'i, 't>(
_context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<LengthPercentage, ParseError<'i>> {
let location = input.current_source_location();
let token = input.next()?;
let value = match *token {
Token::Dimension {
value, ref unit,..
} => {
match_ignore_ascii_case! { unit,
"px" => Ok(LengthPercentage::new(CSSPixelLength::new(value), None)),
_ => Err(()),
}
},
Token::Percentage { unit_value,.. } => Ok(LengthPercentage::new_percent(
computed::Percentage(unit_value),
)),
_ => Err(()),
};
value.map_err(|()| location.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
/// The value of an IntersectionObserver's rootMargin property.
///
/// Only bare px or percentage values are allowed. Other length units and
/// calc() values are not allowed.
///
/// <https://w3c.github.io/IntersectionObserver/#parse-a-root-margin>
#[repr(transparent)]
pub struct
|
(pub Rect<LengthPercentage>);
impl Parse for IntersectionObserverRootMargin {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let rect = Rect::parse_with(context, input, parse_pixel_or_percent)?;
Ok(IntersectionObserverRootMargin(rect))
}
}
// Strictly speaking this is not ToCss. It's serializing for DOM. But
// we can just reuse the infrastructure of this.
//
// <https://w3c.github.io/IntersectionObserver/#dom-intersectionobserver-rootmargin>
impl ToCss for IntersectionObserverRootMargin {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: fmt::Write,
{
// We cannot use the ToCss impl of Rect, because that would
// merge items when they are equal. We want to list them all.
let mut writer = SequenceWriter::new(dest, " ");
let rect = &self.0;
writer.item(&rect.0)?;
writer.item(&rect.1)?;
writer.item(&rect.2)?;
writer.item(&rect.3)
}
}
|
IntersectionObserverRootMargin
|
identifier_name
|
gecko.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Specified types for legacy Gecko-only properties.
use crate::parser::{Parse, ParserContext};
use crate::values::computed::length::CSSPixelLength;
use crate::values::computed::{self, LengthPercentage};
use crate::values::generics::rect::Rect;
use cssparser::{Parser, Token};
use std::fmt;
use style_traits::values::SequenceWriter;
use style_traits::{CssWriter, ParseError, StyleParseErrorKind, ToCss};
fn parse_pixel_or_percent<'i, 't>(
_context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<LengthPercentage, ParseError<'i>> {
let location = input.current_source_location();
let token = input.next()?;
let value = match *token {
Token::Dimension {
value, ref unit,..
} => {
match_ignore_ascii_case! { unit,
"px" => Ok(LengthPercentage::new(CSSPixelLength::new(value), None)),
_ => Err(()),
}
},
Token::Percentage { unit_value,.. } => Ok(LengthPercentage::new_percent(
computed::Percentage(unit_value),
)),
_ => Err(()),
};
value.map_err(|()| location.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
/// The value of an IntersectionObserver's rootMargin property.
///
/// Only bare px or percentage values are allowed. Other length units and
/// calc() values are not allowed.
///
/// <https://w3c.github.io/IntersectionObserver/#parse-a-root-margin>
|
pub struct IntersectionObserverRootMargin(pub Rect<LengthPercentage>);
impl Parse for IntersectionObserverRootMargin {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let rect = Rect::parse_with(context, input, parse_pixel_or_percent)?;
Ok(IntersectionObserverRootMargin(rect))
}
}
// Strictly speaking this is not ToCss. It's serializing for DOM. But
// we can just reuse the infrastructure of this.
//
// <https://w3c.github.io/IntersectionObserver/#dom-intersectionobserver-rootmargin>
impl ToCss for IntersectionObserverRootMargin {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: fmt::Write,
{
// We cannot use the ToCss impl of Rect, because that would
// merge items when they are equal. We want to list them all.
let mut writer = SequenceWriter::new(dest, " ");
let rect = &self.0;
writer.item(&rect.0)?;
writer.item(&rect.1)?;
writer.item(&rect.2)?;
writer.item(&rect.3)
}
}
|
#[repr(transparent)]
|
random_line_split
|
doc_comment.rs
|
use crate::comment::CommentStyle;
use std::fmt::{self, Display};
/// Formats a string as a doc comment using the given [`CommentStyle`].
#[derive(new)]
pub(super) struct DocCommentFormatter<'a> {
literal: &'a str,
style: CommentStyle<'a>,
}
impl Display for DocCommentFormatter<'_> {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
let opener = self.style.opener().trim_end();
let mut lines = self.literal.lines().peekable();
// Handle `#[doc = ""]`.
if lines.peek().is_none() {
return write!(formatter, "{}", opener);
}
while let Some(line) = lines.next() {
let is_last_line = lines.peek().is_none();
if is_last_line {
write!(formatter, "{}{}", opener, line)?;
} else {
writeln!(formatter, "{}{}", opener, line)?;
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn
|
() {
test_doc_comment_is_formatted_correctly(
" Lorem ipsum",
"/// Lorem ipsum",
CommentStyle::TripleSlash,
);
}
#[test]
fn single_line_doc_comment_is_formatted_correctly() {
test_doc_comment_is_formatted_correctly(
"Lorem ipsum",
"///Lorem ipsum",
CommentStyle::TripleSlash,
);
}
#[test]
fn multi_line_doc_comment_is_formatted_correctly() {
test_doc_comment_is_formatted_correctly(
"Lorem ipsum\nDolor sit amet",
"///Lorem ipsum\n///Dolor sit amet",
CommentStyle::TripleSlash,
);
}
#[test]
fn whitespace_within_lines_is_preserved() {
test_doc_comment_is_formatted_correctly(
" Lorem ipsum \n Dolor sit amet ",
"/// Lorem ipsum \n/// Dolor sit amet ",
CommentStyle::TripleSlash,
);
}
fn test_doc_comment_is_formatted_correctly(
literal: &str,
expected_comment: &str,
style: CommentStyle<'_>,
) {
assert_eq!(
expected_comment,
format!("{}", DocCommentFormatter::new(&literal, style))
);
}
}
|
literal_controls_leading_spaces
|
identifier_name
|
doc_comment.rs
|
use crate::comment::CommentStyle;
use std::fmt::{self, Display};
/// Formats a string as a doc comment using the given [`CommentStyle`].
#[derive(new)]
pub(super) struct DocCommentFormatter<'a> {
literal: &'a str,
style: CommentStyle<'a>,
}
impl Display for DocCommentFormatter<'_> {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
let opener = self.style.opener().trim_end();
let mut lines = self.literal.lines().peekable();
// Handle `#[doc = ""]`.
if lines.peek().is_none() {
return write!(formatter, "{}", opener);
}
while let Some(line) = lines.next() {
let is_last_line = lines.peek().is_none();
if is_last_line
|
else {
writeln!(formatter, "{}{}", opener, line)?;
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn literal_controls_leading_spaces() {
test_doc_comment_is_formatted_correctly(
" Lorem ipsum",
"/// Lorem ipsum",
CommentStyle::TripleSlash,
);
}
#[test]
fn single_line_doc_comment_is_formatted_correctly() {
test_doc_comment_is_formatted_correctly(
"Lorem ipsum",
"///Lorem ipsum",
CommentStyle::TripleSlash,
);
}
#[test]
fn multi_line_doc_comment_is_formatted_correctly() {
test_doc_comment_is_formatted_correctly(
"Lorem ipsum\nDolor sit amet",
"///Lorem ipsum\n///Dolor sit amet",
CommentStyle::TripleSlash,
);
}
#[test]
fn whitespace_within_lines_is_preserved() {
test_doc_comment_is_formatted_correctly(
" Lorem ipsum \n Dolor sit amet ",
"/// Lorem ipsum \n/// Dolor sit amet ",
CommentStyle::TripleSlash,
);
}
fn test_doc_comment_is_formatted_correctly(
literal: &str,
expected_comment: &str,
style: CommentStyle<'_>,
) {
assert_eq!(
expected_comment,
format!("{}", DocCommentFormatter::new(&literal, style))
);
}
}
|
{
write!(formatter, "{}{}", opener, line)?;
}
|
conditional_block
|
doc_comment.rs
|
use crate::comment::CommentStyle;
use std::fmt::{self, Display};
/// Formats a string as a doc comment using the given [`CommentStyle`].
#[derive(new)]
pub(super) struct DocCommentFormatter<'a> {
literal: &'a str,
style: CommentStyle<'a>,
}
impl Display for DocCommentFormatter<'_> {
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
let opener = self.style.opener().trim_end();
let mut lines = self.literal.lines().peekable();
// Handle `#[doc = ""]`.
if lines.peek().is_none() {
return write!(formatter, "{}", opener);
}
|
write!(formatter, "{}{}", opener, line)?;
} else {
writeln!(formatter, "{}{}", opener, line)?;
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn literal_controls_leading_spaces() {
test_doc_comment_is_formatted_correctly(
" Lorem ipsum",
"/// Lorem ipsum",
CommentStyle::TripleSlash,
);
}
#[test]
fn single_line_doc_comment_is_formatted_correctly() {
test_doc_comment_is_formatted_correctly(
"Lorem ipsum",
"///Lorem ipsum",
CommentStyle::TripleSlash,
);
}
#[test]
fn multi_line_doc_comment_is_formatted_correctly() {
test_doc_comment_is_formatted_correctly(
"Lorem ipsum\nDolor sit amet",
"///Lorem ipsum\n///Dolor sit amet",
CommentStyle::TripleSlash,
);
}
#[test]
fn whitespace_within_lines_is_preserved() {
test_doc_comment_is_formatted_correctly(
" Lorem ipsum \n Dolor sit amet ",
"/// Lorem ipsum \n/// Dolor sit amet ",
CommentStyle::TripleSlash,
);
}
fn test_doc_comment_is_formatted_correctly(
literal: &str,
expected_comment: &str,
style: CommentStyle<'_>,
) {
assert_eq!(
expected_comment,
format!("{}", DocCommentFormatter::new(&literal, style))
);
}
}
|
while let Some(line) = lines.next() {
let is_last_line = lines.peek().is_none();
if is_last_line {
|
random_line_split
|
sha1.rs
|
use std::num::Wrapping;
use byteorder::ByteOrder;
use byteorder::ReadBytesExt;
use byteorder::WriteBytesExt;
use byteorder::BigEndian as BE;
static K1: u32 = 0x5A827999u32;
static K2: u32 = 0x6ED9EBA1u32;
static K3: u32 = 0x8F1BBCDCu32;
static K4: u32 = 0xCA62C1D6u32;
#[inline]
fn circular_shift(bits: u32, Wrapping(word): Wrapping<u32>) -> u32 {
word << (bits as usize) | word >> ((32u32 - bits) as usize)
}
#[allow(unused_must_use)]
pub fn sha1(message: &[u8]) -> Vec<u8> {
let mut hash: [u32; 5] = [0x67452301,
0xEFCDAB89,
0x98BADCFE,
0x10325476,
0xC3D2E1F0];
let mut msg = message.to_vec();
let msg_bit_len = msg.len() * 8;
let offset = (msg.len() * 8) % 512;
if offset < 448 {
msg.push(128u8);
for _ in 0..(448 - (offset + 8)) / 8 {
msg.push(0u8);
}
} else if offset >= 448 {
|
for _ in 0..(512 - (offset + 8)) / 8 + 56 {
msg.push(0u8);
}
}
msg.write_u64::<BE>(msg_bit_len as u64);
for i in 0..(msg.len() * 8 / 512) {
let mut w = [0u32; 80];
let part = &msg[i * 64..(i+1) * 64];
{
let mut reader = &part[..];
for j in 0usize..16 {
w[j] = reader.read_u32::<BE>().unwrap();
}
}
for j in 16usize..80 {
let val = w[j - 3] ^ w[j - 8] ^ w[j - 14] ^ w[j - 16];
w[j] = circular_shift(1, Wrapping(val));
}
let mut a = Wrapping(hash[0]);
let mut b = Wrapping(hash[1]);
let mut c = Wrapping(hash[2]);
let mut d = Wrapping(hash[3]);
let mut e = Wrapping(hash[4]);
let mut temp: Wrapping<u32>;
for t in 0usize..20 {
temp = Wrapping(circular_shift(5, a))
+ (b & c |!b & d)
+ e
+ Wrapping(w[t])
+ Wrapping(K1);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 20usize..40 {
temp = Wrapping(circular_shift(5, a))
+ (b ^ c ^ d)
+ e
+ Wrapping(w[t])
+ Wrapping(K2);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 40usize..60 {
temp = Wrapping(circular_shift(5, a))
+ (b & c | b & d | c & d)
+ e
+ Wrapping(w[t])
+ Wrapping(K3);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 60usize..80 {
temp = Wrapping(circular_shift(5, a))
+ (b ^ c ^ d)
+ e
+ Wrapping(w[t])
+ Wrapping(K4);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
hash[0] = { let Wrapping(x) = Wrapping(hash[0]) + a; x};
hash[1] = { let Wrapping(x) = Wrapping(hash[1]) + b; x};
hash[2] = { let Wrapping(x) = Wrapping(hash[2]) + c; x};
hash[3] = { let Wrapping(x) = Wrapping(hash[3]) + d; x};
hash[4] = { let Wrapping(x) = Wrapping(hash[4]) + e; x};
}
let mut output = Vec::with_capacity(20);
output.write_u32::<BE>(hash[0]);
output.write_u32::<BE>(hash[1]);
output.write_u32::<BE>(hash[2]);
output.write_u32::<BE>(hash[3]);
output.write_u32::<BE>(hash[4]);
output
}
#[cfg(test)]
mod test {
pub use super::sha1;
#[test]
fn should_compute_sha1_hash() {
assert_eq!(sha1(&[115u8, 104u8, 97u8]),
vec![0xd8u8, 0xf4u8, 0x59u8, 0x03u8, 0x20u8, 0xe1u8,
0x34u8, 0x3au8, 0x91u8, 0x5bu8, 0x63u8, 0x94u8,
0x17u8, 0x06u8, 0x50u8, 0xa8u8, 0xf3u8, 0x5du8,
0x69u8, 0x26u8]);
assert_eq!(sha1(&[65u8; 57]),
vec![0xe8u8, 0xd6u8, 0xeau8, 0x5cu8, 0x62u8, 0x7fu8,
0xc8u8, 0x67u8, 0x6fu8, 0xa6u8, 0x62u8, 0x67u8,
0x7bu8, 0x02u8, 0x86u8, 0x40u8, 0x84u8, 0x4du8,
0xc3u8, 0x5cu8]);
assert_eq!(sha1(&[65u8; 56]),
vec![0x6bu8, 0x45u8, 0xe3u8, 0xcfu8, 0x1eu8, 0xb3u8,
0x32u8, 0x4bu8, 0x9fu8, 0xd4u8, 0xdfu8, 0x3bu8,
0x83u8, 0xd8u8, 0x9cu8, 0x4cu8, 0x2cu8, 0x4cu8,
0xa8u8, 0x96u8]);
assert_eq!(sha1(&[65u8; 64]),
vec![0x30u8, 0xb8u8, 0x6eu8, 0x44u8, 0xe6u8, 0x00u8,
0x14u8, 0x03u8, 0x82u8, 0x7au8, 0x62u8, 0xc5u8,
0x8bu8, 0x08u8, 0x89u8, 0x3eu8, 0x77u8, 0xcfu8,
0x12u8, 0x1fu8]);
assert_eq!(sha1(&[65u8; 65]),
vec![0x82u8, 0x6bu8, 0x7eu8, 0x7au8, 0x7au8, 0xf8u8,
0xa5u8, 0x29u8, 0xaeu8, 0x1cu8, 0x74u8, 0x43u8,
0xc2u8, 0x3bu8, 0xf1u8, 0x85u8, 0xc0u8, 0xadu8,
0x44u8, 0x0cu8]);
}
}
|
msg.push(128u8);
|
random_line_split
|
sha1.rs
|
use std::num::Wrapping;
use byteorder::ByteOrder;
use byteorder::ReadBytesExt;
use byteorder::WriteBytesExt;
use byteorder::BigEndian as BE;
static K1: u32 = 0x5A827999u32;
static K2: u32 = 0x6ED9EBA1u32;
static K3: u32 = 0x8F1BBCDCu32;
static K4: u32 = 0xCA62C1D6u32;
#[inline]
fn
|
(bits: u32, Wrapping(word): Wrapping<u32>) -> u32 {
word << (bits as usize) | word >> ((32u32 - bits) as usize)
}
#[allow(unused_must_use)]
pub fn sha1(message: &[u8]) -> Vec<u8> {
let mut hash: [u32; 5] = [0x67452301,
0xEFCDAB89,
0x98BADCFE,
0x10325476,
0xC3D2E1F0];
let mut msg = message.to_vec();
let msg_bit_len = msg.len() * 8;
let offset = (msg.len() * 8) % 512;
if offset < 448 {
msg.push(128u8);
for _ in 0..(448 - (offset + 8)) / 8 {
msg.push(0u8);
}
} else if offset >= 448 {
msg.push(128u8);
for _ in 0..(512 - (offset + 8)) / 8 + 56 {
msg.push(0u8);
}
}
msg.write_u64::<BE>(msg_bit_len as u64);
for i in 0..(msg.len() * 8 / 512) {
let mut w = [0u32; 80];
let part = &msg[i * 64..(i+1) * 64];
{
let mut reader = &part[..];
for j in 0usize..16 {
w[j] = reader.read_u32::<BE>().unwrap();
}
}
for j in 16usize..80 {
let val = w[j - 3] ^ w[j - 8] ^ w[j - 14] ^ w[j - 16];
w[j] = circular_shift(1, Wrapping(val));
}
let mut a = Wrapping(hash[0]);
let mut b = Wrapping(hash[1]);
let mut c = Wrapping(hash[2]);
let mut d = Wrapping(hash[3]);
let mut e = Wrapping(hash[4]);
let mut temp: Wrapping<u32>;
for t in 0usize..20 {
temp = Wrapping(circular_shift(5, a))
+ (b & c |!b & d)
+ e
+ Wrapping(w[t])
+ Wrapping(K1);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 20usize..40 {
temp = Wrapping(circular_shift(5, a))
+ (b ^ c ^ d)
+ e
+ Wrapping(w[t])
+ Wrapping(K2);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 40usize..60 {
temp = Wrapping(circular_shift(5, a))
+ (b & c | b & d | c & d)
+ e
+ Wrapping(w[t])
+ Wrapping(K3);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 60usize..80 {
temp = Wrapping(circular_shift(5, a))
+ (b ^ c ^ d)
+ e
+ Wrapping(w[t])
+ Wrapping(K4);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
hash[0] = { let Wrapping(x) = Wrapping(hash[0]) + a; x};
hash[1] = { let Wrapping(x) = Wrapping(hash[1]) + b; x};
hash[2] = { let Wrapping(x) = Wrapping(hash[2]) + c; x};
hash[3] = { let Wrapping(x) = Wrapping(hash[3]) + d; x};
hash[4] = { let Wrapping(x) = Wrapping(hash[4]) + e; x};
}
let mut output = Vec::with_capacity(20);
output.write_u32::<BE>(hash[0]);
output.write_u32::<BE>(hash[1]);
output.write_u32::<BE>(hash[2]);
output.write_u32::<BE>(hash[3]);
output.write_u32::<BE>(hash[4]);
output
}
#[cfg(test)]
mod test {
pub use super::sha1;
#[test]
fn should_compute_sha1_hash() {
assert_eq!(sha1(&[115u8, 104u8, 97u8]),
vec![0xd8u8, 0xf4u8, 0x59u8, 0x03u8, 0x20u8, 0xe1u8,
0x34u8, 0x3au8, 0x91u8, 0x5bu8, 0x63u8, 0x94u8,
0x17u8, 0x06u8, 0x50u8, 0xa8u8, 0xf3u8, 0x5du8,
0x69u8, 0x26u8]);
assert_eq!(sha1(&[65u8; 57]),
vec![0xe8u8, 0xd6u8, 0xeau8, 0x5cu8, 0x62u8, 0x7fu8,
0xc8u8, 0x67u8, 0x6fu8, 0xa6u8, 0x62u8, 0x67u8,
0x7bu8, 0x02u8, 0x86u8, 0x40u8, 0x84u8, 0x4du8,
0xc3u8, 0x5cu8]);
assert_eq!(sha1(&[65u8; 56]),
vec![0x6bu8, 0x45u8, 0xe3u8, 0xcfu8, 0x1eu8, 0xb3u8,
0x32u8, 0x4bu8, 0x9fu8, 0xd4u8, 0xdfu8, 0x3bu8,
0x83u8, 0xd8u8, 0x9cu8, 0x4cu8, 0x2cu8, 0x4cu8,
0xa8u8, 0x96u8]);
assert_eq!(sha1(&[65u8; 64]),
vec![0x30u8, 0xb8u8, 0x6eu8, 0x44u8, 0xe6u8, 0x00u8,
0x14u8, 0x03u8, 0x82u8, 0x7au8, 0x62u8, 0xc5u8,
0x8bu8, 0x08u8, 0x89u8, 0x3eu8, 0x77u8, 0xcfu8,
0x12u8, 0x1fu8]);
assert_eq!(sha1(&[65u8; 65]),
vec![0x82u8, 0x6bu8, 0x7eu8, 0x7au8, 0x7au8, 0xf8u8,
0xa5u8, 0x29u8, 0xaeu8, 0x1cu8, 0x74u8, 0x43u8,
0xc2u8, 0x3bu8, 0xf1u8, 0x85u8, 0xc0u8, 0xadu8,
0x44u8, 0x0cu8]);
}
}
|
circular_shift
|
identifier_name
|
sha1.rs
|
use std::num::Wrapping;
use byteorder::ByteOrder;
use byteorder::ReadBytesExt;
use byteorder::WriteBytesExt;
use byteorder::BigEndian as BE;
static K1: u32 = 0x5A827999u32;
static K2: u32 = 0x6ED9EBA1u32;
static K3: u32 = 0x8F1BBCDCu32;
static K4: u32 = 0xCA62C1D6u32;
#[inline]
fn circular_shift(bits: u32, Wrapping(word): Wrapping<u32>) -> u32
|
#[allow(unused_must_use)]
pub fn sha1(message: &[u8]) -> Vec<u8> {
let mut hash: [u32; 5] = [0x67452301,
0xEFCDAB89,
0x98BADCFE,
0x10325476,
0xC3D2E1F0];
let mut msg = message.to_vec();
let msg_bit_len = msg.len() * 8;
let offset = (msg.len() * 8) % 512;
if offset < 448 {
msg.push(128u8);
for _ in 0..(448 - (offset + 8)) / 8 {
msg.push(0u8);
}
} else if offset >= 448 {
msg.push(128u8);
for _ in 0..(512 - (offset + 8)) / 8 + 56 {
msg.push(0u8);
}
}
msg.write_u64::<BE>(msg_bit_len as u64);
for i in 0..(msg.len() * 8 / 512) {
let mut w = [0u32; 80];
let part = &msg[i * 64..(i+1) * 64];
{
let mut reader = &part[..];
for j in 0usize..16 {
w[j] = reader.read_u32::<BE>().unwrap();
}
}
for j in 16usize..80 {
let val = w[j - 3] ^ w[j - 8] ^ w[j - 14] ^ w[j - 16];
w[j] = circular_shift(1, Wrapping(val));
}
let mut a = Wrapping(hash[0]);
let mut b = Wrapping(hash[1]);
let mut c = Wrapping(hash[2]);
let mut d = Wrapping(hash[3]);
let mut e = Wrapping(hash[4]);
let mut temp: Wrapping<u32>;
for t in 0usize..20 {
temp = Wrapping(circular_shift(5, a))
+ (b & c |!b & d)
+ e
+ Wrapping(w[t])
+ Wrapping(K1);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 20usize..40 {
temp = Wrapping(circular_shift(5, a))
+ (b ^ c ^ d)
+ e
+ Wrapping(w[t])
+ Wrapping(K2);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 40usize..60 {
temp = Wrapping(circular_shift(5, a))
+ (b & c | b & d | c & d)
+ e
+ Wrapping(w[t])
+ Wrapping(K3);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 60usize..80 {
temp = Wrapping(circular_shift(5, a))
+ (b ^ c ^ d)
+ e
+ Wrapping(w[t])
+ Wrapping(K4);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
hash[0] = { let Wrapping(x) = Wrapping(hash[0]) + a; x};
hash[1] = { let Wrapping(x) = Wrapping(hash[1]) + b; x};
hash[2] = { let Wrapping(x) = Wrapping(hash[2]) + c; x};
hash[3] = { let Wrapping(x) = Wrapping(hash[3]) + d; x};
hash[4] = { let Wrapping(x) = Wrapping(hash[4]) + e; x};
}
let mut output = Vec::with_capacity(20);
output.write_u32::<BE>(hash[0]);
output.write_u32::<BE>(hash[1]);
output.write_u32::<BE>(hash[2]);
output.write_u32::<BE>(hash[3]);
output.write_u32::<BE>(hash[4]);
output
}
#[cfg(test)]
mod test {
pub use super::sha1;
#[test]
fn should_compute_sha1_hash() {
assert_eq!(sha1(&[115u8, 104u8, 97u8]),
vec![0xd8u8, 0xf4u8, 0x59u8, 0x03u8, 0x20u8, 0xe1u8,
0x34u8, 0x3au8, 0x91u8, 0x5bu8, 0x63u8, 0x94u8,
0x17u8, 0x06u8, 0x50u8, 0xa8u8, 0xf3u8, 0x5du8,
0x69u8, 0x26u8]);
assert_eq!(sha1(&[65u8; 57]),
vec![0xe8u8, 0xd6u8, 0xeau8, 0x5cu8, 0x62u8, 0x7fu8,
0xc8u8, 0x67u8, 0x6fu8, 0xa6u8, 0x62u8, 0x67u8,
0x7bu8, 0x02u8, 0x86u8, 0x40u8, 0x84u8, 0x4du8,
0xc3u8, 0x5cu8]);
assert_eq!(sha1(&[65u8; 56]),
vec![0x6bu8, 0x45u8, 0xe3u8, 0xcfu8, 0x1eu8, 0xb3u8,
0x32u8, 0x4bu8, 0x9fu8, 0xd4u8, 0xdfu8, 0x3bu8,
0x83u8, 0xd8u8, 0x9cu8, 0x4cu8, 0x2cu8, 0x4cu8,
0xa8u8, 0x96u8]);
assert_eq!(sha1(&[65u8; 64]),
vec![0x30u8, 0xb8u8, 0x6eu8, 0x44u8, 0xe6u8, 0x00u8,
0x14u8, 0x03u8, 0x82u8, 0x7au8, 0x62u8, 0xc5u8,
0x8bu8, 0x08u8, 0x89u8, 0x3eu8, 0x77u8, 0xcfu8,
0x12u8, 0x1fu8]);
assert_eq!(sha1(&[65u8; 65]),
vec![0x82u8, 0x6bu8, 0x7eu8, 0x7au8, 0x7au8, 0xf8u8,
0xa5u8, 0x29u8, 0xaeu8, 0x1cu8, 0x74u8, 0x43u8,
0xc2u8, 0x3bu8, 0xf1u8, 0x85u8, 0xc0u8, 0xadu8,
0x44u8, 0x0cu8]);
}
}
|
{
word << (bits as usize) | word >> ((32u32 - bits) as usize)
}
|
identifier_body
|
sha1.rs
|
use std::num::Wrapping;
use byteorder::ByteOrder;
use byteorder::ReadBytesExt;
use byteorder::WriteBytesExt;
use byteorder::BigEndian as BE;
static K1: u32 = 0x5A827999u32;
static K2: u32 = 0x6ED9EBA1u32;
static K3: u32 = 0x8F1BBCDCu32;
static K4: u32 = 0xCA62C1D6u32;
#[inline]
fn circular_shift(bits: u32, Wrapping(word): Wrapping<u32>) -> u32 {
word << (bits as usize) | word >> ((32u32 - bits) as usize)
}
#[allow(unused_must_use)]
pub fn sha1(message: &[u8]) -> Vec<u8> {
let mut hash: [u32; 5] = [0x67452301,
0xEFCDAB89,
0x98BADCFE,
0x10325476,
0xC3D2E1F0];
let mut msg = message.to_vec();
let msg_bit_len = msg.len() * 8;
let offset = (msg.len() * 8) % 512;
if offset < 448
|
else if offset >= 448 {
msg.push(128u8);
for _ in 0..(512 - (offset + 8)) / 8 + 56 {
msg.push(0u8);
}
}
msg.write_u64::<BE>(msg_bit_len as u64);
for i in 0..(msg.len() * 8 / 512) {
let mut w = [0u32; 80];
let part = &msg[i * 64..(i+1) * 64];
{
let mut reader = &part[..];
for j in 0usize..16 {
w[j] = reader.read_u32::<BE>().unwrap();
}
}
for j in 16usize..80 {
let val = w[j - 3] ^ w[j - 8] ^ w[j - 14] ^ w[j - 16];
w[j] = circular_shift(1, Wrapping(val));
}
let mut a = Wrapping(hash[0]);
let mut b = Wrapping(hash[1]);
let mut c = Wrapping(hash[2]);
let mut d = Wrapping(hash[3]);
let mut e = Wrapping(hash[4]);
let mut temp: Wrapping<u32>;
for t in 0usize..20 {
temp = Wrapping(circular_shift(5, a))
+ (b & c |!b & d)
+ e
+ Wrapping(w[t])
+ Wrapping(K1);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 20usize..40 {
temp = Wrapping(circular_shift(5, a))
+ (b ^ c ^ d)
+ e
+ Wrapping(w[t])
+ Wrapping(K2);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 40usize..60 {
temp = Wrapping(circular_shift(5, a))
+ (b & c | b & d | c & d)
+ e
+ Wrapping(w[t])
+ Wrapping(K3);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
for t in 60usize..80 {
temp = Wrapping(circular_shift(5, a))
+ (b ^ c ^ d)
+ e
+ Wrapping(w[t])
+ Wrapping(K4);
e = d;
d = c;
c = Wrapping(circular_shift(30, b));
b = a;
a = temp;
}
hash[0] = { let Wrapping(x) = Wrapping(hash[0]) + a; x};
hash[1] = { let Wrapping(x) = Wrapping(hash[1]) + b; x};
hash[2] = { let Wrapping(x) = Wrapping(hash[2]) + c; x};
hash[3] = { let Wrapping(x) = Wrapping(hash[3]) + d; x};
hash[4] = { let Wrapping(x) = Wrapping(hash[4]) + e; x};
}
let mut output = Vec::with_capacity(20);
output.write_u32::<BE>(hash[0]);
output.write_u32::<BE>(hash[1]);
output.write_u32::<BE>(hash[2]);
output.write_u32::<BE>(hash[3]);
output.write_u32::<BE>(hash[4]);
output
}
#[cfg(test)]
mod test {
pub use super::sha1;
#[test]
fn should_compute_sha1_hash() {
assert_eq!(sha1(&[115u8, 104u8, 97u8]),
vec![0xd8u8, 0xf4u8, 0x59u8, 0x03u8, 0x20u8, 0xe1u8,
0x34u8, 0x3au8, 0x91u8, 0x5bu8, 0x63u8, 0x94u8,
0x17u8, 0x06u8, 0x50u8, 0xa8u8, 0xf3u8, 0x5du8,
0x69u8, 0x26u8]);
assert_eq!(sha1(&[65u8; 57]),
vec![0xe8u8, 0xd6u8, 0xeau8, 0x5cu8, 0x62u8, 0x7fu8,
0xc8u8, 0x67u8, 0x6fu8, 0xa6u8, 0x62u8, 0x67u8,
0x7bu8, 0x02u8, 0x86u8, 0x40u8, 0x84u8, 0x4du8,
0xc3u8, 0x5cu8]);
assert_eq!(sha1(&[65u8; 56]),
vec![0x6bu8, 0x45u8, 0xe3u8, 0xcfu8, 0x1eu8, 0xb3u8,
0x32u8, 0x4bu8, 0x9fu8, 0xd4u8, 0xdfu8, 0x3bu8,
0x83u8, 0xd8u8, 0x9cu8, 0x4cu8, 0x2cu8, 0x4cu8,
0xa8u8, 0x96u8]);
assert_eq!(sha1(&[65u8; 64]),
vec![0x30u8, 0xb8u8, 0x6eu8, 0x44u8, 0xe6u8, 0x00u8,
0x14u8, 0x03u8, 0x82u8, 0x7au8, 0x62u8, 0xc5u8,
0x8bu8, 0x08u8, 0x89u8, 0x3eu8, 0x77u8, 0xcfu8,
0x12u8, 0x1fu8]);
assert_eq!(sha1(&[65u8; 65]),
vec![0x82u8, 0x6bu8, 0x7eu8, 0x7au8, 0x7au8, 0xf8u8,
0xa5u8, 0x29u8, 0xaeu8, 0x1cu8, 0x74u8, 0x43u8,
0xc2u8, 0x3bu8, 0xf1u8, 0x85u8, 0xc0u8, 0xadu8,
0x44u8, 0x0cu8]);
}
}
|
{
msg.push(128u8);
for _ in 0..(448 - (offset + 8)) / 8 {
msg.push(0u8);
}
}
|
conditional_block
|
main.rs
|
use std::default::Default;
extern crate time;
extern crate ansi_term;
use ansi_term::Colour::{Red, Green, Yellow, Blue, Purple, Cyan};
extern crate mysql;
use mysql::conn::MyOpts;
use mysql::conn::pool::MyPool;
use mysql::value::from_row;
use std::collections::BTreeMap;
pub mod error;
use self::error::Error;
pub mod model;
pub mod api_result;
extern crate rustc_serialize;
use rustc_serialize::json::{self, ToJson, Json};
fn main()
|
println!("--- END ---");
println!("\n--- {} ---", Yellow.bold().paint("JSON encode decode").to_string());
let json_str = "{\"code\":0,\"message\":\"OK\",\"data\":{\"keyword\":\"Rust, api, php, nginx\"},\"ip\":[\"192.168.0.168\",\"127.0.0.1\"]}";
println!("json_str: {}", Blue.paint(json_str.to_string()));
//let json: api_result::Body = json::decode(&json_str).unwrap();
let mut obj: BTreeMap<String, json::Json> = BTreeMap::new();
obj.insert("os".to_string(), json::Json::from_str("{\"page\": 11, \"os\": \"Linux, Unix, MacOS\"}").unwrap_or_else(|e| { panic!("failed to execute process: {}", e) }));
obj.insert("test".to_string(), json::Json::from_str("[\"just test\"]").unwrap_or_else(|e| { panic!("failed to execute process: {}", e) }));
let json_obj = api_result::Body {
code: code,
message: message.to_string(),
data: obj,
};
let encode_str = json_obj.to_json();
println!("encode_str: {}", encode_str.to_string());
println!("--- END ---");
println!("\n--- {} ---", Purple.bold().paint("BTreeMap".to_string()));
// BTreeMap 的 key 只能是同一种类型
let mut map = BTreeMap::new();
let mut n_map = BTreeMap::new();
n_map.insert(0, "Hi, world");
n_map.insert(1, "a");
n_map.insert(2, "b");
map.insert("key", "value");
map.insert("os", "linux terminal");
let n_keys: Vec<_> = n_map.keys().cloned().collect();
let n_values: Vec<&str> = n_map.values().cloned().collect();
let keys: Vec<&str> = map.keys().cloned().collect();
let values: Vec<&str> = map.values().cloned().collect();
println!("n_map->keys: {:?}", n_keys);
println!("n_map->values: {:?}", n_values);
println!("map->keys: {:?}", keys);
println!("map->values: {:?}", values);
for (k, v) in map.iter() {
println!("map.{} = {}", k, v);
}
println!("--- END ---");
println!("\n--- {} ---", Cyan.bold().paint("UserModel"));
let empty_user = model::user::UserModel::create_empty();
// error: cannot assign to immutable field
//empty_user.id = 110;
println!("empty_user -> {}", empty_user);
let mut user = model::user::UserModel::create(1, "Admin".to_string(), "15801398759".to_string(), "[email protected]".to_string());
println!("debug for user: {}", user);
user.id = 100;
user.nickname = "OP".to_string();
println!("debug for user after modify: {}", user);
let user_json = json::encode(&user).unwrap();
println!("user_json: {}", user_json);
// CREATE TABLE `test`.`user` (
// `id` INT( 11 ) UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY COMMENT '主键',
// `nickname` VARCHAR( 128 ) NOT NULL,
// `mobile` VARCHAR( 32 ) NOT NULL,
// `email` VARCHAR( 64 ) NOT NULL
// ) ENGINE = INNODB;
println!("\n--- {} ---", Red.bold().paint("MySQL example".to_string()));
let opts = MyOpts {
user: Some("dev".to_string()),
pass: Some("dev".to_string()),
//init: vec!["SET NAMES utf8; use test;".to_owned()], // 加入此行无法写数据.
..Default::default()
};
let pool = MyPool::new(opts).unwrap();
//let method = "insert";
let method = "select";
if "insert" == method {
println!("{}", Yellow.blink().bold().paint("INSERT"));
let mut users = vec![
model::user::UserModel{id : 1, nickname: "admin".to_string(), mobile: "15811119890".to_string(), email: "[email protected]".to_string()},
];
users.push(user);
for mut stmt in pool.prepare(r"INSERT INTO test.user
(id, nickname, mobile, email)
VALUES
(NULL,?,?,?)").into_iter() {
for p in users.iter() {
// `execute` takes ownership of `params` so we pass account name by reference.
// Unwrap each result just to make sure no errors happended.
let _ig = stmt.execute((&p.nickname, &p.mobile, &p.email));
}
}
} else {
println!("{}", Yellow.blink().bold().paint("SELECT"));
let selected: Vec<model::user::UserModel> = pool.prep_exec("SELECT id, nickname, mobile, email FROM test.user", ())
.map(|result| { // In this closure we sill map `QueryResult` to `Vec<T>`
// `QueryResult` is iterator over `MyResult<row, err>` so first call to `map`
// will map each `MyResult` to contained `row` (no proper error handling)
// and second call to `map` will map each `row` to `struct`
result.map(|x| x.unwrap()).map(|row| {
let (id, nickname, mobile, email) = from_row(row);
model::user::UserModel {
id: id,
nickname: nickname,
mobile: mobile,
email: email,
}
}).collect() // Collect payments so now `QueryResult` is mapped to `Vec<T>`
}).unwrap(); // Unwrap `Vec<T>`
for i in 0.. selected.len() {
println!("selected[{}]: {}", i, selected[i]);
}
}
}
|
{
println!("--- {} ---", Red
//.blink()
.bold().paint("Test error Model").to_string());
let code = Error::get_code(&Error::Success);
let message = Error::get_message(&Error::Success);
println!("{{\"code\": {},\"message\":{:?}}}", code, message);
println!("--- END ---");
println!("\n--- {} ---", Green.bold().paint("Time").to_string());
let time_now = time::now();
println!("{:?}", time_now);
let time = time::get_time();
println!("{:?}", time);
let time_str = time::strftime("%Y-%m-%d %H:%M:%S", &time_now);
println!("{:?}", time_str);
|
identifier_body
|
main.rs
|
use std::default::Default;
extern crate time;
extern crate ansi_term;
use ansi_term::Colour::{Red, Green, Yellow, Blue, Purple, Cyan};
extern crate mysql;
use mysql::conn::MyOpts;
use mysql::conn::pool::MyPool;
use mysql::value::from_row;
use std::collections::BTreeMap;
pub mod error;
use self::error::Error;
pub mod model;
pub mod api_result;
extern crate rustc_serialize;
use rustc_serialize::json::{self, ToJson, Json};
fn main() {
println!("--- {} ---", Red
//.blink()
.bold().paint("Test error Model").to_string());
let code = Error::get_code(&Error::Success);
let message = Error::get_message(&Error::Success);
println!("{{\"code\": {},\"message\":{:?}}}", code, message);
println!("--- END ---");
println!("\n--- {} ---", Green.bold().paint("Time").to_string());
let time_now = time::now();
println!("{:?}", time_now);
let time = time::get_time();
println!("{:?}", time);
let time_str = time::strftime("%Y-%m-%d %H:%M:%S", &time_now);
println!("{:?}", time_str);
println!("--- END ---");
println!("\n--- {} ---", Yellow.bold().paint("JSON encode decode").to_string());
let json_str = "{\"code\":0,\"message\":\"OK\",\"data\":{\"keyword\":\"Rust, api, php, nginx\"},\"ip\":[\"192.168.0.168\",\"127.0.0.1\"]}";
println!("json_str: {}", Blue.paint(json_str.to_string()));
//let json: api_result::Body = json::decode(&json_str).unwrap();
let mut obj: BTreeMap<String, json::Json> = BTreeMap::new();
obj.insert("os".to_string(), json::Json::from_str("{\"page\": 11, \"os\": \"Linux, Unix, MacOS\"}").unwrap_or_else(|e| { panic!("failed to execute process: {}", e) }));
obj.insert("test".to_string(), json::Json::from_str("[\"just test\"]").unwrap_or_else(|e| { panic!("failed to execute process: {}", e) }));
let json_obj = api_result::Body {
code: code,
message: message.to_string(),
data: obj,
};
let encode_str = json_obj.to_json();
println!("encode_str: {}", encode_str.to_string());
println!("--- END ---");
println!("\n--- {} ---", Purple.bold().paint("BTreeMap".to_string()));
// BTreeMap 的 key 只能是同一种类型
let mut map = BTreeMap::new();
let mut n_map = BTreeMap::new();
n_map.insert(0, "Hi, world");
n_map.insert(1, "a");
n_map.insert(2, "b");
map.insert("key", "value");
map.insert("os", "linux terminal");
let n_keys: Vec<_> = n_map.keys().cloned().collect();
let n_values: Vec<&str> = n_map.values().cloned().collect();
let keys: Vec<&str> = map.keys().cloned().collect();
let values: Vec<&str> = map.values().cloned().collect();
println!("n_map->keys: {:?}", n_keys);
println!("n_map->values: {:?}", n_values);
println!("map->keys: {:?}", keys);
println!("map->values: {:?}", values);
for (k, v) in map.iter() {
println!("map.{} = {}", k, v);
}
println!("--- END ---");
println!("\n--- {} ---", Cyan.bold().paint("UserModel"));
let empty_user = model::user::UserModel::create_empty();
// error: cannot assign to immutable field
//empty_user.id = 110;
println!("empty_user -> {}", empty_user);
let mut user = model::user::UserModel::create(1, "Admin".to_string(), "15801398759".to_string(), "[email protected]".to_string());
println!("debug for user: {}", user);
user.id = 100;
user.nickname = "OP".to_string();
println!("debug for user after modify: {}", user);
let user_json = json::encode(&user).unwrap();
println!("user_json: {}", user_json);
// CREATE TABLE `test`.`user` (
// `id` INT( 11 ) UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY COMMENT '主键',
// `nickname` VARCHAR( 128 ) NOT NULL,
// `mobile` VARCHAR( 32 ) NOT NULL,
// `email` VARCHAR( 64 ) NOT NULL
// ) ENGINE = INNODB;
println!("\n--- {} ---", Red.bold().paint("MySQL example".to_string()));
let opts = MyOpts {
user: Some("dev".to_string()),
pass: Some("dev".to_string()),
//init: vec!["SET NAMES utf8; use test;".to_owned()], // 加入此行无法写数据.
..Default::default()
};
let pool = MyPool::new(opts).unwrap();
//let method = "insert";
let method = "select";
if "insert" == method {
println!("{}", Yellow.blink().bold().paint("INSERT"));
let mut users = vec![
model::user::UserModel{id : 1, nickname: "admin".to_string(), mobile: "15811119890".to_string(), email: "[email protected]".to_string()},
];
users.push(user);
for mut stmt in pool.prepare(r"INSERT INTO test.user
(id, nickname, mobile, email)
VALUES
(NULL,?,?,?)").into_iter() {
for p in users.iter() {
// `execute` takes ownership of `params` so we pass account name by reference.
// Unwrap each result just to make sure no errors happended.
let _ig = stmt.execute((&p.nickname, &p.mobile, &p.email));
}
}
} else {
println!("{}", Yellow.blink().bold().paint("SELECT"));
let selected: Vec<model::user::UserModel> = pool.prep_exec("SELECT id, nickname, mobile, email FROM test.user", ())
.map(|result| { // In this closure we sill map `QueryResult` to `Vec<T>`
// `QueryResult` is iterator over `MyResult<row, err>` so first call to `map`
// will map each `MyResult` to contained `row` (no proper error handling)
// and second call to `map` will map each `row` to `struct`
result.map(|x| x.unwrap()).map(|row| {
let (id, nickname, mobile, email) = from_row(row);
model::user::UserModel {
|
id: id,
nickname: nickname,
mobile: mobile,
email: email,
}
}).collect() // Collect payments so now `QueryResult` is mapped to `Vec<T>`
}).unwrap(); // Unwrap `Vec<T>`
for i in 0.. selected.len() {
println!("selected[{}]: {}", i, selected[i]);
}
}
}
|
random_line_split
|
|
main.rs
|
use std::default::Default;
extern crate time;
extern crate ansi_term;
use ansi_term::Colour::{Red, Green, Yellow, Blue, Purple, Cyan};
extern crate mysql;
use mysql::conn::MyOpts;
use mysql::conn::pool::MyPool;
use mysql::value::from_row;
use std::collections::BTreeMap;
pub mod error;
use self::error::Error;
pub mod model;
pub mod api_result;
extern crate rustc_serialize;
use rustc_serialize::json::{self, ToJson, Json};
fn main() {
println!("--- {} ---", Red
//.blink()
.bold().paint("Test error Model").to_string());
let code = Error::get_code(&Error::Success);
let message = Error::get_message(&Error::Success);
println!("{{\"code\": {},\"message\":{:?}}}", code, message);
println!("--- END ---");
println!("\n--- {} ---", Green.bold().paint("Time").to_string());
let time_now = time::now();
println!("{:?}", time_now);
let time = time::get_time();
println!("{:?}", time);
let time_str = time::strftime("%Y-%m-%d %H:%M:%S", &time_now);
println!("{:?}", time_str);
println!("--- END ---");
println!("\n--- {} ---", Yellow.bold().paint("JSON encode decode").to_string());
let json_str = "{\"code\":0,\"message\":\"OK\",\"data\":{\"keyword\":\"Rust, api, php, nginx\"},\"ip\":[\"192.168.0.168\",\"127.0.0.1\"]}";
println!("json_str: {}", Blue.paint(json_str.to_string()));
//let json: api_result::Body = json::decode(&json_str).unwrap();
let mut obj: BTreeMap<String, json::Json> = BTreeMap::new();
obj.insert("os".to_string(), json::Json::from_str("{\"page\": 11, \"os\": \"Linux, Unix, MacOS\"}").unwrap_or_else(|e| { panic!("failed to execute process: {}", e) }));
obj.insert("test".to_string(), json::Json::from_str("[\"just test\"]").unwrap_or_else(|e| { panic!("failed to execute process: {}", e) }));
let json_obj = api_result::Body {
code: code,
message: message.to_string(),
data: obj,
};
let encode_str = json_obj.to_json();
println!("encode_str: {}", encode_str.to_string());
println!("--- END ---");
println!("\n--- {} ---", Purple.bold().paint("BTreeMap".to_string()));
// BTreeMap 的 key 只能是同一种类型
let mut map = BTreeMap::new();
let mut n_map = BTreeMap::new();
n_map.insert(0, "Hi, world");
n_map.insert(1, "a");
n_map.insert(2, "b");
map.insert("key", "value");
map.insert("os", "linux terminal");
let n_keys: Vec<_> = n_map.keys().cloned().collect();
let n_values: Vec<&str> = n_map.values().cloned().collect();
let keys: Vec<&str> = map.keys().cloned().collect();
let values: Vec<&str> = map.values().cloned().collect();
println!("n_map->keys: {:?}", n_keys);
println!("n_map->values: {:?}", n_values);
println!("map->keys: {:?}", keys);
println!("map->values: {:?}", values);
for (k, v) in map.iter() {
println!("map.{} = {}", k, v);
}
println!("--- END ---");
println!("\n--- {} ---", Cyan.bold().paint("UserModel"));
let empty_user = model::user::UserModel::create_empty();
// error: cannot assign to immutable field
//empty_user.id = 110;
println!("empty_user -> {}", empty_user);
let mut user = model::user::UserModel::create(1, "Admin".to_string(), "15801398759".to_string(), "[email protected]".to_string());
println!("debug for user: {}", user);
user.id = 100;
user.nickname = "OP".to_string();
println!("debug for user after modify: {}", user);
let user_json = json::encode(&user).unwrap();
println!("user_json: {}", user_json);
// CREATE TABLE `test`.`user` (
// `id` INT( 11 ) UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY COMMENT '主键',
// `nickname` VARCHAR( 128 ) NOT NULL,
// `mobile` VARCHAR( 32 ) NOT NULL,
// `email` VARCHAR( 64 ) NOT NULL
// ) ENGINE = INNODB;
println!("\n--- {} ---", Red.bold().paint("MySQL example".to_string()));
let opts = MyOpts {
user: Some("dev".to_string()),
pass: Some("dev".to_string()),
//init: vec!["SET NAMES utf8; use test;".to_owned()], // 加入此行无法写数据.
..Default::default()
};
let pool = MyPool::new(opts).unwrap();
//let method = "insert";
let method = "select";
if "insert" == method {
println!("{}", Yellow.blink().bold().paint("INSERT"));
let mut users = vec![
model::user::UserModel{id : 1, nickname: "admin".to_string(), mobile: "15811119890".to_string(), email: "[email protected]".to_string()},
];
users.push(user);
for mut stmt in pool.prepare(r"INSERT INTO test.user
(id, nickname, mobile, email)
VALUES
(NULL,?,?,?)").into_iter() {
for p in users.iter() {
// `execute` takes ownership of `params` so we pass account name by reference.
// Unwrap each result just to make sure no errors happended.
let _ig = stmt.execute((&p.nickname, &p.mobile, &p.email));
}
}
} else {
println!("{}", Yellow.blink().
|
}
|
bold().paint("SELECT"));
let selected: Vec<model::user::UserModel> = pool.prep_exec("SELECT id, nickname, mobile, email FROM test.user", ())
.map(|result| { // In this closure we sill map `QueryResult` to `Vec<T>`
// `QueryResult` is iterator over `MyResult<row, err>` so first call to `map`
// will map each `MyResult` to contained `row` (no proper error handling)
// and second call to `map` will map each `row` to `struct`
result.map(|x| x.unwrap()).map(|row| {
let (id, nickname, mobile, email) = from_row(row);
model::user::UserModel {
id: id,
nickname: nickname,
mobile: mobile,
email: email,
}
}).collect() // Collect payments so now `QueryResult` is mapped to `Vec<T>`
}).unwrap(); // Unwrap `Vec<T>`
for i in 0 .. selected.len() {
println!("selected[{}]: {}", i, selected[i]);
}
}
|
conditional_block
|
main.rs
|
use std::default::Default;
extern crate time;
extern crate ansi_term;
use ansi_term::Colour::{Red, Green, Yellow, Blue, Purple, Cyan};
extern crate mysql;
use mysql::conn::MyOpts;
use mysql::conn::pool::MyPool;
use mysql::value::from_row;
use std::collections::BTreeMap;
pub mod error;
use self::error::Error;
pub mod model;
pub mod api_result;
extern crate rustc_serialize;
use rustc_serialize::json::{self, ToJson, Json};
fn
|
() {
println!("--- {} ---", Red
//.blink()
.bold().paint("Test error Model").to_string());
let code = Error::get_code(&Error::Success);
let message = Error::get_message(&Error::Success);
println!("{{\"code\": {},\"message\":{:?}}}", code, message);
println!("--- END ---");
println!("\n--- {} ---", Green.bold().paint("Time").to_string());
let time_now = time::now();
println!("{:?}", time_now);
let time = time::get_time();
println!("{:?}", time);
let time_str = time::strftime("%Y-%m-%d %H:%M:%S", &time_now);
println!("{:?}", time_str);
println!("--- END ---");
println!("\n--- {} ---", Yellow.bold().paint("JSON encode decode").to_string());
let json_str = "{\"code\":0,\"message\":\"OK\",\"data\":{\"keyword\":\"Rust, api, php, nginx\"},\"ip\":[\"192.168.0.168\",\"127.0.0.1\"]}";
println!("json_str: {}", Blue.paint(json_str.to_string()));
//let json: api_result::Body = json::decode(&json_str).unwrap();
let mut obj: BTreeMap<String, json::Json> = BTreeMap::new();
obj.insert("os".to_string(), json::Json::from_str("{\"page\": 11, \"os\": \"Linux, Unix, MacOS\"}").unwrap_or_else(|e| { panic!("failed to execute process: {}", e) }));
obj.insert("test".to_string(), json::Json::from_str("[\"just test\"]").unwrap_or_else(|e| { panic!("failed to execute process: {}", e) }));
let json_obj = api_result::Body {
code: code,
message: message.to_string(),
data: obj,
};
let encode_str = json_obj.to_json();
println!("encode_str: {}", encode_str.to_string());
println!("--- END ---");
println!("\n--- {} ---", Purple.bold().paint("BTreeMap".to_string()));
// BTreeMap 的 key 只能是同一种类型
let mut map = BTreeMap::new();
let mut n_map = BTreeMap::new();
n_map.insert(0, "Hi, world");
n_map.insert(1, "a");
n_map.insert(2, "b");
map.insert("key", "value");
map.insert("os", "linux terminal");
let n_keys: Vec<_> = n_map.keys().cloned().collect();
let n_values: Vec<&str> = n_map.values().cloned().collect();
let keys: Vec<&str> = map.keys().cloned().collect();
let values: Vec<&str> = map.values().cloned().collect();
println!("n_map->keys: {:?}", n_keys);
println!("n_map->values: {:?}", n_values);
println!("map->keys: {:?}", keys);
println!("map->values: {:?}", values);
for (k, v) in map.iter() {
println!("map.{} = {}", k, v);
}
println!("--- END ---");
println!("\n--- {} ---", Cyan.bold().paint("UserModel"));
let empty_user = model::user::UserModel::create_empty();
// error: cannot assign to immutable field
//empty_user.id = 110;
println!("empty_user -> {}", empty_user);
let mut user = model::user::UserModel::create(1, "Admin".to_string(), "15801398759".to_string(), "[email protected]".to_string());
println!("debug for user: {}", user);
user.id = 100;
user.nickname = "OP".to_string();
println!("debug for user after modify: {}", user);
let user_json = json::encode(&user).unwrap();
println!("user_json: {}", user_json);
// CREATE TABLE `test`.`user` (
// `id` INT( 11 ) UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY COMMENT '主键',
// `nickname` VARCHAR( 128 ) NOT NULL,
// `mobile` VARCHAR( 32 ) NOT NULL,
// `email` VARCHAR( 64 ) NOT NULL
// ) ENGINE = INNODB;
println!("\n--- {} ---", Red.bold().paint("MySQL example".to_string()));
let opts = MyOpts {
user: Some("dev".to_string()),
pass: Some("dev".to_string()),
//init: vec!["SET NAMES utf8; use test;".to_owned()], // 加入此行无法写数据.
..Default::default()
};
let pool = MyPool::new(opts).unwrap();
//let method = "insert";
let method = "select";
if "insert" == method {
println!("{}", Yellow.blink().bold().paint("INSERT"));
let mut users = vec![
model::user::UserModel{id : 1, nickname: "admin".to_string(), mobile: "15811119890".to_string(), email: "[email protected]".to_string()},
];
users.push(user);
for mut stmt in pool.prepare(r"INSERT INTO test.user
(id, nickname, mobile, email)
VALUES
(NULL,?,?,?)").into_iter() {
for p in users.iter() {
// `execute` takes ownership of `params` so we pass account name by reference.
// Unwrap each result just to make sure no errors happended.
let _ig = stmt.execute((&p.nickname, &p.mobile, &p.email));
}
}
} else {
println!("{}", Yellow.blink().bold().paint("SELECT"));
let selected: Vec<model::user::UserModel> = pool.prep_exec("SELECT id, nickname, mobile, email FROM test.user", ())
.map(|result| { // In this closure we sill map `QueryResult` to `Vec<T>`
// `QueryResult` is iterator over `MyResult<row, err>` so first call to `map`
// will map each `MyResult` to contained `row` (no proper error handling)
// and second call to `map` will map each `row` to `struct`
result.map(|x| x.unwrap()).map(|row| {
let (id, nickname, mobile, email) = from_row(row);
model::user::UserModel {
id: id,
nickname: nickname,
mobile: mobile,
email: email,
}
}).collect() // Collect payments so now `QueryResult` is mapped to `Vec<T>`
}).unwrap(); // Unwrap `Vec<T>`
for i in 0.. selected.len() {
println!("selected[{}]: {}", i, selected[i]);
}
}
}
|
main
|
identifier_name
|
storageevent.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::StorageEventBinding;
use dom::bindings::codegen::Bindings::StorageEventBinding::StorageEventMethods;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::{DomRoot, MutNullableDom, RootedReference};
use dom::bindings::str::DOMString;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::storage::Storage;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_atoms::Atom;
#[dom_struct]
pub struct StorageEvent {
event: Event,
key: Option<DOMString>,
old_value: Option<DOMString>,
new_value: Option<DOMString>,
url: DOMString,
storage_area: MutNullableDom<Storage>
}
impl StorageEvent {
pub fn new_inherited(key: Option<DOMString>,
old_value: Option<DOMString>,
new_value: Option<DOMString>,
url: DOMString,
storage_area: Option<&Storage>) -> StorageEvent {
StorageEvent {
event: Event::new_inherited(),
key: key,
old_value: old_value,
new_value: new_value,
url: url,
storage_area: MutNullableDom::new(storage_area)
}
}
pub fn new_uninitialized(window: &Window,
url: DOMString) -> DomRoot<StorageEvent> {
reflect_dom_object(Box::new(StorageEvent::new_inherited(None, None, None, url, None)),
window,
StorageEventBinding::Wrap)
}
pub fn new(global: &Window,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> DomRoot<StorageEvent> {
let ev = reflect_dom_object(
Box::new(StorageEvent::new_inherited(key, oldValue, newValue, url, storageArea)),
global,
StorageEventBinding::Wrap
);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bool::from(bubbles), bool::from(cancelable));
}
ev
}
pub fn Constructor(global: &Window,
type_: DOMString,
init: &StorageEventBinding::StorageEventInit) -> Fallible<DomRoot<StorageEvent>> {
let key = init.key.clone();
let oldValue = init.oldValue.clone();
let newValue = init.newValue.clone();
let url = init.url.clone();
let storageArea = init.storageArea.r();
let bubbles = EventBubbles::from(init.parent.bubbles);
let cancelable = EventCancelable::from(init.parent.cancelable);
let event = StorageEvent::new(global, Atom::from(type_),
bubbles, cancelable,
key, oldValue, newValue,
url, storageArea);
Ok(event)
}
}
impl StorageEventMethods for StorageEvent {
// https://html.spec.whatwg.org/multipage/#dom-storageevent-key
fn GetKey(&self) -> Option<DOMString> {
self.key.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-oldvalue
fn GetOldValue(&self) -> Option<DOMString> {
self.old_value.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-newvalue
fn GetNewValue(&self) -> Option<DOMString>
|
// https://html.spec.whatwg.org/multipage/#dom-storageevent-url
fn Url(&self) -> DOMString {
self.url.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-storagearea
fn GetStorageArea(&self) -> Option<DomRoot<Storage>> {
self.storage_area.get()
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}
|
{
self.new_value.clone()
}
|
identifier_body
|
storageevent.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::StorageEventBinding;
use dom::bindings::codegen::Bindings::StorageEventBinding::StorageEventMethods;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::{DomRoot, MutNullableDom, RootedReference};
use dom::bindings::str::DOMString;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::storage::Storage;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_atoms::Atom;
#[dom_struct]
pub struct StorageEvent {
event: Event,
key: Option<DOMString>,
old_value: Option<DOMString>,
new_value: Option<DOMString>,
url: DOMString,
storage_area: MutNullableDom<Storage>
}
impl StorageEvent {
pub fn new_inherited(key: Option<DOMString>,
old_value: Option<DOMString>,
new_value: Option<DOMString>,
url: DOMString,
storage_area: Option<&Storage>) -> StorageEvent {
StorageEvent {
event: Event::new_inherited(),
key: key,
old_value: old_value,
new_value: new_value,
url: url,
storage_area: MutNullableDom::new(storage_area)
}
}
pub fn new_uninitialized(window: &Window,
url: DOMString) -> DomRoot<StorageEvent> {
reflect_dom_object(Box::new(StorageEvent::new_inherited(None, None, None, url, None)),
window,
StorageEventBinding::Wrap)
}
pub fn new(global: &Window,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> DomRoot<StorageEvent> {
let ev = reflect_dom_object(
Box::new(StorageEvent::new_inherited(key, oldValue, newValue, url, storageArea)),
global,
StorageEventBinding::Wrap
);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bool::from(bubbles), bool::from(cancelable));
}
ev
}
pub fn Constructor(global: &Window,
type_: DOMString,
init: &StorageEventBinding::StorageEventInit) -> Fallible<DomRoot<StorageEvent>> {
let key = init.key.clone();
let oldValue = init.oldValue.clone();
let newValue = init.newValue.clone();
let url = init.url.clone();
let storageArea = init.storageArea.r();
let bubbles = EventBubbles::from(init.parent.bubbles);
let cancelable = EventCancelable::from(init.parent.cancelable);
let event = StorageEvent::new(global, Atom::from(type_),
bubbles, cancelable,
key, oldValue, newValue,
url, storageArea);
Ok(event)
}
}
impl StorageEventMethods for StorageEvent {
// https://html.spec.whatwg.org/multipage/#dom-storageevent-key
fn
|
(&self) -> Option<DOMString> {
self.key.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-oldvalue
fn GetOldValue(&self) -> Option<DOMString> {
self.old_value.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-newvalue
fn GetNewValue(&self) -> Option<DOMString> {
self.new_value.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-url
fn Url(&self) -> DOMString {
self.url.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-storagearea
fn GetStorageArea(&self) -> Option<DomRoot<Storage>> {
self.storage_area.get()
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}
|
GetKey
|
identifier_name
|
storageevent.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::StorageEventBinding;
use dom::bindings::codegen::Bindings::StorageEventBinding::StorageEventMethods;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::root::{DomRoot, MutNullableDom, RootedReference};
use dom::bindings::str::DOMString;
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::storage::Storage;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_atoms::Atom;
|
old_value: Option<DOMString>,
new_value: Option<DOMString>,
url: DOMString,
storage_area: MutNullableDom<Storage>
}
impl StorageEvent {
pub fn new_inherited(key: Option<DOMString>,
old_value: Option<DOMString>,
new_value: Option<DOMString>,
url: DOMString,
storage_area: Option<&Storage>) -> StorageEvent {
StorageEvent {
event: Event::new_inherited(),
key: key,
old_value: old_value,
new_value: new_value,
url: url,
storage_area: MutNullableDom::new(storage_area)
}
}
pub fn new_uninitialized(window: &Window,
url: DOMString) -> DomRoot<StorageEvent> {
reflect_dom_object(Box::new(StorageEvent::new_inherited(None, None, None, url, None)),
window,
StorageEventBinding::Wrap)
}
pub fn new(global: &Window,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
key: Option<DOMString>,
oldValue: Option<DOMString>,
newValue: Option<DOMString>,
url: DOMString,
storageArea: Option<&Storage>) -> DomRoot<StorageEvent> {
let ev = reflect_dom_object(
Box::new(StorageEvent::new_inherited(key, oldValue, newValue, url, storageArea)),
global,
StorageEventBinding::Wrap
);
{
let event = ev.upcast::<Event>();
event.init_event(type_, bool::from(bubbles), bool::from(cancelable));
}
ev
}
pub fn Constructor(global: &Window,
type_: DOMString,
init: &StorageEventBinding::StorageEventInit) -> Fallible<DomRoot<StorageEvent>> {
let key = init.key.clone();
let oldValue = init.oldValue.clone();
let newValue = init.newValue.clone();
let url = init.url.clone();
let storageArea = init.storageArea.r();
let bubbles = EventBubbles::from(init.parent.bubbles);
let cancelable = EventCancelable::from(init.parent.cancelable);
let event = StorageEvent::new(global, Atom::from(type_),
bubbles, cancelable,
key, oldValue, newValue,
url, storageArea);
Ok(event)
}
}
impl StorageEventMethods for StorageEvent {
// https://html.spec.whatwg.org/multipage/#dom-storageevent-key
fn GetKey(&self) -> Option<DOMString> {
self.key.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-oldvalue
fn GetOldValue(&self) -> Option<DOMString> {
self.old_value.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-newvalue
fn GetNewValue(&self) -> Option<DOMString> {
self.new_value.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-url
fn Url(&self) -> DOMString {
self.url.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-storageevent-storagearea
fn GetStorageArea(&self) -> Option<DomRoot<Storage>> {
self.storage_area.get()
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}
|
#[dom_struct]
pub struct StorageEvent {
event: Event,
key: Option<DOMString>,
|
random_line_split
|
complexobject.rs
|
use libc::{c_double, c_int};
use pyport::Py_ssize_t;
use object::*;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct Py_complex {
pub real: c_double,
pub imag: c_double
}
extern "C" {
pub fn _Py_c_sum(left: Py_complex, right: Py_complex) -> Py_complex;
pub fn _Py_c_diff(left: Py_complex, right: Py_complex) -> Py_complex;
pub fn _Py_c_neg(complex: Py_complex) -> Py_complex;
pub fn _Py_c_prod(left: Py_complex, right: Py_complex) -> Py_complex;
pub fn _Py_c_quot(dividend: Py_complex, divisor: Py_complex) -> Py_complex;
pub fn _Py_c_pow(num: Py_complex, exp: Py_complex) -> Py_complex;
pub fn _Py_c_abs(arg: Py_complex) -> c_double;
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct PyComplexObject {
#[cfg(py_sys_config="Py_TRACE_REFS")]
pub _ob_next: *mut PyObject,
#[cfg(py_sys_config="Py_TRACE_REFS")]
pub _ob_prev: *mut PyObject,
pub ob_refcnt: Py_ssize_t,
pub ob_type: *mut PyTypeObject,
pub cval: Py_complex
}
extern "C" {
pub static mut PyComplex_Type: PyTypeObject;
}
#[inline(always)]
pub unsafe fn PyComplex_Check(op : *mut PyObject) -> c_int {
PyObject_TypeCheck(op, &mut PyComplex_Type)
}
#[inline(always)]
pub unsafe fn PyComplex_CheckExact(op : *mut PyObject) -> c_int {
let u : *mut PyTypeObject = &mut PyComplex_Type;
(Py_TYPE(op) == u) as c_int
}
extern "C" {
pub fn PyComplex_FromCComplex(v: Py_complex) -> *mut PyObject;
pub fn PyComplex_FromDoubles(real: c_double,
imag: c_double) -> *mut PyObject;
pub fn PyComplex_RealAsDouble(op: *mut PyObject) -> c_double;
pub fn PyComplex_ImagAsDouble(op: *mut PyObject) -> c_double;
pub fn PyComplex_AsCComplex(op: *mut PyObject) -> Py_complex;
|
// format_spec_len: Py_ssize_t)
// -> *mut PyObject;
}
|
//fn _PyComplex_FormatAdvanced(obj: *mut PyObject,
// format_spec: *mut c_char,
|
random_line_split
|
complexobject.rs
|
use libc::{c_double, c_int};
use pyport::Py_ssize_t;
use object::*;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct Py_complex {
pub real: c_double,
pub imag: c_double
}
extern "C" {
pub fn _Py_c_sum(left: Py_complex, right: Py_complex) -> Py_complex;
pub fn _Py_c_diff(left: Py_complex, right: Py_complex) -> Py_complex;
pub fn _Py_c_neg(complex: Py_complex) -> Py_complex;
pub fn _Py_c_prod(left: Py_complex, right: Py_complex) -> Py_complex;
pub fn _Py_c_quot(dividend: Py_complex, divisor: Py_complex) -> Py_complex;
pub fn _Py_c_pow(num: Py_complex, exp: Py_complex) -> Py_complex;
pub fn _Py_c_abs(arg: Py_complex) -> c_double;
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct PyComplexObject {
#[cfg(py_sys_config="Py_TRACE_REFS")]
pub _ob_next: *mut PyObject,
#[cfg(py_sys_config="Py_TRACE_REFS")]
pub _ob_prev: *mut PyObject,
pub ob_refcnt: Py_ssize_t,
pub ob_type: *mut PyTypeObject,
pub cval: Py_complex
}
extern "C" {
pub static mut PyComplex_Type: PyTypeObject;
}
#[inline(always)]
pub unsafe fn PyComplex_Check(op : *mut PyObject) -> c_int {
PyObject_TypeCheck(op, &mut PyComplex_Type)
}
#[inline(always)]
pub unsafe fn
|
(op : *mut PyObject) -> c_int {
let u : *mut PyTypeObject = &mut PyComplex_Type;
(Py_TYPE(op) == u) as c_int
}
extern "C" {
pub fn PyComplex_FromCComplex(v: Py_complex) -> *mut PyObject;
pub fn PyComplex_FromDoubles(real: c_double,
imag: c_double) -> *mut PyObject;
pub fn PyComplex_RealAsDouble(op: *mut PyObject) -> c_double;
pub fn PyComplex_ImagAsDouble(op: *mut PyObject) -> c_double;
pub fn PyComplex_AsCComplex(op: *mut PyObject) -> Py_complex;
//fn _PyComplex_FormatAdvanced(obj: *mut PyObject,
// format_spec: *mut c_char,
// format_spec_len: Py_ssize_t)
// -> *mut PyObject;
}
|
PyComplex_CheckExact
|
identifier_name
|
complexobject.rs
|
use libc::{c_double, c_int};
use pyport::Py_ssize_t;
use object::*;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct Py_complex {
pub real: c_double,
pub imag: c_double
}
extern "C" {
pub fn _Py_c_sum(left: Py_complex, right: Py_complex) -> Py_complex;
pub fn _Py_c_diff(left: Py_complex, right: Py_complex) -> Py_complex;
pub fn _Py_c_neg(complex: Py_complex) -> Py_complex;
pub fn _Py_c_prod(left: Py_complex, right: Py_complex) -> Py_complex;
pub fn _Py_c_quot(dividend: Py_complex, divisor: Py_complex) -> Py_complex;
pub fn _Py_c_pow(num: Py_complex, exp: Py_complex) -> Py_complex;
pub fn _Py_c_abs(arg: Py_complex) -> c_double;
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct PyComplexObject {
#[cfg(py_sys_config="Py_TRACE_REFS")]
pub _ob_next: *mut PyObject,
#[cfg(py_sys_config="Py_TRACE_REFS")]
pub _ob_prev: *mut PyObject,
pub ob_refcnt: Py_ssize_t,
pub ob_type: *mut PyTypeObject,
pub cval: Py_complex
}
extern "C" {
pub static mut PyComplex_Type: PyTypeObject;
}
#[inline(always)]
pub unsafe fn PyComplex_Check(op : *mut PyObject) -> c_int
|
#[inline(always)]
pub unsafe fn PyComplex_CheckExact(op : *mut PyObject) -> c_int {
let u : *mut PyTypeObject = &mut PyComplex_Type;
(Py_TYPE(op) == u) as c_int
}
extern "C" {
pub fn PyComplex_FromCComplex(v: Py_complex) -> *mut PyObject;
pub fn PyComplex_FromDoubles(real: c_double,
imag: c_double) -> *mut PyObject;
pub fn PyComplex_RealAsDouble(op: *mut PyObject) -> c_double;
pub fn PyComplex_ImagAsDouble(op: *mut PyObject) -> c_double;
pub fn PyComplex_AsCComplex(op: *mut PyObject) -> Py_complex;
//fn _PyComplex_FormatAdvanced(obj: *mut PyObject,
// format_spec: *mut c_char,
// format_spec_len: Py_ssize_t)
// -> *mut PyObject;
}
|
{
PyObject_TypeCheck(op, &mut PyComplex_Type)
}
|
identifier_body
|
app.rs
|
// Robigo Luculenta -- Proof of concept spectral path tracer in Rust
// Copyright (C) 2014-2015 Ruud van Asseldonk
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
|
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate num_cpus;
use std::sync::mpsc::{Sender, Receiver, channel};
use std::f32::consts::PI;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time;
use camera::Camera;
use constants::GOLDEN_RATIO;
use gather_unit::GatherUnit;
use geometry::{Circle, Paraboloid, Plane, Sphere, Surface, new_hexagonal_prism};
use material::{BlackBodyMaterial,
DiffuseGreyMaterial,
DiffuseColouredMaterial,
GlossyMirrorMaterial,
Sf10GlassMaterial,
SoapBubbleMaterial};
use object::Object;
use plot_unit::PlotUnit;
use quaternion::Quaternion;
use ray::Ray;
use scene::Scene;
use task_scheduler::{Task, TaskScheduler};
use tonemap_unit::TonemapUnit;
use trace_unit::TraceUnit;
use vector3::Vector3;
pub type Image = Vec<u8>;
pub struct App {
/// Channel that produces a rendered image periodically.
pub images: Receiver<Image>
}
impl App {
/// Constructs and starts a new path tracer that renders to a canvas of
/// the specified size.
pub fn new(image_width: u32, image_height: u32) -> App {
let concurrency = num_cpus::get();
let ts = TaskScheduler::new(concurrency, image_width, image_height);
let task_scheduler = Arc::new(Mutex::new(ts));
// Channel for communicating back to the main task.
let (img_tx, img_rx) = channel();
// Set up the scene that will be rendered.
let scene = Arc::new(App::set_up_scene());
// Spawn as many workers as cores.
for _ in 0.. concurrency {
App::start_worker(task_scheduler.clone(),
scene.clone(),
img_tx.clone());
}
App { images: img_rx }
}
#[cfg(test)]
pub fn new_test(image_width: u32, image_height: u32) -> App {
// Set up a task scheduler and scene with no concurrency.
let mut ts = TaskScheduler::new(1, image_width, image_height);
let (mut img_tx, img_rx) = channel();
let scene = Arc::new(App::set_up_scene());
// Run 5 tasks serially, on this thread.
let mut task = Task::Sleep;
for _ in 0u8.. 5 {
task = ts.get_new_task(task);
App::execute_task(&mut task, &scene, &mut img_tx);
}
App { images: img_rx }
}
fn start_worker(task_scheduler: Arc<Mutex<TaskScheduler>>,
scene: Arc<Scene>,
img_tx: Sender<Image>) {
thread::spawn(move || {
// Move img_tx into the proc.
let mut owned_img_tx = img_tx;
// There is no task yet, but the task scheduler expects
// a completed task. Therefore, this worker is done sleeping.
let mut task = Task::Sleep;
// Continue rendering forever, unless the application is terminated.
loop {
// Ask the task scheduler for a new task, complete the old one.
// Then execute it.
task = task_scheduler.lock().unwrap().get_new_task(task);
App::execute_task(&mut task, &scene, &mut owned_img_tx);
}
});
}
fn execute_task(task: &mut Task, scene: &Scene, img_tx: &mut Sender<Image>) {
match *task {
Task::Sleep =>
App::execute_sleep_task(),
Task::Trace(ref mut trace_unit) =>
App::execute_trace_task(scene, trace_unit),
Task::Plot(ref mut plot_unit, ref mut units) =>
App::execute_plot_task(plot_unit, &mut units[..]),
Task::Gather(ref mut gather_unit, ref mut units) =>
App::execute_gather_task(gather_unit, &mut units[..]),
Task::Tonemap(ref mut tonemap_unit, ref mut gather_unit) =>
App::execute_tonemap_task(img_tx, tonemap_unit, gather_unit)
}
}
fn execute_sleep_task() {
thread::sleep(time::Duration::from_millis(100));
}
fn execute_trace_task(scene: &Scene, trace_unit: &mut TraceUnit) {
trace_unit.render(scene);
}
fn execute_plot_task(plot_unit: &mut PlotUnit,
units: &mut[Box<TraceUnit>]) {
for unit in units {
plot_unit.plot(&unit.mapped_photons);
}
}
fn execute_gather_task(gather_unit: &mut GatherUnit,
units: &mut[Box<PlotUnit>]) {
for unit in units {
gather_unit.accumulate(&unit.tristimulus_buffer);
unit.clear();
}
// Save the gather state, so that rendering can be continued later.
gather_unit.save();
}
fn execute_tonemap_task(img_tx: &mut Sender<Image>,
tonemap_unit: &mut TonemapUnit,
gather_unit: &mut GatherUnit) {
tonemap_unit.tonemap(&gather_unit.tristimulus_buffer);
// Copy the rendered image.
let img = tonemap_unit.rgb_buffer.clone();
// And send it to the UI / main task.
img_tx.send(img).unwrap();
}
fn set_up_scene() -> Scene {
use object::MaterialBox::{Emissive, Reflective};
let mut objects = Vec::new();
// Sphere in the centre.
let sun_radius: f32 = 5.0;
let sun_position = Vector3::zero();
let sun_sphere = Box::new(Sphere::new(sun_position, sun_radius));
let sun_emissive = Box::new(BlackBodyMaterial::new(6504.0, 1.0));
let sun = Object::new(sun_sphere, Emissive(sun_emissive));
objects.push(sun);
// Floor paraboloid.
let floor_normal = Vector3::new(0.0, 0.0, -1.0);
let floor_position = Vector3::new(0.0, 0.0, -sun_radius);
let floor_paraboloid = Paraboloid::new(floor_normal, floor_position,
sun_radius.powi(2));
let grey = Box::new(DiffuseGreyMaterial::new(0.8));
let floor = Object::new(Box::new(floor_paraboloid.clone()), Reflective(grey));
objects.push(floor);
// Floorwall paraboloid (left).
let wall_left_normal = Vector3::new(0.0, 0.0, 1.0);
let wall_left_position = Vector3::new(1.0, 0.0, -sun_radius.powi(2));
let wall_left_paraboloid = Box::new(Paraboloid::new(wall_left_normal,
wall_left_position,
sun_radius.powi(2)));
let green = Box::new(DiffuseColouredMaterial::new(0.9, 550.0, 40.0));
let wall_left = Object::new(wall_left_paraboloid, Reflective(green));
objects.push(wall_left);
// Floorwall paraboloid (right).
let wall_right_normal = Vector3::new(0.0, 0.0, 1.0);
let wall_right_position = Vector3::new(-1.0, 0.0, -sun_radius.powi(2));
let wall_right_paraboloid = Box::new(Paraboloid::new(wall_right_normal,
wall_right_position,
sun_radius.powi(2)));
let red = Box::new(DiffuseColouredMaterial::new(0.9, 660.0, 60.0));
let wall_right = Object::new(wall_right_paraboloid, Reflective(red));
objects.push(wall_right);
// Sky light 1.
let sky_height: f32 = 30.0;
let sky1_radius: f32 = 5.0;
let sky1_position = Vector3::new(-sun_radius, 0.0, sky_height);
let sky1_circle = Box::new(Circle::new(floor_normal, sky1_position, sky1_radius));
let sky1_emissive = Box::new(BlackBodyMaterial::new(7600.0, 0.6));
let sky1 = Object::new(sky1_circle, Emissive(sky1_emissive));
objects.push(sky1);
let sky2_radius: f32 = 15.0;
let sky2_position = Vector3 {
x: -sun_radius * 0.5, y: sun_radius * 2.0 + sky2_radius, z: sky_height
};
let sky2_circle = Box::new(Circle::new(floor_normal, sky2_position, sky2_radius));
let sky2_emissive = Box::new(BlackBodyMaterial::new(5000.0, 0.6));
let sky2 = Object::new(sky2_circle, Emissive(sky2_emissive));
objects.push(sky2);
// Ceiling plane (for more interesting light).
let ceiling_position = Vector3::new(0.0, 0.0, sky_height * 2.0);
let ceiling_plane = Box::new(Plane::new(floor_normal, ceiling_position));
let blue = Box::new(DiffuseColouredMaterial::new(0.5, 470.0, 25.0));
let ceiling = Object::new(ceiling_plane, Reflective(blue));
objects.push(ceiling);
// Spiral sunflower seeds.
let gamma: f32 = PI * 2.0 * (1.0 - 1.0 / GOLDEN_RATIO as f32);
let seed_size: f32 = 0.8;
let seed_scale: f32 = 1.5;
let first_seed = ((sun_radius / seed_scale + 1.0).powi(2) + 0.5) as isize;
let seeds = 100;
for i in first_seed.. first_seed + seeds {
let phi = i as f32 * gamma;
let r = (i as f32).sqrt() * seed_scale;
let position = Vector3 {
x: phi.cos() * r,
y: phi.sin() * r,
z: (r - sun_radius) * -0.5
} + sun_position;
let sphere = Box::new(Sphere::new(position, seed_size));
let mat = Box::new(DiffuseColouredMaterial::new(0.9,
(i - first_seed) as f32 / seeds as f32
* 130.0 + 600.0, 60.0));
let object = Object::new(sphere, Reflective(mat));
objects.push(object);
}
// Seeds in between.
for i in first_seed.. first_seed + seeds {
let phi = (i as f32 + 0.5) * gamma;
let r = (i as f32 + 0.5).sqrt() * seed_scale;
let position = Vector3 {
x: phi.cos() * r,
y: phi.sin() * r,
z: (r - sun_radius) * -0.25
} + sun_position;
let sphere = Box::new(Sphere::new(position, seed_size * 0.5));
let mat = Box::new(GlossyMirrorMaterial::new(0.1));
let object = Object::new(sphere, Reflective(mat));
objects.push(object);
}
// Soap bubbles above.
for i in first_seed / 2.. first_seed + seeds {
let phi = -i as f32 * gamma;
let r = (i as f32).sqrt() * seed_scale * 1.5;
let position = Vector3 {
x: phi.cos() * r,
y: phi.sin() * r,
z: (r - sun_radius) * 1.5 + sun_radius * 2.0
} + sun_position;
let sphere = Box::new(Sphere::new(position, seed_size
* (0.5 + (i as f32).sqrt() * 0.2)));
let mat = Box::new(SoapBubbleMaterial);
let object = Object::new(sphere, Reflective(mat));
objects.push(object);
}
// Prisms along the walls.
let prisms: isize = 11;
let prism_angle: f32 = PI * 2.0 / prisms as f32;
let prism_radius: f32 = 17.0;
let prism_height: f32 = 8.0;
for i in 0.. prisms {
for &(ofs, radius, phi_ofs, h) in vec!(
(0.0f32, 1.0f32, 0.0f32, 1.0f32),
(0.5 * prism_angle, 1.2, PI * 0.5, 1.5)
).iter() {
let phi = i as f32 * prism_angle + ofs;
// Get an initial position.
let mut position = Vector3 {
x: phi.cos() * prism_radius * radius,
y: phi.sin() * prism_radius * radius,
z: 0.0
};
let mut normal = Vector3::new(0.0, 0.0, -1.0);
// Intersect with the floor to get the normal.
let ray = Ray {
origin: position,
direction: normal,
wavelength: 0.0,
probability: 1.0
};
if let Some(intersection) = floor_paraboloid.intersect(&ray) {
// The parabola focus is on the other side of the paraboloid.
normal = -intersection.normal;
position = intersection.position + normal * 2.0 * h;
}
let prism = Box::new(new_hexagonal_prism(normal, position, 3.0, 1.0,
phi + phi_ofs, prism_height * h));
let glass = Box::new(Sf10GlassMaterial);
let object = Object::new(prism, Reflective(glass));
objects.push(object);
}
}
fn make_camera(t: f32) -> Camera {
// Orbit around (0, 0, 0) based on the time.
let phi = PI * (1.0 + 0.01 * t);
let alpha = PI * (0.3 - 0.01 * t);
// Also zoom in a bit. (Or actually, it is a dolly roll.)
let distance = 50.0 - 0.5 * t;
let position = Vector3 {
x: alpha.cos() * phi.sin() * distance,
y: alpha.cos() * phi.cos() * distance,
z: alpha.sin() * distance
};
// Compensate for the displacement of the camera by rotating
// such that (0, 0, 0) remains fixed. The camera is aimed
// downward with angle alpha.
let orientation = Quaternion::rotation(0.0, 0.0, -1.0, phi + PI)
* Quaternion::rotation(1.0, 0.0, 0.0, -alpha);
Camera {
position: position,
field_of_view: PI * 0.35,
focal_distance: distance * 0.9,
// A slight blur, not too much, but enough to demonstrate the effect.
depth_of_field: 2.0,
// A subtle amount of chromatic abberation.
chromatic_abberation: 0.012,
orientation: orientation
}
}
Scene {
objects: objects,
get_camera_at_time: make_camera
}
}
}
|
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
|
random_line_split
|
app.rs
|
// Robigo Luculenta -- Proof of concept spectral path tracer in Rust
// Copyright (C) 2014-2015 Ruud van Asseldonk
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate num_cpus;
use std::sync::mpsc::{Sender, Receiver, channel};
use std::f32::consts::PI;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time;
use camera::Camera;
use constants::GOLDEN_RATIO;
use gather_unit::GatherUnit;
use geometry::{Circle, Paraboloid, Plane, Sphere, Surface, new_hexagonal_prism};
use material::{BlackBodyMaterial,
DiffuseGreyMaterial,
DiffuseColouredMaterial,
GlossyMirrorMaterial,
Sf10GlassMaterial,
SoapBubbleMaterial};
use object::Object;
use plot_unit::PlotUnit;
use quaternion::Quaternion;
use ray::Ray;
use scene::Scene;
use task_scheduler::{Task, TaskScheduler};
use tonemap_unit::TonemapUnit;
use trace_unit::TraceUnit;
use vector3::Vector3;
pub type Image = Vec<u8>;
pub struct App {
/// Channel that produces a rendered image periodically.
pub images: Receiver<Image>
}
impl App {
/// Constructs and starts a new path tracer that renders to a canvas of
/// the specified size.
pub fn new(image_width: u32, image_height: u32) -> App {
let concurrency = num_cpus::get();
let ts = TaskScheduler::new(concurrency, image_width, image_height);
let task_scheduler = Arc::new(Mutex::new(ts));
// Channel for communicating back to the main task.
let (img_tx, img_rx) = channel();
// Set up the scene that will be rendered.
let scene = Arc::new(App::set_up_scene());
// Spawn as many workers as cores.
for _ in 0.. concurrency {
App::start_worker(task_scheduler.clone(),
scene.clone(),
img_tx.clone());
}
App { images: img_rx }
}
#[cfg(test)]
pub fn new_test(image_width: u32, image_height: u32) -> App {
// Set up a task scheduler and scene with no concurrency.
let mut ts = TaskScheduler::new(1, image_width, image_height);
let (mut img_tx, img_rx) = channel();
let scene = Arc::new(App::set_up_scene());
// Run 5 tasks serially, on this thread.
let mut task = Task::Sleep;
for _ in 0u8.. 5 {
task = ts.get_new_task(task);
App::execute_task(&mut task, &scene, &mut img_tx);
}
App { images: img_rx }
}
fn start_worker(task_scheduler: Arc<Mutex<TaskScheduler>>,
scene: Arc<Scene>,
img_tx: Sender<Image>)
|
fn execute_task(task: &mut Task, scene: &Scene, img_tx: &mut Sender<Image>) {
match *task {
Task::Sleep =>
App::execute_sleep_task(),
Task::Trace(ref mut trace_unit) =>
App::execute_trace_task(scene, trace_unit),
Task::Plot(ref mut plot_unit, ref mut units) =>
App::execute_plot_task(plot_unit, &mut units[..]),
Task::Gather(ref mut gather_unit, ref mut units) =>
App::execute_gather_task(gather_unit, &mut units[..]),
Task::Tonemap(ref mut tonemap_unit, ref mut gather_unit) =>
App::execute_tonemap_task(img_tx, tonemap_unit, gather_unit)
}
}
fn execute_sleep_task() {
thread::sleep(time::Duration::from_millis(100));
}
fn execute_trace_task(scene: &Scene, trace_unit: &mut TraceUnit) {
trace_unit.render(scene);
}
fn execute_plot_task(plot_unit: &mut PlotUnit,
units: &mut[Box<TraceUnit>]) {
for unit in units {
plot_unit.plot(&unit.mapped_photons);
}
}
fn execute_gather_task(gather_unit: &mut GatherUnit,
units: &mut[Box<PlotUnit>]) {
for unit in units {
gather_unit.accumulate(&unit.tristimulus_buffer);
unit.clear();
}
// Save the gather state, so that rendering can be continued later.
gather_unit.save();
}
fn execute_tonemap_task(img_tx: &mut Sender<Image>,
tonemap_unit: &mut TonemapUnit,
gather_unit: &mut GatherUnit) {
tonemap_unit.tonemap(&gather_unit.tristimulus_buffer);
// Copy the rendered image.
let img = tonemap_unit.rgb_buffer.clone();
// And send it to the UI / main task.
img_tx.send(img).unwrap();
}
fn set_up_scene() -> Scene {
use object::MaterialBox::{Emissive, Reflective};
let mut objects = Vec::new();
// Sphere in the centre.
let sun_radius: f32 = 5.0;
let sun_position = Vector3::zero();
let sun_sphere = Box::new(Sphere::new(sun_position, sun_radius));
let sun_emissive = Box::new(BlackBodyMaterial::new(6504.0, 1.0));
let sun = Object::new(sun_sphere, Emissive(sun_emissive));
objects.push(sun);
// Floor paraboloid.
let floor_normal = Vector3::new(0.0, 0.0, -1.0);
let floor_position = Vector3::new(0.0, 0.0, -sun_radius);
let floor_paraboloid = Paraboloid::new(floor_normal, floor_position,
sun_radius.powi(2));
let grey = Box::new(DiffuseGreyMaterial::new(0.8));
let floor = Object::new(Box::new(floor_paraboloid.clone()), Reflective(grey));
objects.push(floor);
// Floorwall paraboloid (left).
let wall_left_normal = Vector3::new(0.0, 0.0, 1.0);
let wall_left_position = Vector3::new(1.0, 0.0, -sun_radius.powi(2));
let wall_left_paraboloid = Box::new(Paraboloid::new(wall_left_normal,
wall_left_position,
sun_radius.powi(2)));
let green = Box::new(DiffuseColouredMaterial::new(0.9, 550.0, 40.0));
let wall_left = Object::new(wall_left_paraboloid, Reflective(green));
objects.push(wall_left);
// Floorwall paraboloid (right).
let wall_right_normal = Vector3::new(0.0, 0.0, 1.0);
let wall_right_position = Vector3::new(-1.0, 0.0, -sun_radius.powi(2));
let wall_right_paraboloid = Box::new(Paraboloid::new(wall_right_normal,
wall_right_position,
sun_radius.powi(2)));
let red = Box::new(DiffuseColouredMaterial::new(0.9, 660.0, 60.0));
let wall_right = Object::new(wall_right_paraboloid, Reflective(red));
objects.push(wall_right);
// Sky light 1.
let sky_height: f32 = 30.0;
let sky1_radius: f32 = 5.0;
let sky1_position = Vector3::new(-sun_radius, 0.0, sky_height);
let sky1_circle = Box::new(Circle::new(floor_normal, sky1_position, sky1_radius));
let sky1_emissive = Box::new(BlackBodyMaterial::new(7600.0, 0.6));
let sky1 = Object::new(sky1_circle, Emissive(sky1_emissive));
objects.push(sky1);
let sky2_radius: f32 = 15.0;
let sky2_position = Vector3 {
x: -sun_radius * 0.5, y: sun_radius * 2.0 + sky2_radius, z: sky_height
};
let sky2_circle = Box::new(Circle::new(floor_normal, sky2_position, sky2_radius));
let sky2_emissive = Box::new(BlackBodyMaterial::new(5000.0, 0.6));
let sky2 = Object::new(sky2_circle, Emissive(sky2_emissive));
objects.push(sky2);
// Ceiling plane (for more interesting light).
let ceiling_position = Vector3::new(0.0, 0.0, sky_height * 2.0);
let ceiling_plane = Box::new(Plane::new(floor_normal, ceiling_position));
let blue = Box::new(DiffuseColouredMaterial::new(0.5, 470.0, 25.0));
let ceiling = Object::new(ceiling_plane, Reflective(blue));
objects.push(ceiling);
// Spiral sunflower seeds.
let gamma: f32 = PI * 2.0 * (1.0 - 1.0 / GOLDEN_RATIO as f32);
let seed_size: f32 = 0.8;
let seed_scale: f32 = 1.5;
let first_seed = ((sun_radius / seed_scale + 1.0).powi(2) + 0.5) as isize;
let seeds = 100;
for i in first_seed.. first_seed + seeds {
let phi = i as f32 * gamma;
let r = (i as f32).sqrt() * seed_scale;
let position = Vector3 {
x: phi.cos() * r,
y: phi.sin() * r,
z: (r - sun_radius) * -0.5
} + sun_position;
let sphere = Box::new(Sphere::new(position, seed_size));
let mat = Box::new(DiffuseColouredMaterial::new(0.9,
(i - first_seed) as f32 / seeds as f32
* 130.0 + 600.0, 60.0));
let object = Object::new(sphere, Reflective(mat));
objects.push(object);
}
// Seeds in between.
for i in first_seed.. first_seed + seeds {
let phi = (i as f32 + 0.5) * gamma;
let r = (i as f32 + 0.5).sqrt() * seed_scale;
let position = Vector3 {
x: phi.cos() * r,
y: phi.sin() * r,
z: (r - sun_radius) * -0.25
} + sun_position;
let sphere = Box::new(Sphere::new(position, seed_size * 0.5));
let mat = Box::new(GlossyMirrorMaterial::new(0.1));
let object = Object::new(sphere, Reflective(mat));
objects.push(object);
}
// Soap bubbles above.
for i in first_seed / 2.. first_seed + seeds {
let phi = -i as f32 * gamma;
let r = (i as f32).sqrt() * seed_scale * 1.5;
let position = Vector3 {
x: phi.cos() * r,
y: phi.sin() * r,
z: (r - sun_radius) * 1.5 + sun_radius * 2.0
} + sun_position;
let sphere = Box::new(Sphere::new(position, seed_size
* (0.5 + (i as f32).sqrt() * 0.2)));
let mat = Box::new(SoapBubbleMaterial);
let object = Object::new(sphere, Reflective(mat));
objects.push(object);
}
// Prisms along the walls.
let prisms: isize = 11;
let prism_angle: f32 = PI * 2.0 / prisms as f32;
let prism_radius: f32 = 17.0;
let prism_height: f32 = 8.0;
for i in 0.. prisms {
for &(ofs, radius, phi_ofs, h) in vec!(
(0.0f32, 1.0f32, 0.0f32, 1.0f32),
(0.5 * prism_angle, 1.2, PI * 0.5, 1.5)
).iter() {
let phi = i as f32 * prism_angle + ofs;
// Get an initial position.
let mut position = Vector3 {
x: phi.cos() * prism_radius * radius,
y: phi.sin() * prism_radius * radius,
z: 0.0
};
let mut normal = Vector3::new(0.0, 0.0, -1.0);
// Intersect with the floor to get the normal.
let ray = Ray {
origin: position,
direction: normal,
wavelength: 0.0,
probability: 1.0
};
if let Some(intersection) = floor_paraboloid.intersect(&ray) {
// The parabola focus is on the other side of the paraboloid.
normal = -intersection.normal;
position = intersection.position + normal * 2.0 * h;
}
let prism = Box::new(new_hexagonal_prism(normal, position, 3.0, 1.0,
phi + phi_ofs, prism_height * h));
let glass = Box::new(Sf10GlassMaterial);
let object = Object::new(prism, Reflective(glass));
objects.push(object);
}
}
fn make_camera(t: f32) -> Camera {
// Orbit around (0, 0, 0) based on the time.
let phi = PI * (1.0 + 0.01 * t);
let alpha = PI * (0.3 - 0.01 * t);
// Also zoom in a bit. (Or actually, it is a dolly roll.)
let distance = 50.0 - 0.5 * t;
let position = Vector3 {
x: alpha.cos() * phi.sin() * distance,
y: alpha.cos() * phi.cos() * distance,
z: alpha.sin() * distance
};
// Compensate for the displacement of the camera by rotating
// such that (0, 0, 0) remains fixed. The camera is aimed
// downward with angle alpha.
let orientation = Quaternion::rotation(0.0, 0.0, -1.0, phi + PI)
* Quaternion::rotation(1.0, 0.0, 0.0, -alpha);
Camera {
position: position,
field_of_view: PI * 0.35,
focal_distance: distance * 0.9,
// A slight blur, not too much, but enough to demonstrate the effect.
depth_of_field: 2.0,
// A subtle amount of chromatic abberation.
chromatic_abberation: 0.012,
orientation: orientation
}
}
Scene {
objects: objects,
get_camera_at_time: make_camera
}
}
}
|
{
thread::spawn(move || {
// Move img_tx into the proc.
let mut owned_img_tx = img_tx;
// There is no task yet, but the task scheduler expects
// a completed task. Therefore, this worker is done sleeping.
let mut task = Task::Sleep;
// Continue rendering forever, unless the application is terminated.
loop {
// Ask the task scheduler for a new task, complete the old one.
// Then execute it.
task = task_scheduler.lock().unwrap().get_new_task(task);
App::execute_task(&mut task, &scene, &mut owned_img_tx);
}
});
}
|
identifier_body
|
app.rs
|
// Robigo Luculenta -- Proof of concept spectral path tracer in Rust
// Copyright (C) 2014-2015 Ruud van Asseldonk
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
extern crate num_cpus;
use std::sync::mpsc::{Sender, Receiver, channel};
use std::f32::consts::PI;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time;
use camera::Camera;
use constants::GOLDEN_RATIO;
use gather_unit::GatherUnit;
use geometry::{Circle, Paraboloid, Plane, Sphere, Surface, new_hexagonal_prism};
use material::{BlackBodyMaterial,
DiffuseGreyMaterial,
DiffuseColouredMaterial,
GlossyMirrorMaterial,
Sf10GlassMaterial,
SoapBubbleMaterial};
use object::Object;
use plot_unit::PlotUnit;
use quaternion::Quaternion;
use ray::Ray;
use scene::Scene;
use task_scheduler::{Task, TaskScheduler};
use tonemap_unit::TonemapUnit;
use trace_unit::TraceUnit;
use vector3::Vector3;
pub type Image = Vec<u8>;
pub struct App {
/// Channel that produces a rendered image periodically.
pub images: Receiver<Image>
}
impl App {
/// Constructs and starts a new path tracer that renders to a canvas of
/// the specified size.
pub fn new(image_width: u32, image_height: u32) -> App {
let concurrency = num_cpus::get();
let ts = TaskScheduler::new(concurrency, image_width, image_height);
let task_scheduler = Arc::new(Mutex::new(ts));
// Channel for communicating back to the main task.
let (img_tx, img_rx) = channel();
// Set up the scene that will be rendered.
let scene = Arc::new(App::set_up_scene());
// Spawn as many workers as cores.
for _ in 0.. concurrency {
App::start_worker(task_scheduler.clone(),
scene.clone(),
img_tx.clone());
}
App { images: img_rx }
}
#[cfg(test)]
pub fn new_test(image_width: u32, image_height: u32) -> App {
// Set up a task scheduler and scene with no concurrency.
let mut ts = TaskScheduler::new(1, image_width, image_height);
let (mut img_tx, img_rx) = channel();
let scene = Arc::new(App::set_up_scene());
// Run 5 tasks serially, on this thread.
let mut task = Task::Sleep;
for _ in 0u8.. 5 {
task = ts.get_new_task(task);
App::execute_task(&mut task, &scene, &mut img_tx);
}
App { images: img_rx }
}
fn start_worker(task_scheduler: Arc<Mutex<TaskScheduler>>,
scene: Arc<Scene>,
img_tx: Sender<Image>) {
thread::spawn(move || {
// Move img_tx into the proc.
let mut owned_img_tx = img_tx;
// There is no task yet, but the task scheduler expects
// a completed task. Therefore, this worker is done sleeping.
let mut task = Task::Sleep;
// Continue rendering forever, unless the application is terminated.
loop {
// Ask the task scheduler for a new task, complete the old one.
// Then execute it.
task = task_scheduler.lock().unwrap().get_new_task(task);
App::execute_task(&mut task, &scene, &mut owned_img_tx);
}
});
}
fn execute_task(task: &mut Task, scene: &Scene, img_tx: &mut Sender<Image>) {
match *task {
Task::Sleep =>
App::execute_sleep_task(),
Task::Trace(ref mut trace_unit) =>
App::execute_trace_task(scene, trace_unit),
Task::Plot(ref mut plot_unit, ref mut units) =>
App::execute_plot_task(plot_unit, &mut units[..]),
Task::Gather(ref mut gather_unit, ref mut units) =>
App::execute_gather_task(gather_unit, &mut units[..]),
Task::Tonemap(ref mut tonemap_unit, ref mut gather_unit) =>
App::execute_tonemap_task(img_tx, tonemap_unit, gather_unit)
}
}
fn execute_sleep_task() {
thread::sleep(time::Duration::from_millis(100));
}
fn execute_trace_task(scene: &Scene, trace_unit: &mut TraceUnit) {
trace_unit.render(scene);
}
fn
|
(plot_unit: &mut PlotUnit,
units: &mut[Box<TraceUnit>]) {
for unit in units {
plot_unit.plot(&unit.mapped_photons);
}
}
fn execute_gather_task(gather_unit: &mut GatherUnit,
units: &mut[Box<PlotUnit>]) {
for unit in units {
gather_unit.accumulate(&unit.tristimulus_buffer);
unit.clear();
}
// Save the gather state, so that rendering can be continued later.
gather_unit.save();
}
fn execute_tonemap_task(img_tx: &mut Sender<Image>,
tonemap_unit: &mut TonemapUnit,
gather_unit: &mut GatherUnit) {
tonemap_unit.tonemap(&gather_unit.tristimulus_buffer);
// Copy the rendered image.
let img = tonemap_unit.rgb_buffer.clone();
// And send it to the UI / main task.
img_tx.send(img).unwrap();
}
fn set_up_scene() -> Scene {
use object::MaterialBox::{Emissive, Reflective};
let mut objects = Vec::new();
// Sphere in the centre.
let sun_radius: f32 = 5.0;
let sun_position = Vector3::zero();
let sun_sphere = Box::new(Sphere::new(sun_position, sun_radius));
let sun_emissive = Box::new(BlackBodyMaterial::new(6504.0, 1.0));
let sun = Object::new(sun_sphere, Emissive(sun_emissive));
objects.push(sun);
// Floor paraboloid.
let floor_normal = Vector3::new(0.0, 0.0, -1.0);
let floor_position = Vector3::new(0.0, 0.0, -sun_radius);
let floor_paraboloid = Paraboloid::new(floor_normal, floor_position,
sun_radius.powi(2));
let grey = Box::new(DiffuseGreyMaterial::new(0.8));
let floor = Object::new(Box::new(floor_paraboloid.clone()), Reflective(grey));
objects.push(floor);
// Floorwall paraboloid (left).
let wall_left_normal = Vector3::new(0.0, 0.0, 1.0);
let wall_left_position = Vector3::new(1.0, 0.0, -sun_radius.powi(2));
let wall_left_paraboloid = Box::new(Paraboloid::new(wall_left_normal,
wall_left_position,
sun_radius.powi(2)));
let green = Box::new(DiffuseColouredMaterial::new(0.9, 550.0, 40.0));
let wall_left = Object::new(wall_left_paraboloid, Reflective(green));
objects.push(wall_left);
// Floorwall paraboloid (right).
let wall_right_normal = Vector3::new(0.0, 0.0, 1.0);
let wall_right_position = Vector3::new(-1.0, 0.0, -sun_radius.powi(2));
let wall_right_paraboloid = Box::new(Paraboloid::new(wall_right_normal,
wall_right_position,
sun_radius.powi(2)));
let red = Box::new(DiffuseColouredMaterial::new(0.9, 660.0, 60.0));
let wall_right = Object::new(wall_right_paraboloid, Reflective(red));
objects.push(wall_right);
// Sky light 1.
let sky_height: f32 = 30.0;
let sky1_radius: f32 = 5.0;
let sky1_position = Vector3::new(-sun_radius, 0.0, sky_height);
let sky1_circle = Box::new(Circle::new(floor_normal, sky1_position, sky1_radius));
let sky1_emissive = Box::new(BlackBodyMaterial::new(7600.0, 0.6));
let sky1 = Object::new(sky1_circle, Emissive(sky1_emissive));
objects.push(sky1);
let sky2_radius: f32 = 15.0;
let sky2_position = Vector3 {
x: -sun_radius * 0.5, y: sun_radius * 2.0 + sky2_radius, z: sky_height
};
let sky2_circle = Box::new(Circle::new(floor_normal, sky2_position, sky2_radius));
let sky2_emissive = Box::new(BlackBodyMaterial::new(5000.0, 0.6));
let sky2 = Object::new(sky2_circle, Emissive(sky2_emissive));
objects.push(sky2);
// Ceiling plane (for more interesting light).
let ceiling_position = Vector3::new(0.0, 0.0, sky_height * 2.0);
let ceiling_plane = Box::new(Plane::new(floor_normal, ceiling_position));
let blue = Box::new(DiffuseColouredMaterial::new(0.5, 470.0, 25.0));
let ceiling = Object::new(ceiling_plane, Reflective(blue));
objects.push(ceiling);
// Spiral sunflower seeds.
let gamma: f32 = PI * 2.0 * (1.0 - 1.0 / GOLDEN_RATIO as f32);
let seed_size: f32 = 0.8;
let seed_scale: f32 = 1.5;
let first_seed = ((sun_radius / seed_scale + 1.0).powi(2) + 0.5) as isize;
let seeds = 100;
for i in first_seed.. first_seed + seeds {
let phi = i as f32 * gamma;
let r = (i as f32).sqrt() * seed_scale;
let position = Vector3 {
x: phi.cos() * r,
y: phi.sin() * r,
z: (r - sun_radius) * -0.5
} + sun_position;
let sphere = Box::new(Sphere::new(position, seed_size));
let mat = Box::new(DiffuseColouredMaterial::new(0.9,
(i - first_seed) as f32 / seeds as f32
* 130.0 + 600.0, 60.0));
let object = Object::new(sphere, Reflective(mat));
objects.push(object);
}
// Seeds in between.
for i in first_seed.. first_seed + seeds {
let phi = (i as f32 + 0.5) * gamma;
let r = (i as f32 + 0.5).sqrt() * seed_scale;
let position = Vector3 {
x: phi.cos() * r,
y: phi.sin() * r,
z: (r - sun_radius) * -0.25
} + sun_position;
let sphere = Box::new(Sphere::new(position, seed_size * 0.5));
let mat = Box::new(GlossyMirrorMaterial::new(0.1));
let object = Object::new(sphere, Reflective(mat));
objects.push(object);
}
// Soap bubbles above.
for i in first_seed / 2.. first_seed + seeds {
let phi = -i as f32 * gamma;
let r = (i as f32).sqrt() * seed_scale * 1.5;
let position = Vector3 {
x: phi.cos() * r,
y: phi.sin() * r,
z: (r - sun_radius) * 1.5 + sun_radius * 2.0
} + sun_position;
let sphere = Box::new(Sphere::new(position, seed_size
* (0.5 + (i as f32).sqrt() * 0.2)));
let mat = Box::new(SoapBubbleMaterial);
let object = Object::new(sphere, Reflective(mat));
objects.push(object);
}
// Prisms along the walls.
let prisms: isize = 11;
let prism_angle: f32 = PI * 2.0 / prisms as f32;
let prism_radius: f32 = 17.0;
let prism_height: f32 = 8.0;
for i in 0.. prisms {
for &(ofs, radius, phi_ofs, h) in vec!(
(0.0f32, 1.0f32, 0.0f32, 1.0f32),
(0.5 * prism_angle, 1.2, PI * 0.5, 1.5)
).iter() {
let phi = i as f32 * prism_angle + ofs;
// Get an initial position.
let mut position = Vector3 {
x: phi.cos() * prism_radius * radius,
y: phi.sin() * prism_radius * radius,
z: 0.0
};
let mut normal = Vector3::new(0.0, 0.0, -1.0);
// Intersect with the floor to get the normal.
let ray = Ray {
origin: position,
direction: normal,
wavelength: 0.0,
probability: 1.0
};
if let Some(intersection) = floor_paraboloid.intersect(&ray) {
// The parabola focus is on the other side of the paraboloid.
normal = -intersection.normal;
position = intersection.position + normal * 2.0 * h;
}
let prism = Box::new(new_hexagonal_prism(normal, position, 3.0, 1.0,
phi + phi_ofs, prism_height * h));
let glass = Box::new(Sf10GlassMaterial);
let object = Object::new(prism, Reflective(glass));
objects.push(object);
}
}
fn make_camera(t: f32) -> Camera {
// Orbit around (0, 0, 0) based on the time.
let phi = PI * (1.0 + 0.01 * t);
let alpha = PI * (0.3 - 0.01 * t);
// Also zoom in a bit. (Or actually, it is a dolly roll.)
let distance = 50.0 - 0.5 * t;
let position = Vector3 {
x: alpha.cos() * phi.sin() * distance,
y: alpha.cos() * phi.cos() * distance,
z: alpha.sin() * distance
};
// Compensate for the displacement of the camera by rotating
// such that (0, 0, 0) remains fixed. The camera is aimed
// downward with angle alpha.
let orientation = Quaternion::rotation(0.0, 0.0, -1.0, phi + PI)
* Quaternion::rotation(1.0, 0.0, 0.0, -alpha);
Camera {
position: position,
field_of_view: PI * 0.35,
focal_distance: distance * 0.9,
// A slight blur, not too much, but enough to demonstrate the effect.
depth_of_field: 2.0,
// A subtle amount of chromatic abberation.
chromatic_abberation: 0.012,
orientation: orientation
}
}
Scene {
objects: objects,
get_camera_at_time: make_camera
}
}
}
|
execute_plot_task
|
identifier_name
|
helical_joint.rs
|
use na::{self, DVectorSliceMut, Isometry3, RealField, Translation3, Unit, Vector3};
use crate::joint::{Joint, JointMotor, RevoluteJoint, UnitJoint};
use crate::math::{JacobianSliceMut, Velocity};
use crate::object::{BodyPartHandle, Multibody, MultibodyLink};
use crate::solver::{ConstraintSet, GenericNonlinearConstraint, IntegrationParameters};
/// A joint that allows one degree of freedom between two multibody links.
///
/// The degree of freedom is the combination of a rotation and a translation along the same axis.
/// Both rotational and translational motions are coupled to generate a screw motion.
#[derive(Copy, Clone, Debug)]
pub struct HelicalJoint<N: RealField> {
revo: RevoluteJoint<N>,
pitch: N,
}
impl<N: RealField> HelicalJoint<N> {
/// Create an helical joint with the given axis and initial angle.
///
/// The `pitch` controls how much translation is generated for how much rotation.
/// In particular, the translational displacement along `axis` is given by `angle * pitch`.
pub fn new(axis: Unit<Vector3<N>>, pitch: N, angle: N) -> Self {
HelicalJoint {
revo: RevoluteJoint::new(axis, angle),
pitch: pitch,
}
}
/// The translational displacement along the joint axis.
pub fn offset(&self) -> N {
self.revo.angle() * self.pitch
}
|
/// The rotational displacement along the joint axis.
pub fn angle(&self) -> N {
self.revo.angle()
}
}
impl<N: RealField> Joint<N> for HelicalJoint<N> {
#[inline]
fn ndofs(&self) -> usize {
1
}
fn body_to_parent(&self, parent_shift: &Vector3<N>, body_shift: &Vector3<N>) -> Isometry3<N> {
Translation3::from(self.revo.axis().as_ref() * self.revo.angle())
* self.revo.body_to_parent(parent_shift, body_shift)
}
fn update_jacobians(&mut self, body_shift: &Vector3<N>, vels: &[N]) {
self.revo.update_jacobians(body_shift, vels)
}
fn jacobian(&self, transform: &Isometry3<N>, out: &mut JacobianSliceMut<N>) {
let mut jac = *self.revo.local_jacobian();
jac.linear += self.revo.axis().as_ref() * self.pitch;
out.copy_from(jac.transformed(transform).as_vector())
}
fn jacobian_dot(&self, transform: &Isometry3<N>, out: &mut JacobianSliceMut<N>) {
self.revo.jacobian_dot(transform, out)
}
fn jacobian_dot_veldiff_mul_coordinates(
&self,
transform: &Isometry3<N>,
acc: &[N],
out: &mut JacobianSliceMut<N>,
) {
self.revo
.jacobian_dot_veldiff_mul_coordinates(transform, acc, out)
}
fn jacobian_mul_coordinates(&self, vels: &[N]) -> Velocity<N> {
let mut jac = *self.revo.local_jacobian();
jac.linear += self.revo.axis().as_ref() * self.pitch;
jac * vels[0]
}
fn jacobian_dot_mul_coordinates(&self, vels: &[N]) -> Velocity<N> {
self.revo.jacobian_dot_mul_coordinates(vels)
}
fn default_damping(&self, out: &mut DVectorSliceMut<N>) {
out.fill(na::convert(0.1f64))
}
fn integrate(&mut self, parameters: &IntegrationParameters<N>, vels: &[N]) {
self.revo.integrate(parameters, vels)
}
fn apply_displacement(&mut self, disp: &[N]) {
self.revo.apply_displacement(disp)
}
#[inline]
fn clone(&self) -> Box<dyn Joint<N>> {
Box::new(*self)
}
fn num_velocity_constraints(&self) -> usize {
self.revo.num_velocity_constraints()
}
fn velocity_constraints(
&self,
parameters: &IntegrationParameters<N>,
multibody: &Multibody<N>,
link: &MultibodyLink<N>,
assembly_id: usize,
dof_id: usize,
ext_vels: &[N],
ground_j_id: &mut usize,
jacobians: &mut [N],
constraints: &mut ConstraintSet<N, (), (), usize>,
) {
// XXX: is this correct even though we don't have the same jacobian?
self.revo.velocity_constraints(
parameters,
multibody,
link,
assembly_id,
dof_id,
ext_vels,
ground_j_id,
jacobians,
constraints,
);
}
fn num_position_constraints(&self) -> usize {
// NOTE: we don't test if constraints exist to simplify indexing.
1
}
fn position_constraint(
&self,
_: usize,
multibody: &Multibody<N>,
link: &MultibodyLink<N>,
handle: BodyPartHandle<()>,
dof_id: usize,
jacobians: &mut [N],
) -> Option<GenericNonlinearConstraint<N, ()>> {
// XXX: is this correct even though we don't have the same jacobian?
self.revo
.position_constraint(0, multibody, link, handle, dof_id, jacobians)
}
}
impl<N: RealField> UnitJoint<N> for HelicalJoint<N> {
fn position(&self) -> N {
self.revo.angle()
}
fn motor(&self) -> &JointMotor<N, N> {
self.revo.motor()
}
fn min_position(&self) -> Option<N> {
self.revo.min_angle()
}
fn max_position(&self) -> Option<N> {
self.revo.max_angle()
}
}
revolute_motor_limit_methods!(HelicalJoint, revo);
|
random_line_split
|
|
helical_joint.rs
|
use na::{self, DVectorSliceMut, Isometry3, RealField, Translation3, Unit, Vector3};
use crate::joint::{Joint, JointMotor, RevoluteJoint, UnitJoint};
use crate::math::{JacobianSliceMut, Velocity};
use crate::object::{BodyPartHandle, Multibody, MultibodyLink};
use crate::solver::{ConstraintSet, GenericNonlinearConstraint, IntegrationParameters};
/// A joint that allows one degree of freedom between two multibody links.
///
/// The degree of freedom is the combination of a rotation and a translation along the same axis.
/// Both rotational and translational motions are coupled to generate a screw motion.
#[derive(Copy, Clone, Debug)]
pub struct HelicalJoint<N: RealField> {
revo: RevoluteJoint<N>,
pitch: N,
}
impl<N: RealField> HelicalJoint<N> {
/// Create an helical joint with the given axis and initial angle.
///
/// The `pitch` controls how much translation is generated for how much rotation.
/// In particular, the translational displacement along `axis` is given by `angle * pitch`.
pub fn new(axis: Unit<Vector3<N>>, pitch: N, angle: N) -> Self {
HelicalJoint {
revo: RevoluteJoint::new(axis, angle),
pitch: pitch,
}
}
/// The translational displacement along the joint axis.
pub fn offset(&self) -> N {
self.revo.angle() * self.pitch
}
/// The rotational displacement along the joint axis.
pub fn angle(&self) -> N {
self.revo.angle()
}
}
impl<N: RealField> Joint<N> for HelicalJoint<N> {
#[inline]
fn ndofs(&self) -> usize {
1
}
fn body_to_parent(&self, parent_shift: &Vector3<N>, body_shift: &Vector3<N>) -> Isometry3<N> {
Translation3::from(self.revo.axis().as_ref() * self.revo.angle())
* self.revo.body_to_parent(parent_shift, body_shift)
}
fn update_jacobians(&mut self, body_shift: &Vector3<N>, vels: &[N]) {
self.revo.update_jacobians(body_shift, vels)
}
fn jacobian(&self, transform: &Isometry3<N>, out: &mut JacobianSliceMut<N>) {
let mut jac = *self.revo.local_jacobian();
jac.linear += self.revo.axis().as_ref() * self.pitch;
out.copy_from(jac.transformed(transform).as_vector())
}
fn jacobian_dot(&self, transform: &Isometry3<N>, out: &mut JacobianSliceMut<N>) {
self.revo.jacobian_dot(transform, out)
}
fn jacobian_dot_veldiff_mul_coordinates(
&self,
transform: &Isometry3<N>,
acc: &[N],
out: &mut JacobianSliceMut<N>,
) {
self.revo
.jacobian_dot_veldiff_mul_coordinates(transform, acc, out)
}
fn jacobian_mul_coordinates(&self, vels: &[N]) -> Velocity<N> {
let mut jac = *self.revo.local_jacobian();
jac.linear += self.revo.axis().as_ref() * self.pitch;
jac * vels[0]
}
fn jacobian_dot_mul_coordinates(&self, vels: &[N]) -> Velocity<N> {
self.revo.jacobian_dot_mul_coordinates(vels)
}
fn default_damping(&self, out: &mut DVectorSliceMut<N>) {
out.fill(na::convert(0.1f64))
}
fn integrate(&mut self, parameters: &IntegrationParameters<N>, vels: &[N]) {
self.revo.integrate(parameters, vels)
}
fn apply_displacement(&mut self, disp: &[N]) {
self.revo.apply_displacement(disp)
}
#[inline]
fn clone(&self) -> Box<dyn Joint<N>> {
Box::new(*self)
}
fn num_velocity_constraints(&self) -> usize {
self.revo.num_velocity_constraints()
}
fn velocity_constraints(
&self,
parameters: &IntegrationParameters<N>,
multibody: &Multibody<N>,
link: &MultibodyLink<N>,
assembly_id: usize,
dof_id: usize,
ext_vels: &[N],
ground_j_id: &mut usize,
jacobians: &mut [N],
constraints: &mut ConstraintSet<N, (), (), usize>,
) {
// XXX: is this correct even though we don't have the same jacobian?
self.revo.velocity_constraints(
parameters,
multibody,
link,
assembly_id,
dof_id,
ext_vels,
ground_j_id,
jacobians,
constraints,
);
}
fn
|
(&self) -> usize {
// NOTE: we don't test if constraints exist to simplify indexing.
1
}
fn position_constraint(
&self,
_: usize,
multibody: &Multibody<N>,
link: &MultibodyLink<N>,
handle: BodyPartHandle<()>,
dof_id: usize,
jacobians: &mut [N],
) -> Option<GenericNonlinearConstraint<N, ()>> {
// XXX: is this correct even though we don't have the same jacobian?
self.revo
.position_constraint(0, multibody, link, handle, dof_id, jacobians)
}
}
impl<N: RealField> UnitJoint<N> for HelicalJoint<N> {
fn position(&self) -> N {
self.revo.angle()
}
fn motor(&self) -> &JointMotor<N, N> {
self.revo.motor()
}
fn min_position(&self) -> Option<N> {
self.revo.min_angle()
}
fn max_position(&self) -> Option<N> {
self.revo.max_angle()
}
}
revolute_motor_limit_methods!(HelicalJoint, revo);
|
num_position_constraints
|
identifier_name
|
helical_joint.rs
|
use na::{self, DVectorSliceMut, Isometry3, RealField, Translation3, Unit, Vector3};
use crate::joint::{Joint, JointMotor, RevoluteJoint, UnitJoint};
use crate::math::{JacobianSliceMut, Velocity};
use crate::object::{BodyPartHandle, Multibody, MultibodyLink};
use crate::solver::{ConstraintSet, GenericNonlinearConstraint, IntegrationParameters};
/// A joint that allows one degree of freedom between two multibody links.
///
/// The degree of freedom is the combination of a rotation and a translation along the same axis.
/// Both rotational and translational motions are coupled to generate a screw motion.
#[derive(Copy, Clone, Debug)]
pub struct HelicalJoint<N: RealField> {
revo: RevoluteJoint<N>,
pitch: N,
}
impl<N: RealField> HelicalJoint<N> {
/// Create an helical joint with the given axis and initial angle.
///
/// The `pitch` controls how much translation is generated for how much rotation.
/// In particular, the translational displacement along `axis` is given by `angle * pitch`.
pub fn new(axis: Unit<Vector3<N>>, pitch: N, angle: N) -> Self {
HelicalJoint {
revo: RevoluteJoint::new(axis, angle),
pitch: pitch,
}
}
/// The translational displacement along the joint axis.
pub fn offset(&self) -> N {
self.revo.angle() * self.pitch
}
/// The rotational displacement along the joint axis.
pub fn angle(&self) -> N {
self.revo.angle()
}
}
impl<N: RealField> Joint<N> for HelicalJoint<N> {
#[inline]
fn ndofs(&self) -> usize {
1
}
fn body_to_parent(&self, parent_shift: &Vector3<N>, body_shift: &Vector3<N>) -> Isometry3<N> {
Translation3::from(self.revo.axis().as_ref() * self.revo.angle())
* self.revo.body_to_parent(parent_shift, body_shift)
}
fn update_jacobians(&mut self, body_shift: &Vector3<N>, vels: &[N]) {
self.revo.update_jacobians(body_shift, vels)
}
fn jacobian(&self, transform: &Isometry3<N>, out: &mut JacobianSliceMut<N>) {
let mut jac = *self.revo.local_jacobian();
jac.linear += self.revo.axis().as_ref() * self.pitch;
out.copy_from(jac.transformed(transform).as_vector())
}
fn jacobian_dot(&self, transform: &Isometry3<N>, out: &mut JacobianSliceMut<N>) {
self.revo.jacobian_dot(transform, out)
}
fn jacobian_dot_veldiff_mul_coordinates(
&self,
transform: &Isometry3<N>,
acc: &[N],
out: &mut JacobianSliceMut<N>,
) {
self.revo
.jacobian_dot_veldiff_mul_coordinates(transform, acc, out)
}
fn jacobian_mul_coordinates(&self, vels: &[N]) -> Velocity<N> {
let mut jac = *self.revo.local_jacobian();
jac.linear += self.revo.axis().as_ref() * self.pitch;
jac * vels[0]
}
fn jacobian_dot_mul_coordinates(&self, vels: &[N]) -> Velocity<N>
|
fn default_damping(&self, out: &mut DVectorSliceMut<N>) {
out.fill(na::convert(0.1f64))
}
fn integrate(&mut self, parameters: &IntegrationParameters<N>, vels: &[N]) {
self.revo.integrate(parameters, vels)
}
fn apply_displacement(&mut self, disp: &[N]) {
self.revo.apply_displacement(disp)
}
#[inline]
fn clone(&self) -> Box<dyn Joint<N>> {
Box::new(*self)
}
fn num_velocity_constraints(&self) -> usize {
self.revo.num_velocity_constraints()
}
fn velocity_constraints(
&self,
parameters: &IntegrationParameters<N>,
multibody: &Multibody<N>,
link: &MultibodyLink<N>,
assembly_id: usize,
dof_id: usize,
ext_vels: &[N],
ground_j_id: &mut usize,
jacobians: &mut [N],
constraints: &mut ConstraintSet<N, (), (), usize>,
) {
// XXX: is this correct even though we don't have the same jacobian?
self.revo.velocity_constraints(
parameters,
multibody,
link,
assembly_id,
dof_id,
ext_vels,
ground_j_id,
jacobians,
constraints,
);
}
fn num_position_constraints(&self) -> usize {
// NOTE: we don't test if constraints exist to simplify indexing.
1
}
fn position_constraint(
&self,
_: usize,
multibody: &Multibody<N>,
link: &MultibodyLink<N>,
handle: BodyPartHandle<()>,
dof_id: usize,
jacobians: &mut [N],
) -> Option<GenericNonlinearConstraint<N, ()>> {
// XXX: is this correct even though we don't have the same jacobian?
self.revo
.position_constraint(0, multibody, link, handle, dof_id, jacobians)
}
}
impl<N: RealField> UnitJoint<N> for HelicalJoint<N> {
fn position(&self) -> N {
self.revo.angle()
}
fn motor(&self) -> &JointMotor<N, N> {
self.revo.motor()
}
fn min_position(&self) -> Option<N> {
self.revo.min_angle()
}
fn max_position(&self) -> Option<N> {
self.revo.max_angle()
}
}
revolute_motor_limit_methods!(HelicalJoint, revo);
|
{
self.revo.jacobian_dot_mul_coordinates(vels)
}
|
identifier_body
|
command.rs
|
use std::path::Path;
pub struct Command {
path: String,
origin: String,
destination: String,
}
#[derive(Debug)]
pub enum CommandParseError {
InvalidArgCount,
InvalidPath,
}
impl Command {
pub fn from_args<I: IntoIterator<Item = String>>(args: I) -> Result<Command, CommandParseError> {
let mut args = args.into_iter();
Ok(Command {
path: try!(read_path(args.next())),
origin: try!(args.next().ok_or(CommandParseError::InvalidArgCount)),
destination: try!(args.next().ok_or(CommandParseError::InvalidArgCount))
})
}
pub fn path(&self) -> &Path {
Path::new(&self.path)
}
pub fn origin(&self) -> &str {
&self.origin
}
pub fn
|
(&self) -> &str {
&self.destination
}
}
fn read_path<T>(path: Option<T>) -> Result<String, CommandParseError>
where T: AsRef<Path> + Into<String>
{
match path {
None => Err(CommandParseError::InvalidArgCount),
Some(path) => if path.as_ref().exists() {
Ok(path.into())
} else {
Err(CommandParseError::InvalidPath)
}
}
}
|
destination
|
identifier_name
|
command.rs
|
use std::path::Path;
pub struct Command {
path: String,
|
#[derive(Debug)]
pub enum CommandParseError {
InvalidArgCount,
InvalidPath,
}
impl Command {
pub fn from_args<I: IntoIterator<Item = String>>(args: I) -> Result<Command, CommandParseError> {
let mut args = args.into_iter();
Ok(Command {
path: try!(read_path(args.next())),
origin: try!(args.next().ok_or(CommandParseError::InvalidArgCount)),
destination: try!(args.next().ok_or(CommandParseError::InvalidArgCount))
})
}
pub fn path(&self) -> &Path {
Path::new(&self.path)
}
pub fn origin(&self) -> &str {
&self.origin
}
pub fn destination(&self) -> &str {
&self.destination
}
}
fn read_path<T>(path: Option<T>) -> Result<String, CommandParseError>
where T: AsRef<Path> + Into<String>
{
match path {
None => Err(CommandParseError::InvalidArgCount),
Some(path) => if path.as_ref().exists() {
Ok(path.into())
} else {
Err(CommandParseError::InvalidPath)
}
}
}
|
origin: String,
destination: String,
}
|
random_line_split
|
command.rs
|
use std::path::Path;
pub struct Command {
path: String,
origin: String,
destination: String,
}
#[derive(Debug)]
pub enum CommandParseError {
InvalidArgCount,
InvalidPath,
}
impl Command {
pub fn from_args<I: IntoIterator<Item = String>>(args: I) -> Result<Command, CommandParseError> {
let mut args = args.into_iter();
Ok(Command {
path: try!(read_path(args.next())),
origin: try!(args.next().ok_or(CommandParseError::InvalidArgCount)),
destination: try!(args.next().ok_or(CommandParseError::InvalidArgCount))
})
}
pub fn path(&self) -> &Path {
Path::new(&self.path)
}
pub fn origin(&self) -> &str
|
pub fn destination(&self) -> &str {
&self.destination
}
}
fn read_path<T>(path: Option<T>) -> Result<String, CommandParseError>
where T: AsRef<Path> + Into<String>
{
match path {
None => Err(CommandParseError::InvalidArgCount),
Some(path) => if path.as_ref().exists() {
Ok(path.into())
} else {
Err(CommandParseError::InvalidPath)
}
}
}
|
{
&self.origin
}
|
identifier_body
|
issue-15149.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(phase)]
extern crate native;
#[phase(plugin)]
extern crate green;
use native::NativeTaskBuilder;
use std::io::{TempDir, Command, fs};
use std::os;
use std::task::TaskBuilder;
// FIXME(#15149) libgreen still needs to be update. There is an open PR for it
// but it is not yet merged.
// green_start!(main)
fn main() {
// If we're the child, make sure we were invoked correctly
let args = os::args();
if args.len() > 1 && args.get(1).as_slice() == "child" {
return assert_eq!(args.get(0).as_slice(), "mytest");
}
test();
let (tx, rx) = channel();
TaskBuilder::new().native().spawn(proc() {
tx.send(test());
});
rx.recv();
}
fn test()
|
{
// If we're the parent, copy our own binary to a tempr directory, and then
// make it executable.
let dir = TempDir::new("mytest").unwrap();
let me = os::self_exe_name().unwrap();
let dest = dir.path().join(format!("mytest{}", os::consts::EXE_SUFFIX));
fs::copy(&me, &dest).unwrap();
// Append the temp directory to our own PATH.
let mut path = os::split_paths(os::getenv("PATH").unwrap_or(String::new()));
path.push(dir.path().clone());
let path = os::join_paths(path.as_slice()).unwrap();
Command::new("mytest").env("PATH", path.as_slice())
.arg("child")
.spawn().unwrap();
}
|
identifier_body
|
|
issue-15149.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(phase)]
extern crate native;
#[phase(plugin)]
extern crate green;
use native::NativeTaskBuilder;
use std::io::{TempDir, Command, fs};
use std::os;
use std::task::TaskBuilder;
// FIXME(#15149) libgreen still needs to be update. There is an open PR for it
// but it is not yet merged.
// green_start!(main)
fn main() {
// If we're the child, make sure we were invoked correctly
let args = os::args();
if args.len() > 1 && args.get(1).as_slice() == "child"
|
test();
let (tx, rx) = channel();
TaskBuilder::new().native().spawn(proc() {
tx.send(test());
});
rx.recv();
}
fn test() {
// If we're the parent, copy our own binary to a tempr directory, and then
// make it executable.
let dir = TempDir::new("mytest").unwrap();
let me = os::self_exe_name().unwrap();
let dest = dir.path().join(format!("mytest{}", os::consts::EXE_SUFFIX));
fs::copy(&me, &dest).unwrap();
// Append the temp directory to our own PATH.
let mut path = os::split_paths(os::getenv("PATH").unwrap_or(String::new()));
path.push(dir.path().clone());
let path = os::join_paths(path.as_slice()).unwrap();
Command::new("mytest").env("PATH", path.as_slice())
.arg("child")
.spawn().unwrap();
}
|
{
return assert_eq!(args.get(0).as_slice(), "mytest");
}
|
conditional_block
|
issue-15149.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(phase)]
extern crate native;
#[phase(plugin)]
extern crate green;
use native::NativeTaskBuilder;
use std::io::{TempDir, Command, fs};
use std::os;
use std::task::TaskBuilder;
// FIXME(#15149) libgreen still needs to be update. There is an open PR for it
// but it is not yet merged.
// green_start!(main)
fn main() {
// If we're the child, make sure we were invoked correctly
let args = os::args();
if args.len() > 1 && args.get(1).as_slice() == "child" {
return assert_eq!(args.get(0).as_slice(), "mytest");
}
test();
let (tx, rx) = channel();
TaskBuilder::new().native().spawn(proc() {
tx.send(test());
});
rx.recv();
}
fn
|
() {
// If we're the parent, copy our own binary to a tempr directory, and then
// make it executable.
let dir = TempDir::new("mytest").unwrap();
let me = os::self_exe_name().unwrap();
let dest = dir.path().join(format!("mytest{}", os::consts::EXE_SUFFIX));
fs::copy(&me, &dest).unwrap();
// Append the temp directory to our own PATH.
let mut path = os::split_paths(os::getenv("PATH").unwrap_or(String::new()));
path.push(dir.path().clone());
let path = os::join_paths(path.as_slice()).unwrap();
Command::new("mytest").env("PATH", path.as_slice())
.arg("child")
.spawn().unwrap();
}
|
test
|
identifier_name
|
issue-15149.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(phase)]
extern crate native;
#[phase(plugin)]
extern crate green;
use native::NativeTaskBuilder;
use std::io::{TempDir, Command, fs};
use std::os;
use std::task::TaskBuilder;
// FIXME(#15149) libgreen still needs to be update. There is an open PR for it
// but it is not yet merged.
// green_start!(main)
fn main() {
// If we're the child, make sure we were invoked correctly
let args = os::args();
if args.len() > 1 && args.get(1).as_slice() == "child" {
return assert_eq!(args.get(0).as_slice(), "mytest");
}
test();
let (tx, rx) = channel();
TaskBuilder::new().native().spawn(proc() {
tx.send(test());
});
|
}
fn test() {
// If we're the parent, copy our own binary to a tempr directory, and then
// make it executable.
let dir = TempDir::new("mytest").unwrap();
let me = os::self_exe_name().unwrap();
let dest = dir.path().join(format!("mytest{}", os::consts::EXE_SUFFIX));
fs::copy(&me, &dest).unwrap();
// Append the temp directory to our own PATH.
let mut path = os::split_paths(os::getenv("PATH").unwrap_or(String::new()));
path.push(dir.path().clone());
let path = os::join_paths(path.as_slice()).unwrap();
Command::new("mytest").env("PATH", path.as_slice())
.arg("child")
.spawn().unwrap();
}
|
rx.recv();
|
random_line_split
|
validitystate.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::ValidityStateBinding;
use dom::bindings::global::Window;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::window::Window;
#[deriving(Encodable)]
#[must_root]
pub struct ValidityState {
reflector_: Reflector,
state: u8,
}
impl ValidityState {
pub fn new_inherited() -> ValidityState {
ValidityState {
reflector_: Reflector::new(),
state: 0,
}
}
pub fn new(window: JSRef<Window>) -> Temporary<ValidityState> {
reflect_dom_object(box ValidityState::new_inherited(),
&Window(window),
ValidityStateBinding::Wrap)
}
}
impl Reflectable for ValidityState {
fn
|
<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
|
reflector
|
identifier_name
|
validitystate.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::ValidityStateBinding;
use dom::bindings::global::Window;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::window::Window;
#[deriving(Encodable)]
#[must_root]
pub struct ValidityState {
|
state: u8,
}
impl ValidityState {
pub fn new_inherited() -> ValidityState {
ValidityState {
reflector_: Reflector::new(),
state: 0,
}
}
pub fn new(window: JSRef<Window>) -> Temporary<ValidityState> {
reflect_dom_object(box ValidityState::new_inherited(),
&Window(window),
ValidityStateBinding::Wrap)
}
}
impl Reflectable for ValidityState {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
|
reflector_: Reflector,
|
random_line_split
|
validitystate.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::ValidityStateBinding;
use dom::bindings::global::Window;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::window::Window;
#[deriving(Encodable)]
#[must_root]
pub struct ValidityState {
reflector_: Reflector,
state: u8,
}
impl ValidityState {
pub fn new_inherited() -> ValidityState
|
pub fn new(window: JSRef<Window>) -> Temporary<ValidityState> {
reflect_dom_object(box ValidityState::new_inherited(),
&Window(window),
ValidityStateBinding::Wrap)
}
}
impl Reflectable for ValidityState {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}
|
{
ValidityState {
reflector_: Reflector::new(),
state: 0,
}
}
|
identifier_body
|
pull.rs
|
// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! The pull thread.
//!
//! This module handles pulling all the pushed rumors from every member off a ZMQ socket.
use std::sync::atomic::Ordering;
use std::thread;
use std::time::Duration;
use protobuf;
use zmq;
use ZMQ_CONTEXT;
use server::Server;
use message::swim::{Rumor, Rumor_Type};
use trace::TraceKind;
/// Takes a reference to the server itself
pub struct
|
{
pub server: Server,
}
impl Pull {
/// Create a new Pull
pub fn new(server: Server) -> Pull {
Pull { server: server }
}
/// Run this thread. Creates a socket, binds to the `gossip_addr`, then processes messages as
/// they are received. Uses a ZMQ pull socket, so inbound messages are fair-queued.
pub fn run(&mut self) {
let socket = (**ZMQ_CONTEXT)
.as_mut()
.socket(zmq::PULL)
.expect("Failure to create the ZMQ pull socket");
socket
.set_linger(0)
.expect("Failure to set the ZMQ Pull socket to not linger");
socket
.set_tcp_keepalive(0)
.expect("Failure to set the ZMQ Pull socket to not use keepalive");
socket
.bind(&format!("tcp://{}", self.server.gossip_addr()))
.expect("Failure to bind the ZMQ Pull socket to the port");
'recv: loop {
if self.server.pause.load(Ordering::Relaxed) {
thread::sleep(Duration::from_millis(100));
continue;
}
let msg = match socket.recv_msg(0) {
Ok(msg) => msg,
Err(e) => {
error!("Error receiving message: {:?}", e);
continue'recv;
}
};
let payload = match self.server.unwrap_wire(&msg) {
Ok(payload) => payload,
Err(e) => {
// NOTE: In the future, we might want to blacklist people who send us
// garbage all the time.
error!("Error parsing protobuf: {:?}", e);
continue;
}
};
let mut proto: Rumor = match protobuf::parse_from_bytes(&payload) {
Ok(proto) => proto,
Err(e) => {
error!("Error parsing protobuf: {:?}", e);
continue'recv;
}
};
if self.server.check_blacklist(proto.get_from_id()) {
warn!(
"Not processing message from {} - it is blacklisted",
proto.get_from_id()
);
continue'recv;
}
trace_it!(GOSSIP: &self.server, TraceKind::RecvRumor, proto.get_from_id(), &proto);
match proto.get_field_type() {
Rumor_Type::Member => {
let member = proto.mut_member().take_member().into();
let health = proto.mut_member().get_health().into();
self.server.insert_member_from_rumor(member, health);
}
Rumor_Type::Service => {
self.server.insert_service(proto.into());
}
Rumor_Type::ServiceConfig => {
self.server.insert_service_config(proto.into());
}
Rumor_Type::ServiceFile => {
self.server.insert_service_file(proto.into());
}
Rumor_Type::Election => {
self.server.insert_election(proto.into());
}
Rumor_Type::ElectionUpdate => {
self.server.insert_update_election(proto.into());
}
Rumor_Type::Departure => {
self.server.insert_departure(proto.into());
}
Rumor_Type::Fake | Rumor_Type::Fake2 => {
debug!("Nothing to do for fake rumor types")
}
}
}
}
}
|
Pull
|
identifier_name
|
pull.rs
|
// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! The pull thread.
//!
//! This module handles pulling all the pushed rumors from every member off a ZMQ socket.
use std::sync::atomic::Ordering;
use std::thread;
use std::time::Duration;
use protobuf;
use zmq;
use ZMQ_CONTEXT;
use server::Server;
use message::swim::{Rumor, Rumor_Type};
use trace::TraceKind;
/// Takes a reference to the server itself
pub struct Pull {
pub server: Server,
}
impl Pull {
/// Create a new Pull
pub fn new(server: Server) -> Pull {
Pull { server: server }
}
/// Run this thread. Creates a socket, binds to the `gossip_addr`, then processes messages as
/// they are received. Uses a ZMQ pull socket, so inbound messages are fair-queued.
pub fn run(&mut self) {
let socket = (**ZMQ_CONTEXT)
.as_mut()
.socket(zmq::PULL)
.expect("Failure to create the ZMQ pull socket");
socket
.set_linger(0)
.expect("Failure to set the ZMQ Pull socket to not linger");
socket
.set_tcp_keepalive(0)
.expect("Failure to set the ZMQ Pull socket to not use keepalive");
socket
.bind(&format!("tcp://{}", self.server.gossip_addr()))
.expect("Failure to bind the ZMQ Pull socket to the port");
'recv: loop {
if self.server.pause.load(Ordering::Relaxed) {
thread::sleep(Duration::from_millis(100));
continue;
}
let msg = match socket.recv_msg(0) {
Ok(msg) => msg,
Err(e) => {
error!("Error receiving message: {:?}", e);
continue'recv;
}
};
let payload = match self.server.unwrap_wire(&msg) {
Ok(payload) => payload,
Err(e) => {
// NOTE: In the future, we might want to blacklist people who send us
// garbage all the time.
error!("Error parsing protobuf: {:?}", e);
continue;
}
};
let mut proto: Rumor = match protobuf::parse_from_bytes(&payload) {
Ok(proto) => proto,
Err(e) => {
error!("Error parsing protobuf: {:?}", e);
continue'recv;
}
};
if self.server.check_blacklist(proto.get_from_id()) {
warn!(
"Not processing message from {} - it is blacklisted",
proto.get_from_id()
);
continue'recv;
}
trace_it!(GOSSIP: &self.server, TraceKind::RecvRumor, proto.get_from_id(), &proto);
match proto.get_field_type() {
Rumor_Type::Member => {
let member = proto.mut_member().take_member().into();
|
}
Rumor_Type::ServiceConfig => {
self.server.insert_service_config(proto.into());
}
Rumor_Type::ServiceFile => {
self.server.insert_service_file(proto.into());
}
Rumor_Type::Election => {
self.server.insert_election(proto.into());
}
Rumor_Type::ElectionUpdate => {
self.server.insert_update_election(proto.into());
}
Rumor_Type::Departure => {
self.server.insert_departure(proto.into());
}
Rumor_Type::Fake | Rumor_Type::Fake2 => {
debug!("Nothing to do for fake rumor types")
}
}
}
}
}
|
let health = proto.mut_member().get_health().into();
self.server.insert_member_from_rumor(member, health);
}
Rumor_Type::Service => {
self.server.insert_service(proto.into());
|
random_line_split
|
pull.rs
|
// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! The pull thread.
//!
//! This module handles pulling all the pushed rumors from every member off a ZMQ socket.
use std::sync::atomic::Ordering;
use std::thread;
use std::time::Duration;
use protobuf;
use zmq;
use ZMQ_CONTEXT;
use server::Server;
use message::swim::{Rumor, Rumor_Type};
use trace::TraceKind;
/// Takes a reference to the server itself
pub struct Pull {
pub server: Server,
}
impl Pull {
/// Create a new Pull
pub fn new(server: Server) -> Pull {
Pull { server: server }
}
/// Run this thread. Creates a socket, binds to the `gossip_addr`, then processes messages as
/// they are received. Uses a ZMQ pull socket, so inbound messages are fair-queued.
pub fn run(&mut self) {
let socket = (**ZMQ_CONTEXT)
.as_mut()
.socket(zmq::PULL)
.expect("Failure to create the ZMQ pull socket");
socket
.set_linger(0)
.expect("Failure to set the ZMQ Pull socket to not linger");
socket
.set_tcp_keepalive(0)
.expect("Failure to set the ZMQ Pull socket to not use keepalive");
socket
.bind(&format!("tcp://{}", self.server.gossip_addr()))
.expect("Failure to bind the ZMQ Pull socket to the port");
'recv: loop {
if self.server.pause.load(Ordering::Relaxed) {
thread::sleep(Duration::from_millis(100));
continue;
}
let msg = match socket.recv_msg(0) {
Ok(msg) => msg,
Err(e) => {
error!("Error receiving message: {:?}", e);
continue'recv;
}
};
let payload = match self.server.unwrap_wire(&msg) {
Ok(payload) => payload,
Err(e) => {
// NOTE: In the future, we might want to blacklist people who send us
// garbage all the time.
error!("Error parsing protobuf: {:?}", e);
continue;
}
};
let mut proto: Rumor = match protobuf::parse_from_bytes(&payload) {
Ok(proto) => proto,
Err(e) => {
error!("Error parsing protobuf: {:?}", e);
continue'recv;
}
};
if self.server.check_blacklist(proto.get_from_id())
|
trace_it!(GOSSIP: &self.server, TraceKind::RecvRumor, proto.get_from_id(), &proto);
match proto.get_field_type() {
Rumor_Type::Member => {
let member = proto.mut_member().take_member().into();
let health = proto.mut_member().get_health().into();
self.server.insert_member_from_rumor(member, health);
}
Rumor_Type::Service => {
self.server.insert_service(proto.into());
}
Rumor_Type::ServiceConfig => {
self.server.insert_service_config(proto.into());
}
Rumor_Type::ServiceFile => {
self.server.insert_service_file(proto.into());
}
Rumor_Type::Election => {
self.server.insert_election(proto.into());
}
Rumor_Type::ElectionUpdate => {
self.server.insert_update_election(proto.into());
}
Rumor_Type::Departure => {
self.server.insert_departure(proto.into());
}
Rumor_Type::Fake | Rumor_Type::Fake2 => {
debug!("Nothing to do for fake rumor types")
}
}
}
}
}
|
{
warn!(
"Not processing message from {} - it is blacklisted",
proto.get_from_id()
);
continue 'recv;
}
|
conditional_block
|
mod.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
typeck.rs, an introduction
The type checker is responsible for:
1. Determining the type of each expression
2. Resolving methods and traits
3. Guaranteeing that most type rules are met ("most?", you say, "why most?"
Well, dear reader, read on)
The main entry point is `check_crate()`. Type checking operates in two major
phases: collect and check. The collect phase passes over all items and
determines their type, without examining their "innards". The check phase
then checks function bodies and so forth.
Within the check phase, we check each function body one at a time (bodies of
function expressions are checked as part of the containing function).
Inference is used to supply types wherever they are unknown. The actual
checking of a function itself has several phases (check, regionck, writeback),
as discussed in the documentation for the `check` module.
The type checker is defined into various submodules which are documented
independently:
- astconv: converts the AST representation of types
into the `ty` representation
- collect: computes the types of each top-level item and enters them into
the `cx.tcache` table for later use
- check: walks over function bodies and type checks them, inferring types for
local variables, type parameters, etc as necessary.
- infer: finds the types to use for each type variable such that
all subtyping and assignment constraints are met. In essence, the check
module specifies the constraints, and the infer module solves them.
*/
use driver::session;
use middle::resolve;
use middle::ty;
use util::common::time;
use util::ppaux::Repr;
use util::ppaux;
use std::hashmap::HashMap;
use std::result;
use extra::list::List;
use extra::list;
use syntax::codemap::span;
use syntax::print::pprust::*;
use syntax::{ast, ast_map, abi};
use syntax::opt_vec;
#[path = "check/mod.rs"]
pub mod check;
pub mod rscope;
pub mod astconv;
#[path = "infer/mod.rs"]
pub mod infer;
pub mod collect;
pub mod coherence;
#[deriving(Encodable, Decodable)]
pub enum method_origin {
// supertrait method invoked on "self" inside a default method
// first field is supertrait ID;
// second field is method index (relative to the *supertrait*
// method list)
method_super(ast::def_id, uint),
// fully statically resolved method
method_static(ast::def_id),
// method invoked on a type parameter with a bounded trait
method_param(method_param),
// method invoked on a trait instance
method_trait(ast::def_id, uint, ty::TraitStore),
// method invoked on "self" inside a default method
method_self(ast::def_id, uint)
|
#[deriving(Encodable, Decodable)]
pub struct method_param {
// the trait containing the method to be invoked
trait_id: ast::def_id,
// index of the method to be invoked amongst the trait's methods
method_num: uint,
// index of the type parameter (from those that are in scope) that is
// the type of the receiver
param_num: uint,
// index of the bound for this type parameter which specifies the trait
bound_num: uint,
}
pub struct method_map_entry {
// the type of the self parameter, which is not reflected in the fn type
// (FIXME #3446)
self_ty: ty::t,
// the mode of `self`
self_mode: ty::SelfMode,
// the type of explicit self on the method
explicit_self: ast::explicit_self_,
// method details being invoked
origin: method_origin,
}
// maps from an expression id that corresponds to a method call to the details
// of the method to be invoked
pub type method_map = @mut HashMap<ast::node_id, method_map_entry>;
pub type vtable_param_res = @~[vtable_origin];
// Resolutions for bounds of all parameters, left to right, for a given path.
pub type vtable_res = @~[vtable_param_res];
pub enum vtable_origin {
/*
Statically known vtable. def_id gives the class or impl item
from whence comes the vtable, and tys are the type substs.
vtable_res is the vtable itself
*/
vtable_static(ast::def_id, ~[ty::t], vtable_res),
/*
Dynamic vtable, comes from a parameter that has a bound on it:
fn foo<T:quux,baz,bar>(a: T) -- a's vtable would have a
vtable_param origin
The first uint is the param number (identifying T in the example),
and the second is the bound number (identifying baz)
*/
vtable_param(uint, uint),
/*
Dynamic vtable, comes from self.
*/
vtable_self(ast::def_id)
}
impl Repr for vtable_origin {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
vtable_static(def_id, ref tys, ref vtable_res) => {
fmt!("vtable_static(%?:%s, %s, %s)",
def_id,
ty::item_path_str(tcx, def_id),
tys.repr(tcx),
vtable_res.repr(tcx))
}
vtable_param(x, y) => {
fmt!("vtable_param(%?, %?)", x, y)
}
vtable_self(def_id) => {
fmt!("vtable_self(%?)", def_id)
}
}
}
}
pub type vtable_map = @mut HashMap<ast::node_id, vtable_res>;
pub struct CrateCtxt {
// A mapping from method call sites to traits that have that method.
trait_map: resolve::TraitMap,
method_map: method_map,
vtable_map: vtable_map,
coherence_info: coherence::CoherenceInfo,
tcx: ty::ctxt
}
// Functions that write types into the node type table
pub fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) {
debug!("write_ty_to_tcx(%d, %s)", node_id, ppaux::ty_to_str(tcx, ty));
assert!(!ty::type_needs_infer(ty));
tcx.node_types.insert(node_id as uint, ty);
}
pub fn write_substs_to_tcx(tcx: ty::ctxt,
node_id: ast::node_id,
substs: ~[ty::t]) {
if substs.len() > 0u {
debug!("write_substs_to_tcx(%d, %?)", node_id,
substs.map(|t| ppaux::ty_to_str(tcx, *t)));
assert!(substs.iter().all(|t|!ty::type_needs_infer(*t)));
tcx.node_type_substs.insert(node_id, substs);
}
}
pub fn write_tpt_to_tcx(tcx: ty::ctxt,
node_id: ast::node_id,
tpt: &ty::ty_param_substs_and_ty) {
write_ty_to_tcx(tcx, node_id, tpt.ty);
if!tpt.substs.tps.is_empty() {
write_substs_to_tcx(tcx, node_id, copy tpt.substs.tps);
}
}
pub fn lookup_def_tcx(tcx: ty::ctxt, sp: span, id: ast::node_id) -> ast::def {
match tcx.def_map.find(&id) {
Some(&x) => x,
_ => {
tcx.sess.span_fatal(sp, "internal error looking up a definition")
}
}
}
pub fn lookup_def_ccx(ccx: &CrateCtxt, sp: span, id: ast::node_id)
-> ast::def {
lookup_def_tcx(ccx.tcx, sp, id)
}
pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: @~[],
region_param: None},
ty: t
}
}
pub fn require_same_types(
tcx: ty::ctxt,
maybe_infcx: Option<@mut infer::InferCtxt>,
t1_is_expected: bool,
span: span,
t1: ty::t,
t2: ty::t,
msg: &fn() -> ~str) -> bool {
let l_tcx;
let l_infcx;
match maybe_infcx {
None => {
l_tcx = tcx;
l_infcx = infer::new_infer_ctxt(tcx);
}
Some(i) => {
l_tcx = i.tcx;
l_infcx = i;
}
}
match infer::mk_eqty(l_infcx, t1_is_expected, infer::Misc(span), t1, t2) {
result::Ok(()) => true,
result::Err(ref terr) => {
l_tcx.sess.span_err(span, msg() + ": " +
ty::type_err_to_str(l_tcx, terr));
ty::note_and_explain_type_err(l_tcx, terr);
false
}
}
}
// a list of mapping from in-scope-region-names ("isr") to the
// corresponding ty::Region
pub type isr_alist = @List<(ty::bound_region, ty::Region)>;
trait get_and_find_region {
fn get(&self, br: ty::bound_region) -> ty::Region;
fn find(&self, br: ty::bound_region) -> Option<ty::Region>;
}
impl get_and_find_region for isr_alist {
pub fn get(&self, br: ty::bound_region) -> ty::Region {
self.find(br).get()
}
pub fn find(&self, br: ty::bound_region) -> Option<ty::Region> {
for list::each(*self) |isr| {
let (isr_br, isr_r) = *isr;
if isr_br == br { return Some(isr_r); }
}
return None;
}
}
fn check_main_fn_ty(ccx: &CrateCtxt,
main_id: ast::node_id,
main_span: span) {
let tcx = ccx.tcx;
let main_t = ty::node_id_to_type(tcx, main_id);
match ty::get(main_t).sty {
ty::ty_bare_fn(ref fn_ty) => {
match tcx.items.find(&main_id) {
Some(&ast_map::node_item(it,_)) => {
match it.node {
ast::item_fn(_, _, _, ref ps, _)
if ps.is_parameterized() => {
tcx.sess.span_err(
main_span,
"main function is not allowed to have type parameters");
return;
}
_ => ()
}
}
_ => ()
}
let mut ok = ty::type_is_nil(fn_ty.sig.output);
let num_args = fn_ty.sig.inputs.len();
ok &= num_args == 0u;
if!ok {
tcx.sess.span_err(
main_span,
fmt!("Wrong type in main function: found `%s`, \
expected `fn() -> ()`",
ppaux::ty_to_str(tcx, main_t)));
}
}
_ => {
tcx.sess.span_bug(main_span,
fmt!("main has a non-function type: found `%s`",
ppaux::ty_to_str(tcx, main_t)));
}
}
}
fn check_start_fn_ty(ccx: &CrateCtxt,
start_id: ast::node_id,
start_span: span) {
let tcx = ccx.tcx;
let start_t = ty::node_id_to_type(tcx, start_id);
match ty::get(start_t).sty {
ty::ty_bare_fn(_) => {
match tcx.items.find(&start_id) {
Some(&ast_map::node_item(it,_)) => {
match it.node {
ast::item_fn(_,_,_,ref ps,_)
if ps.is_parameterized() => {
tcx.sess.span_err(
start_span,
"start function is not allowed to have type parameters");
return;
}
_ => ()
}
}
_ => ()
}
let se_ty = ty::mk_bare_fn(tcx, ty::BareFnTy {
purity: ast::impure_fn,
abis: abi::AbiSet::Rust(),
sig: ty::FnSig {
bound_lifetime_names: opt_vec::Empty,
inputs: ~[
ty::mk_int(),
ty::mk_imm_ptr(tcx, ty::mk_imm_ptr(tcx, ty::mk_u8())),
ty::mk_imm_ptr(tcx, ty::mk_u8())
],
output: ty::mk_int()
}
});
require_same_types(tcx, None, false, start_span, start_t, se_ty,
|| fmt!("start function expects type: `%s`", ppaux::ty_to_str(ccx.tcx, se_ty)));
}
_ => {
tcx.sess.span_bug(start_span,
fmt!("start has a non-function type: found `%s`",
ppaux::ty_to_str(tcx, start_t)));
}
}
}
fn check_for_entry_fn(ccx: &CrateCtxt) {
let tcx = ccx.tcx;
if!*tcx.sess.building_library {
match *tcx.sess.entry_fn {
Some((id, sp)) => match *tcx.sess.entry_type {
Some(session::EntryMain) => check_main_fn_ty(ccx, id, sp),
Some(session::EntryStart) => check_start_fn_ty(ccx, id, sp),
None => tcx.sess.bug("entry function without a type")
},
None => tcx.sess.bug("type checking without entry function")
}
}
}
pub fn check_crate(tcx: ty::ctxt,
trait_map: resolve::TraitMap,
crate: &ast::crate)
-> (method_map, vtable_map) {
let time_passes = tcx.sess.time_passes();
let ccx = @mut CrateCtxt {
trait_map: trait_map,
method_map: @mut HashMap::new(),
vtable_map: @mut HashMap::new(),
coherence_info: coherence::CoherenceInfo(),
tcx: tcx
};
time(time_passes, ~"type collecting", ||
collect::collect_item_types(ccx, crate));
// this ensures that later parts of type checking can assume that items
// have valid types and not error
tcx.sess.abort_if_errors();
time(time_passes, ~"coherence checking", ||
coherence::check_coherence(ccx, crate));
time(time_passes, ~"type checking", ||
check::check_item_types(ccx, crate));
check_for_entry_fn(ccx);
tcx.sess.abort_if_errors();
(ccx.method_map, ccx.vtable_map)
}
|
}
// details for a method invoked with a receiver whose type is a type parameter
// with a bounded trait.
|
random_line_split
|
mod.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
typeck.rs, an introduction
The type checker is responsible for:
1. Determining the type of each expression
2. Resolving methods and traits
3. Guaranteeing that most type rules are met ("most?", you say, "why most?"
Well, dear reader, read on)
The main entry point is `check_crate()`. Type checking operates in two major
phases: collect and check. The collect phase passes over all items and
determines their type, without examining their "innards". The check phase
then checks function bodies and so forth.
Within the check phase, we check each function body one at a time (bodies of
function expressions are checked as part of the containing function).
Inference is used to supply types wherever they are unknown. The actual
checking of a function itself has several phases (check, regionck, writeback),
as discussed in the documentation for the `check` module.
The type checker is defined into various submodules which are documented
independently:
- astconv: converts the AST representation of types
into the `ty` representation
- collect: computes the types of each top-level item and enters them into
the `cx.tcache` table for later use
- check: walks over function bodies and type checks them, inferring types for
local variables, type parameters, etc as necessary.
- infer: finds the types to use for each type variable such that
all subtyping and assignment constraints are met. In essence, the check
module specifies the constraints, and the infer module solves them.
*/
use driver::session;
use middle::resolve;
use middle::ty;
use util::common::time;
use util::ppaux::Repr;
use util::ppaux;
use std::hashmap::HashMap;
use std::result;
use extra::list::List;
use extra::list;
use syntax::codemap::span;
use syntax::print::pprust::*;
use syntax::{ast, ast_map, abi};
use syntax::opt_vec;
#[path = "check/mod.rs"]
pub mod check;
pub mod rscope;
pub mod astconv;
#[path = "infer/mod.rs"]
pub mod infer;
pub mod collect;
pub mod coherence;
#[deriving(Encodable, Decodable)]
pub enum method_origin {
// supertrait method invoked on "self" inside a default method
// first field is supertrait ID;
// second field is method index (relative to the *supertrait*
// method list)
method_super(ast::def_id, uint),
// fully statically resolved method
method_static(ast::def_id),
// method invoked on a type parameter with a bounded trait
method_param(method_param),
// method invoked on a trait instance
method_trait(ast::def_id, uint, ty::TraitStore),
// method invoked on "self" inside a default method
method_self(ast::def_id, uint)
}
// details for a method invoked with a receiver whose type is a type parameter
// with a bounded trait.
#[deriving(Encodable, Decodable)]
pub struct method_param {
// the trait containing the method to be invoked
trait_id: ast::def_id,
// index of the method to be invoked amongst the trait's methods
method_num: uint,
// index of the type parameter (from those that are in scope) that is
// the type of the receiver
param_num: uint,
// index of the bound for this type parameter which specifies the trait
bound_num: uint,
}
pub struct method_map_entry {
// the type of the self parameter, which is not reflected in the fn type
// (FIXME #3446)
self_ty: ty::t,
// the mode of `self`
self_mode: ty::SelfMode,
// the type of explicit self on the method
explicit_self: ast::explicit_self_,
// method details being invoked
origin: method_origin,
}
// maps from an expression id that corresponds to a method call to the details
// of the method to be invoked
pub type method_map = @mut HashMap<ast::node_id, method_map_entry>;
pub type vtable_param_res = @~[vtable_origin];
// Resolutions for bounds of all parameters, left to right, for a given path.
pub type vtable_res = @~[vtable_param_res];
pub enum vtable_origin {
/*
Statically known vtable. def_id gives the class or impl item
from whence comes the vtable, and tys are the type substs.
vtable_res is the vtable itself
*/
vtable_static(ast::def_id, ~[ty::t], vtable_res),
/*
Dynamic vtable, comes from a parameter that has a bound on it:
fn foo<T:quux,baz,bar>(a: T) -- a's vtable would have a
vtable_param origin
The first uint is the param number (identifying T in the example),
and the second is the bound number (identifying baz)
*/
vtable_param(uint, uint),
/*
Dynamic vtable, comes from self.
*/
vtable_self(ast::def_id)
}
impl Repr for vtable_origin {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
vtable_static(def_id, ref tys, ref vtable_res) => {
fmt!("vtable_static(%?:%s, %s, %s)",
def_id,
ty::item_path_str(tcx, def_id),
tys.repr(tcx),
vtable_res.repr(tcx))
}
vtable_param(x, y) => {
fmt!("vtable_param(%?, %?)", x, y)
}
vtable_self(def_id) => {
fmt!("vtable_self(%?)", def_id)
}
}
}
}
pub type vtable_map = @mut HashMap<ast::node_id, vtable_res>;
pub struct CrateCtxt {
// A mapping from method call sites to traits that have that method.
trait_map: resolve::TraitMap,
method_map: method_map,
vtable_map: vtable_map,
coherence_info: coherence::CoherenceInfo,
tcx: ty::ctxt
}
// Functions that write types into the node type table
pub fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) {
debug!("write_ty_to_tcx(%d, %s)", node_id, ppaux::ty_to_str(tcx, ty));
assert!(!ty::type_needs_infer(ty));
tcx.node_types.insert(node_id as uint, ty);
}
pub fn write_substs_to_tcx(tcx: ty::ctxt,
node_id: ast::node_id,
substs: ~[ty::t]) {
if substs.len() > 0u {
debug!("write_substs_to_tcx(%d, %?)", node_id,
substs.map(|t| ppaux::ty_to_str(tcx, *t)));
assert!(substs.iter().all(|t|!ty::type_needs_infer(*t)));
tcx.node_type_substs.insert(node_id, substs);
}
}
pub fn write_tpt_to_tcx(tcx: ty::ctxt,
node_id: ast::node_id,
tpt: &ty::ty_param_substs_and_ty) {
write_ty_to_tcx(tcx, node_id, tpt.ty);
if!tpt.substs.tps.is_empty() {
write_substs_to_tcx(tcx, node_id, copy tpt.substs.tps);
}
}
pub fn lookup_def_tcx(tcx: ty::ctxt, sp: span, id: ast::node_id) -> ast::def {
match tcx.def_map.find(&id) {
Some(&x) => x,
_ => {
tcx.sess.span_fatal(sp, "internal error looking up a definition")
}
}
}
pub fn lookup_def_ccx(ccx: &CrateCtxt, sp: span, id: ast::node_id)
-> ast::def {
lookup_def_tcx(ccx.tcx, sp, id)
}
pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: @~[],
region_param: None},
ty: t
}
}
pub fn require_same_types(
tcx: ty::ctxt,
maybe_infcx: Option<@mut infer::InferCtxt>,
t1_is_expected: bool,
span: span,
t1: ty::t,
t2: ty::t,
msg: &fn() -> ~str) -> bool {
let l_tcx;
let l_infcx;
match maybe_infcx {
None =>
|
Some(i) => {
l_tcx = i.tcx;
l_infcx = i;
}
}
match infer::mk_eqty(l_infcx, t1_is_expected, infer::Misc(span), t1, t2) {
result::Ok(()) => true,
result::Err(ref terr) => {
l_tcx.sess.span_err(span, msg() + ": " +
ty::type_err_to_str(l_tcx, terr));
ty::note_and_explain_type_err(l_tcx, terr);
false
}
}
}
// a list of mapping from in-scope-region-names ("isr") to the
// corresponding ty::Region
pub type isr_alist = @List<(ty::bound_region, ty::Region)>;
trait get_and_find_region {
fn get(&self, br: ty::bound_region) -> ty::Region;
fn find(&self, br: ty::bound_region) -> Option<ty::Region>;
}
impl get_and_find_region for isr_alist {
pub fn get(&self, br: ty::bound_region) -> ty::Region {
self.find(br).get()
}
pub fn find(&self, br: ty::bound_region) -> Option<ty::Region> {
for list::each(*self) |isr| {
let (isr_br, isr_r) = *isr;
if isr_br == br { return Some(isr_r); }
}
return None;
}
}
fn check_main_fn_ty(ccx: &CrateCtxt,
main_id: ast::node_id,
main_span: span) {
let tcx = ccx.tcx;
let main_t = ty::node_id_to_type(tcx, main_id);
match ty::get(main_t).sty {
ty::ty_bare_fn(ref fn_ty) => {
match tcx.items.find(&main_id) {
Some(&ast_map::node_item(it,_)) => {
match it.node {
ast::item_fn(_, _, _, ref ps, _)
if ps.is_parameterized() => {
tcx.sess.span_err(
main_span,
"main function is not allowed to have type parameters");
return;
}
_ => ()
}
}
_ => ()
}
let mut ok = ty::type_is_nil(fn_ty.sig.output);
let num_args = fn_ty.sig.inputs.len();
ok &= num_args == 0u;
if!ok {
tcx.sess.span_err(
main_span,
fmt!("Wrong type in main function: found `%s`, \
expected `fn() -> ()`",
ppaux::ty_to_str(tcx, main_t)));
}
}
_ => {
tcx.sess.span_bug(main_span,
fmt!("main has a non-function type: found `%s`",
ppaux::ty_to_str(tcx, main_t)));
}
}
}
fn check_start_fn_ty(ccx: &CrateCtxt,
start_id: ast::node_id,
start_span: span) {
let tcx = ccx.tcx;
let start_t = ty::node_id_to_type(tcx, start_id);
match ty::get(start_t).sty {
ty::ty_bare_fn(_) => {
match tcx.items.find(&start_id) {
Some(&ast_map::node_item(it,_)) => {
match it.node {
ast::item_fn(_,_,_,ref ps,_)
if ps.is_parameterized() => {
tcx.sess.span_err(
start_span,
"start function is not allowed to have type parameters");
return;
}
_ => ()
}
}
_ => ()
}
let se_ty = ty::mk_bare_fn(tcx, ty::BareFnTy {
purity: ast::impure_fn,
abis: abi::AbiSet::Rust(),
sig: ty::FnSig {
bound_lifetime_names: opt_vec::Empty,
inputs: ~[
ty::mk_int(),
ty::mk_imm_ptr(tcx, ty::mk_imm_ptr(tcx, ty::mk_u8())),
ty::mk_imm_ptr(tcx, ty::mk_u8())
],
output: ty::mk_int()
}
});
require_same_types(tcx, None, false, start_span, start_t, se_ty,
|| fmt!("start function expects type: `%s`", ppaux::ty_to_str(ccx.tcx, se_ty)));
}
_ => {
tcx.sess.span_bug(start_span,
fmt!("start has a non-function type: found `%s`",
ppaux::ty_to_str(tcx, start_t)));
}
}
}
fn check_for_entry_fn(ccx: &CrateCtxt) {
let tcx = ccx.tcx;
if!*tcx.sess.building_library {
match *tcx.sess.entry_fn {
Some((id, sp)) => match *tcx.sess.entry_type {
Some(session::EntryMain) => check_main_fn_ty(ccx, id, sp),
Some(session::EntryStart) => check_start_fn_ty(ccx, id, sp),
None => tcx.sess.bug("entry function without a type")
},
None => tcx.sess.bug("type checking without entry function")
}
}
}
pub fn check_crate(tcx: ty::ctxt,
trait_map: resolve::TraitMap,
crate: &ast::crate)
-> (method_map, vtable_map) {
let time_passes = tcx.sess.time_passes();
let ccx = @mut CrateCtxt {
trait_map: trait_map,
method_map: @mut HashMap::new(),
vtable_map: @mut HashMap::new(),
coherence_info: coherence::CoherenceInfo(),
tcx: tcx
};
time(time_passes, ~"type collecting", ||
collect::collect_item_types(ccx, crate));
// this ensures that later parts of type checking can assume that items
// have valid types and not error
tcx.sess.abort_if_errors();
time(time_passes, ~"coherence checking", ||
coherence::check_coherence(ccx, crate));
time(time_passes, ~"type checking", ||
check::check_item_types(ccx, crate));
check_for_entry_fn(ccx);
tcx.sess.abort_if_errors();
(ccx.method_map, ccx.vtable_map)
}
|
{
l_tcx = tcx;
l_infcx = infer::new_infer_ctxt(tcx);
}
|
conditional_block
|
mod.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
typeck.rs, an introduction
The type checker is responsible for:
1. Determining the type of each expression
2. Resolving methods and traits
3. Guaranteeing that most type rules are met ("most?", you say, "why most?"
Well, dear reader, read on)
The main entry point is `check_crate()`. Type checking operates in two major
phases: collect and check. The collect phase passes over all items and
determines their type, without examining their "innards". The check phase
then checks function bodies and so forth.
Within the check phase, we check each function body one at a time (bodies of
function expressions are checked as part of the containing function).
Inference is used to supply types wherever they are unknown. The actual
checking of a function itself has several phases (check, regionck, writeback),
as discussed in the documentation for the `check` module.
The type checker is defined into various submodules which are documented
independently:
- astconv: converts the AST representation of types
into the `ty` representation
- collect: computes the types of each top-level item and enters them into
the `cx.tcache` table for later use
- check: walks over function bodies and type checks them, inferring types for
local variables, type parameters, etc as necessary.
- infer: finds the types to use for each type variable such that
all subtyping and assignment constraints are met. In essence, the check
module specifies the constraints, and the infer module solves them.
*/
use driver::session;
use middle::resolve;
use middle::ty;
use util::common::time;
use util::ppaux::Repr;
use util::ppaux;
use std::hashmap::HashMap;
use std::result;
use extra::list::List;
use extra::list;
use syntax::codemap::span;
use syntax::print::pprust::*;
use syntax::{ast, ast_map, abi};
use syntax::opt_vec;
#[path = "check/mod.rs"]
pub mod check;
pub mod rscope;
pub mod astconv;
#[path = "infer/mod.rs"]
pub mod infer;
pub mod collect;
pub mod coherence;
#[deriving(Encodable, Decodable)]
pub enum method_origin {
// supertrait method invoked on "self" inside a default method
// first field is supertrait ID;
// second field is method index (relative to the *supertrait*
// method list)
method_super(ast::def_id, uint),
// fully statically resolved method
method_static(ast::def_id),
// method invoked on a type parameter with a bounded trait
method_param(method_param),
// method invoked on a trait instance
method_trait(ast::def_id, uint, ty::TraitStore),
// method invoked on "self" inside a default method
method_self(ast::def_id, uint)
}
// details for a method invoked with a receiver whose type is a type parameter
// with a bounded trait.
#[deriving(Encodable, Decodable)]
pub struct method_param {
// the trait containing the method to be invoked
trait_id: ast::def_id,
// index of the method to be invoked amongst the trait's methods
method_num: uint,
// index of the type parameter (from those that are in scope) that is
// the type of the receiver
param_num: uint,
// index of the bound for this type parameter which specifies the trait
bound_num: uint,
}
pub struct method_map_entry {
// the type of the self parameter, which is not reflected in the fn type
// (FIXME #3446)
self_ty: ty::t,
// the mode of `self`
self_mode: ty::SelfMode,
// the type of explicit self on the method
explicit_self: ast::explicit_self_,
// method details being invoked
origin: method_origin,
}
// maps from an expression id that corresponds to a method call to the details
// of the method to be invoked
pub type method_map = @mut HashMap<ast::node_id, method_map_entry>;
pub type vtable_param_res = @~[vtable_origin];
// Resolutions for bounds of all parameters, left to right, for a given path.
pub type vtable_res = @~[vtable_param_res];
pub enum vtable_origin {
/*
Statically known vtable. def_id gives the class or impl item
from whence comes the vtable, and tys are the type substs.
vtable_res is the vtable itself
*/
vtable_static(ast::def_id, ~[ty::t], vtable_res),
/*
Dynamic vtable, comes from a parameter that has a bound on it:
fn foo<T:quux,baz,bar>(a: T) -- a's vtable would have a
vtable_param origin
The first uint is the param number (identifying T in the example),
and the second is the bound number (identifying baz)
*/
vtable_param(uint, uint),
/*
Dynamic vtable, comes from self.
*/
vtable_self(ast::def_id)
}
impl Repr for vtable_origin {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
vtable_static(def_id, ref tys, ref vtable_res) => {
fmt!("vtable_static(%?:%s, %s, %s)",
def_id,
ty::item_path_str(tcx, def_id),
tys.repr(tcx),
vtable_res.repr(tcx))
}
vtable_param(x, y) => {
fmt!("vtable_param(%?, %?)", x, y)
}
vtable_self(def_id) => {
fmt!("vtable_self(%?)", def_id)
}
}
}
}
pub type vtable_map = @mut HashMap<ast::node_id, vtable_res>;
pub struct CrateCtxt {
// A mapping from method call sites to traits that have that method.
trait_map: resolve::TraitMap,
method_map: method_map,
vtable_map: vtable_map,
coherence_info: coherence::CoherenceInfo,
tcx: ty::ctxt
}
// Functions that write types into the node type table
pub fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) {
debug!("write_ty_to_tcx(%d, %s)", node_id, ppaux::ty_to_str(tcx, ty));
assert!(!ty::type_needs_infer(ty));
tcx.node_types.insert(node_id as uint, ty);
}
pub fn write_substs_to_tcx(tcx: ty::ctxt,
node_id: ast::node_id,
substs: ~[ty::t]) {
if substs.len() > 0u {
debug!("write_substs_to_tcx(%d, %?)", node_id,
substs.map(|t| ppaux::ty_to_str(tcx, *t)));
assert!(substs.iter().all(|t|!ty::type_needs_infer(*t)));
tcx.node_type_substs.insert(node_id, substs);
}
}
pub fn write_tpt_to_tcx(tcx: ty::ctxt,
node_id: ast::node_id,
tpt: &ty::ty_param_substs_and_ty) {
write_ty_to_tcx(tcx, node_id, tpt.ty);
if!tpt.substs.tps.is_empty() {
write_substs_to_tcx(tcx, node_id, copy tpt.substs.tps);
}
}
pub fn lookup_def_tcx(tcx: ty::ctxt, sp: span, id: ast::node_id) -> ast::def {
match tcx.def_map.find(&id) {
Some(&x) => x,
_ => {
tcx.sess.span_fatal(sp, "internal error looking up a definition")
}
}
}
pub fn lookup_def_ccx(ccx: &CrateCtxt, sp: span, id: ast::node_id)
-> ast::def {
lookup_def_tcx(ccx.tcx, sp, id)
}
pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: @~[],
region_param: None},
ty: t
}
}
pub fn require_same_types(
tcx: ty::ctxt,
maybe_infcx: Option<@mut infer::InferCtxt>,
t1_is_expected: bool,
span: span,
t1: ty::t,
t2: ty::t,
msg: &fn() -> ~str) -> bool
|
ty::note_and_explain_type_err(l_tcx, terr);
false
}
}
}
// a list of mapping from in-scope-region-names ("isr") to the
// corresponding ty::Region
pub type isr_alist = @List<(ty::bound_region, ty::Region)>;
trait get_and_find_region {
fn get(&self, br: ty::bound_region) -> ty::Region;
fn find(&self, br: ty::bound_region) -> Option<ty::Region>;
}
impl get_and_find_region for isr_alist {
pub fn get(&self, br: ty::bound_region) -> ty::Region {
self.find(br).get()
}
pub fn find(&self, br: ty::bound_region) -> Option<ty::Region> {
for list::each(*self) |isr| {
let (isr_br, isr_r) = *isr;
if isr_br == br { return Some(isr_r); }
}
return None;
}
}
fn check_main_fn_ty(ccx: &CrateCtxt,
main_id: ast::node_id,
main_span: span) {
let tcx = ccx.tcx;
let main_t = ty::node_id_to_type(tcx, main_id);
match ty::get(main_t).sty {
ty::ty_bare_fn(ref fn_ty) => {
match tcx.items.find(&main_id) {
Some(&ast_map::node_item(it,_)) => {
match it.node {
ast::item_fn(_, _, _, ref ps, _)
if ps.is_parameterized() => {
tcx.sess.span_err(
main_span,
"main function is not allowed to have type parameters");
return;
}
_ => ()
}
}
_ => ()
}
let mut ok = ty::type_is_nil(fn_ty.sig.output);
let num_args = fn_ty.sig.inputs.len();
ok &= num_args == 0u;
if!ok {
tcx.sess.span_err(
main_span,
fmt!("Wrong type in main function: found `%s`, \
expected `fn() -> ()`",
ppaux::ty_to_str(tcx, main_t)));
}
}
_ => {
tcx.sess.span_bug(main_span,
fmt!("main has a non-function type: found `%s`",
ppaux::ty_to_str(tcx, main_t)));
}
}
}
fn check_start_fn_ty(ccx: &CrateCtxt,
start_id: ast::node_id,
start_span: span) {
let tcx = ccx.tcx;
let start_t = ty::node_id_to_type(tcx, start_id);
match ty::get(start_t).sty {
ty::ty_bare_fn(_) => {
match tcx.items.find(&start_id) {
Some(&ast_map::node_item(it,_)) => {
match it.node {
ast::item_fn(_,_,_,ref ps,_)
if ps.is_parameterized() => {
tcx.sess.span_err(
start_span,
"start function is not allowed to have type parameters");
return;
}
_ => ()
}
}
_ => ()
}
let se_ty = ty::mk_bare_fn(tcx, ty::BareFnTy {
purity: ast::impure_fn,
abis: abi::AbiSet::Rust(),
sig: ty::FnSig {
bound_lifetime_names: opt_vec::Empty,
inputs: ~[
ty::mk_int(),
ty::mk_imm_ptr(tcx, ty::mk_imm_ptr(tcx, ty::mk_u8())),
ty::mk_imm_ptr(tcx, ty::mk_u8())
],
output: ty::mk_int()
}
});
require_same_types(tcx, None, false, start_span, start_t, se_ty,
|| fmt!("start function expects type: `%s`", ppaux::ty_to_str(ccx.tcx, se_ty)));
}
_ => {
tcx.sess.span_bug(start_span,
fmt!("start has a non-function type: found `%s`",
ppaux::ty_to_str(tcx, start_t)));
}
}
}
fn check_for_entry_fn(ccx: &CrateCtxt) {
let tcx = ccx.tcx;
if!*tcx.sess.building_library {
match *tcx.sess.entry_fn {
Some((id, sp)) => match *tcx.sess.entry_type {
Some(session::EntryMain) => check_main_fn_ty(ccx, id, sp),
Some(session::EntryStart) => check_start_fn_ty(ccx, id, sp),
None => tcx.sess.bug("entry function without a type")
},
None => tcx.sess.bug("type checking without entry function")
}
}
}
pub fn check_crate(tcx: ty::ctxt,
trait_map: resolve::TraitMap,
crate: &ast::crate)
-> (method_map, vtable_map) {
let time_passes = tcx.sess.time_passes();
let ccx = @mut CrateCtxt {
trait_map: trait_map,
method_map: @mut HashMap::new(),
vtable_map: @mut HashMap::new(),
coherence_info: coherence::CoherenceInfo(),
tcx: tcx
};
time(time_passes, ~"type collecting", ||
collect::collect_item_types(ccx, crate));
// this ensures that later parts of type checking can assume that items
// have valid types and not error
tcx.sess.abort_if_errors();
time(time_passes, ~"coherence checking", ||
coherence::check_coherence(ccx, crate));
time(time_passes, ~"type checking", ||
check::check_item_types(ccx, crate));
check_for_entry_fn(ccx);
tcx.sess.abort_if_errors();
(ccx.method_map, ccx.vtable_map)
}
|
{
let l_tcx;
let l_infcx;
match maybe_infcx {
None => {
l_tcx = tcx;
l_infcx = infer::new_infer_ctxt(tcx);
}
Some(i) => {
l_tcx = i.tcx;
l_infcx = i;
}
}
match infer::mk_eqty(l_infcx, t1_is_expected, infer::Misc(span), t1, t2) {
result::Ok(()) => true,
result::Err(ref terr) => {
l_tcx.sess.span_err(span, msg() + ": " +
ty::type_err_to_str(l_tcx, terr));
|
identifier_body
|
mod.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
typeck.rs, an introduction
The type checker is responsible for:
1. Determining the type of each expression
2. Resolving methods and traits
3. Guaranteeing that most type rules are met ("most?", you say, "why most?"
Well, dear reader, read on)
The main entry point is `check_crate()`. Type checking operates in two major
phases: collect and check. The collect phase passes over all items and
determines their type, without examining their "innards". The check phase
then checks function bodies and so forth.
Within the check phase, we check each function body one at a time (bodies of
function expressions are checked as part of the containing function).
Inference is used to supply types wherever they are unknown. The actual
checking of a function itself has several phases (check, regionck, writeback),
as discussed in the documentation for the `check` module.
The type checker is defined into various submodules which are documented
independently:
- astconv: converts the AST representation of types
into the `ty` representation
- collect: computes the types of each top-level item and enters them into
the `cx.tcache` table for later use
- check: walks over function bodies and type checks them, inferring types for
local variables, type parameters, etc as necessary.
- infer: finds the types to use for each type variable such that
all subtyping and assignment constraints are met. In essence, the check
module specifies the constraints, and the infer module solves them.
*/
use driver::session;
use middle::resolve;
use middle::ty;
use util::common::time;
use util::ppaux::Repr;
use util::ppaux;
use std::hashmap::HashMap;
use std::result;
use extra::list::List;
use extra::list;
use syntax::codemap::span;
use syntax::print::pprust::*;
use syntax::{ast, ast_map, abi};
use syntax::opt_vec;
#[path = "check/mod.rs"]
pub mod check;
pub mod rscope;
pub mod astconv;
#[path = "infer/mod.rs"]
pub mod infer;
pub mod collect;
pub mod coherence;
#[deriving(Encodable, Decodable)]
pub enum
|
{
// supertrait method invoked on "self" inside a default method
// first field is supertrait ID;
// second field is method index (relative to the *supertrait*
// method list)
method_super(ast::def_id, uint),
// fully statically resolved method
method_static(ast::def_id),
// method invoked on a type parameter with a bounded trait
method_param(method_param),
// method invoked on a trait instance
method_trait(ast::def_id, uint, ty::TraitStore),
// method invoked on "self" inside a default method
method_self(ast::def_id, uint)
}
// details for a method invoked with a receiver whose type is a type parameter
// with a bounded trait.
#[deriving(Encodable, Decodable)]
pub struct method_param {
// the trait containing the method to be invoked
trait_id: ast::def_id,
// index of the method to be invoked amongst the trait's methods
method_num: uint,
// index of the type parameter (from those that are in scope) that is
// the type of the receiver
param_num: uint,
// index of the bound for this type parameter which specifies the trait
bound_num: uint,
}
pub struct method_map_entry {
// the type of the self parameter, which is not reflected in the fn type
// (FIXME #3446)
self_ty: ty::t,
// the mode of `self`
self_mode: ty::SelfMode,
// the type of explicit self on the method
explicit_self: ast::explicit_self_,
// method details being invoked
origin: method_origin,
}
// maps from an expression id that corresponds to a method call to the details
// of the method to be invoked
pub type method_map = @mut HashMap<ast::node_id, method_map_entry>;
pub type vtable_param_res = @~[vtable_origin];
// Resolutions for bounds of all parameters, left to right, for a given path.
pub type vtable_res = @~[vtable_param_res];
pub enum vtable_origin {
/*
Statically known vtable. def_id gives the class or impl item
from whence comes the vtable, and tys are the type substs.
vtable_res is the vtable itself
*/
vtable_static(ast::def_id, ~[ty::t], vtable_res),
/*
Dynamic vtable, comes from a parameter that has a bound on it:
fn foo<T:quux,baz,bar>(a: T) -- a's vtable would have a
vtable_param origin
The first uint is the param number (identifying T in the example),
and the second is the bound number (identifying baz)
*/
vtable_param(uint, uint),
/*
Dynamic vtable, comes from self.
*/
vtable_self(ast::def_id)
}
impl Repr for vtable_origin {
fn repr(&self, tcx: ty::ctxt) -> ~str {
match *self {
vtable_static(def_id, ref tys, ref vtable_res) => {
fmt!("vtable_static(%?:%s, %s, %s)",
def_id,
ty::item_path_str(tcx, def_id),
tys.repr(tcx),
vtable_res.repr(tcx))
}
vtable_param(x, y) => {
fmt!("vtable_param(%?, %?)", x, y)
}
vtable_self(def_id) => {
fmt!("vtable_self(%?)", def_id)
}
}
}
}
pub type vtable_map = @mut HashMap<ast::node_id, vtable_res>;
pub struct CrateCtxt {
// A mapping from method call sites to traits that have that method.
trait_map: resolve::TraitMap,
method_map: method_map,
vtable_map: vtable_map,
coherence_info: coherence::CoherenceInfo,
tcx: ty::ctxt
}
// Functions that write types into the node type table
pub fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) {
debug!("write_ty_to_tcx(%d, %s)", node_id, ppaux::ty_to_str(tcx, ty));
assert!(!ty::type_needs_infer(ty));
tcx.node_types.insert(node_id as uint, ty);
}
pub fn write_substs_to_tcx(tcx: ty::ctxt,
node_id: ast::node_id,
substs: ~[ty::t]) {
if substs.len() > 0u {
debug!("write_substs_to_tcx(%d, %?)", node_id,
substs.map(|t| ppaux::ty_to_str(tcx, *t)));
assert!(substs.iter().all(|t|!ty::type_needs_infer(*t)));
tcx.node_type_substs.insert(node_id, substs);
}
}
pub fn write_tpt_to_tcx(tcx: ty::ctxt,
node_id: ast::node_id,
tpt: &ty::ty_param_substs_and_ty) {
write_ty_to_tcx(tcx, node_id, tpt.ty);
if!tpt.substs.tps.is_empty() {
write_substs_to_tcx(tcx, node_id, copy tpt.substs.tps);
}
}
pub fn lookup_def_tcx(tcx: ty::ctxt, sp: span, id: ast::node_id) -> ast::def {
match tcx.def_map.find(&id) {
Some(&x) => x,
_ => {
tcx.sess.span_fatal(sp, "internal error looking up a definition")
}
}
}
pub fn lookup_def_ccx(ccx: &CrateCtxt, sp: span, id: ast::node_id)
-> ast::def {
lookup_def_tcx(ccx.tcx, sp, id)
}
pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: @~[],
region_param: None},
ty: t
}
}
pub fn require_same_types(
tcx: ty::ctxt,
maybe_infcx: Option<@mut infer::InferCtxt>,
t1_is_expected: bool,
span: span,
t1: ty::t,
t2: ty::t,
msg: &fn() -> ~str) -> bool {
let l_tcx;
let l_infcx;
match maybe_infcx {
None => {
l_tcx = tcx;
l_infcx = infer::new_infer_ctxt(tcx);
}
Some(i) => {
l_tcx = i.tcx;
l_infcx = i;
}
}
match infer::mk_eqty(l_infcx, t1_is_expected, infer::Misc(span), t1, t2) {
result::Ok(()) => true,
result::Err(ref terr) => {
l_tcx.sess.span_err(span, msg() + ": " +
ty::type_err_to_str(l_tcx, terr));
ty::note_and_explain_type_err(l_tcx, terr);
false
}
}
}
// a list of mapping from in-scope-region-names ("isr") to the
// corresponding ty::Region
pub type isr_alist = @List<(ty::bound_region, ty::Region)>;
trait get_and_find_region {
fn get(&self, br: ty::bound_region) -> ty::Region;
fn find(&self, br: ty::bound_region) -> Option<ty::Region>;
}
impl get_and_find_region for isr_alist {
pub fn get(&self, br: ty::bound_region) -> ty::Region {
self.find(br).get()
}
pub fn find(&self, br: ty::bound_region) -> Option<ty::Region> {
for list::each(*self) |isr| {
let (isr_br, isr_r) = *isr;
if isr_br == br { return Some(isr_r); }
}
return None;
}
}
fn check_main_fn_ty(ccx: &CrateCtxt,
main_id: ast::node_id,
main_span: span) {
let tcx = ccx.tcx;
let main_t = ty::node_id_to_type(tcx, main_id);
match ty::get(main_t).sty {
ty::ty_bare_fn(ref fn_ty) => {
match tcx.items.find(&main_id) {
Some(&ast_map::node_item(it,_)) => {
match it.node {
ast::item_fn(_, _, _, ref ps, _)
if ps.is_parameterized() => {
tcx.sess.span_err(
main_span,
"main function is not allowed to have type parameters");
return;
}
_ => ()
}
}
_ => ()
}
let mut ok = ty::type_is_nil(fn_ty.sig.output);
let num_args = fn_ty.sig.inputs.len();
ok &= num_args == 0u;
if!ok {
tcx.sess.span_err(
main_span,
fmt!("Wrong type in main function: found `%s`, \
expected `fn() -> ()`",
ppaux::ty_to_str(tcx, main_t)));
}
}
_ => {
tcx.sess.span_bug(main_span,
fmt!("main has a non-function type: found `%s`",
ppaux::ty_to_str(tcx, main_t)));
}
}
}
fn check_start_fn_ty(ccx: &CrateCtxt,
start_id: ast::node_id,
start_span: span) {
let tcx = ccx.tcx;
let start_t = ty::node_id_to_type(tcx, start_id);
match ty::get(start_t).sty {
ty::ty_bare_fn(_) => {
match tcx.items.find(&start_id) {
Some(&ast_map::node_item(it,_)) => {
match it.node {
ast::item_fn(_,_,_,ref ps,_)
if ps.is_parameterized() => {
tcx.sess.span_err(
start_span,
"start function is not allowed to have type parameters");
return;
}
_ => ()
}
}
_ => ()
}
let se_ty = ty::mk_bare_fn(tcx, ty::BareFnTy {
purity: ast::impure_fn,
abis: abi::AbiSet::Rust(),
sig: ty::FnSig {
bound_lifetime_names: opt_vec::Empty,
inputs: ~[
ty::mk_int(),
ty::mk_imm_ptr(tcx, ty::mk_imm_ptr(tcx, ty::mk_u8())),
ty::mk_imm_ptr(tcx, ty::mk_u8())
],
output: ty::mk_int()
}
});
require_same_types(tcx, None, false, start_span, start_t, se_ty,
|| fmt!("start function expects type: `%s`", ppaux::ty_to_str(ccx.tcx, se_ty)));
}
_ => {
tcx.sess.span_bug(start_span,
fmt!("start has a non-function type: found `%s`",
ppaux::ty_to_str(tcx, start_t)));
}
}
}
fn check_for_entry_fn(ccx: &CrateCtxt) {
let tcx = ccx.tcx;
if!*tcx.sess.building_library {
match *tcx.sess.entry_fn {
Some((id, sp)) => match *tcx.sess.entry_type {
Some(session::EntryMain) => check_main_fn_ty(ccx, id, sp),
Some(session::EntryStart) => check_start_fn_ty(ccx, id, sp),
None => tcx.sess.bug("entry function without a type")
},
None => tcx.sess.bug("type checking without entry function")
}
}
}
pub fn check_crate(tcx: ty::ctxt,
trait_map: resolve::TraitMap,
crate: &ast::crate)
-> (method_map, vtable_map) {
let time_passes = tcx.sess.time_passes();
let ccx = @mut CrateCtxt {
trait_map: trait_map,
method_map: @mut HashMap::new(),
vtable_map: @mut HashMap::new(),
coherence_info: coherence::CoherenceInfo(),
tcx: tcx
};
time(time_passes, ~"type collecting", ||
collect::collect_item_types(ccx, crate));
// this ensures that later parts of type checking can assume that items
// have valid types and not error
tcx.sess.abort_if_errors();
time(time_passes, ~"coherence checking", ||
coherence::check_coherence(ccx, crate));
time(time_passes, ~"type checking", ||
check::check_item_types(ccx, crate));
check_for_entry_fn(ccx);
tcx.sess.abort_if_errors();
(ccx.method_map, ccx.vtable_map)
}
|
method_origin
|
identifier_name
|
tar.rs
|
use std::mem;
use misc::*;
#[ repr (C) ]
struct BinaryHeader {
name: [u8; 100],
mode: [u8; 8],
uid: [u8; 8],
gid: [u8; 8],
size: [u8; 12],
mtime: [u8; 12],
cksum: [u8; 8],
typeflag: [u8; 1],
linkname: [u8; 100],
magic: [u8; 6],
version: [u8; 2],
uname: [u8; 32],
gname: [u8; 32],
dev_major: [u8; 8],
dev_minor: [u8; 8],
atime: [u8; 12],
ctime: [u8; 12],
offset: [u8; 12],
longnames: [u8; 4],
unused: [u8; 1],
sparse: [BinarySparseHeader; 4],
isextended: [u8; 1],
realsize: [u8; 12],
pad: [u8; 17],
}
#[ repr (C) ]
struct BinarySparseHeader {
offset: [u8; 12],
numbytes: [u8; 12],
}
pub struct Header {
pub name: Vec <u8>,
pub mode: u32,
pub uid: u32,
pub gid: u32,
pub size: u64,
pub blocks: u64,
pub mtime: u64,
pub cksum: u32,
pub typeflag: Type,
pub linkname: Vec <u8>,
pub uname: Vec <u8>,
pub gname: Vec <u8>,
pub dev_major: u32,
pub dev_minor: u32,
pub atime: u64,
pub ctime: u64,
pub offset: u64,
}
#[ derive (Debug) ]
pub enum Type {
Regular,
Link,
SymbolicLink,
CharacterSpecial,
BlockSpecial,
Directory,
Fifo,
LongName,
LongLink,
}
impl Header {
pub fn read (
header_bytes: & [u8],
) -> Result <Header, TfError> {
if header_bytes.len ()!= 512 {
panic! ();
}
let binary_header =
unsafe {
mem::transmute::<& u8, & BinaryHeader> (
& header_bytes [0])
};
if binary_header.magic!= * b"ustar " {
Err (TfError {
error_message: format! (
"Unrecognised tar format: {:?} {:?}",
binary_header.magic,
binary_header.version),
})
} else if binary_header.version!= * b" \0" {
Err (TfError {
error_message: format! (
"Unrecognised gnu tar version: {:?}",
binary_header.version),
})
} else {
let size =
try! (
tar_number_u64 (
& binary_header.size));
Ok (Header {
name: tar_string (
& binary_header.name),
mode: try! (
tar_number_u32 (
& binary_header.mode)),
uid: try! (
tar_number_u32 (
& binary_header.uid)),
gid: try! (
tar_number_u32 (
& binary_header.gid)),
size:
size,
blocks: 0
+ (size >> 9)
+ (if (size & 0x1ff)!= 0 { 1 } else { 0 }),
mtime: try! (
tar_number_u64 (
& binary_header.mtime)),
cksum: try! (
tar_number_u32 (
& binary_header.cksum)),
typeflag: tar_type (
& binary_header.typeflag),
linkname: tar_string (
& binary_header.linkname),
uname: tar_string (
& binary_header.uname),
gname: tar_string (
& binary_header.gname),
dev_major: try! (
tar_number_u32 (
& binary_header.dev_major)),
dev_minor: try! (
tar_number_u32 (
& binary_header.dev_minor)),
atime: try! (
tar_number_u64 (
& binary_header.atime)),
ctime: try! (
tar_number_u64 (
& binary_header.ctime)),
offset: try! (
tar_number_u64 (
& binary_header.offset)),
/*
longnames: [u8; 4],
unused: [u8; 1],
sparse: [BinarySparseHeader; 4],
isextended: [u8; 1],
realsize: [u8; 12],
pad: [u8; 17],
*/
})
}
}
}
fn tar_string (
slice: & [u8],
) -> Vec <u8> {
match slice.iter ().position (
|index| * index == 0,
) {
Some (index) =>
slice [.. index ].to_vec (),
None =>
slice.to_vec (),
}
}
fn tar_number_u64 (
slice: & [u8],
) -> Result <u64, TfError> {
if slice [0] == 0 {
Ok (0)
} else if slice [0] == 0x80 {
Ok (u64::from_be (
unsafe {
* mem::transmute::<& u8, & u64> (
& slice [4])
}
))
} else if slice [0] == 0xff {
panic! ()
} else {
let string =
try! (
String::from_utf8 (
tar_string (slice)));
let number =
try! (
u64::from_str_radix (
& string,
8));
Ok (number)
}
}
fn tar_number_u32 (
slice: & [u8],
) -> Result <u32, TfError> {
if slice [0] == 0 {
Ok (0)
} else {
let string =
try! (
String::from_utf8 (
tar_string (slice)));
let number =
try! (
u32::from_str_radix (
& string,
8));
Ok (number)
}
|
) -> Type {
match typeflag [0] {
b'0' => Type::Regular,
b'1' => Type::Link,
b'2' => Type::SymbolicLink,
b'3' => Type::CharacterSpecial,
b'4' => Type::BlockSpecial,
b'5' => Type::Directory,
b'6' => Type::Fifo,
b'K' => Type::LongLink,
b'L' => Type::LongName,
_ => {
panic! (
"Unrecognised typeflag: {:?}",
typeflag [0])
}
}
}
|
}
fn tar_type (
typeflag: & [u8; 1],
|
random_line_split
|
tar.rs
|
use std::mem;
use misc::*;
#[ repr (C) ]
struct BinaryHeader {
name: [u8; 100],
mode: [u8; 8],
uid: [u8; 8],
gid: [u8; 8],
size: [u8; 12],
mtime: [u8; 12],
cksum: [u8; 8],
typeflag: [u8; 1],
linkname: [u8; 100],
magic: [u8; 6],
version: [u8; 2],
uname: [u8; 32],
gname: [u8; 32],
dev_major: [u8; 8],
dev_minor: [u8; 8],
atime: [u8; 12],
ctime: [u8; 12],
offset: [u8; 12],
longnames: [u8; 4],
unused: [u8; 1],
sparse: [BinarySparseHeader; 4],
isextended: [u8; 1],
realsize: [u8; 12],
pad: [u8; 17],
}
#[ repr (C) ]
struct BinarySparseHeader {
offset: [u8; 12],
numbytes: [u8; 12],
}
pub struct Header {
pub name: Vec <u8>,
pub mode: u32,
pub uid: u32,
pub gid: u32,
pub size: u64,
pub blocks: u64,
pub mtime: u64,
pub cksum: u32,
pub typeflag: Type,
pub linkname: Vec <u8>,
pub uname: Vec <u8>,
pub gname: Vec <u8>,
pub dev_major: u32,
pub dev_minor: u32,
pub atime: u64,
pub ctime: u64,
pub offset: u64,
}
#[ derive (Debug) ]
pub enum Type {
Regular,
Link,
SymbolicLink,
CharacterSpecial,
BlockSpecial,
Directory,
Fifo,
LongName,
LongLink,
}
impl Header {
pub fn read (
header_bytes: & [u8],
) -> Result <Header, TfError> {
if header_bytes.len ()!= 512 {
panic! ();
}
let binary_header =
unsafe {
mem::transmute::<& u8, & BinaryHeader> (
& header_bytes [0])
};
if binary_header.magic!= * b"ustar " {
Err (TfError {
error_message: format! (
"Unrecognised tar format: {:?} {:?}",
binary_header.magic,
binary_header.version),
})
} else if binary_header.version!= * b" \0" {
Err (TfError {
error_message: format! (
"Unrecognised gnu tar version: {:?}",
binary_header.version),
})
} else {
let size =
try! (
tar_number_u64 (
& binary_header.size));
Ok (Header {
name: tar_string (
& binary_header.name),
mode: try! (
tar_number_u32 (
& binary_header.mode)),
uid: try! (
tar_number_u32 (
& binary_header.uid)),
gid: try! (
tar_number_u32 (
& binary_header.gid)),
size:
size,
blocks: 0
+ (size >> 9)
+ (if (size & 0x1ff)!= 0 { 1 } else { 0 }),
mtime: try! (
tar_number_u64 (
& binary_header.mtime)),
cksum: try! (
tar_number_u32 (
& binary_header.cksum)),
typeflag: tar_type (
& binary_header.typeflag),
linkname: tar_string (
& binary_header.linkname),
uname: tar_string (
& binary_header.uname),
gname: tar_string (
& binary_header.gname),
dev_major: try! (
tar_number_u32 (
& binary_header.dev_major)),
dev_minor: try! (
tar_number_u32 (
& binary_header.dev_minor)),
atime: try! (
tar_number_u64 (
& binary_header.atime)),
ctime: try! (
tar_number_u64 (
& binary_header.ctime)),
offset: try! (
tar_number_u64 (
& binary_header.offset)),
/*
longnames: [u8; 4],
unused: [u8; 1],
sparse: [BinarySparseHeader; 4],
isextended: [u8; 1],
realsize: [u8; 12],
pad: [u8; 17],
*/
})
}
}
}
fn tar_string (
slice: & [u8],
) -> Vec <u8> {
match slice.iter ().position (
|index| * index == 0,
) {
Some (index) =>
slice [.. index ].to_vec (),
None =>
slice.to_vec (),
}
}
fn
|
(
slice: & [u8],
) -> Result <u64, TfError> {
if slice [0] == 0 {
Ok (0)
} else if slice [0] == 0x80 {
Ok (u64::from_be (
unsafe {
* mem::transmute::<& u8, & u64> (
& slice [4])
}
))
} else if slice [0] == 0xff {
panic! ()
} else {
let string =
try! (
String::from_utf8 (
tar_string (slice)));
let number =
try! (
u64::from_str_radix (
& string,
8));
Ok (number)
}
}
fn tar_number_u32 (
slice: & [u8],
) -> Result <u32, TfError> {
if slice [0] == 0 {
Ok (0)
} else {
let string =
try! (
String::from_utf8 (
tar_string (slice)));
let number =
try! (
u32::from_str_radix (
& string,
8));
Ok (number)
}
}
fn tar_type (
typeflag: & [u8; 1],
) -> Type {
match typeflag [0] {
b'0' => Type::Regular,
b'1' => Type::Link,
b'2' => Type::SymbolicLink,
b'3' => Type::CharacterSpecial,
b'4' => Type::BlockSpecial,
b'5' => Type::Directory,
b'6' => Type::Fifo,
b'K' => Type::LongLink,
b'L' => Type::LongName,
_ => {
panic! (
"Unrecognised typeflag: {:?}",
typeflag [0])
}
}
}
|
tar_number_u64
|
identifier_name
|
tar.rs
|
use std::mem;
use misc::*;
#[ repr (C) ]
struct BinaryHeader {
name: [u8; 100],
mode: [u8; 8],
uid: [u8; 8],
gid: [u8; 8],
size: [u8; 12],
mtime: [u8; 12],
cksum: [u8; 8],
typeflag: [u8; 1],
linkname: [u8; 100],
magic: [u8; 6],
version: [u8; 2],
uname: [u8; 32],
gname: [u8; 32],
dev_major: [u8; 8],
dev_minor: [u8; 8],
atime: [u8; 12],
ctime: [u8; 12],
offset: [u8; 12],
longnames: [u8; 4],
unused: [u8; 1],
sparse: [BinarySparseHeader; 4],
isextended: [u8; 1],
realsize: [u8; 12],
pad: [u8; 17],
}
#[ repr (C) ]
struct BinarySparseHeader {
offset: [u8; 12],
numbytes: [u8; 12],
}
pub struct Header {
pub name: Vec <u8>,
pub mode: u32,
pub uid: u32,
pub gid: u32,
pub size: u64,
pub blocks: u64,
pub mtime: u64,
pub cksum: u32,
pub typeflag: Type,
pub linkname: Vec <u8>,
pub uname: Vec <u8>,
pub gname: Vec <u8>,
pub dev_major: u32,
pub dev_minor: u32,
pub atime: u64,
pub ctime: u64,
pub offset: u64,
}
#[ derive (Debug) ]
pub enum Type {
Regular,
Link,
SymbolicLink,
CharacterSpecial,
BlockSpecial,
Directory,
Fifo,
LongName,
LongLink,
}
impl Header {
pub fn read (
header_bytes: & [u8],
) -> Result <Header, TfError>
|
} else if binary_header.version!= * b" \0" {
Err (TfError {
error_message: format! (
"Unrecognised gnu tar version: {:?}",
binary_header.version),
})
} else {
let size =
try! (
tar_number_u64 (
& binary_header.size));
Ok (Header {
name: tar_string (
& binary_header.name),
mode: try! (
tar_number_u32 (
& binary_header.mode)),
uid: try! (
tar_number_u32 (
& binary_header.uid)),
gid: try! (
tar_number_u32 (
& binary_header.gid)),
size:
size,
blocks: 0
+ (size >> 9)
+ (if (size & 0x1ff)!= 0 { 1 } else { 0 }),
mtime: try! (
tar_number_u64 (
& binary_header.mtime)),
cksum: try! (
tar_number_u32 (
& binary_header.cksum)),
typeflag: tar_type (
& binary_header.typeflag),
linkname: tar_string (
& binary_header.linkname),
uname: tar_string (
& binary_header.uname),
gname: tar_string (
& binary_header.gname),
dev_major: try! (
tar_number_u32 (
& binary_header.dev_major)),
dev_minor: try! (
tar_number_u32 (
& binary_header.dev_minor)),
atime: try! (
tar_number_u64 (
& binary_header.atime)),
ctime: try! (
tar_number_u64 (
& binary_header.ctime)),
offset: try! (
tar_number_u64 (
& binary_header.offset)),
/*
longnames: [u8; 4],
unused: [u8; 1],
sparse: [BinarySparseHeader; 4],
isextended: [u8; 1],
realsize: [u8; 12],
pad: [u8; 17],
*/
})
}
}
}
fn tar_string (
slice: & [u8],
) -> Vec <u8> {
match slice.iter ().position (
|index| * index == 0,
) {
Some (index) =>
slice [.. index ].to_vec (),
None =>
slice.to_vec (),
}
}
fn tar_number_u64 (
slice: & [u8],
) -> Result <u64, TfError> {
if slice [0] == 0 {
Ok (0)
} else if slice [0] == 0x80 {
Ok (u64::from_be (
unsafe {
* mem::transmute::<& u8, & u64> (
& slice [4])
}
))
} else if slice [0] == 0xff {
panic! ()
} else {
let string =
try! (
String::from_utf8 (
tar_string (slice)));
let number =
try! (
u64::from_str_radix (
& string,
8));
Ok (number)
}
}
fn tar_number_u32 (
slice: & [u8],
) -> Result <u32, TfError> {
if slice [0] == 0 {
Ok (0)
} else {
let string =
try! (
String::from_utf8 (
tar_string (slice)));
let number =
try! (
u32::from_str_radix (
& string,
8));
Ok (number)
}
}
fn tar_type (
typeflag: & [u8; 1],
) -> Type {
match typeflag [0] {
b'0' => Type::Regular,
b'1' => Type::Link,
b'2' => Type::SymbolicLink,
b'3' => Type::CharacterSpecial,
b'4' => Type::BlockSpecial,
b'5' => Type::Directory,
b'6' => Type::Fifo,
b'K' => Type::LongLink,
b'L' => Type::LongName,
_ => {
panic! (
"Unrecognised typeflag: {:?}",
typeflag [0])
}
}
}
|
{
if header_bytes.len () != 512 {
panic! ();
}
let binary_header =
unsafe {
mem::transmute::<& u8, & BinaryHeader> (
& header_bytes [0])
};
if binary_header.magic != * b"ustar " {
Err (TfError {
error_message: format! (
"Unrecognised tar format: {:?} {:?}",
binary_header.magic,
binary_header.version),
})
|
identifier_body
|
repo_commit.rs
|
use rusqlite::{SqliteConnection,SqliteResult,SqliteRow};
use schemamama_rusqlite::{SqliteMigration};
use std::str::FromStr;
use result::*;
#[derive(Debug,Copy,Clone)]
pub enum CommitState {
Indexed,
NotIndexed,
}
impl FromStr for CommitState {
type Err = RepoError;
fn from_str(s: &str) -> Result<CommitState, Self::Err> {
match s {
"Indexed" => Ok(CommitState::Indexed),
"NotIndexed" => Ok(CommitState::NotIndexed),
_ => Err(RepoError::EnumParseError(s.to_string()))
}
}
}
impl ToString for CommitState {
fn to_string(&self) -> String {
match *self {
CommitState::Indexed => "Indexed".to_string(),
CommitState::NotIndexed => "NotIndexed".to_string(),
}
}
}
#[derive(Debug,Clone)]
pub struct RepoCommit {
pub id: String,
pub repo_id: String,
pub state: CommitState,
}
impl RepoCommit {
pub fn new(id: String, repo_id: String, state: CommitState) -> RepoCommit {
RepoCommit {
id: id,
repo_id: repo_id,
state: state,
}
}
|
id: row0.get(0),
repo_id: row0.get(1),
state: try!(CommitState::from_str(&commit_state)),
})
}
}
pub struct CreateCommitsTable;
migration!(CreateCommitsTable, 3, "create commits table");
impl SqliteMigration for CreateCommitsTable {
fn up(&self, conn: &SqliteConnection) -> SqliteResult<()> {
const CREATE_COMMITS: &'static str = "\
CREATE TABLE commits ( \
id TEXT, \
repo_id TEXT, \
state TEXT \
);";
const CREATE_COMMITS_PKEY: &'static str = "\
CREATE UNIQUE INDEX commits_repo_id_id_idx ON commits(repo_id,id)";
Ok(())
.and(conn.execute(CREATE_COMMITS, &[]))
.and(conn.execute(CREATE_COMMITS_PKEY, &[]))
.map(|_| (()))
}
fn down(&self, conn: &SqliteConnection) -> SqliteResult<()> {
conn.execute("DROP TABLE commits;", &[]).map(|_| ())
}
}
|
pub fn new_from_sql_row(row0: &SqliteRow) -> RepoResult<RepoCommit> {
let commit_state: String = row0.get(2);
Ok(RepoCommit {
|
random_line_split
|
repo_commit.rs
|
use rusqlite::{SqliteConnection,SqliteResult,SqliteRow};
use schemamama_rusqlite::{SqliteMigration};
use std::str::FromStr;
use result::*;
#[derive(Debug,Copy,Clone)]
pub enum CommitState {
Indexed,
NotIndexed,
}
impl FromStr for CommitState {
type Err = RepoError;
fn from_str(s: &str) -> Result<CommitState, Self::Err> {
match s {
"Indexed" => Ok(CommitState::Indexed),
"NotIndexed" => Ok(CommitState::NotIndexed),
_ => Err(RepoError::EnumParseError(s.to_string()))
}
}
}
impl ToString for CommitState {
fn
|
(&self) -> String {
match *self {
CommitState::Indexed => "Indexed".to_string(),
CommitState::NotIndexed => "NotIndexed".to_string(),
}
}
}
#[derive(Debug,Clone)]
pub struct RepoCommit {
pub id: String,
pub repo_id: String,
pub state: CommitState,
}
impl RepoCommit {
pub fn new(id: String, repo_id: String, state: CommitState) -> RepoCommit {
RepoCommit {
id: id,
repo_id: repo_id,
state: state,
}
}
pub fn new_from_sql_row(row0: &SqliteRow) -> RepoResult<RepoCommit> {
let commit_state: String = row0.get(2);
Ok(RepoCommit {
id: row0.get(0),
repo_id: row0.get(1),
state: try!(CommitState::from_str(&commit_state)),
})
}
}
pub struct CreateCommitsTable;
migration!(CreateCommitsTable, 3, "create commits table");
impl SqliteMigration for CreateCommitsTable {
fn up(&self, conn: &SqliteConnection) -> SqliteResult<()> {
const CREATE_COMMITS: &'static str = "\
CREATE TABLE commits ( \
id TEXT, \
repo_id TEXT, \
state TEXT \
);";
const CREATE_COMMITS_PKEY: &'static str = "\
CREATE UNIQUE INDEX commits_repo_id_id_idx ON commits(repo_id,id)";
Ok(())
.and(conn.execute(CREATE_COMMITS, &[]))
.and(conn.execute(CREATE_COMMITS_PKEY, &[]))
.map(|_| (()))
}
fn down(&self, conn: &SqliteConnection) -> SqliteResult<()> {
conn.execute("DROP TABLE commits;", &[]).map(|_| ())
}
}
|
to_string
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.