file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
constant_offsets.rs | // SPDX-License-Identifier: MIT
// Copyright [email protected]
// Copyright iced contributors
use core::hash::{Hash, Hasher};
use pyo3::class::basic::CompareOp;
use pyo3::prelude::*;
use pyo3::PyObjectProtocol;
use std::collections::hash_map::DefaultHasher;
/// Contains the offsets of the displacement and immediate.
///
/// Call :class:`Decoder.get_constant_offsets` or :class:`Encoder.get_constant_offsets` to get the
/// offsets of the constants after the instruction has been decoded/encoded.
#[pyclass(module = "_iced_x86_py")]
#[text_signature = "(/)"]
#[derive(Copy, Clone)]
pub(crate) struct ConstantOffsets {
pub(crate) offsets: iced_x86::ConstantOffsets,
}
#[pymethods]
impl ConstantOffsets {
/// int: (``u32``) The offset of the displacement, if any
#[getter]
fn displacement_offset(&self) -> u32 |
/// int: (``u32``) Size in bytes of the displacement, or 0 if there's no displacement
#[getter]
fn displacement_size(&self) -> u32 {
self.offsets.displacement_size() as u32
}
/// int: (``u32``) The offset of the first immediate, if any.
///
/// This field can be invalid even if the operand has an immediate if it's an immediate that isn't part
/// of the instruction stream, eg. ``SHL AL,1``.
#[getter]
fn immediate_offset(&self) -> u32 {
self.offsets.immediate_offset() as u32
}
/// int: (``u32``) Size in bytes of the first immediate, or 0 if there's no immediate
#[getter]
fn immediate_size(&self) -> u32 {
self.offsets.immediate_size() as u32
}
/// int: (``u32``) The offset of the second immediate, if any.
#[getter]
fn immediate_offset2(&self) -> u32 {
self.offsets.immediate_offset2() as u32
}
/// int: (``u32``) Size in bytes of the second immediate, or 0 if there's no second immediate
#[getter]
fn immediate_size2(&self) -> u32 {
self.offsets.immediate_size2() as u32
}
/// bool: ``True`` if :class:`ConstantOffsets.displacement_offset` and :class:`ConstantOffsets.displacement_size` are valid
#[getter]
fn has_displacement(&self) -> bool {
self.offsets.has_displacement()
}
/// bool: ``True`` if :class:`ConstantOffsets.immediate_offset` and :class:`ConstantOffsets.immediate_size` are valid
#[getter]
fn has_immediate(&self) -> bool {
self.offsets.has_immediate()
}
/// bool: ``True`` if :class:`ConstantOffsets.immediate_offset2` and :class:`ConstantOffsets.immediate_size2` are valid
#[getter]
fn has_immediate2(&self) -> bool {
self.offsets.has_immediate2()
}
/// Returns a copy of this instance.
///
/// Returns:
/// ConstantOffsets: A copy of this instance
///
/// This is identical to :class:`ConstantOffsets.copy`
#[text_signature = "($self, /)"]
fn __copy__(&self) -> Self {
*self
}
/// Returns a copy of this instance.
///
/// Args:
/// memo (Any): memo dict
///
/// Returns:
/// ConstantOffsets: A copy of this instance
///
/// This is identical to :class:`ConstantOffsets.copy`
#[text_signature = "($self, memo, /)"]
fn __deepcopy__(&self, _memo: &PyAny) -> Self {
*self
}
/// Returns a copy of this instance.
///
/// Returns:
/// ConstantOffsets: A copy of this instance
#[text_signature = "($self, /)"]
fn copy(&self) -> Self {
*self
}
}
#[pyproto]
impl PyObjectProtocol for ConstantOffsets {
fn __richcmp__(&self, other: PyRef<ConstantOffsets>, op: CompareOp) -> PyObject {
match op {
CompareOp::Eq => (self.offsets == other.offsets).into_py(other.py()),
CompareOp::Ne => (self.offsets!= other.offsets).into_py(other.py()),
_ => other.py().NotImplemented(),
}
}
fn __hash__(&self) -> u64 {
let mut hasher = DefaultHasher::new();
self.offsets.hash(&mut hasher);
hasher.finish()
}
}
| {
self.offsets.displacement_offset() as u32
} | identifier_body |
constant_offsets.rs | // SPDX-License-Identifier: MIT
// Copyright [email protected]
// Copyright iced contributors
use core::hash::{Hash, Hasher};
use pyo3::class::basic::CompareOp;
use pyo3::prelude::*;
use pyo3::PyObjectProtocol;
use std::collections::hash_map::DefaultHasher;
/// Contains the offsets of the displacement and immediate.
///
/// Call :class:`Decoder.get_constant_offsets` or :class:`Encoder.get_constant_offsets` to get the
/// offsets of the constants after the instruction has been decoded/encoded.
#[pyclass(module = "_iced_x86_py")]
#[text_signature = "(/)"]
#[derive(Copy, Clone)]
pub(crate) struct ConstantOffsets {
pub(crate) offsets: iced_x86::ConstantOffsets,
}
#[pymethods]
impl ConstantOffsets {
/// int: (``u32``) The offset of the displacement, if any
#[getter]
fn displacement_offset(&self) -> u32 {
self.offsets.displacement_offset() as u32
}
/// int: (``u32``) Size in bytes of the displacement, or 0 if there's no displacement
#[getter]
fn displacement_size(&self) -> u32 {
self.offsets.displacement_size() as u32
}
/// int: (``u32``) The offset of the first immediate, if any.
///
/// This field can be invalid even if the operand has an immediate if it's an immediate that isn't part
/// of the instruction stream, eg. ``SHL AL,1``.
#[getter]
fn immediate_offset(&self) -> u32 {
self.offsets.immediate_offset() as u32
}
/// int: (``u32``) Size in bytes of the first immediate, or 0 if there's no immediate
#[getter]
fn immediate_size(&self) -> u32 {
self.offsets.immediate_size() as u32
}
/// int: (``u32``) The offset of the second immediate, if any.
#[getter]
fn immediate_offset2(&self) -> u32 {
self.offsets.immediate_offset2() as u32
}
/// int: (``u32``) Size in bytes of the second immediate, or 0 if there's no second immediate
#[getter]
fn immediate_size2(&self) -> u32 {
self.offsets.immediate_size2() as u32
}
/// bool: ``True`` if :class:`ConstantOffsets.displacement_offset` and :class:`ConstantOffsets.displacement_size` are valid
#[getter]
fn has_displacement(&self) -> bool {
self.offsets.has_displacement()
}
/// bool: ``True`` if :class:`ConstantOffsets.immediate_offset` and :class:`ConstantOffsets.immediate_size` are valid | /// bool: ``True`` if :class:`ConstantOffsets.immediate_offset2` and :class:`ConstantOffsets.immediate_size2` are valid
#[getter]
fn has_immediate2(&self) -> bool {
self.offsets.has_immediate2()
}
/// Returns a copy of this instance.
///
/// Returns:
/// ConstantOffsets: A copy of this instance
///
/// This is identical to :class:`ConstantOffsets.copy`
#[text_signature = "($self, /)"]
fn __copy__(&self) -> Self {
*self
}
/// Returns a copy of this instance.
///
/// Args:
/// memo (Any): memo dict
///
/// Returns:
/// ConstantOffsets: A copy of this instance
///
/// This is identical to :class:`ConstantOffsets.copy`
#[text_signature = "($self, memo, /)"]
fn __deepcopy__(&self, _memo: &PyAny) -> Self {
*self
}
/// Returns a copy of this instance.
///
/// Returns:
/// ConstantOffsets: A copy of this instance
#[text_signature = "($self, /)"]
fn copy(&self) -> Self {
*self
}
}
#[pyproto]
impl PyObjectProtocol for ConstantOffsets {
fn __richcmp__(&self, other: PyRef<ConstantOffsets>, op: CompareOp) -> PyObject {
match op {
CompareOp::Eq => (self.offsets == other.offsets).into_py(other.py()),
CompareOp::Ne => (self.offsets!= other.offsets).into_py(other.py()),
_ => other.py().NotImplemented(),
}
}
fn __hash__(&self) -> u64 {
let mut hasher = DefaultHasher::new();
self.offsets.hash(&mut hasher);
hasher.finish()
}
} | #[getter]
fn has_immediate(&self) -> bool {
self.offsets.has_immediate()
}
| random_line_split |
visitor.rs | // Copyright 2016 Pierre Talbot (IRCAM)
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![macro_use]
use std::default::Default;
use ast::*;
use ast::Expression::*;
pub trait Visitor<R: Default> : ExprByIndex
{
fn visit_expr(&mut self, this: usize) -> R {
walk_expr(self, this)
}
fn visit_str_literal(&mut self, _this: usize, _lit: String) -> R { R::default() }
fn visit_non_terminal_symbol(&mut self, _this: usize, _rule: &Ident) -> R |
fn visit_external_non_terminal_symbol(&mut self, _this: usize, _rule: &syn::Path) -> R { R::default() }
fn visit_atom(&mut self, _this: usize) -> R { R::default() }
fn visit_any_single_char(&mut self, this: usize) -> R {
self.visit_atom(this)
}
fn visit_character_class(&mut self, this: usize, _char_class: CharacterClassExpr) -> R {
self.visit_atom(this)
}
fn visit_spanned_expr(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_range_expr(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_sequence(&mut self, _this: usize, children: Vec<usize>) -> R;
fn visit_choice(&mut self, _this: usize, children: Vec<usize>) -> R;
fn visit_repeat(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_zero_or_more(&mut self, this: usize, child: usize) -> R {
self.visit_repeat(this, child)
}
fn visit_one_or_more(&mut self, this: usize, child: usize) -> R {
self.visit_repeat(this, child)
}
fn visit_optional(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_syntactic_predicate(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_not_predicate(&mut self, this: usize, child: usize) -> R {
self.visit_syntactic_predicate(this, child)
}
fn visit_and_predicate(&mut self, this: usize, child: usize) -> R {
self.visit_syntactic_predicate(this, child)
}
fn visit_semantic_action(&mut self, _this: usize, child: usize, _boxed: bool, _action: syn::Expr) -> R {
self.visit_expr(child)
}
fn visit_type_ascription(&mut self, _this: usize, child: usize, _ty: IType) -> R {
self.visit_expr(child)
}
}
/// We need this macro for factorizing the code since we can not specialize a trait on specific type parameter (we would need to specialize on `()` here).
macro_rules! unit_visitor_impl {
(sequence) => (
fn visit_sequence(&mut self, _this: usize, children: Vec<usize>) -> () {
walk_exprs(self, children);
}
);
(choice) => (
fn visit_choice(&mut self, _this: usize, children: Vec<usize>) -> () {
walk_exprs(self, children);
}
);
}
pub fn walk_expr<R: Default, V:?Sized>(visitor: &mut V, this: usize) -> R where
V: Visitor<R>
{
match visitor.expr_by_index(this) {
StrLiteral(lit) => {
visitor.visit_str_literal(this, lit)
}
AnySingleChar => {
visitor.visit_any_single_char(this)
}
NonTerminalSymbol(rule) => {
visitor.visit_non_terminal_symbol(this, &rule)
}
ExternalNonTerminalSymbol(rule) => {
visitor.visit_external_non_terminal_symbol(this, &rule)
}
Sequence(seq) => {
visitor.visit_sequence(this, seq)
}
Choice(choices) => {
visitor.visit_choice(this, choices)
}
ZeroOrMore(child) => {
visitor.visit_zero_or_more(this, child)
}
OneOrMore(child) => {
visitor.visit_one_or_more(this, child)
}
ZeroOrOne(child) => {
visitor.visit_optional(this, child)
}
NotPredicate(child) => {
visitor.visit_not_predicate(this, child)
}
AndPredicate(child) => {
visitor.visit_and_predicate(this, child)
}
CharacterClass(char_class) => {
visitor.visit_character_class(this, char_class)
}
SemanticAction(child, boxed, action) => {
visitor.visit_semantic_action(this, child, boxed, action)
}
TypeAscription(child, ty) => {
visitor.visit_type_ascription(this, child, ty)
}
SpannedExpr(child) => {
visitor.visit_spanned_expr(this, child)
}
RangeExpr(child) => {
visitor.visit_range_expr(this, child)
}
}
}
pub fn walk_exprs<R: Default, V:?Sized>(visitor: &mut V, exprs: Vec<usize>) -> Vec<R> where
V: Visitor<R>
{
exprs.into_iter().map(|expr| visitor.visit_expr(expr)).collect()
}
| { R::default() } | identifier_body |
visitor.rs | // Copyright 2016 Pierre Talbot (IRCAM)
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![macro_use]
use std::default::Default;
use ast::*;
use ast::Expression::*;
pub trait Visitor<R: Default> : ExprByIndex
{
fn visit_expr(&mut self, this: usize) -> R {
walk_expr(self, this)
}
fn visit_str_literal(&mut self, _this: usize, _lit: String) -> R { R::default() }
fn visit_non_terminal_symbol(&mut self, _this: usize, _rule: &Ident) -> R { R::default() }
fn visit_external_non_terminal_symbol(&mut self, _this: usize, _rule: &syn::Path) -> R { R::default() }
fn visit_atom(&mut self, _this: usize) -> R { R::default() }
fn visit_any_single_char(&mut self, this: usize) -> R {
self.visit_atom(this)
}
fn visit_character_class(&mut self, this: usize, _char_class: CharacterClassExpr) -> R {
self.visit_atom(this)
}
fn visit_spanned_expr(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_range_expr(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_sequence(&mut self, _this: usize, children: Vec<usize>) -> R;
fn visit_choice(&mut self, _this: usize, children: Vec<usize>) -> R;
fn visit_repeat(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_zero_or_more(&mut self, this: usize, child: usize) -> R {
self.visit_repeat(this, child)
}
fn visit_one_or_more(&mut self, this: usize, child: usize) -> R {
self.visit_repeat(this, child)
}
fn visit_optional(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_syntactic_predicate(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_not_predicate(&mut self, this: usize, child: usize) -> R {
self.visit_syntactic_predicate(this, child)
}
fn visit_and_predicate(&mut self, this: usize, child: usize) -> R {
self.visit_syntactic_predicate(this, child)
}
fn visit_semantic_action(&mut self, _this: usize, child: usize, _boxed: bool, _action: syn::Expr) -> R {
self.visit_expr(child)
}
fn visit_type_ascription(&mut self, _this: usize, child: usize, _ty: IType) -> R {
self.visit_expr(child)
}
}
/// We need this macro for factorizing the code since we can not specialize a trait on specific type parameter (we would need to specialize on `()` here).
macro_rules! unit_visitor_impl {
(sequence) => (
fn visit_sequence(&mut self, _this: usize, children: Vec<usize>) -> () {
walk_exprs(self, children);
}
);
(choice) => (
fn visit_choice(&mut self, _this: usize, children: Vec<usize>) -> () {
walk_exprs(self, children);
}
);
}
pub fn walk_expr<R: Default, V:?Sized>(visitor: &mut V, this: usize) -> R where
V: Visitor<R>
{
match visitor.expr_by_index(this) {
StrLiteral(lit) => {
visitor.visit_str_literal(this, lit)
}
AnySingleChar => {
visitor.visit_any_single_char(this)
}
NonTerminalSymbol(rule) => {
visitor.visit_non_terminal_symbol(this, &rule)
}
ExternalNonTerminalSymbol(rule) => {
visitor.visit_external_non_terminal_symbol(this, &rule)
}
Sequence(seq) => {
visitor.visit_sequence(this, seq)
}
Choice(choices) => {
visitor.visit_choice(this, choices)
}
ZeroOrMore(child) => {
visitor.visit_zero_or_more(this, child)
}
OneOrMore(child) => |
ZeroOrOne(child) => {
visitor.visit_optional(this, child)
}
NotPredicate(child) => {
visitor.visit_not_predicate(this, child)
}
AndPredicate(child) => {
visitor.visit_and_predicate(this, child)
}
CharacterClass(char_class) => {
visitor.visit_character_class(this, char_class)
}
SemanticAction(child, boxed, action) => {
visitor.visit_semantic_action(this, child, boxed, action)
}
TypeAscription(child, ty) => {
visitor.visit_type_ascription(this, child, ty)
}
SpannedExpr(child) => {
visitor.visit_spanned_expr(this, child)
}
RangeExpr(child) => {
visitor.visit_range_expr(this, child)
}
}
}
pub fn walk_exprs<R: Default, V:?Sized>(visitor: &mut V, exprs: Vec<usize>) -> Vec<R> where
V: Visitor<R>
{
exprs.into_iter().map(|expr| visitor.visit_expr(expr)).collect()
}
| {
visitor.visit_one_or_more(this, child)
} | conditional_block |
visitor.rs | // Copyright 2016 Pierre Talbot (IRCAM)
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![macro_use]
use std::default::Default;
use ast::*;
use ast::Expression::*;
pub trait Visitor<R: Default> : ExprByIndex
{
fn visit_expr(&mut self, this: usize) -> R {
walk_expr(self, this)
}
fn visit_str_literal(&mut self, _this: usize, _lit: String) -> R { R::default() }
fn visit_non_terminal_symbol(&mut self, _this: usize, _rule: &Ident) -> R { R::default() }
fn visit_external_non_terminal_symbol(&mut self, _this: usize, _rule: &syn::Path) -> R { R::default() }
fn visit_atom(&mut self, _this: usize) -> R { R::default() }
fn visit_any_single_char(&mut self, this: usize) -> R {
self.visit_atom(this)
}
fn visit_character_class(&mut self, this: usize, _char_class: CharacterClassExpr) -> R {
self.visit_atom(this)
}
fn visit_spanned_expr(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_range_expr(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_sequence(&mut self, _this: usize, children: Vec<usize>) -> R;
fn visit_choice(&mut self, _this: usize, children: Vec<usize>) -> R;
fn visit_repeat(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn | (&mut self, this: usize, child: usize) -> R {
self.visit_repeat(this, child)
}
fn visit_one_or_more(&mut self, this: usize, child: usize) -> R {
self.visit_repeat(this, child)
}
fn visit_optional(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_syntactic_predicate(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_not_predicate(&mut self, this: usize, child: usize) -> R {
self.visit_syntactic_predicate(this, child)
}
fn visit_and_predicate(&mut self, this: usize, child: usize) -> R {
self.visit_syntactic_predicate(this, child)
}
fn visit_semantic_action(&mut self, _this: usize, child: usize, _boxed: bool, _action: syn::Expr) -> R {
self.visit_expr(child)
}
fn visit_type_ascription(&mut self, _this: usize, child: usize, _ty: IType) -> R {
self.visit_expr(child)
}
}
/// We need this macro for factorizing the code since we can not specialize a trait on specific type parameter (we would need to specialize on `()` here).
macro_rules! unit_visitor_impl {
(sequence) => (
fn visit_sequence(&mut self, _this: usize, children: Vec<usize>) -> () {
walk_exprs(self, children);
}
);
(choice) => (
fn visit_choice(&mut self, _this: usize, children: Vec<usize>) -> () {
walk_exprs(self, children);
}
);
}
pub fn walk_expr<R: Default, V:?Sized>(visitor: &mut V, this: usize) -> R where
V: Visitor<R>
{
match visitor.expr_by_index(this) {
StrLiteral(lit) => {
visitor.visit_str_literal(this, lit)
}
AnySingleChar => {
visitor.visit_any_single_char(this)
}
NonTerminalSymbol(rule) => {
visitor.visit_non_terminal_symbol(this, &rule)
}
ExternalNonTerminalSymbol(rule) => {
visitor.visit_external_non_terminal_symbol(this, &rule)
}
Sequence(seq) => {
visitor.visit_sequence(this, seq)
}
Choice(choices) => {
visitor.visit_choice(this, choices)
}
ZeroOrMore(child) => {
visitor.visit_zero_or_more(this, child)
}
OneOrMore(child) => {
visitor.visit_one_or_more(this, child)
}
ZeroOrOne(child) => {
visitor.visit_optional(this, child)
}
NotPredicate(child) => {
visitor.visit_not_predicate(this, child)
}
AndPredicate(child) => {
visitor.visit_and_predicate(this, child)
}
CharacterClass(char_class) => {
visitor.visit_character_class(this, char_class)
}
SemanticAction(child, boxed, action) => {
visitor.visit_semantic_action(this, child, boxed, action)
}
TypeAscription(child, ty) => {
visitor.visit_type_ascription(this, child, ty)
}
SpannedExpr(child) => {
visitor.visit_spanned_expr(this, child)
}
RangeExpr(child) => {
visitor.visit_range_expr(this, child)
}
}
}
pub fn walk_exprs<R: Default, V:?Sized>(visitor: &mut V, exprs: Vec<usize>) -> Vec<R> where
V: Visitor<R>
{
exprs.into_iter().map(|expr| visitor.visit_expr(expr)).collect()
}
| visit_zero_or_more | identifier_name |
visitor.rs | // Copyright 2016 Pierre Talbot (IRCAM)
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![macro_use]
use std::default::Default;
use ast::*;
use ast::Expression::*;
pub trait Visitor<R: Default> : ExprByIndex
{ | fn visit_expr(&mut self, this: usize) -> R {
walk_expr(self, this)
}
fn visit_str_literal(&mut self, _this: usize, _lit: String) -> R { R::default() }
fn visit_non_terminal_symbol(&mut self, _this: usize, _rule: &Ident) -> R { R::default() }
fn visit_external_non_terminal_symbol(&mut self, _this: usize, _rule: &syn::Path) -> R { R::default() }
fn visit_atom(&mut self, _this: usize) -> R { R::default() }
fn visit_any_single_char(&mut self, this: usize) -> R {
self.visit_atom(this)
}
fn visit_character_class(&mut self, this: usize, _char_class: CharacterClassExpr) -> R {
self.visit_atom(this)
}
fn visit_spanned_expr(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_range_expr(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_sequence(&mut self, _this: usize, children: Vec<usize>) -> R;
fn visit_choice(&mut self, _this: usize, children: Vec<usize>) -> R;
fn visit_repeat(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_zero_or_more(&mut self, this: usize, child: usize) -> R {
self.visit_repeat(this, child)
}
fn visit_one_or_more(&mut self, this: usize, child: usize) -> R {
self.visit_repeat(this, child)
}
fn visit_optional(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_syntactic_predicate(&mut self, _this: usize, child: usize) -> R {
self.visit_expr(child)
}
fn visit_not_predicate(&mut self, this: usize, child: usize) -> R {
self.visit_syntactic_predicate(this, child)
}
fn visit_and_predicate(&mut self, this: usize, child: usize) -> R {
self.visit_syntactic_predicate(this, child)
}
fn visit_semantic_action(&mut self, _this: usize, child: usize, _boxed: bool, _action: syn::Expr) -> R {
self.visit_expr(child)
}
fn visit_type_ascription(&mut self, _this: usize, child: usize, _ty: IType) -> R {
self.visit_expr(child)
}
}
/// We need this macro for factorizing the code since we can not specialize a trait on specific type parameter (we would need to specialize on `()` here).
macro_rules! unit_visitor_impl {
(sequence) => (
fn visit_sequence(&mut self, _this: usize, children: Vec<usize>) -> () {
walk_exprs(self, children);
}
);
(choice) => (
fn visit_choice(&mut self, _this: usize, children: Vec<usize>) -> () {
walk_exprs(self, children);
}
);
}
pub fn walk_expr<R: Default, V:?Sized>(visitor: &mut V, this: usize) -> R where
V: Visitor<R>
{
match visitor.expr_by_index(this) {
StrLiteral(lit) => {
visitor.visit_str_literal(this, lit)
}
AnySingleChar => {
visitor.visit_any_single_char(this)
}
NonTerminalSymbol(rule) => {
visitor.visit_non_terminal_symbol(this, &rule)
}
ExternalNonTerminalSymbol(rule) => {
visitor.visit_external_non_terminal_symbol(this, &rule)
}
Sequence(seq) => {
visitor.visit_sequence(this, seq)
}
Choice(choices) => {
visitor.visit_choice(this, choices)
}
ZeroOrMore(child) => {
visitor.visit_zero_or_more(this, child)
}
OneOrMore(child) => {
visitor.visit_one_or_more(this, child)
}
ZeroOrOne(child) => {
visitor.visit_optional(this, child)
}
NotPredicate(child) => {
visitor.visit_not_predicate(this, child)
}
AndPredicate(child) => {
visitor.visit_and_predicate(this, child)
}
CharacterClass(char_class) => {
visitor.visit_character_class(this, char_class)
}
SemanticAction(child, boxed, action) => {
visitor.visit_semantic_action(this, child, boxed, action)
}
TypeAscription(child, ty) => {
visitor.visit_type_ascription(this, child, ty)
}
SpannedExpr(child) => {
visitor.visit_spanned_expr(this, child)
}
RangeExpr(child) => {
visitor.visit_range_expr(this, child)
}
}
}
pub fn walk_exprs<R: Default, V:?Sized>(visitor: &mut V, exprs: Vec<usize>) -> Vec<R> where
V: Visitor<R>
{
exprs.into_iter().map(|expr| visitor.visit_expr(expr)).collect()
} | random_line_split |
|
eulers_sum_of_powers_conjecture.rs | // http://rosettacode.org/wiki/Euler's_sum_of_powers_conjecture
const MAX_N: u64 = 250;
fn | () -> (usize, usize, usize, usize, usize) {
let pow5: Vec<u64> = (0..MAX_N).map(|i| i.pow(5)).collect();
let pow5_to_n = |pow| pow5.binary_search(&pow);
for x0 in 1..MAX_N as usize {
for x1 in 1..x0 {
for x2 in 1..x1 {
for x3 in 1..x2 {
let pow_sum = pow5[x0] + pow5[x1] + pow5[x2] + pow5[x3];
if let Ok(n) = pow5_to_n(pow_sum) {
return (x0, x1, x2, x3, n)
}
}
}
}
}
panic!();
}
fn main() {
let (x0, x1, x2, x3, y) = eulers_sum_of_powers();
println!("{}^5 + {}^5 + {}^5 + {}^5 == {}^5", x0, x1, x2, x3, y)
}
| eulers_sum_of_powers | identifier_name |
eulers_sum_of_powers_conjecture.rs | // http://rosettacode.org/wiki/Euler's_sum_of_powers_conjecture
const MAX_N: u64 = 250;
fn eulers_sum_of_powers() -> (usize, usize, usize, usize, usize) |
fn main() {
let (x0, x1, x2, x3, y) = eulers_sum_of_powers();
println!("{}^5 + {}^5 + {}^5 + {}^5 == {}^5", x0, x1, x2, x3, y)
}
| {
let pow5: Vec<u64> = (0..MAX_N).map(|i| i.pow(5)).collect();
let pow5_to_n = |pow| pow5.binary_search(&pow);
for x0 in 1..MAX_N as usize {
for x1 in 1..x0 {
for x2 in 1..x1 {
for x3 in 1..x2 {
let pow_sum = pow5[x0] + pow5[x1] + pow5[x2] + pow5[x3];
if let Ok(n) = pow5_to_n(pow_sum) {
return (x0, x1, x2, x3, n)
}
}
}
}
}
panic!();
} | identifier_body |
eulers_sum_of_powers_conjecture.rs | // http://rosettacode.org/wiki/Euler's_sum_of_powers_conjecture
const MAX_N: u64 = 250;
fn eulers_sum_of_powers() -> (usize, usize, usize, usize, usize) {
let pow5: Vec<u64> = (0..MAX_N).map(|i| i.pow(5)).collect();
let pow5_to_n = |pow| pow5.binary_search(&pow);
for x0 in 1..MAX_N as usize {
for x1 in 1..x0 {
for x2 in 1..x1 {
for x3 in 1..x2 {
let pow_sum = pow5[x0] + pow5[x1] + pow5[x2] + pow5[x3];
if let Ok(n) = pow5_to_n(pow_sum) {
return (x0, x1, x2, x3, n)
}
}
}
}
}
panic!();
}
fn main() {
let (x0, x1, x2, x3, y) = eulers_sum_of_powers(); | } | println!("{}^5 + {}^5 + {}^5 + {}^5 == {}^5", x0, x1, x2, x3, y) | random_line_split |
eulers_sum_of_powers_conjecture.rs | // http://rosettacode.org/wiki/Euler's_sum_of_powers_conjecture
const MAX_N: u64 = 250;
fn eulers_sum_of_powers() -> (usize, usize, usize, usize, usize) {
let pow5: Vec<u64> = (0..MAX_N).map(|i| i.pow(5)).collect();
let pow5_to_n = |pow| pow5.binary_search(&pow);
for x0 in 1..MAX_N as usize {
for x1 in 1..x0 {
for x2 in 1..x1 {
for x3 in 1..x2 {
let pow_sum = pow5[x0] + pow5[x1] + pow5[x2] + pow5[x3];
if let Ok(n) = pow5_to_n(pow_sum) |
}
}
}
}
panic!();
}
fn main() {
let (x0, x1, x2, x3, y) = eulers_sum_of_powers();
println!("{}^5 + {}^5 + {}^5 + {}^5 == {}^5", x0, x1, x2, x3, y)
}
| {
return (x0, x1, x2, x3, n)
} | conditional_block |
stylesheet.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use {Namespace, Prefix};
use context::QuirksMode;
use cssparser::{Parser, ParserInput, RuleListParser};
use error_reporting::{ContextualParseError, ParseErrorReporter};
use fallible::FallibleVec;
use fnv::FnvHashMap;
use invalidation::media_queries::{MediaListKey, ToMediaListKey};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use media_queries::{Device, MediaList};
use parking_lot::RwLock;
use parser::{ParserContext, ParserErrorContext};
use servo_arc::Arc;
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard};
use std::mem;
use std::sync::atomic::{AtomicBool, Ordering};
use style_traits::ParsingMode;
use stylesheets::{CssRule, CssRules, Origin, UrlExtraData};
use stylesheets::loader::StylesheetLoader;
use stylesheets::rule_parser::{State, TopLevelRuleParser};
use stylesheets::rules_iterator::{EffectiveRules, EffectiveRulesIterator};
use stylesheets::rules_iterator::{NestedRuleIterationCondition, RulesIterator};
/// This structure holds the user-agent and user stylesheets.
pub struct UserAgentStylesheets {
/// The lock used for user-agent stylesheets.
pub shared_lock: SharedRwLock,
/// The user or user agent stylesheets.
pub user_or_user_agent_stylesheets: Vec<DocumentStyleSheet>,
/// The quirks mode stylesheet.
pub quirks_mode_stylesheet: DocumentStyleSheet,
}
/// A set of namespaces applying to a given stylesheet.
///
/// The namespace id is used in gecko
#[derive(Clone, Debug, Default, MallocSizeOf)]
#[allow(missing_docs)]
pub struct Namespaces {
pub default: Option<Namespace>,
pub prefixes: FnvHashMap<Prefix, Namespace>,
}
/// The contents of a given stylesheet. This effectively maps to a
/// StyleSheetInner in Gecko.
#[derive(Debug)]
pub struct StylesheetContents {
/// List of rules in the order they were found (important for
/// cascading order)
pub rules: Arc<Locked<CssRules>>,
/// The origin of this stylesheet.
pub origin: Origin,
/// The url data this stylesheet should use.
pub url_data: RwLock<UrlExtraData>,
/// The namespaces that apply to this stylesheet.
pub namespaces: RwLock<Namespaces>,
/// The quirks mode of this stylesheet.
pub quirks_mode: QuirksMode,
/// This stylesheet's source map URL.
pub source_map_url: RwLock<Option<String>>,
/// This stylesheet's source URL.
pub source_url: RwLock<Option<String>>,
}
impl StylesheetContents {
/// Parse a given CSS string, with a given url-data, origin, and
/// quirks mode.
pub fn from_str<R: ParseErrorReporter>(
css: &str,
url_data: UrlExtraData,
origin: Origin,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> Self {
let namespaces = RwLock::new(Namespaces::default());
let (rules, source_map_url, source_url) = Stylesheet::parse_rules(
css,
&url_data,
origin,
&mut *namespaces.write(),
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
);
Self {
rules: CssRules::new(rules, &shared_lock),
origin: origin,
url_data: RwLock::new(url_data),
namespaces: namespaces,
quirks_mode: quirks_mode,
source_map_url: RwLock::new(source_map_url),
source_url: RwLock::new(source_url),
}
}
/// Returns a reference to the list of rules.
#[inline]
pub fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
&self.rules.read_with(guard).0
}
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
}
impl DeepCloneWithLock for StylesheetContents {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
// Make a deep clone of the rules, using the new lock.
let rules = self.rules
.read_with(guard)
.deep_clone_with_lock(lock, guard, params);
Self {
rules: Arc::new(lock.wrap(rules)),
quirks_mode: self.quirks_mode,
origin: self.origin,
url_data: RwLock::new((*self.url_data.read()).clone()),
namespaces: RwLock::new((*self.namespaces.read()).clone()),
source_map_url: RwLock::new((*self.source_map_url.read()).clone()),
source_url: RwLock::new((*self.source_map_url.read()).clone()),
}
}
}
/// The structure servo uses to represent a stylesheet.
#[derive(Debug)]
pub struct Stylesheet {
/// The contents of this stylesheet.
pub contents: StylesheetContents,
/// The lock used for objects inside this stylesheet
pub shared_lock: SharedRwLock,
/// List of media associated with the Stylesheet.
pub media: Arc<Locked<MediaList>>,
/// Whether this stylesheet should be disabled.
pub disabled: AtomicBool,
}
macro_rules! rule_filter {
($( $method: ident($variant:ident => $rule_type: ident), )+) => {
$(
#[allow(missing_docs)]
fn $method<F>(&self, device: &Device, guard: &SharedRwLockReadGuard, mut f: F)
where F: FnMut(&::stylesheets::$rule_type),
{
use stylesheets::CssRule;
for rule in self.effective_rules(device, guard) {
if let CssRule::$variant(ref lock) = *rule {
let rule = lock.read_with(guard);
f(&rule)
}
}
}
)+
}
}
/// A trait to represent a given stylesheet in a document.
pub trait StylesheetInDocument {
/// Get the stylesheet origin.
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin;
/// Get the stylesheet quirks mode.
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode;
/// Get whether this stylesheet is enabled.
fn enabled(&self) -> bool;
/// Get the media associated with this stylesheet.
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList>;
/// Returns a reference to the list of rules in this stylesheet.
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule];
/// Return an iterator using the condition `C`.
#[inline]
fn iter_rules<'a, 'b, C>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> RulesIterator<'a, 'b, C>
where
C: NestedRuleIterationCondition,
{
RulesIterator::new(device, self.quirks_mode(guard), guard, self.rules(guard))
}
/// Returns whether the style-sheet applies for the current device.
fn is_effective_for_device(&self, device: &Device, guard: &SharedRwLockReadGuard) -> bool {
match self.media(guard) {
Some(medialist) => medialist.evaluate(device, self.quirks_mode(guard)),
None => true,
}
}
/// Return an iterator over the effective rules within the style-sheet, as
/// according to the supplied `Device`.
#[inline]
fn effective_rules<'a, 'b>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> EffectiveRulesIterator<'a, 'b> {
self.iter_rules::<EffectiveRules>(device, guard)
}
rule_filter! {
effective_style_rules(Style => StyleRule),
effective_media_rules(Media => MediaRule),
effective_font_face_rules(FontFace => FontFaceRule),
effective_font_face_feature_values_rules(FontFeatureValues => FontFeatureValuesRule),
effective_counter_style_rules(CounterStyle => CounterStyleRule),
effective_viewport_rules(Viewport => ViewportRule),
effective_keyframes_rules(Keyframes => KeyframesRule),
effective_supports_rules(Supports => SupportsRule),
effective_page_rules(Page => PageRule),
effective_document_rules(Document => DocumentRule),
}
}
impl StylesheetInDocument for Stylesheet {
fn origin(&self, _guard: &SharedRwLockReadGuard) -> Origin {
self.contents.origin
}
fn quirks_mode(&self, _guard: &SharedRwLockReadGuard) -> QuirksMode {
self.contents.quirks_mode
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
Some(self.media.read_with(guard))
}
fn enabled(&self) -> bool {
!self.disabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.contents.rules(guard)
}
}
/// A simple wrapper over an `Arc<Stylesheet>`, with pointer comparison, and
/// suitable for its use in a `StylesheetSet`.
#[derive(Clone)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub struct DocumentStyleSheet(
#[cfg_attr(feature = "servo", ignore_malloc_size_of = "Arc")] pub Arc<Stylesheet>,
);
impl PartialEq for DocumentStyleSheet {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
}
impl ToMediaListKey for DocumentStyleSheet {
fn to_media_list_key(&self) -> MediaListKey {
self.0.to_media_list_key()
}
}
impl StylesheetInDocument for DocumentStyleSheet {
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin {
self.0.origin(guard)
}
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode {
self.0.quirks_mode(guard)
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
self.0.media(guard)
}
fn enabled(&self) -> bool {
self.0.enabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.0.rules(guard)
}
}
impl Stylesheet {
/// Updates an empty stylesheet from a given string of text.
pub fn | <R>(
existing: &Stylesheet,
css: &str,
url_data: UrlExtraData,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
line_number_offset: u32,
) where
R: ParseErrorReporter,
{
let namespaces = RwLock::new(Namespaces::default());
let (rules, source_map_url, source_url) = Stylesheet::parse_rules(
css,
&url_data,
existing.contents.origin,
&mut *namespaces.write(),
&existing.shared_lock,
stylesheet_loader,
error_reporter,
existing.contents.quirks_mode,
line_number_offset,
);
*existing.contents.url_data.write() = url_data;
mem::swap(
&mut *existing.contents.namespaces.write(),
&mut *namespaces.write(),
);
// Acquire the lock *after* parsing, to minimize the exclusive section.
let mut guard = existing.shared_lock.write();
*existing.contents.rules.write_with(&mut guard) = CssRules(rules);
*existing.contents.source_map_url.write() = source_map_url;
*existing.contents.source_url.write() = source_url;
}
fn parse_rules<R: ParseErrorReporter>(
css: &str,
url_data: &UrlExtraData,
origin: Origin,
namespaces: &mut Namespaces,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> (Vec<CssRule>, Option<String>, Option<String>) {
let mut rules = Vec::new();
let mut input = ParserInput::new_with_line_number_offset(css, line_number_offset);
let mut input = Parser::new(&mut input);
let context = ParserContext::new(origin, url_data, None, ParsingMode::DEFAULT, quirks_mode);
let error_context = ParserErrorContext { error_reporter };
let rule_parser = TopLevelRuleParser {
stylesheet_origin: origin,
shared_lock: shared_lock,
loader: stylesheet_loader,
context: context,
error_context: error_context,
state: State::Start,
had_hierarchy_error: false,
namespaces: namespaces,
};
{
let mut iter = RuleListParser::new_for_stylesheet(&mut input, rule_parser);
while let Some(result) = iter.next() {
match result {
Ok(rule) => {
// Use a fallible push here, and if it fails, just
// fall out of the loop. This will cause the page to
// be shown incorrectly, but it's better than OOMing.
if rules.try_push(rule).is_err() {
break;
}
},
Err((error, slice)) => {
let location = error.location;
let error = ContextualParseError::InvalidRule(slice, error);
iter.parser.context.log_css_error(
&iter.parser.error_context,
location,
error,
);
},
}
}
}
let source_map_url = input.current_source_map_url().map(String::from);
let source_url = input.current_source_url().map(String::from);
(rules, source_map_url, source_url)
}
/// Creates an empty stylesheet and parses it with a given base url, origin
/// and media.
///
/// Effectively creates a new stylesheet and forwards the hard work to
/// `Stylesheet::update_from_str`.
pub fn from_str<R: ParseErrorReporter>(
css: &str,
url_data: UrlExtraData,
origin: Origin,
media: Arc<Locked<MediaList>>,
shared_lock: SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> Stylesheet {
let contents = StylesheetContents::from_str(
css,
url_data,
origin,
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
);
Stylesheet {
contents,
shared_lock,
media,
disabled: AtomicBool::new(false),
}
}
/// Returns whether the stylesheet has been explicitly disabled through the
/// CSSOM.
pub fn disabled(&self) -> bool {
self.disabled.load(Ordering::SeqCst)
}
/// Records that the stylesheet has been explicitly disabled through the
/// CSSOM.
///
/// Returns whether the the call resulted in a change in disabled state.
///
/// Disabled stylesheets remain in the document, but their rules are not
/// added to the Stylist.
pub fn set_disabled(&self, disabled: bool) -> bool {
self.disabled.swap(disabled, Ordering::SeqCst)!= disabled
}
}
#[cfg(feature = "servo")]
impl Clone for Stylesheet {
fn clone(&self) -> Self {
// Create a new lock for our clone.
let lock = self.shared_lock.clone();
let guard = self.shared_lock.read();
// Make a deep clone of the media, using the new lock.
let media = self.media.read_with(&guard).clone();
let media = Arc::new(lock.wrap(media));
let contents = self.contents
.deep_clone_with_lock(&lock, &guard, &DeepCloneParams);
Stylesheet {
contents,
media: media,
shared_lock: lock,
disabled: AtomicBool::new(self.disabled.load(Ordering::SeqCst)),
}
}
}
| update_from_str | identifier_name |
stylesheet.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use {Namespace, Prefix};
use context::QuirksMode;
use cssparser::{Parser, ParserInput, RuleListParser};
use error_reporting::{ContextualParseError, ParseErrorReporter};
use fallible::FallibleVec;
use fnv::FnvHashMap;
use invalidation::media_queries::{MediaListKey, ToMediaListKey};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use media_queries::{Device, MediaList};
use parking_lot::RwLock;
use parser::{ParserContext, ParserErrorContext};
use servo_arc::Arc;
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard};
use std::mem;
use std::sync::atomic::{AtomicBool, Ordering};
use style_traits::ParsingMode;
use stylesheets::{CssRule, CssRules, Origin, UrlExtraData};
use stylesheets::loader::StylesheetLoader;
use stylesheets::rule_parser::{State, TopLevelRuleParser};
use stylesheets::rules_iterator::{EffectiveRules, EffectiveRulesIterator};
use stylesheets::rules_iterator::{NestedRuleIterationCondition, RulesIterator};
/// This structure holds the user-agent and user stylesheets.
pub struct UserAgentStylesheets {
/// The lock used for user-agent stylesheets.
pub shared_lock: SharedRwLock,
/// The user or user agent stylesheets.
pub user_or_user_agent_stylesheets: Vec<DocumentStyleSheet>,
/// The quirks mode stylesheet.
pub quirks_mode_stylesheet: DocumentStyleSheet,
}
/// A set of namespaces applying to a given stylesheet.
///
/// The namespace id is used in gecko
#[derive(Clone, Debug, Default, MallocSizeOf)]
#[allow(missing_docs)]
pub struct Namespaces {
pub default: Option<Namespace>,
pub prefixes: FnvHashMap<Prefix, Namespace>,
}
/// The contents of a given stylesheet. This effectively maps to a
/// StyleSheetInner in Gecko.
#[derive(Debug)]
pub struct StylesheetContents {
/// List of rules in the order they were found (important for
/// cascading order)
pub rules: Arc<Locked<CssRules>>,
/// The origin of this stylesheet.
pub origin: Origin,
/// The url data this stylesheet should use.
pub url_data: RwLock<UrlExtraData>,
/// The namespaces that apply to this stylesheet.
pub namespaces: RwLock<Namespaces>,
/// The quirks mode of this stylesheet.
pub quirks_mode: QuirksMode,
/// This stylesheet's source map URL.
pub source_map_url: RwLock<Option<String>>,
/// This stylesheet's source URL.
pub source_url: RwLock<Option<String>>,
}
impl StylesheetContents {
/// Parse a given CSS string, with a given url-data, origin, and
/// quirks mode.
pub fn from_str<R: ParseErrorReporter>(
css: &str,
url_data: UrlExtraData,
origin: Origin,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> Self {
let namespaces = RwLock::new(Namespaces::default());
let (rules, source_map_url, source_url) = Stylesheet::parse_rules(
css,
&url_data,
origin,
&mut *namespaces.write(),
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
);
Self {
rules: CssRules::new(rules, &shared_lock),
origin: origin,
url_data: RwLock::new(url_data),
namespaces: namespaces,
quirks_mode: quirks_mode,
source_map_url: RwLock::new(source_map_url),
source_url: RwLock::new(source_url),
}
}
/// Returns a reference to the list of rules.
#[inline]
pub fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
&self.rules.read_with(guard).0
}
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
}
impl DeepCloneWithLock for StylesheetContents {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self {
// Make a deep clone of the rules, using the new lock.
let rules = self.rules
.read_with(guard)
.deep_clone_with_lock(lock, guard, params);
Self {
rules: Arc::new(lock.wrap(rules)),
quirks_mode: self.quirks_mode,
origin: self.origin,
url_data: RwLock::new((*self.url_data.read()).clone()),
namespaces: RwLock::new((*self.namespaces.read()).clone()),
source_map_url: RwLock::new((*self.source_map_url.read()).clone()),
source_url: RwLock::new((*self.source_map_url.read()).clone()),
}
}
}
/// The structure servo uses to represent a stylesheet.
#[derive(Debug)]
pub struct Stylesheet {
/// The contents of this stylesheet.
pub contents: StylesheetContents,
/// The lock used for objects inside this stylesheet
pub shared_lock: SharedRwLock,
/// List of media associated with the Stylesheet.
pub media: Arc<Locked<MediaList>>,
/// Whether this stylesheet should be disabled.
pub disabled: AtomicBool,
}
macro_rules! rule_filter {
($( $method: ident($variant:ident => $rule_type: ident), )+) => {
$(
#[allow(missing_docs)]
fn $method<F>(&self, device: &Device, guard: &SharedRwLockReadGuard, mut f: F)
where F: FnMut(&::stylesheets::$rule_type),
{
use stylesheets::CssRule;
for rule in self.effective_rules(device, guard) {
if let CssRule::$variant(ref lock) = *rule {
let rule = lock.read_with(guard);
f(&rule)
} | }
}
)+
}
}
/// A trait to represent a given stylesheet in a document.
pub trait StylesheetInDocument {
/// Get the stylesheet origin.
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin;
/// Get the stylesheet quirks mode.
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode;
/// Get whether this stylesheet is enabled.
fn enabled(&self) -> bool;
/// Get the media associated with this stylesheet.
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList>;
/// Returns a reference to the list of rules in this stylesheet.
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule];
/// Return an iterator using the condition `C`.
#[inline]
fn iter_rules<'a, 'b, C>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> RulesIterator<'a, 'b, C>
where
C: NestedRuleIterationCondition,
{
RulesIterator::new(device, self.quirks_mode(guard), guard, self.rules(guard))
}
/// Returns whether the style-sheet applies for the current device.
fn is_effective_for_device(&self, device: &Device, guard: &SharedRwLockReadGuard) -> bool {
match self.media(guard) {
Some(medialist) => medialist.evaluate(device, self.quirks_mode(guard)),
None => true,
}
}
/// Return an iterator over the effective rules within the style-sheet, as
/// according to the supplied `Device`.
#[inline]
fn effective_rules<'a, 'b>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> EffectiveRulesIterator<'a, 'b> {
self.iter_rules::<EffectiveRules>(device, guard)
}
rule_filter! {
effective_style_rules(Style => StyleRule),
effective_media_rules(Media => MediaRule),
effective_font_face_rules(FontFace => FontFaceRule),
effective_font_face_feature_values_rules(FontFeatureValues => FontFeatureValuesRule),
effective_counter_style_rules(CounterStyle => CounterStyleRule),
effective_viewport_rules(Viewport => ViewportRule),
effective_keyframes_rules(Keyframes => KeyframesRule),
effective_supports_rules(Supports => SupportsRule),
effective_page_rules(Page => PageRule),
effective_document_rules(Document => DocumentRule),
}
}
impl StylesheetInDocument for Stylesheet {
fn origin(&self, _guard: &SharedRwLockReadGuard) -> Origin {
self.contents.origin
}
fn quirks_mode(&self, _guard: &SharedRwLockReadGuard) -> QuirksMode {
self.contents.quirks_mode
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
Some(self.media.read_with(guard))
}
fn enabled(&self) -> bool {
!self.disabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.contents.rules(guard)
}
}
/// A simple wrapper over an `Arc<Stylesheet>`, with pointer comparison, and
/// suitable for its use in a `StylesheetSet`.
#[derive(Clone)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub struct DocumentStyleSheet(
#[cfg_attr(feature = "servo", ignore_malloc_size_of = "Arc")] pub Arc<Stylesheet>,
);
impl PartialEq for DocumentStyleSheet {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
}
impl ToMediaListKey for DocumentStyleSheet {
fn to_media_list_key(&self) -> MediaListKey {
self.0.to_media_list_key()
}
}
impl StylesheetInDocument for DocumentStyleSheet {
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin {
self.0.origin(guard)
}
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode {
self.0.quirks_mode(guard)
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
self.0.media(guard)
}
fn enabled(&self) -> bool {
self.0.enabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.0.rules(guard)
}
}
impl Stylesheet {
/// Updates an empty stylesheet from a given string of text.
pub fn update_from_str<R>(
existing: &Stylesheet,
css: &str,
url_data: UrlExtraData,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
line_number_offset: u32,
) where
R: ParseErrorReporter,
{
let namespaces = RwLock::new(Namespaces::default());
let (rules, source_map_url, source_url) = Stylesheet::parse_rules(
css,
&url_data,
existing.contents.origin,
&mut *namespaces.write(),
&existing.shared_lock,
stylesheet_loader,
error_reporter,
existing.contents.quirks_mode,
line_number_offset,
);
*existing.contents.url_data.write() = url_data;
mem::swap(
&mut *existing.contents.namespaces.write(),
&mut *namespaces.write(),
);
// Acquire the lock *after* parsing, to minimize the exclusive section.
let mut guard = existing.shared_lock.write();
*existing.contents.rules.write_with(&mut guard) = CssRules(rules);
*existing.contents.source_map_url.write() = source_map_url;
*existing.contents.source_url.write() = source_url;
}
fn parse_rules<R: ParseErrorReporter>(
css: &str,
url_data: &UrlExtraData,
origin: Origin,
namespaces: &mut Namespaces,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> (Vec<CssRule>, Option<String>, Option<String>) {
let mut rules = Vec::new();
let mut input = ParserInput::new_with_line_number_offset(css, line_number_offset);
let mut input = Parser::new(&mut input);
let context = ParserContext::new(origin, url_data, None, ParsingMode::DEFAULT, quirks_mode);
let error_context = ParserErrorContext { error_reporter };
let rule_parser = TopLevelRuleParser {
stylesheet_origin: origin,
shared_lock: shared_lock,
loader: stylesheet_loader,
context: context,
error_context: error_context,
state: State::Start,
had_hierarchy_error: false,
namespaces: namespaces,
};
{
let mut iter = RuleListParser::new_for_stylesheet(&mut input, rule_parser);
while let Some(result) = iter.next() {
match result {
Ok(rule) => {
// Use a fallible push here, and if it fails, just
// fall out of the loop. This will cause the page to
// be shown incorrectly, but it's better than OOMing.
if rules.try_push(rule).is_err() {
break;
}
},
Err((error, slice)) => {
let location = error.location;
let error = ContextualParseError::InvalidRule(slice, error);
iter.parser.context.log_css_error(
&iter.parser.error_context,
location,
error,
);
},
}
}
}
let source_map_url = input.current_source_map_url().map(String::from);
let source_url = input.current_source_url().map(String::from);
(rules, source_map_url, source_url)
}
/// Creates an empty stylesheet and parses it with a given base url, origin
/// and media.
///
/// Effectively creates a new stylesheet and forwards the hard work to
/// `Stylesheet::update_from_str`.
pub fn from_str<R: ParseErrorReporter>(
css: &str,
url_data: UrlExtraData,
origin: Origin,
media: Arc<Locked<MediaList>>,
shared_lock: SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> Stylesheet {
let contents = StylesheetContents::from_str(
css,
url_data,
origin,
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
);
Stylesheet {
contents,
shared_lock,
media,
disabled: AtomicBool::new(false),
}
}
/// Returns whether the stylesheet has been explicitly disabled through the
/// CSSOM.
pub fn disabled(&self) -> bool {
self.disabled.load(Ordering::SeqCst)
}
/// Records that the stylesheet has been explicitly disabled through the
/// CSSOM.
///
/// Returns whether the the call resulted in a change in disabled state.
///
/// Disabled stylesheets remain in the document, but their rules are not
/// added to the Stylist.
pub fn set_disabled(&self, disabled: bool) -> bool {
self.disabled.swap(disabled, Ordering::SeqCst)!= disabled
}
}
#[cfg(feature = "servo")]
impl Clone for Stylesheet {
fn clone(&self) -> Self {
// Create a new lock for our clone.
let lock = self.shared_lock.clone();
let guard = self.shared_lock.read();
// Make a deep clone of the media, using the new lock.
let media = self.media.read_with(&guard).clone();
let media = Arc::new(lock.wrap(media));
let contents = self.contents
.deep_clone_with_lock(&lock, &guard, &DeepCloneParams);
Stylesheet {
contents,
media: media,
shared_lock: lock,
disabled: AtomicBool::new(self.disabled.load(Ordering::SeqCst)),
}
}
} | random_line_split |
|
stylesheet.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use {Namespace, Prefix};
use context::QuirksMode;
use cssparser::{Parser, ParserInput, RuleListParser};
use error_reporting::{ContextualParseError, ParseErrorReporter};
use fallible::FallibleVec;
use fnv::FnvHashMap;
use invalidation::media_queries::{MediaListKey, ToMediaListKey};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOfOps, MallocUnconditionalShallowSizeOf};
use media_queries::{Device, MediaList};
use parking_lot::RwLock;
use parser::{ParserContext, ParserErrorContext};
use servo_arc::Arc;
use shared_lock::{DeepCloneParams, DeepCloneWithLock, Locked, SharedRwLock, SharedRwLockReadGuard};
use std::mem;
use std::sync::atomic::{AtomicBool, Ordering};
use style_traits::ParsingMode;
use stylesheets::{CssRule, CssRules, Origin, UrlExtraData};
use stylesheets::loader::StylesheetLoader;
use stylesheets::rule_parser::{State, TopLevelRuleParser};
use stylesheets::rules_iterator::{EffectiveRules, EffectiveRulesIterator};
use stylesheets::rules_iterator::{NestedRuleIterationCondition, RulesIterator};
/// This structure holds the user-agent and user stylesheets.
pub struct UserAgentStylesheets {
/// The lock used for user-agent stylesheets.
pub shared_lock: SharedRwLock,
/// The user or user agent stylesheets.
pub user_or_user_agent_stylesheets: Vec<DocumentStyleSheet>,
/// The quirks mode stylesheet.
pub quirks_mode_stylesheet: DocumentStyleSheet,
}
/// A set of namespaces applying to a given stylesheet.
///
/// The namespace id is used in gecko
#[derive(Clone, Debug, Default, MallocSizeOf)]
#[allow(missing_docs)]
pub struct Namespaces {
pub default: Option<Namespace>,
pub prefixes: FnvHashMap<Prefix, Namespace>,
}
/// The contents of a given stylesheet. This effectively maps to a
/// StyleSheetInner in Gecko.
#[derive(Debug)]
pub struct StylesheetContents {
/// List of rules in the order they were found (important for
/// cascading order)
pub rules: Arc<Locked<CssRules>>,
/// The origin of this stylesheet.
pub origin: Origin,
/// The url data this stylesheet should use.
pub url_data: RwLock<UrlExtraData>,
/// The namespaces that apply to this stylesheet.
pub namespaces: RwLock<Namespaces>,
/// The quirks mode of this stylesheet.
pub quirks_mode: QuirksMode,
/// This stylesheet's source map URL.
pub source_map_url: RwLock<Option<String>>,
/// This stylesheet's source URL.
pub source_url: RwLock<Option<String>>,
}
impl StylesheetContents {
/// Parse a given CSS string, with a given url-data, origin, and
/// quirks mode.
pub fn from_str<R: ParseErrorReporter>(
css: &str,
url_data: UrlExtraData,
origin: Origin,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> Self {
let namespaces = RwLock::new(Namespaces::default());
let (rules, source_map_url, source_url) = Stylesheet::parse_rules(
css,
&url_data,
origin,
&mut *namespaces.write(),
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
);
Self {
rules: CssRules::new(rules, &shared_lock),
origin: origin,
url_data: RwLock::new(url_data),
namespaces: namespaces,
quirks_mode: quirks_mode,
source_map_url: RwLock::new(source_map_url),
source_url: RwLock::new(source_url),
}
}
/// Returns a reference to the list of rules.
#[inline]
pub fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
&self.rules.read_with(guard).0
}
/// Measure heap usage.
#[cfg(feature = "gecko")]
pub fn size_of(&self, guard: &SharedRwLockReadGuard, ops: &mut MallocSizeOfOps) -> usize {
// Measurement of other fields may be added later.
self.rules.unconditional_shallow_size_of(ops) +
self.rules.read_with(guard).size_of(guard, ops)
}
}
impl DeepCloneWithLock for StylesheetContents {
fn deep_clone_with_lock(
&self,
lock: &SharedRwLock,
guard: &SharedRwLockReadGuard,
params: &DeepCloneParams,
) -> Self |
}
/// The structure servo uses to represent a stylesheet.
#[derive(Debug)]
pub struct Stylesheet {
/// The contents of this stylesheet.
pub contents: StylesheetContents,
/// The lock used for objects inside this stylesheet
pub shared_lock: SharedRwLock,
/// List of media associated with the Stylesheet.
pub media: Arc<Locked<MediaList>>,
/// Whether this stylesheet should be disabled.
pub disabled: AtomicBool,
}
macro_rules! rule_filter {
($( $method: ident($variant:ident => $rule_type: ident), )+) => {
$(
#[allow(missing_docs)]
fn $method<F>(&self, device: &Device, guard: &SharedRwLockReadGuard, mut f: F)
where F: FnMut(&::stylesheets::$rule_type),
{
use stylesheets::CssRule;
for rule in self.effective_rules(device, guard) {
if let CssRule::$variant(ref lock) = *rule {
let rule = lock.read_with(guard);
f(&rule)
}
}
}
)+
}
}
/// A trait to represent a given stylesheet in a document.
pub trait StylesheetInDocument {
/// Get the stylesheet origin.
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin;
/// Get the stylesheet quirks mode.
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode;
/// Get whether this stylesheet is enabled.
fn enabled(&self) -> bool;
/// Get the media associated with this stylesheet.
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList>;
/// Returns a reference to the list of rules in this stylesheet.
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule];
/// Return an iterator using the condition `C`.
#[inline]
fn iter_rules<'a, 'b, C>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> RulesIterator<'a, 'b, C>
where
C: NestedRuleIterationCondition,
{
RulesIterator::new(device, self.quirks_mode(guard), guard, self.rules(guard))
}
/// Returns whether the style-sheet applies for the current device.
fn is_effective_for_device(&self, device: &Device, guard: &SharedRwLockReadGuard) -> bool {
match self.media(guard) {
Some(medialist) => medialist.evaluate(device, self.quirks_mode(guard)),
None => true,
}
}
/// Return an iterator over the effective rules within the style-sheet, as
/// according to the supplied `Device`.
#[inline]
fn effective_rules<'a, 'b>(
&'a self,
device: &'a Device,
guard: &'a SharedRwLockReadGuard<'b>,
) -> EffectiveRulesIterator<'a, 'b> {
self.iter_rules::<EffectiveRules>(device, guard)
}
rule_filter! {
effective_style_rules(Style => StyleRule),
effective_media_rules(Media => MediaRule),
effective_font_face_rules(FontFace => FontFaceRule),
effective_font_face_feature_values_rules(FontFeatureValues => FontFeatureValuesRule),
effective_counter_style_rules(CounterStyle => CounterStyleRule),
effective_viewport_rules(Viewport => ViewportRule),
effective_keyframes_rules(Keyframes => KeyframesRule),
effective_supports_rules(Supports => SupportsRule),
effective_page_rules(Page => PageRule),
effective_document_rules(Document => DocumentRule),
}
}
impl StylesheetInDocument for Stylesheet {
fn origin(&self, _guard: &SharedRwLockReadGuard) -> Origin {
self.contents.origin
}
fn quirks_mode(&self, _guard: &SharedRwLockReadGuard) -> QuirksMode {
self.contents.quirks_mode
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
Some(self.media.read_with(guard))
}
fn enabled(&self) -> bool {
!self.disabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.contents.rules(guard)
}
}
/// A simple wrapper over an `Arc<Stylesheet>`, with pointer comparison, and
/// suitable for its use in a `StylesheetSet`.
#[derive(Clone)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub struct DocumentStyleSheet(
#[cfg_attr(feature = "servo", ignore_malloc_size_of = "Arc")] pub Arc<Stylesheet>,
);
impl PartialEq for DocumentStyleSheet {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
}
}
impl ToMediaListKey for DocumentStyleSheet {
fn to_media_list_key(&self) -> MediaListKey {
self.0.to_media_list_key()
}
}
impl StylesheetInDocument for DocumentStyleSheet {
fn origin(&self, guard: &SharedRwLockReadGuard) -> Origin {
self.0.origin(guard)
}
fn quirks_mode(&self, guard: &SharedRwLockReadGuard) -> QuirksMode {
self.0.quirks_mode(guard)
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
self.0.media(guard)
}
fn enabled(&self) -> bool {
self.0.enabled()
}
#[inline]
fn rules<'a, 'b: 'a>(&'a self, guard: &'b SharedRwLockReadGuard) -> &'a [CssRule] {
self.0.rules(guard)
}
}
impl Stylesheet {
/// Updates an empty stylesheet from a given string of text.
pub fn update_from_str<R>(
existing: &Stylesheet,
css: &str,
url_data: UrlExtraData,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
line_number_offset: u32,
) where
R: ParseErrorReporter,
{
let namespaces = RwLock::new(Namespaces::default());
let (rules, source_map_url, source_url) = Stylesheet::parse_rules(
css,
&url_data,
existing.contents.origin,
&mut *namespaces.write(),
&existing.shared_lock,
stylesheet_loader,
error_reporter,
existing.contents.quirks_mode,
line_number_offset,
);
*existing.contents.url_data.write() = url_data;
mem::swap(
&mut *existing.contents.namespaces.write(),
&mut *namespaces.write(),
);
// Acquire the lock *after* parsing, to minimize the exclusive section.
let mut guard = existing.shared_lock.write();
*existing.contents.rules.write_with(&mut guard) = CssRules(rules);
*existing.contents.source_map_url.write() = source_map_url;
*existing.contents.source_url.write() = source_url;
}
fn parse_rules<R: ParseErrorReporter>(
css: &str,
url_data: &UrlExtraData,
origin: Origin,
namespaces: &mut Namespaces,
shared_lock: &SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> (Vec<CssRule>, Option<String>, Option<String>) {
let mut rules = Vec::new();
let mut input = ParserInput::new_with_line_number_offset(css, line_number_offset);
let mut input = Parser::new(&mut input);
let context = ParserContext::new(origin, url_data, None, ParsingMode::DEFAULT, quirks_mode);
let error_context = ParserErrorContext { error_reporter };
let rule_parser = TopLevelRuleParser {
stylesheet_origin: origin,
shared_lock: shared_lock,
loader: stylesheet_loader,
context: context,
error_context: error_context,
state: State::Start,
had_hierarchy_error: false,
namespaces: namespaces,
};
{
let mut iter = RuleListParser::new_for_stylesheet(&mut input, rule_parser);
while let Some(result) = iter.next() {
match result {
Ok(rule) => {
// Use a fallible push here, and if it fails, just
// fall out of the loop. This will cause the page to
// be shown incorrectly, but it's better than OOMing.
if rules.try_push(rule).is_err() {
break;
}
},
Err((error, slice)) => {
let location = error.location;
let error = ContextualParseError::InvalidRule(slice, error);
iter.parser.context.log_css_error(
&iter.parser.error_context,
location,
error,
);
},
}
}
}
let source_map_url = input.current_source_map_url().map(String::from);
let source_url = input.current_source_url().map(String::from);
(rules, source_map_url, source_url)
}
/// Creates an empty stylesheet and parses it with a given base url, origin
/// and media.
///
/// Effectively creates a new stylesheet and forwards the hard work to
/// `Stylesheet::update_from_str`.
pub fn from_str<R: ParseErrorReporter>(
css: &str,
url_data: UrlExtraData,
origin: Origin,
media: Arc<Locked<MediaList>>,
shared_lock: SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &R,
quirks_mode: QuirksMode,
line_number_offset: u32,
) -> Stylesheet {
let contents = StylesheetContents::from_str(
css,
url_data,
origin,
&shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
line_number_offset,
);
Stylesheet {
contents,
shared_lock,
media,
disabled: AtomicBool::new(false),
}
}
/// Returns whether the stylesheet has been explicitly disabled through the
/// CSSOM.
pub fn disabled(&self) -> bool {
self.disabled.load(Ordering::SeqCst)
}
/// Records that the stylesheet has been explicitly disabled through the
/// CSSOM.
///
/// Returns whether the the call resulted in a change in disabled state.
///
/// Disabled stylesheets remain in the document, but their rules are not
/// added to the Stylist.
pub fn set_disabled(&self, disabled: bool) -> bool {
self.disabled.swap(disabled, Ordering::SeqCst)!= disabled
}
}
#[cfg(feature = "servo")]
impl Clone for Stylesheet {
fn clone(&self) -> Self {
// Create a new lock for our clone.
let lock = self.shared_lock.clone();
let guard = self.shared_lock.read();
// Make a deep clone of the media, using the new lock.
let media = self.media.read_with(&guard).clone();
let media = Arc::new(lock.wrap(media));
let contents = self.contents
.deep_clone_with_lock(&lock, &guard, &DeepCloneParams);
Stylesheet {
contents,
media: media,
shared_lock: lock,
disabled: AtomicBool::new(self.disabled.load(Ordering::SeqCst)),
}
}
}
| {
// Make a deep clone of the rules, using the new lock.
let rules = self.rules
.read_with(guard)
.deep_clone_with_lock(lock, guard, params);
Self {
rules: Arc::new(lock.wrap(rules)),
quirks_mode: self.quirks_mode,
origin: self.origin,
url_data: RwLock::new((*self.url_data.read()).clone()),
namespaces: RwLock::new((*self.namespaces.read()).clone()),
source_map_url: RwLock::new((*self.source_map_url.read()).clone()),
source_url: RwLock::new((*self.source_map_url.read()).clone()),
}
} | identifier_body |
main.rs | use docopt::Docopt;
use eyre::bail;
use serde::Deserialize;
use std::fs::File;
use std::io::{self, BufRead, BufReader};
use std::path::Path;
use std::process;
const USAGE: &str = "
Print or check BLAKE2 (512-bit) checksums.
With no FILE, or when FILE is -, read standard input.
Usage:
b2sum [options] []...
b2sum (-h | --help)
b2sum --version
Options:
-c, --check read BLAKE2 sums from the FILEs and check them
-l, --length=BITS digest length in bits; must not exceed the maximum for the
blake2 algorithm and must be a multiple of 8 [default: 512]
--tag create a BSD-style checksum
The following five options are useful only when verifying checksums:
--ignore-missing don't fail or report status for missing files
--quiet don't print OK for each successfully verified file
--status don't output anything, status code shows success
--strict exit non-zero for improperly formatted checksum lines
-w, --warn warn about improperly formatted checksum lines
-h, --help display this help and exit
--version output version information and exit
The sums are computed as described in RFC 7693. When checking, the input
should be a former output of this program. The default mode is to print
a line with checksum and name for each FILE.
";
#[derive(Debug, Deserialize)]
struct Args {
arg_filename: Vec<String>,
flag_check: bool,
flag_ignore_missing: bool,
flag_length: usize,
flag_quiet: bool,
flag_status: bool,
flag_strict: bool,
flag_tag: bool,
flag_version: bool,
flag_warn: bool,
}
fn print_version() ->! {
let version = env!("CARGO_PKG_VERSION");
println!("b2sum-rs {}", version);
process::exit(0)
}
fn hash_reader<R>(length: usize, mut reader: R) -> eyre::Result<String>
where
R: BufRead,
{
let mut digest = blake2b_simd::Params::new().hash_length(length).to_state();
loop {
let count = {
let data = reader.fill_buf()?;
if data.is_empty() {
break;
}
digest.update(data);
data.len()
};
reader.consume(count);
}
let output = digest.finalize();
let result = output.to_hex().to_ascii_lowercase();
Ok(result)
}
fn hash_file<P>(length: usize, path: P) -> eyre::Result<String>
where
P: AsRef<Path>,
{
let file = File::open(path)?;
let reader = BufReader::new(file);
hash_reader(length, reader)
}
fn split_check_line(line: &str) -> eyre::Result<(&str, &str)> {
let hash_length = line.chars().position(|c|!c.is_digit(16)).unwrap_or(0);
if hash_length < 2 || hash_length % 2!= 0 || hash_length > 128 {
bail!("Invalid hash length: {}", hash_length);
}
let hash = &line[0..hash_length];
let line = &line[hash_length..];
if line.len() < 3 {
bail!("Malformed line");
}
let filename = &line[2..];
Ok((hash, filename))
}
fn check_input<R>(args: &Args, check_filename: &str, reader: R) -> eyre::Result<bool>
where
R: BufRead,
{
let print_result =!(args.flag_quiet || args.flag_status);
let mut errors = false;
for (i, line) in reader.lines().enumerate() {
let line = line?;
let line = line.trim();
if line.starts_with('#') {
continue;
}
let (hash, filename) = match split_check_line(line) {
Ok((hash, filename)) => (hash, filename),
Err(e) => {
if args.flag_strict {
errors = true;
}
if args.flag_warn {
println!("{}:{}: {}", check_filename, i + 1, e)
}
continue;
}
};
let length = hash.len() / 2;
let calculated_hash = match hash_file(length, filename) {
Ok(h) => h,
Err(e) => {
if let Some(io_err) = e.downcast_ref::<io::Error>() {
if io_err.kind() == io::ErrorKind::NotFound && args.flag_ignore_missing {
continue;
}
}
errors = true;
if!args.flag_status {
println!("{}: FAILED {}", filename, e);
}
continue;
}
};
let matched = hash == calculated_hash;
if!matched {
errors = true;
}
if print_result {
print!("{}: ", filename);
if matched {
println!("OK");
} else {
println!("FAILED");
}
}
}
Ok(errors)
}
fn check_args(args: Args) -> eyre::Result<i32> {
let filename = args.arg_filename[0].as_str();
let errors = if filename == "-" {
let stdin = io::stdin();
check_input(&args, filename, stdin.lock())?
} else {
let file = File::open(filename)?;
let reader = BufReader::new(file);
check_input(&args, filename, reader)?
};
let code = if errors { 1 } else { 0 };
Ok(code)
}
fn hash_args(args: Args) -> eyre::Result<i32> {
let length = args.flag_length / 8;
for filename in args.arg_filename {
let hash = if filename == "-" {
let stdin = io::stdin();
hash_reader(length, stdin.lock())?
} else {
hash_file(length, &filename)?
};
if args.flag_tag {
print!("BLAKE2b");
if args.flag_length < 512 {
print!("-{}", args.flag_length);
}
println!(" ({}) = {}", filename, hash);
} else {
println!("{} {}", hash, filename);
}
}
Ok(0)
}
fn main() -> eyre::Result<()> {
stable_eyre::install()?;
let mut args: Args = Docopt::new(USAGE).and_then(|d| d.deserialize()).unwrap_or_else(|e| e.exit());
if args.flag_version {
print_version();
}
if args.arg_filename.is_empty() {
args.arg_filename.push("-".to_string());
}
let result = if args.flag_check { check_args(args)? } else { hash_args(args)? };
process::exit(result)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn split_check_line_with_valid_line() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 test";
let (hash, filename) = split_check_line(line).unwrap();
assert_eq!(
"c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534",
hash
);
assert_eq!("test", filename);
}
#[test]
fn split_check_line_with_truncated_line() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 ";
let result = split_check_line(line).unwrap_err();
assert_eq!("Malformed line", result.to_string());
}
#[test]
fn split_check_line_with_missing_filename() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 ";
let result = split_check_line(line).unwrap_err();
assert_eq!("Malformed line", result.to_string());
}
#[test]
fn split_check_line_with_too_small_hash() {
let line = "c test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 1", result.to_string());
}
#[test]
fn split_check_line_with_too_long_hash() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea2353400 test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 130", result.to_string());
}
#[test]
fn split_check_line_with_non_even_hash() {
let line = "c0ae0 test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 5", result.to_string());
}
#[test]
fn test_hash_formatting() |
}
| {
let expected = "7ea59e7a000ec003846b6607dfd5f9217b681dc1a81b0789b464c3995105d93083f7f0a86fca01a1bed27e9f9303ae58d01746e3b20443480bea56198e65bfc5";
assert_eq!(expected, hash_reader(64, "hi\n".as_bytes()).unwrap());
} | identifier_body |
main.rs | use docopt::Docopt;
use eyre::bail;
use serde::Deserialize;
use std::fs::File;
use std::io::{self, BufRead, BufReader};
use std::path::Path;
use std::process;
const USAGE: &str = "
Print or check BLAKE2 (512-bit) checksums.
With no FILE, or when FILE is -, read standard input.
Usage:
b2sum [options] []...
b2sum (-h | --help)
b2sum --version
Options:
-c, --check read BLAKE2 sums from the FILEs and check them
-l, --length=BITS digest length in bits; must not exceed the maximum for the
blake2 algorithm and must be a multiple of 8 [default: 512]
--tag create a BSD-style checksum
The following five options are useful only when verifying checksums:
--ignore-missing don't fail or report status for missing files
--quiet don't print OK for each successfully verified file
--status don't output anything, status code shows success
--strict exit non-zero for improperly formatted checksum lines
-w, --warn warn about improperly formatted checksum lines
-h, --help display this help and exit
--version output version information and exit
The sums are computed as described in RFC 7693. When checking, the input
should be a former output of this program. The default mode is to print
a line with checksum and name for each FILE.
";
#[derive(Debug, Deserialize)]
struct Args {
arg_filename: Vec<String>,
flag_check: bool,
flag_ignore_missing: bool,
flag_length: usize,
flag_quiet: bool,
flag_status: bool,
flag_strict: bool,
flag_tag: bool,
flag_version: bool,
flag_warn: bool,
}
fn print_version() ->! {
let version = env!("CARGO_PKG_VERSION");
println!("b2sum-rs {}", version);
process::exit(0)
}
fn hash_reader<R>(length: usize, mut reader: R) -> eyre::Result<String>
where
R: BufRead,
{
let mut digest = blake2b_simd::Params::new().hash_length(length).to_state();
loop {
let count = {
let data = reader.fill_buf()?;
if data.is_empty() {
break;
}
digest.update(data);
data.len()
};
reader.consume(count);
}
let output = digest.finalize();
let result = output.to_hex().to_ascii_lowercase();
Ok(result)
}
fn hash_file<P>(length: usize, path: P) -> eyre::Result<String>
where
P: AsRef<Path>,
{
let file = File::open(path)?;
let reader = BufReader::new(file);
hash_reader(length, reader)
}
fn split_check_line(line: &str) -> eyre::Result<(&str, &str)> {
let hash_length = line.chars().position(|c|!c.is_digit(16)).unwrap_or(0);
if hash_length < 2 || hash_length % 2!= 0 || hash_length > 128 {
bail!("Invalid hash length: {}", hash_length);
}
let hash = &line[0..hash_length];
let line = &line[hash_length..];
if line.len() < 3 {
bail!("Malformed line");
}
let filename = &line[2..];
Ok((hash, filename))
}
fn check_input<R>(args: &Args, check_filename: &str, reader: R) -> eyre::Result<bool>
where
R: BufRead,
{
let print_result =!(args.flag_quiet || args.flag_status);
let mut errors = false;
for (i, line) in reader.lines().enumerate() {
let line = line?;
let line = line.trim();
if line.starts_with('#') {
continue;
}
let (hash, filename) = match split_check_line(line) {
Ok((hash, filename)) => (hash, filename),
Err(e) => {
if args.flag_strict {
errors = true;
}
if args.flag_warn {
println!("{}:{}: {}", check_filename, i + 1, e)
}
continue;
}
};
let length = hash.len() / 2;
let calculated_hash = match hash_file(length, filename) {
Ok(h) => h,
Err(e) => {
if let Some(io_err) = e.downcast_ref::<io::Error>() {
if io_err.kind() == io::ErrorKind::NotFound && args.flag_ignore_missing {
continue;
}
}
errors = true;
if!args.flag_status {
println!("{}: FAILED {}", filename, e);
}
continue;
}
};
let matched = hash == calculated_hash;
if!matched {
errors = true;
}
if print_result {
print!("{}: ", filename);
if matched {
println!("OK");
} else { | Ok(errors)
}
fn check_args(args: Args) -> eyre::Result<i32> {
let filename = args.arg_filename[0].as_str();
let errors = if filename == "-" {
let stdin = io::stdin();
check_input(&args, filename, stdin.lock())?
} else {
let file = File::open(filename)?;
let reader = BufReader::new(file);
check_input(&args, filename, reader)?
};
let code = if errors { 1 } else { 0 };
Ok(code)
}
fn hash_args(args: Args) -> eyre::Result<i32> {
let length = args.flag_length / 8;
for filename in args.arg_filename {
let hash = if filename == "-" {
let stdin = io::stdin();
hash_reader(length, stdin.lock())?
} else {
hash_file(length, &filename)?
};
if args.flag_tag {
print!("BLAKE2b");
if args.flag_length < 512 {
print!("-{}", args.flag_length);
}
println!(" ({}) = {}", filename, hash);
} else {
println!("{} {}", hash, filename);
}
}
Ok(0)
}
fn main() -> eyre::Result<()> {
stable_eyre::install()?;
let mut args: Args = Docopt::new(USAGE).and_then(|d| d.deserialize()).unwrap_or_else(|e| e.exit());
if args.flag_version {
print_version();
}
if args.arg_filename.is_empty() {
args.arg_filename.push("-".to_string());
}
let result = if args.flag_check { check_args(args)? } else { hash_args(args)? };
process::exit(result)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn split_check_line_with_valid_line() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 test";
let (hash, filename) = split_check_line(line).unwrap();
assert_eq!(
"c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534",
hash
);
assert_eq!("test", filename);
}
#[test]
fn split_check_line_with_truncated_line() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 ";
let result = split_check_line(line).unwrap_err();
assert_eq!("Malformed line", result.to_string());
}
#[test]
fn split_check_line_with_missing_filename() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 ";
let result = split_check_line(line).unwrap_err();
assert_eq!("Malformed line", result.to_string());
}
#[test]
fn split_check_line_with_too_small_hash() {
let line = "c test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 1", result.to_string());
}
#[test]
fn split_check_line_with_too_long_hash() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea2353400 test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 130", result.to_string());
}
#[test]
fn split_check_line_with_non_even_hash() {
let line = "c0ae0 test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 5", result.to_string());
}
#[test]
fn test_hash_formatting() {
let expected = "7ea59e7a000ec003846b6607dfd5f9217b681dc1a81b0789b464c3995105d93083f7f0a86fca01a1bed27e9f9303ae58d01746e3b20443480bea56198e65bfc5";
assert_eq!(expected, hash_reader(64, "hi\n".as_bytes()).unwrap());
}
} | println!("FAILED");
}
}
}
| random_line_split |
main.rs | use docopt::Docopt;
use eyre::bail;
use serde::Deserialize;
use std::fs::File;
use std::io::{self, BufRead, BufReader};
use std::path::Path;
use std::process;
const USAGE: &str = "
Print or check BLAKE2 (512-bit) checksums.
With no FILE, or when FILE is -, read standard input.
Usage:
b2sum [options] []...
b2sum (-h | --help)
b2sum --version
Options:
-c, --check read BLAKE2 sums from the FILEs and check them
-l, --length=BITS digest length in bits; must not exceed the maximum for the
blake2 algorithm and must be a multiple of 8 [default: 512]
--tag create a BSD-style checksum
The following five options are useful only when verifying checksums:
--ignore-missing don't fail or report status for missing files
--quiet don't print OK for each successfully verified file
--status don't output anything, status code shows success
--strict exit non-zero for improperly formatted checksum lines
-w, --warn warn about improperly formatted checksum lines
-h, --help display this help and exit
--version output version information and exit
The sums are computed as described in RFC 7693. When checking, the input
should be a former output of this program. The default mode is to print
a line with checksum and name for each FILE.
";
#[derive(Debug, Deserialize)]
struct Args {
arg_filename: Vec<String>,
flag_check: bool,
flag_ignore_missing: bool,
flag_length: usize,
flag_quiet: bool,
flag_status: bool,
flag_strict: bool,
flag_tag: bool,
flag_version: bool,
flag_warn: bool,
}
fn print_version() ->! {
let version = env!("CARGO_PKG_VERSION");
println!("b2sum-rs {}", version);
process::exit(0)
}
fn hash_reader<R>(length: usize, mut reader: R) -> eyre::Result<String>
where
R: BufRead,
{
let mut digest = blake2b_simd::Params::new().hash_length(length).to_state();
loop {
let count = {
let data = reader.fill_buf()?;
if data.is_empty() {
break;
}
digest.update(data);
data.len()
};
reader.consume(count);
}
let output = digest.finalize();
let result = output.to_hex().to_ascii_lowercase();
Ok(result)
}
fn hash_file<P>(length: usize, path: P) -> eyre::Result<String>
where
P: AsRef<Path>,
{
let file = File::open(path)?;
let reader = BufReader::new(file);
hash_reader(length, reader)
}
fn split_check_line(line: &str) -> eyre::Result<(&str, &str)> {
let hash_length = line.chars().position(|c|!c.is_digit(16)).unwrap_or(0);
if hash_length < 2 || hash_length % 2!= 0 || hash_length > 128 {
bail!("Invalid hash length: {}", hash_length);
}
let hash = &line[0..hash_length];
let line = &line[hash_length..];
if line.len() < 3 {
bail!("Malformed line");
}
let filename = &line[2..];
Ok((hash, filename))
}
fn check_input<R>(args: &Args, check_filename: &str, reader: R) -> eyre::Result<bool>
where
R: BufRead,
{
let print_result =!(args.flag_quiet || args.flag_status);
let mut errors = false;
for (i, line) in reader.lines().enumerate() {
let line = line?;
let line = line.trim();
if line.starts_with('#') {
continue;
}
let (hash, filename) = match split_check_line(line) {
Ok((hash, filename)) => (hash, filename),
Err(e) => {
if args.flag_strict {
errors = true;
}
if args.flag_warn {
println!("{}:{}: {}", check_filename, i + 1, e)
}
continue;
}
};
let length = hash.len() / 2;
let calculated_hash = match hash_file(length, filename) {
Ok(h) => h,
Err(e) => {
if let Some(io_err) = e.downcast_ref::<io::Error>() {
if io_err.kind() == io::ErrorKind::NotFound && args.flag_ignore_missing {
continue;
}
}
errors = true;
if!args.flag_status {
println!("{}: FAILED {}", filename, e);
}
continue;
}
};
let matched = hash == calculated_hash;
if!matched {
errors = true;
}
if print_result {
print!("{}: ", filename);
if matched {
println!("OK");
} else {
println!("FAILED");
}
}
}
Ok(errors)
}
fn | (args: Args) -> eyre::Result<i32> {
let filename = args.arg_filename[0].as_str();
let errors = if filename == "-" {
let stdin = io::stdin();
check_input(&args, filename, stdin.lock())?
} else {
let file = File::open(filename)?;
let reader = BufReader::new(file);
check_input(&args, filename, reader)?
};
let code = if errors { 1 } else { 0 };
Ok(code)
}
fn hash_args(args: Args) -> eyre::Result<i32> {
let length = args.flag_length / 8;
for filename in args.arg_filename {
let hash = if filename == "-" {
let stdin = io::stdin();
hash_reader(length, stdin.lock())?
} else {
hash_file(length, &filename)?
};
if args.flag_tag {
print!("BLAKE2b");
if args.flag_length < 512 {
print!("-{}", args.flag_length);
}
println!(" ({}) = {}", filename, hash);
} else {
println!("{} {}", hash, filename);
}
}
Ok(0)
}
fn main() -> eyre::Result<()> {
stable_eyre::install()?;
let mut args: Args = Docopt::new(USAGE).and_then(|d| d.deserialize()).unwrap_or_else(|e| e.exit());
if args.flag_version {
print_version();
}
if args.arg_filename.is_empty() {
args.arg_filename.push("-".to_string());
}
let result = if args.flag_check { check_args(args)? } else { hash_args(args)? };
process::exit(result)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn split_check_line_with_valid_line() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 test";
let (hash, filename) = split_check_line(line).unwrap();
assert_eq!(
"c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534",
hash
);
assert_eq!("test", filename);
}
#[test]
fn split_check_line_with_truncated_line() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 ";
let result = split_check_line(line).unwrap_err();
assert_eq!("Malformed line", result.to_string());
}
#[test]
fn split_check_line_with_missing_filename() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 ";
let result = split_check_line(line).unwrap_err();
assert_eq!("Malformed line", result.to_string());
}
#[test]
fn split_check_line_with_too_small_hash() {
let line = "c test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 1", result.to_string());
}
#[test]
fn split_check_line_with_too_long_hash() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea2353400 test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 130", result.to_string());
}
#[test]
fn split_check_line_with_non_even_hash() {
let line = "c0ae0 test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 5", result.to_string());
}
#[test]
fn test_hash_formatting() {
let expected = "7ea59e7a000ec003846b6607dfd5f9217b681dc1a81b0789b464c3995105d93083f7f0a86fca01a1bed27e9f9303ae58d01746e3b20443480bea56198e65bfc5";
assert_eq!(expected, hash_reader(64, "hi\n".as_bytes()).unwrap());
}
}
| check_args | identifier_name |
main.rs | use docopt::Docopt;
use eyre::bail;
use serde::Deserialize;
use std::fs::File;
use std::io::{self, BufRead, BufReader};
use std::path::Path;
use std::process;
const USAGE: &str = "
Print or check BLAKE2 (512-bit) checksums.
With no FILE, or when FILE is -, read standard input.
Usage:
b2sum [options] []...
b2sum (-h | --help)
b2sum --version
Options:
-c, --check read BLAKE2 sums from the FILEs and check them
-l, --length=BITS digest length in bits; must not exceed the maximum for the
blake2 algorithm and must be a multiple of 8 [default: 512]
--tag create a BSD-style checksum
The following five options are useful only when verifying checksums:
--ignore-missing don't fail or report status for missing files
--quiet don't print OK for each successfully verified file
--status don't output anything, status code shows success
--strict exit non-zero for improperly formatted checksum lines
-w, --warn warn about improperly formatted checksum lines
-h, --help display this help and exit
--version output version information and exit
The sums are computed as described in RFC 7693. When checking, the input
should be a former output of this program. The default mode is to print
a line with checksum and name for each FILE.
";
#[derive(Debug, Deserialize)]
struct Args {
arg_filename: Vec<String>,
flag_check: bool,
flag_ignore_missing: bool,
flag_length: usize,
flag_quiet: bool,
flag_status: bool,
flag_strict: bool,
flag_tag: bool,
flag_version: bool,
flag_warn: bool,
}
fn print_version() ->! {
let version = env!("CARGO_PKG_VERSION");
println!("b2sum-rs {}", version);
process::exit(0)
}
fn hash_reader<R>(length: usize, mut reader: R) -> eyre::Result<String>
where
R: BufRead,
{
let mut digest = blake2b_simd::Params::new().hash_length(length).to_state();
loop {
let count = {
let data = reader.fill_buf()?;
if data.is_empty() {
break;
}
digest.update(data);
data.len()
};
reader.consume(count);
}
let output = digest.finalize();
let result = output.to_hex().to_ascii_lowercase();
Ok(result)
}
fn hash_file<P>(length: usize, path: P) -> eyre::Result<String>
where
P: AsRef<Path>,
{
let file = File::open(path)?;
let reader = BufReader::new(file);
hash_reader(length, reader)
}
fn split_check_line(line: &str) -> eyre::Result<(&str, &str)> {
let hash_length = line.chars().position(|c|!c.is_digit(16)).unwrap_or(0);
if hash_length < 2 || hash_length % 2!= 0 || hash_length > 128 {
bail!("Invalid hash length: {}", hash_length);
}
let hash = &line[0..hash_length];
let line = &line[hash_length..];
if line.len() < 3 {
bail!("Malformed line");
}
let filename = &line[2..];
Ok((hash, filename))
}
fn check_input<R>(args: &Args, check_filename: &str, reader: R) -> eyre::Result<bool>
where
R: BufRead,
{
let print_result =!(args.flag_quiet || args.flag_status);
let mut errors = false;
for (i, line) in reader.lines().enumerate() {
let line = line?;
let line = line.trim();
if line.starts_with('#') {
continue;
}
let (hash, filename) = match split_check_line(line) {
Ok((hash, filename)) => (hash, filename),
Err(e) => {
if args.flag_strict {
errors = true;
}
if args.flag_warn {
println!("{}:{}: {}", check_filename, i + 1, e)
}
continue;
}
};
let length = hash.len() / 2;
let calculated_hash = match hash_file(length, filename) {
Ok(h) => h,
Err(e) => {
if let Some(io_err) = e.downcast_ref::<io::Error>() {
if io_err.kind() == io::ErrorKind::NotFound && args.flag_ignore_missing {
continue;
}
}
errors = true;
if!args.flag_status {
println!("{}: FAILED {}", filename, e);
}
continue;
}
};
let matched = hash == calculated_hash;
if!matched {
errors = true;
}
if print_result {
print!("{}: ", filename);
if matched {
println!("OK");
} else {
println!("FAILED");
}
}
}
Ok(errors)
}
fn check_args(args: Args) -> eyre::Result<i32> {
let filename = args.arg_filename[0].as_str();
let errors = if filename == "-" {
let stdin = io::stdin();
check_input(&args, filename, stdin.lock())?
} else {
let file = File::open(filename)?;
let reader = BufReader::new(file);
check_input(&args, filename, reader)?
};
let code = if errors { 1 } else { 0 };
Ok(code)
}
fn hash_args(args: Args) -> eyre::Result<i32> {
let length = args.flag_length / 8;
for filename in args.arg_filename {
let hash = if filename == "-" {
let stdin = io::stdin();
hash_reader(length, stdin.lock())?
} else {
hash_file(length, &filename)?
};
if args.flag_tag {
print!("BLAKE2b");
if args.flag_length < 512 {
print!("-{}", args.flag_length);
}
println!(" ({}) = {}", filename, hash);
} else {
println!("{} {}", hash, filename);
}
}
Ok(0)
}
fn main() -> eyre::Result<()> {
stable_eyre::install()?;
let mut args: Args = Docopt::new(USAGE).and_then(|d| d.deserialize()).unwrap_or_else(|e| e.exit());
if args.flag_version {
print_version();
}
if args.arg_filename.is_empty() {
args.arg_filename.push("-".to_string());
}
let result = if args.flag_check { check_args(args)? } else | ;
process::exit(result)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn split_check_line_with_valid_line() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 test";
let (hash, filename) = split_check_line(line).unwrap();
assert_eq!(
"c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534",
hash
);
assert_eq!("test", filename);
}
#[test]
fn split_check_line_with_truncated_line() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 ";
let result = split_check_line(line).unwrap_err();
assert_eq!("Malformed line", result.to_string());
}
#[test]
fn split_check_line_with_missing_filename() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea23534 ";
let result = split_check_line(line).unwrap_err();
assert_eq!("Malformed line", result.to_string());
}
#[test]
fn split_check_line_with_too_small_hash() {
let line = "c test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 1", result.to_string());
}
#[test]
fn split_check_line_with_too_long_hash() {
let line = "c0ae24f806df19d850565b234bc37afd5035e7536388290db9413c98578394313f38b093143ecfbc208425d54b9bfef0d9917a9e93910f7914a97e73fea2353400 test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 130", result.to_string());
}
#[test]
fn split_check_line_with_non_even_hash() {
let line = "c0ae0 test";
let result = split_check_line(line).unwrap_err();
assert_eq!("Invalid hash length: 5", result.to_string());
}
#[test]
fn test_hash_formatting() {
let expected = "7ea59e7a000ec003846b6607dfd5f9217b681dc1a81b0789b464c3995105d93083f7f0a86fca01a1bed27e9f9303ae58d01746e3b20443480bea56198e65bfc5";
assert_eq!(expected, hash_reader(64, "hi\n".as_bytes()).unwrap());
}
}
| { hash_args(args)? } | conditional_block |
simple-struct.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-win32 Broken because of LLVM bug: http://llvm.org/bugs/show_bug.cgi?id=16249
// compile-flags:-Z extra-debug-info
// debugger:set print pretty off
// debugger:break zzz
// debugger:run
// debugger:finish
// debugger:print no_padding16
// check:$1 = {x = 10000, y = -10001}
// debugger:print no_padding32
// check:$2 = {x = -10002, y = -10003.5, z = 10004}
// debugger:print no_padding64
// check:$3 = {x = -10005.5, y = 10006, z = 10007}
// debugger:print no_padding163264
// check:$4 = {a = -10008, b = 10009, c = 10010, d = 10011}
|
// debugger:print padding_at_end
// check:$6 = {x = -10014, y = 10015}
struct NoPadding16 {
x: u16,
y: i16
}
struct NoPadding32 {
x: i32,
y: f32,
z: u32
}
struct NoPadding64 {
x: f64,
y: i64,
z: u64
}
struct NoPadding163264 {
a: i16,
b: u16,
c: i32,
d: u64
}
struct InternalPadding {
x: u16,
y: i64
}
struct PaddingAtEnd {
x: i64,
y: u16
}
fn main() {
let no_padding16 = NoPadding16 { x: 10000, y: -10001 };
let no_padding32 = NoPadding32 { x: -10002, y: -10003.5, z: 10004 };
let no_padding64 = NoPadding64 { x: -10005.5, y: 10006, z: 10007 };
let no_padding163264 = NoPadding163264 { a: -10008, b: 10009, c: 10010, d: 10011 };
let internal_padding = InternalPadding { x: 10012, y: -10013 };
let padding_at_end = PaddingAtEnd { x: -10014, y: 10015 };
zzz();
}
fn zzz() {()} | // debugger:print internal_padding
// check:$5 = {x = 10012, y = -10013} | random_line_split |
simple-struct.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// xfail-win32 Broken because of LLVM bug: http://llvm.org/bugs/show_bug.cgi?id=16249
// compile-flags:-Z extra-debug-info
// debugger:set print pretty off
// debugger:break zzz
// debugger:run
// debugger:finish
// debugger:print no_padding16
// check:$1 = {x = 10000, y = -10001}
// debugger:print no_padding32
// check:$2 = {x = -10002, y = -10003.5, z = 10004}
// debugger:print no_padding64
// check:$3 = {x = -10005.5, y = 10006, z = 10007}
// debugger:print no_padding163264
// check:$4 = {a = -10008, b = 10009, c = 10010, d = 10011}
// debugger:print internal_padding
// check:$5 = {x = 10012, y = -10013}
// debugger:print padding_at_end
// check:$6 = {x = -10014, y = 10015}
struct NoPadding16 {
x: u16,
y: i16
}
struct NoPadding32 {
x: i32,
y: f32,
z: u32
}
struct NoPadding64 {
x: f64,
y: i64,
z: u64
}
struct NoPadding163264 {
a: i16,
b: u16,
c: i32,
d: u64
}
struct InternalPadding {
x: u16,
y: i64
}
struct | {
x: i64,
y: u16
}
fn main() {
let no_padding16 = NoPadding16 { x: 10000, y: -10001 };
let no_padding32 = NoPadding32 { x: -10002, y: -10003.5, z: 10004 };
let no_padding64 = NoPadding64 { x: -10005.5, y: 10006, z: 10007 };
let no_padding163264 = NoPadding163264 { a: -10008, b: 10009, c: 10010, d: 10011 };
let internal_padding = InternalPadding { x: 10012, y: -10013 };
let padding_at_end = PaddingAtEnd { x: -10014, y: 10015 };
zzz();
}
fn zzz() {()} | PaddingAtEnd | identifier_name |
lib.rs | // The MIT License (MIT)
//
// Copyright (c) 2016 Marvin Böcker
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! This crate is a simple digital signature crate and can be used to verify data integrity by
//! using public-key cryptography. It uses the "super-fast, super-secure" elliptic curve and
//! digital signature algorithm [Ed25519](https://ed25519.cr.yp.to/).
//!
//! It provides the struct `Certificate`, which holds the public key, metadata and a signature.
//!
//! # The basics
//! A `Certificate` can be signed by a master key, or another `Certificate`. The top-most `Certificate`
//! must be signed with the master key, or it will not be valid. For validation, the master public
//! key will be given. This way, a `Certificate` can only be valid, if it has been signed with a
//! trust chain, which top-most `Certificate` has been signed with the right private key.
//!
//! See also [here](https://en.wikipedia.org/wiki/EdDSA).
//!
//! ## Other crates
//!
//! To use the edcert ecosystem, there are a few other crates to make your life simpler:
//!
//! - [edcert-letter](https://crates.io/crates/edcert-letter), which provides a container for
//! signed data, Letter<T>.
//! - [edcert-restrevoke](https://crates.io/crates/edcert-restrevoke), which provides a REST-based
//! revokation system.
//! - [edcert-compressor](https://crates.io/crates/edcert-compressor), which provides methods to
//! (de)compress `Certificate`s using JSON/LZMA and manages loading/saving certificates for you.
//! - [edcert-tools](https://crates.io/crates/edcert-tools), which provides a binary for
//! generation, signing, validation, etc using edcert (and all of the above).
#![deny(missing_docs)]
// External crates.
extern crate chrono;
extern crate rustc_serialize;
extern crate sodiumoxide;
// Internal modules
mod bytescontainer;
pub mod ed25519;
// External modules
pub mod certificate;
pub mod fingerprint;
pub mod meta;
pub mod signature;
// Validation
pub mod validator;
pub mod root_validator;
pub mod trust_validator;
// Revokation
pub mod revoker;
/// This is a simple copy function. This should be equivalent to memcpy.
pub fn copy_bytes(dest: &mut [u8], src: &[u8], start_dest: usize, start_src: usize, len: usize) {
let end_dest = start_dest + len;
let end_src = start_src + len;
dest[start_dest..end_dest].copy_from_slice(&src[start_src..end_src]);
}
#[test]
fn t | ) {
use chrono::Timelike;
use chrono::UTC;
use chrono::duration::Duration;
use meta::Meta;
use certificate::Certificate;
use validator::Validator;
use root_validator::RootValidator;
use trust_validator::TrustValidator;
use revoker::NoRevoker;
use fingerprint::Fingerprint;
// create random master key
let (mpk, msk) = ed25519::generate_keypair();
// create random certificate
let meta = Meta::new_empty();
let expires = UTC::now()
.checked_add(Duration::days(90))
.expect("Failed to add 90 days to expiration date.")
.with_nanosecond(0)
.unwrap();
let mut cert = Certificate::generate_random(meta, expires);
// sign certificate with master key
cert.sign_with_master(&msk);
// we can use a RootValidator, which analyzes the trust chain.
// in this case, the top-most certificate must be signed with the right private key for mpk.
let cv = RootValidator::new(&mpk, NoRevoker);
// now we use the CV to validate certificates
assert_eq!(true, cv.is_valid(&cert).is_ok());
// we could also use a TrustValidator. It's like RootValidator, but you can also give trusted
// certificates. If the chain contains one of these, the upper certificates aren't checked
// with the master public key. We can give any 32 byte key here, it doesn't matter.
let mut tcv = TrustValidator::new(NoRevoker);
tcv.add_trusted_certificates(vec![cert.fingerprint()]);
// even though we gave a wrong master key, this certificate is valid, because it is trusted.
assert_eq!(true, tcv.is_valid(&cert).is_ok());
// now we sign data with it
let data = [1; 42];
// and sign the data with the certificate
let signature = cert.sign(&data)
.expect("This fails, if no private key is known to the certificate.");
// the signature must be valid
assert_eq!(true, cert.verify(&data, &signature));
}
| est_readme_example( | identifier_name |
lib.rs | // The MIT License (MIT)
//
// Copyright (c) 2016 Marvin Böcker
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! This crate is a simple digital signature crate and can be used to verify data integrity by
//! using public-key cryptography. It uses the "super-fast, super-secure" elliptic curve and
//! digital signature algorithm [Ed25519](https://ed25519.cr.yp.to/).
//!
//! It provides the struct `Certificate`, which holds the public key, metadata and a signature.
//!
//! # The basics
//! A `Certificate` can be signed by a master key, or another `Certificate`. The top-most `Certificate`
//! must be signed with the master key, or it will not be valid. For validation, the master public
//! key will be given. This way, a `Certificate` can only be valid, if it has been signed with a
//! trust chain, which top-most `Certificate` has been signed with the right private key.
//!
//! See also [here](https://en.wikipedia.org/wiki/EdDSA).
//!
//! ## Other crates
//!
//! To use the edcert ecosystem, there are a few other crates to make your life simpler:
//!
//! - [edcert-letter](https://crates.io/crates/edcert-letter), which provides a container for
//! signed data, Letter<T>.
//! - [edcert-restrevoke](https://crates.io/crates/edcert-restrevoke), which provides a REST-based
//! revokation system.
//! - [edcert-compressor](https://crates.io/crates/edcert-compressor), which provides methods to
//! (de)compress `Certificate`s using JSON/LZMA and manages loading/saving certificates for you.
//! - [edcert-tools](https://crates.io/crates/edcert-tools), which provides a binary for
//! generation, signing, validation, etc using edcert (and all of the above).
#![deny(missing_docs)]
// External crates.
extern crate chrono;
extern crate rustc_serialize;
extern crate sodiumoxide;
// Internal modules
mod bytescontainer;
pub mod ed25519;
// External modules
pub mod certificate;
pub mod fingerprint;
pub mod meta;
pub mod signature;
// Validation
pub mod validator;
pub mod root_validator;
pub mod trust_validator;
// Revokation
pub mod revoker;
/// This is a simple copy function. This should be equivalent to memcpy.
pub fn copy_bytes(dest: &mut [u8], src: &[u8], start_dest: usize, start_src: usize, len: usize) {
let end_dest = start_dest + len;
let end_src = start_src + len;
dest[start_dest..end_dest].copy_from_slice(&src[start_src..end_src]);
}
#[test]
fn test_readme_example() { | .with_nanosecond(0)
.unwrap();
let mut cert = Certificate::generate_random(meta, expires);
// sign certificate with master key
cert.sign_with_master(&msk);
// we can use a RootValidator, which analyzes the trust chain.
// in this case, the top-most certificate must be signed with the right private key for mpk.
let cv = RootValidator::new(&mpk, NoRevoker);
// now we use the CV to validate certificates
assert_eq!(true, cv.is_valid(&cert).is_ok());
// we could also use a TrustValidator. It's like RootValidator, but you can also give trusted
// certificates. If the chain contains one of these, the upper certificates aren't checked
// with the master public key. We can give any 32 byte key here, it doesn't matter.
let mut tcv = TrustValidator::new(NoRevoker);
tcv.add_trusted_certificates(vec![cert.fingerprint()]);
// even though we gave a wrong master key, this certificate is valid, because it is trusted.
assert_eq!(true, tcv.is_valid(&cert).is_ok());
// now we sign data with it
let data = [1; 42];
// and sign the data with the certificate
let signature = cert.sign(&data)
.expect("This fails, if no private key is known to the certificate.");
// the signature must be valid
assert_eq!(true, cert.verify(&data, &signature));
}
|
use chrono::Timelike;
use chrono::UTC;
use chrono::duration::Duration;
use meta::Meta;
use certificate::Certificate;
use validator::Validator;
use root_validator::RootValidator;
use trust_validator::TrustValidator;
use revoker::NoRevoker;
use fingerprint::Fingerprint;
// create random master key
let (mpk, msk) = ed25519::generate_keypair();
// create random certificate
let meta = Meta::new_empty();
let expires = UTC::now()
.checked_add(Duration::days(90))
.expect("Failed to add 90 days to expiration date.") | identifier_body |
lib.rs | // The MIT License (MIT)
//
// Copyright (c) 2016 Marvin Böcker
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | //! This crate is a simple digital signature crate and can be used to verify data integrity by
//! using public-key cryptography. It uses the "super-fast, super-secure" elliptic curve and
//! digital signature algorithm [Ed25519](https://ed25519.cr.yp.to/).
//!
//! It provides the struct `Certificate`, which holds the public key, metadata and a signature.
//!
//! # The basics
//! A `Certificate` can be signed by a master key, or another `Certificate`. The top-most `Certificate`
//! must be signed with the master key, or it will not be valid. For validation, the master public
//! key will be given. This way, a `Certificate` can only be valid, if it has been signed with a
//! trust chain, which top-most `Certificate` has been signed with the right private key.
//!
//! See also [here](https://en.wikipedia.org/wiki/EdDSA).
//!
//! ## Other crates
//!
//! To use the edcert ecosystem, there are a few other crates to make your life simpler:
//!
//! - [edcert-letter](https://crates.io/crates/edcert-letter), which provides a container for
//! signed data, Letter<T>.
//! - [edcert-restrevoke](https://crates.io/crates/edcert-restrevoke), which provides a REST-based
//! revokation system.
//! - [edcert-compressor](https://crates.io/crates/edcert-compressor), which provides methods to
//! (de)compress `Certificate`s using JSON/LZMA and manages loading/saving certificates for you.
//! - [edcert-tools](https://crates.io/crates/edcert-tools), which provides a binary for
//! generation, signing, validation, etc using edcert (and all of the above).
#![deny(missing_docs)]
// External crates.
extern crate chrono;
extern crate rustc_serialize;
extern crate sodiumoxide;
// Internal modules
mod bytescontainer;
pub mod ed25519;
// External modules
pub mod certificate;
pub mod fingerprint;
pub mod meta;
pub mod signature;
// Validation
pub mod validator;
pub mod root_validator;
pub mod trust_validator;
// Revokation
pub mod revoker;
/// This is a simple copy function. This should be equivalent to memcpy.
pub fn copy_bytes(dest: &mut [u8], src: &[u8], start_dest: usize, start_src: usize, len: usize) {
let end_dest = start_dest + len;
let end_src = start_src + len;
dest[start_dest..end_dest].copy_from_slice(&src[start_src..end_src]);
}
#[test]
fn test_readme_example() {
use chrono::Timelike;
use chrono::UTC;
use chrono::duration::Duration;
use meta::Meta;
use certificate::Certificate;
use validator::Validator;
use root_validator::RootValidator;
use trust_validator::TrustValidator;
use revoker::NoRevoker;
use fingerprint::Fingerprint;
// create random master key
let (mpk, msk) = ed25519::generate_keypair();
// create random certificate
let meta = Meta::new_empty();
let expires = UTC::now()
.checked_add(Duration::days(90))
.expect("Failed to add 90 days to expiration date.")
.with_nanosecond(0)
.unwrap();
let mut cert = Certificate::generate_random(meta, expires);
// sign certificate with master key
cert.sign_with_master(&msk);
// we can use a RootValidator, which analyzes the trust chain.
// in this case, the top-most certificate must be signed with the right private key for mpk.
let cv = RootValidator::new(&mpk, NoRevoker);
// now we use the CV to validate certificates
assert_eq!(true, cv.is_valid(&cert).is_ok());
// we could also use a TrustValidator. It's like RootValidator, but you can also give trusted
// certificates. If the chain contains one of these, the upper certificates aren't checked
// with the master public key. We can give any 32 byte key here, it doesn't matter.
let mut tcv = TrustValidator::new(NoRevoker);
tcv.add_trusted_certificates(vec![cert.fingerprint()]);
// even though we gave a wrong master key, this certificate is valid, because it is trusted.
assert_eq!(true, tcv.is_valid(&cert).is_ok());
// now we sign data with it
let data = [1; 42];
// and sign the data with the certificate
let signature = cert.sign(&data)
.expect("This fails, if no private key is known to the certificate.");
// the signature must be valid
assert_eq!(true, cert.verify(&data, &signature));
} | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
| random_line_split |
aliased.rs | use expression::{Expression, NonAggregate, SelectableExpression};
use query_builder::*;
use query_source::*;
#[derive(Debug, Clone, Copy)] | expr: Expr,
alias: &'a str,
}
impl<'a, Expr> Aliased<'a, Expr> {
pub fn new(expr: Expr, alias: &'a str) -> Self {
Aliased {
expr: expr,
alias: alias,
}
}
}
pub struct FromEverywhere;
impl<'a, T> Expression for Aliased<'a, T> where T: Expression {
type SqlType = T::SqlType;
fn to_sql(&self, out: &mut QueryBuilder) -> BuildQueryResult {
out.push_identifier(&self.alias)
}
}
// FIXME This is incorrect, should only be selectable from WithQuerySource
impl<'a, T, QS> SelectableExpression<QS> for Aliased<'a, T> where
Aliased<'a, T>: Expression,
{
}
impl<'a, T: Expression> QuerySource for Aliased<'a, T> {
fn from_clause(&self, out: &mut QueryBuilder) -> BuildQueryResult {
try!(self.expr.to_sql(out));
out.push_sql(" ");
out.push_identifier(&self.alias)
}
}
impl<'a, T> NonAggregate for Aliased<'a, T> where Aliased<'a, T>: Expression {
} | pub struct Aliased<'a, Expr> { | random_line_split |
aliased.rs | use expression::{Expression, NonAggregate, SelectableExpression};
use query_builder::*;
use query_source::*;
#[derive(Debug, Clone, Copy)]
pub struct Aliased<'a, Expr> {
expr: Expr,
alias: &'a str,
}
impl<'a, Expr> Aliased<'a, Expr> {
pub fn new(expr: Expr, alias: &'a str) -> Self {
Aliased {
expr: expr,
alias: alias,
}
}
}
pub struct FromEverywhere;
impl<'a, T> Expression for Aliased<'a, T> where T: Expression {
type SqlType = T::SqlType;
fn | (&self, out: &mut QueryBuilder) -> BuildQueryResult {
out.push_identifier(&self.alias)
}
}
// FIXME This is incorrect, should only be selectable from WithQuerySource
impl<'a, T, QS> SelectableExpression<QS> for Aliased<'a, T> where
Aliased<'a, T>: Expression,
{
}
impl<'a, T: Expression> QuerySource for Aliased<'a, T> {
fn from_clause(&self, out: &mut QueryBuilder) -> BuildQueryResult {
try!(self.expr.to_sql(out));
out.push_sql(" ");
out.push_identifier(&self.alias)
}
}
impl<'a, T> NonAggregate for Aliased<'a, T> where Aliased<'a, T>: Expression {
}
| to_sql | identifier_name |
font_context.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use font::{Font, FontGroup};
use font::SpecifiedFontStyle;
use platform::font_context::FontContextHandle;
use style::computed_values::font_style;
use font_cache_task::FontCacheTask;
use font_template::FontTemplateDescriptor;
use platform::font_template::FontTemplateData;
use font::FontHandleMethods;
use platform::font::FontHandle;
use servo_util::cache::HashCache;
use std::rc::{Rc, Weak};
use std::cell::RefCell;
use sync::Arc;
use azure::AzFloat;
use azure::azure_hl::BackendType;
use azure::scaled_font::ScaledFont;
#[cfg(target_os="linux")]
#[cfg(target_os="android")]
use azure::scaled_font::FontData;
#[cfg(target_os="linux")]
#[cfg(target_os="android")]
fn create_scaled_font(backend: BackendType, template: &Arc<FontTemplateData>, pt_size: f64) -> ScaledFont {
ScaledFont::new(backend, FontData(&template.bytes), pt_size as AzFloat)
}
#[cfg(target_os="macos")]
fn create_scaled_font(backend: BackendType, template: &Arc<FontTemplateData>, pt_size: f64) -> ScaledFont {
let cgfont = template.ctfont.copy_to_CGFont();
ScaledFont::new(backend, &cgfont, pt_size as AzFloat)
}
/// A cached azure font (per render task) that
/// can be shared by multiple text runs.
struct RenderFontCacheEntry {
pt_size: f64,
identifier: String,
font: Rc<RefCell<ScaledFont>>,
}
/// The FontContext represents the per-thread/task state necessary for
/// working with fonts. It is the public API used by the layout and
/// render code. It talks directly to the font cache task where
/// required.
pub struct FontContext {
platform_handle: FontContextHandle,
font_cache_task: FontCacheTask,
/// Weak reference as the layout FontContext is persistent.
layout_font_cache: Vec<Weak<RefCell<Font>>>,
/// Strong reference as the render FontContext is (for now) recycled
/// per frame. TODO: Make this weak when incremental redraw is done.
render_font_cache: Vec<RenderFontCacheEntry>,
}
impl FontContext {
pub fn new(font_cache_task: FontCacheTask) -> FontContext {
let handle = FontContextHandle::new();
FontContext {
platform_handle: handle,
font_cache_task: font_cache_task,
layout_font_cache: vec!(),
render_font_cache: vec!(),
}
}
/// Create a font for use in layout calculations.
fn | (&self, template: Arc<FontTemplateData>,
descriptor: FontTemplateDescriptor, pt_size: f64) -> Font {
let handle: FontHandle = FontHandleMethods::new_from_template(&self.platform_handle, template, Some(pt_size)).unwrap();
let metrics = handle.get_metrics();
Font {
handle: handle,
shaper: None,
descriptor: descriptor,
pt_size: pt_size,
metrics: metrics,
shape_cache: HashCache::new(),
glyph_advance_cache: HashCache::new(),
}
}
/// Create a group of fonts for use in layout calculations. May return
/// a cached font if this font instance has already been used by
/// this context.
pub fn get_layout_font_group_for_style(&mut self, style: &SpecifiedFontStyle) -> FontGroup {
// Remove all weak pointers that have been dropped.
self.layout_font_cache.retain(|maybe_font| {
maybe_font.upgrade().is_some()
});
let mut fonts: Vec<Rc<RefCell<Font>>> = vec!();
for family in style.families.iter() {
let desc = FontTemplateDescriptor::new(style.weight, style.style == font_style::italic);
// GWTODO: Check on real pages if this is faster as Vec() or HashMap().
let mut cache_hit = false;
for maybe_cached_font in self.layout_font_cache.iter() {
let cached_font = maybe_cached_font.upgrade().unwrap();
if cached_font.borrow().descriptor == desc {
fonts.push(cached_font.clone());
cache_hit = true;
break;
}
}
if!cache_hit {
let font_template = self.font_cache_task.get_font_template(family.clone(), desc.clone());
let layout_font = Rc::new(RefCell::new(self.create_layout_font(font_template, desc.clone(), style.pt_size)));
self.layout_font_cache.push(layout_font.downgrade());
fonts.push(layout_font);
}
}
FontGroup::new(fonts)
}
/// Create a render font for use with azure. May return a cached
/// reference if already used by this font context.
pub fn get_render_font_from_template(&mut self, template: &Arc<FontTemplateData>, pt_size: f64, backend: BackendType) -> Rc<RefCell<ScaledFont>> {
for cached_font in self.render_font_cache.iter() {
if cached_font.pt_size == pt_size &&
cached_font.identifier == template.identifier {
return cached_font.font.clone();
}
}
let render_font = Rc::new(RefCell::new(create_scaled_font(backend, template, pt_size)));
self.render_font_cache.push(RenderFontCacheEntry{
font: render_font.clone(),
pt_size: pt_size,
identifier: template.identifier.clone(),
});
render_font
}
}
| create_layout_font | identifier_name |
font_context.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use font::{Font, FontGroup};
use font::SpecifiedFontStyle;
use platform::font_context::FontContextHandle;
use style::computed_values::font_style;
use font_cache_task::FontCacheTask;
use font_template::FontTemplateDescriptor;
use platform::font_template::FontTemplateData;
use font::FontHandleMethods;
use platform::font::FontHandle;
use servo_util::cache::HashCache;
use std::rc::{Rc, Weak};
use std::cell::RefCell;
use sync::Arc;
use azure::AzFloat;
use azure::azure_hl::BackendType;
use azure::scaled_font::ScaledFont;
#[cfg(target_os="linux")]
#[cfg(target_os="android")]
use azure::scaled_font::FontData;
#[cfg(target_os="linux")]
#[cfg(target_os="android")]
fn create_scaled_font(backend: BackendType, template: &Arc<FontTemplateData>, pt_size: f64) -> ScaledFont {
ScaledFont::new(backend, FontData(&template.bytes), pt_size as AzFloat)
}
#[cfg(target_os="macos")]
fn create_scaled_font(backend: BackendType, template: &Arc<FontTemplateData>, pt_size: f64) -> ScaledFont {
let cgfont = template.ctfont.copy_to_CGFont();
ScaledFont::new(backend, &cgfont, pt_size as AzFloat)
}
/// A cached azure font (per render task) that
/// can be shared by multiple text runs.
struct RenderFontCacheEntry {
pt_size: f64,
identifier: String,
font: Rc<RefCell<ScaledFont>>,
}
/// The FontContext represents the per-thread/task state necessary for
/// working with fonts. It is the public API used by the layout and
/// render code. It talks directly to the font cache task where
/// required.
pub struct FontContext {
platform_handle: FontContextHandle,
font_cache_task: FontCacheTask,
/// Weak reference as the layout FontContext is persistent.
layout_font_cache: Vec<Weak<RefCell<Font>>>,
/// Strong reference as the render FontContext is (for now) recycled
/// per frame. TODO: Make this weak when incremental redraw is done.
render_font_cache: Vec<RenderFontCacheEntry>,
}
impl FontContext {
pub fn new(font_cache_task: FontCacheTask) -> FontContext {
let handle = FontContextHandle::new();
FontContext {
platform_handle: handle,
font_cache_task: font_cache_task,
layout_font_cache: vec!(),
render_font_cache: vec!(),
}
}
/// Create a font for use in layout calculations.
fn create_layout_font(&self, template: Arc<FontTemplateData>,
descriptor: FontTemplateDescriptor, pt_size: f64) -> Font {
let handle: FontHandle = FontHandleMethods::new_from_template(&self.platform_handle, template, Some(pt_size)).unwrap();
let metrics = handle.get_metrics();
Font {
handle: handle,
shaper: None,
descriptor: descriptor,
pt_size: pt_size,
metrics: metrics,
shape_cache: HashCache::new(),
glyph_advance_cache: HashCache::new(),
}
}
/// Create a group of fonts for use in layout calculations. May return
/// a cached font if this font instance has already been used by
/// this context.
pub fn get_layout_font_group_for_style(&mut self, style: &SpecifiedFontStyle) -> FontGroup {
// Remove all weak pointers that have been dropped.
self.layout_font_cache.retain(|maybe_font| {
maybe_font.upgrade().is_some()
});
let mut fonts: Vec<Rc<RefCell<Font>>> = vec!();
for family in style.families.iter() {
let desc = FontTemplateDescriptor::new(style.weight, style.style == font_style::italic);
// GWTODO: Check on real pages if this is faster as Vec() or HashMap().
let mut cache_hit = false;
for maybe_cached_font in self.layout_font_cache.iter() {
let cached_font = maybe_cached_font.upgrade().unwrap();
if cached_font.borrow().descriptor == desc {
fonts.push(cached_font.clone());
cache_hit = true;
break;
}
}
if!cache_hit {
let font_template = self.font_cache_task.get_font_template(family.clone(), desc.clone());
let layout_font = Rc::new(RefCell::new(self.create_layout_font(font_template, desc.clone(), style.pt_size)));
self.layout_font_cache.push(layout_font.downgrade());
fonts.push(layout_font);
}
}
FontGroup::new(fonts)
}
/// Create a render font for use with azure. May return a cached
/// reference if already used by this font context.
pub fn get_render_font_from_template(&mut self, template: &Arc<FontTemplateData>, pt_size: f64, backend: BackendType) -> Rc<RefCell<ScaledFont>> {
for cached_font in self.render_font_cache.iter() {
if cached_font.pt_size == pt_size &&
cached_font.identifier == template.identifier |
}
let render_font = Rc::new(RefCell::new(create_scaled_font(backend, template, pt_size)));
self.render_font_cache.push(RenderFontCacheEntry{
font: render_font.clone(),
pt_size: pt_size,
identifier: template.identifier.clone(),
});
render_font
}
}
| {
return cached_font.font.clone();
} | conditional_block |
font_context.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use font::{Font, FontGroup};
use font::SpecifiedFontStyle;
use platform::font_context::FontContextHandle;
use style::computed_values::font_style;
use font_cache_task::FontCacheTask;
use font_template::FontTemplateDescriptor;
use platform::font_template::FontTemplateData;
use font::FontHandleMethods;
use platform::font::FontHandle;
use servo_util::cache::HashCache;
use std::rc::{Rc, Weak};
use std::cell::RefCell;
use sync::Arc;
use azure::AzFloat;
use azure::azure_hl::BackendType;
use azure::scaled_font::ScaledFont;
#[cfg(target_os="linux")]
#[cfg(target_os="android")]
use azure::scaled_font::FontData;
#[cfg(target_os="linux")]
#[cfg(target_os="android")]
fn create_scaled_font(backend: BackendType, template: &Arc<FontTemplateData>, pt_size: f64) -> ScaledFont {
ScaledFont::new(backend, FontData(&template.bytes), pt_size as AzFloat)
}
#[cfg(target_os="macos")]
fn create_scaled_font(backend: BackendType, template: &Arc<FontTemplateData>, pt_size: f64) -> ScaledFont {
let cgfont = template.ctfont.copy_to_CGFont();
ScaledFont::new(backend, &cgfont, pt_size as AzFloat)
}
/// A cached azure font (per render task) that
/// can be shared by multiple text runs.
struct RenderFontCacheEntry {
pt_size: f64,
identifier: String,
font: Rc<RefCell<ScaledFont>>,
}
/// The FontContext represents the per-thread/task state necessary for
/// working with fonts. It is the public API used by the layout and
/// render code. It talks directly to the font cache task where
/// required.
pub struct FontContext {
platform_handle: FontContextHandle,
font_cache_task: FontCacheTask,
/// Weak reference as the layout FontContext is persistent.
layout_font_cache: Vec<Weak<RefCell<Font>>>,
/// Strong reference as the render FontContext is (for now) recycled
/// per frame. TODO: Make this weak when incremental redraw is done.
render_font_cache: Vec<RenderFontCacheEntry>,
}
impl FontContext {
pub fn new(font_cache_task: FontCacheTask) -> FontContext |
/// Create a font for use in layout calculations.
fn create_layout_font(&self, template: Arc<FontTemplateData>,
descriptor: FontTemplateDescriptor, pt_size: f64) -> Font {
let handle: FontHandle = FontHandleMethods::new_from_template(&self.platform_handle, template, Some(pt_size)).unwrap();
let metrics = handle.get_metrics();
Font {
handle: handle,
shaper: None,
descriptor: descriptor,
pt_size: pt_size,
metrics: metrics,
shape_cache: HashCache::new(),
glyph_advance_cache: HashCache::new(),
}
}
/// Create a group of fonts for use in layout calculations. May return
/// a cached font if this font instance has already been used by
/// this context.
pub fn get_layout_font_group_for_style(&mut self, style: &SpecifiedFontStyle) -> FontGroup {
// Remove all weak pointers that have been dropped.
self.layout_font_cache.retain(|maybe_font| {
maybe_font.upgrade().is_some()
});
let mut fonts: Vec<Rc<RefCell<Font>>> = vec!();
for family in style.families.iter() {
let desc = FontTemplateDescriptor::new(style.weight, style.style == font_style::italic);
// GWTODO: Check on real pages if this is faster as Vec() or HashMap().
let mut cache_hit = false;
for maybe_cached_font in self.layout_font_cache.iter() {
let cached_font = maybe_cached_font.upgrade().unwrap();
if cached_font.borrow().descriptor == desc {
fonts.push(cached_font.clone());
cache_hit = true;
break;
}
}
if!cache_hit {
let font_template = self.font_cache_task.get_font_template(family.clone(), desc.clone());
let layout_font = Rc::new(RefCell::new(self.create_layout_font(font_template, desc.clone(), style.pt_size)));
self.layout_font_cache.push(layout_font.downgrade());
fonts.push(layout_font);
}
}
FontGroup::new(fonts)
}
/// Create a render font for use with azure. May return a cached
/// reference if already used by this font context.
pub fn get_render_font_from_template(&mut self, template: &Arc<FontTemplateData>, pt_size: f64, backend: BackendType) -> Rc<RefCell<ScaledFont>> {
for cached_font in self.render_font_cache.iter() {
if cached_font.pt_size == pt_size &&
cached_font.identifier == template.identifier {
return cached_font.font.clone();
}
}
let render_font = Rc::new(RefCell::new(create_scaled_font(backend, template, pt_size)));
self.render_font_cache.push(RenderFontCacheEntry{
font: render_font.clone(),
pt_size: pt_size,
identifier: template.identifier.clone(),
});
render_font
}
}
| {
let handle = FontContextHandle::new();
FontContext {
platform_handle: handle,
font_cache_task: font_cache_task,
layout_font_cache: vec!(),
render_font_cache: vec!(),
}
} | identifier_body |
font_context.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use font::{Font, FontGroup};
use font::SpecifiedFontStyle;
use platform::font_context::FontContextHandle;
use style::computed_values::font_style;
use font_cache_task::FontCacheTask;
use font_template::FontTemplateDescriptor;
use platform::font_template::FontTemplateData;
use font::FontHandleMethods;
use platform::font::FontHandle;
use servo_util::cache::HashCache;
use std::rc::{Rc, Weak};
use std::cell::RefCell;
use sync::Arc;
use azure::AzFloat;
use azure::azure_hl::BackendType;
use azure::scaled_font::ScaledFont;
#[cfg(target_os="linux")]
#[cfg(target_os="android")]
use azure::scaled_font::FontData;
#[cfg(target_os="linux")]
#[cfg(target_os="android")]
fn create_scaled_font(backend: BackendType, template: &Arc<FontTemplateData>, pt_size: f64) -> ScaledFont {
ScaledFont::new(backend, FontData(&template.bytes), pt_size as AzFloat)
}
| fn create_scaled_font(backend: BackendType, template: &Arc<FontTemplateData>, pt_size: f64) -> ScaledFont {
let cgfont = template.ctfont.copy_to_CGFont();
ScaledFont::new(backend, &cgfont, pt_size as AzFloat)
}
/// A cached azure font (per render task) that
/// can be shared by multiple text runs.
struct RenderFontCacheEntry {
pt_size: f64,
identifier: String,
font: Rc<RefCell<ScaledFont>>,
}
/// The FontContext represents the per-thread/task state necessary for
/// working with fonts. It is the public API used by the layout and
/// render code. It talks directly to the font cache task where
/// required.
pub struct FontContext {
platform_handle: FontContextHandle,
font_cache_task: FontCacheTask,
/// Weak reference as the layout FontContext is persistent.
layout_font_cache: Vec<Weak<RefCell<Font>>>,
/// Strong reference as the render FontContext is (for now) recycled
/// per frame. TODO: Make this weak when incremental redraw is done.
render_font_cache: Vec<RenderFontCacheEntry>,
}
impl FontContext {
pub fn new(font_cache_task: FontCacheTask) -> FontContext {
let handle = FontContextHandle::new();
FontContext {
platform_handle: handle,
font_cache_task: font_cache_task,
layout_font_cache: vec!(),
render_font_cache: vec!(),
}
}
/// Create a font for use in layout calculations.
fn create_layout_font(&self, template: Arc<FontTemplateData>,
descriptor: FontTemplateDescriptor, pt_size: f64) -> Font {
let handle: FontHandle = FontHandleMethods::new_from_template(&self.platform_handle, template, Some(pt_size)).unwrap();
let metrics = handle.get_metrics();
Font {
handle: handle,
shaper: None,
descriptor: descriptor,
pt_size: pt_size,
metrics: metrics,
shape_cache: HashCache::new(),
glyph_advance_cache: HashCache::new(),
}
}
/// Create a group of fonts for use in layout calculations. May return
/// a cached font if this font instance has already been used by
/// this context.
pub fn get_layout_font_group_for_style(&mut self, style: &SpecifiedFontStyle) -> FontGroup {
// Remove all weak pointers that have been dropped.
self.layout_font_cache.retain(|maybe_font| {
maybe_font.upgrade().is_some()
});
let mut fonts: Vec<Rc<RefCell<Font>>> = vec!();
for family in style.families.iter() {
let desc = FontTemplateDescriptor::new(style.weight, style.style == font_style::italic);
// GWTODO: Check on real pages if this is faster as Vec() or HashMap().
let mut cache_hit = false;
for maybe_cached_font in self.layout_font_cache.iter() {
let cached_font = maybe_cached_font.upgrade().unwrap();
if cached_font.borrow().descriptor == desc {
fonts.push(cached_font.clone());
cache_hit = true;
break;
}
}
if!cache_hit {
let font_template = self.font_cache_task.get_font_template(family.clone(), desc.clone());
let layout_font = Rc::new(RefCell::new(self.create_layout_font(font_template, desc.clone(), style.pt_size)));
self.layout_font_cache.push(layout_font.downgrade());
fonts.push(layout_font);
}
}
FontGroup::new(fonts)
}
/// Create a render font for use with azure. May return a cached
/// reference if already used by this font context.
pub fn get_render_font_from_template(&mut self, template: &Arc<FontTemplateData>, pt_size: f64, backend: BackendType) -> Rc<RefCell<ScaledFont>> {
for cached_font in self.render_font_cache.iter() {
if cached_font.pt_size == pt_size &&
cached_font.identifier == template.identifier {
return cached_font.font.clone();
}
}
let render_font = Rc::new(RefCell::new(create_scaled_font(backend, template, pt_size)));
self.render_font_cache.push(RenderFontCacheEntry{
font: render_font.clone(),
pt_size: pt_size,
identifier: template.identifier.clone(),
});
render_font
}
} | #[cfg(target_os="macos")] | random_line_split |
inline.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm::{AvailableExternallyLinkage, InternalLinkage, SetLinkage};
use metadata::csearch;
use metadata::inline::InlinedItem;
use middle::astencode;
use middle::subst::Substs;
use trans::base::{push_ctxt, trans_item, get_item_val, trans_fn};
use trans::common::*;
use syntax::ast;
use syntax::ast_util::local_def;
fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId)
-> Option<ast::DefId> {
debug!("instantiate_inline({:?})", fn_id);
let _icx = push_ctxt("instantiate_inline");
match ccx.external().borrow().get(&fn_id) {
Some(&Some(node_id)) => {
// Already inline
debug!("instantiate_inline({}): already inline as node id {}",
ccx.tcx().item_path_str(fn_id), node_id);
return Some(local_def(node_id));
}
Some(&None) => {
return None; // Not inlinable
}
None => {
// Not seen yet
}
}
let csearch_result =
csearch::maybe_get_item_ast(
ccx.tcx(), fn_id,
Box::new(|a,b,c,d| astencode::decode_inlined_item(a, b, c, d)));
let inline_id = match csearch_result {
csearch::FoundAst::NotFound => {
ccx.external().borrow_mut().insert(fn_id, None);
return None;
}
csearch::FoundAst::Found(&InlinedItem::Item(ref item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, fn_id);
ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
trans_item(ccx, item);
let linkage = match item.node {
ast::ItemFn(_, _, _, _, ref generics, _) => {
if generics.is_type_parameterized() {
// Generics have no symbol, so they can't be given any
// linkage.
None
} else {
if ccx.sess().opts.cg.codegen_units == 1 {
// We could use AvailableExternallyLinkage here,
// but InternalLinkage allows LLVM to optimize more
// aggressively (at the cost of sometimes
// duplicating code).
Some(InternalLinkage)
} else {
// With multiple compilation units, duplicated code
// is more of a problem. Also, `codegen_units > 1`
// means the user is okay with losing some
// performance.
Some(AvailableExternallyLinkage)
}
}
}
ast::ItemConst(..) => None,
_ => unreachable!(),
};
match linkage {
Some(linkage) => {
let g = get_item_val(ccx, item.id);
SetLinkage(g, linkage);
}
None => {}
}
item.id
}
csearch::FoundAst::Found(&InlinedItem::Foreign(ref item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, fn_id);
item.id
}
csearch::FoundAst::FoundParent(parent_id, &InlinedItem::Item(ref item)) => {
ccx.external().borrow_mut().insert(parent_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, parent_id);
let mut my_id = 0;
match item.node {
ast::ItemEnum(ref ast_def, _) => {
let ast_vs = &ast_def.variants;
let ty_vs = &ccx.tcx().lookup_adt_def(parent_id).variants;
assert_eq!(ast_vs.len(), ty_vs.len());
for (ast_v, ty_v) in ast_vs.iter().zip(ty_vs.iter()) {
if ty_v.did == fn_id { my_id = ast_v.node.id; }
ccx.external().borrow_mut().insert(ty_v.did, Some(ast_v.node.id));
}
}
ast::ItemStruct(ref struct_def, _) => {
match struct_def.ctor_id {
None => ccx.sess().bug("instantiate_inline: called on a \
non-tuple struct"),
Some(ctor_id) => {
ccx.external().borrow_mut().insert(fn_id, Some(ctor_id));
my_id = ctor_id;
}
}
}
_ => ccx.sess().bug("instantiate_inline: item has a \
non-enum, non-struct parent")
}
trans_item(ccx, &**item);
my_id
}
csearch::FoundAst::FoundParent(_, _) => {
ccx.sess().bug("maybe_get_item_ast returned a FoundParent \
with a non-item parent");
}
csearch::FoundAst::Found(&InlinedItem::TraitItem(_, ref trait_item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(trait_item.id));
ccx.external_srcs().borrow_mut().insert(trait_item.id, fn_id);
ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
// Associated consts already have to be evaluated in `typeck`, so
// the logic to do that already exists in `middle`. In order to
// reuse that code, it needs to be able to look up the traits for
// inlined items.
let ty_trait_item = ccx.tcx().impl_or_trait_item(fn_id).clone();
ccx.tcx().impl_or_trait_items.borrow_mut()
.insert(local_def(trait_item.id), ty_trait_item);
// If this is a default method, we can't look up the
// impl type. But we aren't going to translate anyways, so
// don't.
trait_item.id
}
csearch::FoundAst::Found(&InlinedItem::ImplItem(impl_did, ref impl_item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(impl_item.id));
ccx.external_srcs().borrow_mut().insert(impl_item.id, fn_id);
ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
// Translate monomorphic impl methods immediately.
if let ast::MethodImplItem(ref sig, ref body) = impl_item.node {
let impl_tpt = ccx.tcx().lookup_item_type(impl_did);
if impl_tpt.generics.types.is_empty() &&
sig.generics.ty_params.is_empty() {
let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
let llfn = get_item_val(ccx, impl_item.id); | llfn,
empty_substs,
impl_item.id,
&[]);
// See linkage comments on items.
if ccx.sess().opts.cg.codegen_units == 1 {
SetLinkage(llfn, InternalLinkage);
} else {
SetLinkage(llfn, AvailableExternallyLinkage);
}
}
}
impl_item.id
}
};
Some(local_def(inline_id))
}
pub fn get_local_instance(ccx: &CrateContext, fn_id: ast::DefId)
-> Option<ast::DefId> {
if fn_id.krate == ast::LOCAL_CRATE {
Some(fn_id)
} else {
instantiate_inline(ccx, fn_id)
}
}
pub fn maybe_instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId) -> ast::DefId {
get_local_instance(ccx, fn_id).unwrap_or(fn_id)
} | trans_fn(ccx,
&sig.decl,
body, | random_line_split |
inline.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm::{AvailableExternallyLinkage, InternalLinkage, SetLinkage};
use metadata::csearch;
use metadata::inline::InlinedItem;
use middle::astencode;
use middle::subst::Substs;
use trans::base::{push_ctxt, trans_item, get_item_val, trans_fn};
use trans::common::*;
use syntax::ast;
use syntax::ast_util::local_def;
fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId)
-> Option<ast::DefId> {
debug!("instantiate_inline({:?})", fn_id);
let _icx = push_ctxt("instantiate_inline");
match ccx.external().borrow().get(&fn_id) {
Some(&Some(node_id)) => {
// Already inline
debug!("instantiate_inline({}): already inline as node id {}",
ccx.tcx().item_path_str(fn_id), node_id);
return Some(local_def(node_id));
}
Some(&None) => {
return None; // Not inlinable
}
None => {
// Not seen yet
}
}
let csearch_result =
csearch::maybe_get_item_ast(
ccx.tcx(), fn_id,
Box::new(|a,b,c,d| astencode::decode_inlined_item(a, b, c, d)));
let inline_id = match csearch_result {
csearch::FoundAst::NotFound => {
ccx.external().borrow_mut().insert(fn_id, None);
return None;
}
csearch::FoundAst::Found(&InlinedItem::Item(ref item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, fn_id);
ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
trans_item(ccx, item);
let linkage = match item.node {
ast::ItemFn(_, _, _, _, ref generics, _) => {
if generics.is_type_parameterized() {
// Generics have no symbol, so they can't be given any
// linkage.
None
} else {
if ccx.sess().opts.cg.codegen_units == 1 {
// We could use AvailableExternallyLinkage here,
// but InternalLinkage allows LLVM to optimize more
// aggressively (at the cost of sometimes
// duplicating code).
Some(InternalLinkage)
} else {
// With multiple compilation units, duplicated code
// is more of a problem. Also, `codegen_units > 1`
// means the user is okay with losing some
// performance.
Some(AvailableExternallyLinkage)
}
}
}
ast::ItemConst(..) => None,
_ => unreachable!(),
};
match linkage {
Some(linkage) => {
let g = get_item_val(ccx, item.id);
SetLinkage(g, linkage);
}
None => {}
}
item.id
}
csearch::FoundAst::Found(&InlinedItem::Foreign(ref item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, fn_id);
item.id
}
csearch::FoundAst::FoundParent(parent_id, &InlinedItem::Item(ref item)) => {
ccx.external().borrow_mut().insert(parent_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, parent_id);
let mut my_id = 0;
match item.node {
ast::ItemEnum(ref ast_def, _) => {
let ast_vs = &ast_def.variants;
let ty_vs = &ccx.tcx().lookup_adt_def(parent_id).variants;
assert_eq!(ast_vs.len(), ty_vs.len());
for (ast_v, ty_v) in ast_vs.iter().zip(ty_vs.iter()) {
if ty_v.did == fn_id { my_id = ast_v.node.id; }
ccx.external().borrow_mut().insert(ty_v.did, Some(ast_v.node.id));
}
}
ast::ItemStruct(ref struct_def, _) => {
match struct_def.ctor_id {
None => ccx.sess().bug("instantiate_inline: called on a \
non-tuple struct"),
Some(ctor_id) => {
ccx.external().borrow_mut().insert(fn_id, Some(ctor_id));
my_id = ctor_id;
}
}
}
_ => ccx.sess().bug("instantiate_inline: item has a \
non-enum, non-struct parent")
}
trans_item(ccx, &**item);
my_id
}
csearch::FoundAst::FoundParent(_, _) => {
ccx.sess().bug("maybe_get_item_ast returned a FoundParent \
with a non-item parent");
}
csearch::FoundAst::Found(&InlinedItem::TraitItem(_, ref trait_item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(trait_item.id));
ccx.external_srcs().borrow_mut().insert(trait_item.id, fn_id);
ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
// Associated consts already have to be evaluated in `typeck`, so
// the logic to do that already exists in `middle`. In order to
// reuse that code, it needs to be able to look up the traits for
// inlined items.
let ty_trait_item = ccx.tcx().impl_or_trait_item(fn_id).clone();
ccx.tcx().impl_or_trait_items.borrow_mut()
.insert(local_def(trait_item.id), ty_trait_item);
// If this is a default method, we can't look up the
// impl type. But we aren't going to translate anyways, so
// don't.
trait_item.id
}
csearch::FoundAst::Found(&InlinedItem::ImplItem(impl_did, ref impl_item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(impl_item.id));
ccx.external_srcs().borrow_mut().insert(impl_item.id, fn_id);
ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
// Translate monomorphic impl methods immediately.
if let ast::MethodImplItem(ref sig, ref body) = impl_item.node {
let impl_tpt = ccx.tcx().lookup_item_type(impl_did);
if impl_tpt.generics.types.is_empty() &&
sig.generics.ty_params.is_empty() {
let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
let llfn = get_item_val(ccx, impl_item.id);
trans_fn(ccx,
&sig.decl,
body,
llfn,
empty_substs,
impl_item.id,
&[]);
// See linkage comments on items.
if ccx.sess().opts.cg.codegen_units == 1 {
SetLinkage(llfn, InternalLinkage);
} else {
SetLinkage(llfn, AvailableExternallyLinkage);
}
}
}
impl_item.id
}
};
Some(local_def(inline_id))
}
pub fn get_local_instance(ccx: &CrateContext, fn_id: ast::DefId)
-> Option<ast::DefId> {
if fn_id.krate == ast::LOCAL_CRATE {
Some(fn_id)
} else {
instantiate_inline(ccx, fn_id)
}
}
pub fn maybe_instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId) -> ast::DefId | {
get_local_instance(ccx, fn_id).unwrap_or(fn_id)
} | identifier_body |
|
inline.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm::{AvailableExternallyLinkage, InternalLinkage, SetLinkage};
use metadata::csearch;
use metadata::inline::InlinedItem;
use middle::astencode;
use middle::subst::Substs;
use trans::base::{push_ctxt, trans_item, get_item_val, trans_fn};
use trans::common::*;
use syntax::ast;
use syntax::ast_util::local_def;
fn | (ccx: &CrateContext, fn_id: ast::DefId)
-> Option<ast::DefId> {
debug!("instantiate_inline({:?})", fn_id);
let _icx = push_ctxt("instantiate_inline");
match ccx.external().borrow().get(&fn_id) {
Some(&Some(node_id)) => {
// Already inline
debug!("instantiate_inline({}): already inline as node id {}",
ccx.tcx().item_path_str(fn_id), node_id);
return Some(local_def(node_id));
}
Some(&None) => {
return None; // Not inlinable
}
None => {
// Not seen yet
}
}
let csearch_result =
csearch::maybe_get_item_ast(
ccx.tcx(), fn_id,
Box::new(|a,b,c,d| astencode::decode_inlined_item(a, b, c, d)));
let inline_id = match csearch_result {
csearch::FoundAst::NotFound => {
ccx.external().borrow_mut().insert(fn_id, None);
return None;
}
csearch::FoundAst::Found(&InlinedItem::Item(ref item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, fn_id);
ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
trans_item(ccx, item);
let linkage = match item.node {
ast::ItemFn(_, _, _, _, ref generics, _) => {
if generics.is_type_parameterized() {
// Generics have no symbol, so they can't be given any
// linkage.
None
} else {
if ccx.sess().opts.cg.codegen_units == 1 {
// We could use AvailableExternallyLinkage here,
// but InternalLinkage allows LLVM to optimize more
// aggressively (at the cost of sometimes
// duplicating code).
Some(InternalLinkage)
} else {
// With multiple compilation units, duplicated code
// is more of a problem. Also, `codegen_units > 1`
// means the user is okay with losing some
// performance.
Some(AvailableExternallyLinkage)
}
}
}
ast::ItemConst(..) => None,
_ => unreachable!(),
};
match linkage {
Some(linkage) => {
let g = get_item_val(ccx, item.id);
SetLinkage(g, linkage);
}
None => {}
}
item.id
}
csearch::FoundAst::Found(&InlinedItem::Foreign(ref item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, fn_id);
item.id
}
csearch::FoundAst::FoundParent(parent_id, &InlinedItem::Item(ref item)) => {
ccx.external().borrow_mut().insert(parent_id, Some(item.id));
ccx.external_srcs().borrow_mut().insert(item.id, parent_id);
let mut my_id = 0;
match item.node {
ast::ItemEnum(ref ast_def, _) => {
let ast_vs = &ast_def.variants;
let ty_vs = &ccx.tcx().lookup_adt_def(parent_id).variants;
assert_eq!(ast_vs.len(), ty_vs.len());
for (ast_v, ty_v) in ast_vs.iter().zip(ty_vs.iter()) {
if ty_v.did == fn_id { my_id = ast_v.node.id; }
ccx.external().borrow_mut().insert(ty_v.did, Some(ast_v.node.id));
}
}
ast::ItemStruct(ref struct_def, _) => {
match struct_def.ctor_id {
None => ccx.sess().bug("instantiate_inline: called on a \
non-tuple struct"),
Some(ctor_id) => {
ccx.external().borrow_mut().insert(fn_id, Some(ctor_id));
my_id = ctor_id;
}
}
}
_ => ccx.sess().bug("instantiate_inline: item has a \
non-enum, non-struct parent")
}
trans_item(ccx, &**item);
my_id
}
csearch::FoundAst::FoundParent(_, _) => {
ccx.sess().bug("maybe_get_item_ast returned a FoundParent \
with a non-item parent");
}
csearch::FoundAst::Found(&InlinedItem::TraitItem(_, ref trait_item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(trait_item.id));
ccx.external_srcs().borrow_mut().insert(trait_item.id, fn_id);
ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
// Associated consts already have to be evaluated in `typeck`, so
// the logic to do that already exists in `middle`. In order to
// reuse that code, it needs to be able to look up the traits for
// inlined items.
let ty_trait_item = ccx.tcx().impl_or_trait_item(fn_id).clone();
ccx.tcx().impl_or_trait_items.borrow_mut()
.insert(local_def(trait_item.id), ty_trait_item);
// If this is a default method, we can't look up the
// impl type. But we aren't going to translate anyways, so
// don't.
trait_item.id
}
csearch::FoundAst::Found(&InlinedItem::ImplItem(impl_did, ref impl_item)) => {
ccx.external().borrow_mut().insert(fn_id, Some(impl_item.id));
ccx.external_srcs().borrow_mut().insert(impl_item.id, fn_id);
ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
// Translate monomorphic impl methods immediately.
if let ast::MethodImplItem(ref sig, ref body) = impl_item.node {
let impl_tpt = ccx.tcx().lookup_item_type(impl_did);
if impl_tpt.generics.types.is_empty() &&
sig.generics.ty_params.is_empty() {
let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
let llfn = get_item_val(ccx, impl_item.id);
trans_fn(ccx,
&sig.decl,
body,
llfn,
empty_substs,
impl_item.id,
&[]);
// See linkage comments on items.
if ccx.sess().opts.cg.codegen_units == 1 {
SetLinkage(llfn, InternalLinkage);
} else {
SetLinkage(llfn, AvailableExternallyLinkage);
}
}
}
impl_item.id
}
};
Some(local_def(inline_id))
}
pub fn get_local_instance(ccx: &CrateContext, fn_id: ast::DefId)
-> Option<ast::DefId> {
if fn_id.krate == ast::LOCAL_CRATE {
Some(fn_id)
} else {
instantiate_inline(ccx, fn_id)
}
}
pub fn maybe_instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId) -> ast::DefId {
get_local_instance(ccx, fn_id).unwrap_or(fn_id)
}
| instantiate_inline | identifier_name |
attribute-with-error.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. | // except according to those terms.
// aux-build:attribute-with-error.rs
#![feature(custom_inner_attributes)]
extern crate attribute_with_error;
use attribute_with_error::foo;
#[foo]
fn test1() {
let a: i32 = "foo";
//~^ ERROR: mismatched types
let b: i32 = "f'oo";
//~^ ERROR: mismatched types
}
fn test2() {
#![foo]
// FIXME: should have a type error here and assert it works but it doesn't
}
trait A {
// FIXME: should have a #[foo] attribute here and assert that it works
fn foo(&self) {
let a: i32 = "foo";
//~^ ERROR: mismatched types
}
}
struct B;
impl A for B {
#[foo]
fn foo(&self) {
let a: i32 = "foo";
//~^ ERROR: mismatched types
}
}
#[foo]
fn main() {
} | //
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed | random_line_split |
attribute-with-error.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:attribute-with-error.rs
#![feature(custom_inner_attributes)]
extern crate attribute_with_error;
use attribute_with_error::foo;
#[foo]
fn test1() |
fn test2() {
#![foo]
// FIXME: should have a type error here and assert it works but it doesn't
}
trait A {
// FIXME: should have a #[foo] attribute here and assert that it works
fn foo(&self) {
let a: i32 = "foo";
//~^ ERROR: mismatched types
}
}
struct B;
impl A for B {
#[foo]
fn foo(&self) {
let a: i32 = "foo";
//~^ ERROR: mismatched types
}
}
#[foo]
fn main() {
}
| {
let a: i32 = "foo";
//~^ ERROR: mismatched types
let b: i32 = "f'oo";
//~^ ERROR: mismatched types
} | identifier_body |
attribute-with-error.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:attribute-with-error.rs
#![feature(custom_inner_attributes)]
extern crate attribute_with_error;
use attribute_with_error::foo;
#[foo]
fn test1() {
let a: i32 = "foo";
//~^ ERROR: mismatched types
let b: i32 = "f'oo";
//~^ ERROR: mismatched types
}
fn test2() {
#![foo]
// FIXME: should have a type error here and assert it works but it doesn't
}
trait A {
// FIXME: should have a #[foo] attribute here and assert that it works
fn | (&self) {
let a: i32 = "foo";
//~^ ERROR: mismatched types
}
}
struct B;
impl A for B {
#[foo]
fn foo(&self) {
let a: i32 = "foo";
//~^ ERROR: mismatched types
}
}
#[foo]
fn main() {
}
| foo | identifier_name |
shootout-spectralnorm.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Based on spectalnorm.gcc by Sebastien Loisel
extern mod std;
fn eval_A(i: uint, j: uint) -> float {
1.0/(((i+j)*(i+j+1u)/2u+i+1u) as float)
}
fn eval_A_times_u(u: &const [float], Au: &mut [float]) {
let N = vec::len(u);
let mut i = 0u;
while i < N {
Au[i] = 0.0;
let mut j = 0u;
while j < N {
Au[i] += eval_A(i, j) * u[j];
j += 1u;
}
i += 1u;
}
}
fn eval_At_times_u(u: &const [float], Au: &mut [float]) {
let N = vec::len(u);
let mut i = 0u;
while i < N {
Au[i] = 0.0;
let mut j = 0u;
while j < N {
Au[i] += eval_A(j, i) * u[j];
j += 1u;
}
i += 1u;
}
}
fn eval_AtA_times_u(u: &const [float], AtAu: &mut [float]) {
let mut v = vec::from_elem(vec::len(u), 0.0);
eval_A_times_u(u, v);
eval_At_times_u(v, AtAu);
}
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"2000"]
} else if args.len() <= 1u | else {
args
};
let N = uint::from_str(args[1]).get();
let mut u = vec::from_elem(N, 1.0);
let mut v = vec::from_elem(N, 0.0);
let mut i = 0u;
while i < 10u {
eval_AtA_times_u(u, v);
eval_AtA_times_u(v, u);
i += 1u;
}
let mut vBv = 0.0;
let mut vv = 0.0;
let mut i = 0u;
while i < N {
vBv += u[i] * v[i];
vv += v[i] * v[i];
i += 1u;
}
io::println(fmt!("%0.9f\n", float::sqrt(vBv / vv)));
}
| {
~[~"", ~"1000"]
} | conditional_block |
shootout-spectralnorm.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Based on spectalnorm.gcc by Sebastien Loisel
extern mod std;
fn eval_A(i: uint, j: uint) -> float {
1.0/(((i+j)*(i+j+1u)/2u+i+1u) as float)
}
fn eval_A_times_u(u: &const [float], Au: &mut [float]) {
let N = vec::len(u);
let mut i = 0u;
while i < N {
Au[i] = 0.0;
let mut j = 0u;
while j < N {
Au[i] += eval_A(i, j) * u[j];
j += 1u;
}
i += 1u;
}
}
fn eval_At_times_u(u: &const [float], Au: &mut [float]) {
let N = vec::len(u);
let mut i = 0u;
while i < N {
Au[i] = 0.0;
let mut j = 0u;
while j < N {
Au[i] += eval_A(j, i) * u[j];
j += 1u;
}
i += 1u;
}
}
fn eval_AtA_times_u(u: &const [float], AtAu: &mut [float]) {
let mut v = vec::from_elem(vec::len(u), 0.0);
eval_A_times_u(u, v);
eval_At_times_u(v, AtAu);
}
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"2000"]
} else if args.len() <= 1u {
~[~"", ~"1000"]
} else {
args
}; | let mut v = vec::from_elem(N, 0.0);
let mut i = 0u;
while i < 10u {
eval_AtA_times_u(u, v);
eval_AtA_times_u(v, u);
i += 1u;
}
let mut vBv = 0.0;
let mut vv = 0.0;
let mut i = 0u;
while i < N {
vBv += u[i] * v[i];
vv += v[i] * v[i];
i += 1u;
}
io::println(fmt!("%0.9f\n", float::sqrt(vBv / vv)));
} |
let N = uint::from_str(args[1]).get();
let mut u = vec::from_elem(N, 1.0); | random_line_split |
shootout-spectralnorm.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Based on spectalnorm.gcc by Sebastien Loisel
extern mod std;
fn eval_A(i: uint, j: uint) -> float {
1.0/(((i+j)*(i+j+1u)/2u+i+1u) as float)
}
fn | (u: &const [float], Au: &mut [float]) {
let N = vec::len(u);
let mut i = 0u;
while i < N {
Au[i] = 0.0;
let mut j = 0u;
while j < N {
Au[i] += eval_A(i, j) * u[j];
j += 1u;
}
i += 1u;
}
}
fn eval_At_times_u(u: &const [float], Au: &mut [float]) {
let N = vec::len(u);
let mut i = 0u;
while i < N {
Au[i] = 0.0;
let mut j = 0u;
while j < N {
Au[i] += eval_A(j, i) * u[j];
j += 1u;
}
i += 1u;
}
}
fn eval_AtA_times_u(u: &const [float], AtAu: &mut [float]) {
let mut v = vec::from_elem(vec::len(u), 0.0);
eval_A_times_u(u, v);
eval_At_times_u(v, AtAu);
}
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"2000"]
} else if args.len() <= 1u {
~[~"", ~"1000"]
} else {
args
};
let N = uint::from_str(args[1]).get();
let mut u = vec::from_elem(N, 1.0);
let mut v = vec::from_elem(N, 0.0);
let mut i = 0u;
while i < 10u {
eval_AtA_times_u(u, v);
eval_AtA_times_u(v, u);
i += 1u;
}
let mut vBv = 0.0;
let mut vv = 0.0;
let mut i = 0u;
while i < N {
vBv += u[i] * v[i];
vv += v[i] * v[i];
i += 1u;
}
io::println(fmt!("%0.9f\n", float::sqrt(vBv / vv)));
}
| eval_A_times_u | identifier_name |
shootout-spectralnorm.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Based on spectalnorm.gcc by Sebastien Loisel
extern mod std;
fn eval_A(i: uint, j: uint) -> float {
1.0/(((i+j)*(i+j+1u)/2u+i+1u) as float)
}
fn eval_A_times_u(u: &const [float], Au: &mut [float]) |
fn eval_At_times_u(u: &const [float], Au: &mut [float]) {
let N = vec::len(u);
let mut i = 0u;
while i < N {
Au[i] = 0.0;
let mut j = 0u;
while j < N {
Au[i] += eval_A(j, i) * u[j];
j += 1u;
}
i += 1u;
}
}
fn eval_AtA_times_u(u: &const [float], AtAu: &mut [float]) {
let mut v = vec::from_elem(vec::len(u), 0.0);
eval_A_times_u(u, v);
eval_At_times_u(v, AtAu);
}
fn main() {
let args = os::args();
let args = if os::getenv(~"RUST_BENCH").is_some() {
~[~"", ~"2000"]
} else if args.len() <= 1u {
~[~"", ~"1000"]
} else {
args
};
let N = uint::from_str(args[1]).get();
let mut u = vec::from_elem(N, 1.0);
let mut v = vec::from_elem(N, 0.0);
let mut i = 0u;
while i < 10u {
eval_AtA_times_u(u, v);
eval_AtA_times_u(v, u);
i += 1u;
}
let mut vBv = 0.0;
let mut vv = 0.0;
let mut i = 0u;
while i < N {
vBv += u[i] * v[i];
vv += v[i] * v[i];
i += 1u;
}
io::println(fmt!("%0.9f\n", float::sqrt(vBv / vv)));
}
| {
let N = vec::len(u);
let mut i = 0u;
while i < N {
Au[i] = 0.0;
let mut j = 0u;
while j < N {
Au[i] += eval_A(i, j) * u[j];
j += 1u;
}
i += 1u;
}
} | identifier_body |
context.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use option::*;
use super::stack::StackSegment;
use libc::c_void;
use cast::{transmute, transmute_mut_unsafe,
transmute_region, transmute_mut_region};
// FIXME #7761: Registers is boxed so that it is 16-byte aligned, for storing
// SSE regs. It would be marginally better not to do this. In C++ we
// use an attribute on a struct.
// FIXME #7761: It would be nice to define regs as `~Option<Registers>` since
// the registers are sometimes empty, but the discriminant would
// then misalign the regs again.
pub struct Context {
/// The context entry point, saved here for later destruction
start: Option<~~fn()>,
/// Hold the registers while the task or scheduler is suspended
regs: ~Registers
}
impl Context {
pub fn empty() -> Context {
Context {
start: None,
regs: new_regs()
}
}
/// Create a new context that will resume execution by running ~fn()
pub fn new(start: ~fn(), stack: &mut StackSegment) -> Context {
// FIXME #7767: Putting main into a ~ so it's a thin pointer and can
// be passed to the spawn function. Another unfortunate
// allocation
let start = ~start;
// The C-ABI function that is the task entry point
extern fn task_start_wrapper(f: &~fn()) { (*f)() }
let fp: *c_void = task_start_wrapper as *c_void;
let argp: *c_void = unsafe { transmute::<&~fn(), *c_void>(&*start) };
let stack_base: *uint = stack.start();
let sp: *uint = stack.end();
let sp: *mut uint = unsafe { transmute_mut_unsafe(sp) };
// Save and then immediately load the current context,
// which we will then modify to call the given function when restored
let mut regs = new_regs();
unsafe {
swap_registers(transmute_mut_region(&mut *regs), transmute_region(&*regs));
};
initialize_call_frame(&mut *regs, fp, argp, sp, stack_base);
return Context {
start: Some(start),
regs: regs
}
}
/* Switch contexts
Suspend the current execution context and resume another by
saving the registers values of the executing thread to a Context
then loading the registers from a previously saved Context.
*/
pub fn swap(out_context: &mut Context, in_context: &Context) {
rtdebug!("swapping contexts");
let out_regs: &mut Registers = match out_context {
&Context { regs: ~ref mut r, _ } => r
};
let in_regs: &Registers = match in_context {
&Context { regs: ~ref r, _ } => r
};
rtdebug!("doing raw swap");
unsafe { swap_registers(out_regs, in_regs) };
}
}
extern {
#[rust_stack]
fn swap_registers(out_regs: *mut Registers, in_regs: *Registers);
}
#[cfg(target_arch = "x86")]
struct Registers {
eax: u32, ebx: u32, ecx: u32, edx: u32,
ebp: u32, esi: u32, edi: u32, esp: u32,
cs: u16, ds: u16, ss: u16, es: u16, fs: u16, gs: u16,
eflags: u32, eip: u32
}
#[cfg(target_arch = "x86")]
fn new_regs() -> ~Registers {
~Registers {
eax: 0, ebx: 0, ecx: 0, edx: 0,
ebp: 0, esi: 0, edi: 0, esp: 0,
cs: 0, ds: 0, ss: 0, es: 0, fs: 0, gs: 0,
eflags: 0, eip: 0
}
}
#[cfg(target_arch = "x86")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, _stack_base: *uint) {
let sp = align_down(sp);
let sp = mut_offset(sp, -4);
unsafe { *sp = arg as uint };
let sp = mut_offset(sp, -1);
unsafe { *sp = 0 }; // The final return address
regs.esp = sp as u32;
regs.eip = fptr as u32;
// Last base pointer on the stack is 0
regs.ebp = 0;
}
#[cfg(windows, target_arch = "x86_64")]
type Registers = [uint,..34];
#[cfg(not(windows), target_arch = "x86_64")]
type Registers = [uint,..22];
#[cfg(windows, target_arch = "x86_64")]
fn new_regs() -> ~Registers { ~([0,.. 34]) }
#[cfg(not(windows), target_arch = "x86_64")]
fn new_regs() -> ~Registers { ~([0,.. 22]) }
#[cfg(target_arch = "x86_64")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, stack_base: *uint) {
// Redefinitions from regs.h
static RUSTRT_ARG0: uint = 3;
static RUSTRT_RSP: uint = 1;
static RUSTRT_IP: uint = 8;
static RUSTRT_RBP: uint = 2;
#[cfg(windows)]
fn initialize_tib(regs: &mut Registers, sp: *mut uint, stack_base: *uint) {
// Redefinitions from regs.h
static RUSTRT_ST1: uint = 11; // stack bottom
static RUSTRT_ST2: uint = 12; // stack top
regs[RUSTRT_ST1] = sp as uint;
regs[RUSTRT_ST2] = stack_base as uint;
}
#[cfg(not(windows))]
fn initialize_tib(_: &mut Registers, _: *mut uint, _: *uint) {
}
// Win64 manages stack range at TIB: %gs:0x08 (top) and %gs:0x10 (bottom)
initialize_tib(regs, sp, stack_base);
let sp = align_down(sp);
let sp = mut_offset(sp, -1);
// The final return address. 0 indicates the bottom of the stack
unsafe { *sp = 0; }
rtdebug!("creating call frame");
rtdebug!("fptr {}", fptr as uint);
rtdebug!("arg {}", arg as uint);
rtdebug!("sp {}", sp as uint);
regs[RUSTRT_ARG0] = arg as uint;
regs[RUSTRT_RSP] = sp as uint;
regs[RUSTRT_IP] = fptr as uint;
// Last base pointer on the stack should be 0
regs[RUSTRT_RBP] = 0;
}
#[cfg(target_arch = "arm")]
type Registers = [uint,..32];
#[cfg(target_arch = "arm")]
fn new_regs() -> ~Registers { ~([0,.. 32]) }
#[cfg(target_arch = "arm")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, _stack_base: *uint) {
let sp = align_down(sp);
// sp of arm eabi is 8-byte aligned
let sp = mut_offset(sp, -2);
// The final return address. 0 indicates the bottom of the stack
unsafe { *sp = 0; }
regs[0] = arg as uint; // r0
regs[13] = sp as uint; // #53 sp, r13
regs[14] = fptr as uint; // #60 pc, r15 --> lr
}
#[cfg(target_arch = "mips")]
type Registers = [uint,..32];
#[cfg(target_arch = "mips")]
fn new_regs() -> ~Registers { ~([0,.. 32]) }
#[cfg(target_arch = "mips")]
fn | (regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, _stack_base: *uint) {
let sp = align_down(sp);
// sp of mips o32 is 8-byte aligned
let sp = mut_offset(sp, -2);
// The final return address. 0 indicates the bottom of the stack
unsafe { *sp = 0; }
regs[4] = arg as uint;
regs[29] = sp as uint;
regs[25] = fptr as uint;
regs[31] = fptr as uint;
}
fn align_down(sp: *mut uint) -> *mut uint {
unsafe {
let sp: uint = transmute(sp);
let sp = sp &!(16 - 1);
transmute::<uint, *mut uint>(sp)
}
}
// ptr::mut_offset is positive ints only
#[inline]
pub fn mut_offset<T>(ptr: *mut T, count: int) -> *mut T {
use std::sys::size_of;
(ptr as int + count * (size_of::<T>() as int)) as *mut T
}
| initialize_call_frame | identifier_name |
context.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use option::*;
use super::stack::StackSegment;
use libc::c_void;
use cast::{transmute, transmute_mut_unsafe,
transmute_region, transmute_mut_region};
// FIXME #7761: Registers is boxed so that it is 16-byte aligned, for storing
// SSE regs. It would be marginally better not to do this. In C++ we
// use an attribute on a struct.
// FIXME #7761: It would be nice to define regs as `~Option<Registers>` since
// the registers are sometimes empty, but the discriminant would
// then misalign the regs again.
pub struct Context {
/// The context entry point, saved here for later destruction
start: Option<~~fn()>,
/// Hold the registers while the task or scheduler is suspended
regs: ~Registers
}
impl Context {
pub fn empty() -> Context {
Context {
start: None,
regs: new_regs()
}
}
/// Create a new context that will resume execution by running ~fn()
pub fn new(start: ~fn(), stack: &mut StackSegment) -> Context {
// FIXME #7767: Putting main into a ~ so it's a thin pointer and can
// be passed to the spawn function. Another unfortunate
// allocation
let start = ~start;
// The C-ABI function that is the task entry point
extern fn task_start_wrapper(f: &~fn()) { (*f)() }
let fp: *c_void = task_start_wrapper as *c_void;
let argp: *c_void = unsafe { transmute::<&~fn(), *c_void>(&*start) };
let stack_base: *uint = stack.start();
let sp: *uint = stack.end();
let sp: *mut uint = unsafe { transmute_mut_unsafe(sp) };
// Save and then immediately load the current context,
// which we will then modify to call the given function when restored
let mut regs = new_regs();
unsafe {
swap_registers(transmute_mut_region(&mut *regs), transmute_region(&*regs));
};
initialize_call_frame(&mut *regs, fp, argp, sp, stack_base);
return Context {
start: Some(start),
regs: regs
}
}
/* Switch contexts
Suspend the current execution context and resume another by
saving the registers values of the executing thread to a Context
then loading the registers from a previously saved Context.
*/
pub fn swap(out_context: &mut Context, in_context: &Context) {
rtdebug!("swapping contexts");
let out_regs: &mut Registers = match out_context {
&Context { regs: ~ref mut r, _ } => r
};
let in_regs: &Registers = match in_context {
&Context { regs: ~ref r, _ } => r
};
rtdebug!("doing raw swap");
unsafe { swap_registers(out_regs, in_regs) };
}
}
extern {
#[rust_stack]
fn swap_registers(out_regs: *mut Registers, in_regs: *Registers);
}
#[cfg(target_arch = "x86")]
struct Registers {
eax: u32, ebx: u32, ecx: u32, edx: u32,
ebp: u32, esi: u32, edi: u32, esp: u32,
cs: u16, ds: u16, ss: u16, es: u16, fs: u16, gs: u16,
eflags: u32, eip: u32
}
#[cfg(target_arch = "x86")]
fn new_regs() -> ~Registers {
~Registers {
eax: 0, ebx: 0, ecx: 0, edx: 0,
ebp: 0, esi: 0, edi: 0, esp: 0,
cs: 0, ds: 0, ss: 0, es: 0, fs: 0, gs: 0,
eflags: 0, eip: 0
}
}
#[cfg(target_arch = "x86")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, _stack_base: *uint) {
let sp = align_down(sp);
let sp = mut_offset(sp, -4);
unsafe { *sp = arg as uint };
let sp = mut_offset(sp, -1);
unsafe { *sp = 0 }; // The final return address
regs.esp = sp as u32;
regs.eip = fptr as u32;
// Last base pointer on the stack is 0
regs.ebp = 0;
}
#[cfg(windows, target_arch = "x86_64")]
type Registers = [uint,..34];
#[cfg(not(windows), target_arch = "x86_64")]
type Registers = [uint,..22];
#[cfg(windows, target_arch = "x86_64")]
fn new_regs() -> ~Registers { ~([0,.. 34]) }
#[cfg(not(windows), target_arch = "x86_64")]
fn new_regs() -> ~Registers { ~([0,.. 22]) }
#[cfg(target_arch = "x86_64")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, stack_base: *uint) {
// Redefinitions from regs.h
static RUSTRT_ARG0: uint = 3;
static RUSTRT_RSP: uint = 1;
static RUSTRT_IP: uint = 8;
static RUSTRT_RBP: uint = 2;
#[cfg(windows)]
fn initialize_tib(regs: &mut Registers, sp: *mut uint, stack_base: *uint) {
// Redefinitions from regs.h
static RUSTRT_ST1: uint = 11; // stack bottom
static RUSTRT_ST2: uint = 12; // stack top
regs[RUSTRT_ST1] = sp as uint;
regs[RUSTRT_ST2] = stack_base as uint;
}
#[cfg(not(windows))]
fn initialize_tib(_: &mut Registers, _: *mut uint, _: *uint) {
}
// Win64 manages stack range at TIB: %gs:0x08 (top) and %gs:0x10 (bottom)
initialize_tib(regs, sp, stack_base);
let sp = align_down(sp);
let sp = mut_offset(sp, -1);
// The final return address. 0 indicates the bottom of the stack
unsafe { *sp = 0; }
rtdebug!("creating call frame");
rtdebug!("fptr {}", fptr as uint);
rtdebug!("arg {}", arg as uint);
rtdebug!("sp {}", sp as uint);
regs[RUSTRT_ARG0] = arg as uint;
regs[RUSTRT_RSP] = sp as uint;
regs[RUSTRT_IP] = fptr as uint;
// Last base pointer on the stack should be 0
regs[RUSTRT_RBP] = 0;
}
#[cfg(target_arch = "arm")]
type Registers = [uint,..32];
#[cfg(target_arch = "arm")]
fn new_regs() -> ~Registers { ~([0,.. 32]) }
#[cfg(target_arch = "arm")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, _stack_base: *uint) {
let sp = align_down(sp);
// sp of arm eabi is 8-byte aligned
let sp = mut_offset(sp, -2);
// The final return address. 0 indicates the bottom of the stack
unsafe { *sp = 0; }
regs[0] = arg as uint; // r0
regs[13] = sp as uint; // #53 sp, r13
regs[14] = fptr as uint; // #60 pc, r15 --> lr
}
#[cfg(target_arch = "mips")]
type Registers = [uint,..32];
#[cfg(target_arch = "mips")]
fn new_regs() -> ~Registers { ~([0,.. 32]) }
#[cfg(target_arch = "mips")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, _stack_base: *uint) {
let sp = align_down(sp);
// sp of mips o32 is 8-byte aligned
let sp = mut_offset(sp, -2);
// The final return address. 0 indicates the bottom of the stack
unsafe { *sp = 0; }
regs[4] = arg as uint;
regs[29] = sp as uint;
regs[25] = fptr as uint;
regs[31] = fptr as uint;
}
fn align_down(sp: *mut uint) -> *mut uint {
unsafe {
let sp: uint = transmute(sp);
let sp = sp &!(16 - 1);
transmute::<uint, *mut uint>(sp)
}
}
// ptr::mut_offset is positive ints only
#[inline]
pub fn mut_offset<T>(ptr: *mut T, count: int) -> *mut T | {
use std::sys::size_of;
(ptr as int + count * (size_of::<T>() as int)) as *mut T
} | identifier_body |
|
context.rs | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use option::*;
use super::stack::StackSegment;
use libc::c_void;
use cast::{transmute, transmute_mut_unsafe,
transmute_region, transmute_mut_region};
// FIXME #7761: Registers is boxed so that it is 16-byte aligned, for storing
// SSE regs. It would be marginally better not to do this. In C++ we
// use an attribute on a struct.
// FIXME #7761: It would be nice to define regs as `~Option<Registers>` since
// the registers are sometimes empty, but the discriminant would
// then misalign the regs again.
pub struct Context {
/// The context entry point, saved here for later destruction
start: Option<~~fn()>,
/// Hold the registers while the task or scheduler is suspended
regs: ~Registers
}
impl Context {
pub fn empty() -> Context {
Context {
start: None,
regs: new_regs()
}
}
/// Create a new context that will resume execution by running ~fn()
pub fn new(start: ~fn(), stack: &mut StackSegment) -> Context {
// FIXME #7767: Putting main into a ~ so it's a thin pointer and can
// be passed to the spawn function. Another unfortunate
// allocation
let start = ~start;
// The C-ABI function that is the task entry point
extern fn task_start_wrapper(f: &~fn()) { (*f)() }
let fp: *c_void = task_start_wrapper as *c_void;
let argp: *c_void = unsafe { transmute::<&~fn(), *c_void>(&*start) };
let stack_base: *uint = stack.start();
let sp: *uint = stack.end();
let sp: *mut uint = unsafe { transmute_mut_unsafe(sp) };
// Save and then immediately load the current context,
// which we will then modify to call the given function when restored
let mut regs = new_regs();
unsafe {
swap_registers(transmute_mut_region(&mut *regs), transmute_region(&*regs));
};
initialize_call_frame(&mut *regs, fp, argp, sp, stack_base);
return Context {
start: Some(start),
regs: regs
}
}
/* Switch contexts
Suspend the current execution context and resume another by
saving the registers values of the executing thread to a Context
then loading the registers from a previously saved Context.
*/
pub fn swap(out_context: &mut Context, in_context: &Context) {
rtdebug!("swapping contexts");
let out_regs: &mut Registers = match out_context {
&Context { regs: ~ref mut r, _ } => r
};
let in_regs: &Registers = match in_context {
&Context { regs: ~ref r, _ } => r
};
rtdebug!("doing raw swap");
unsafe { swap_registers(out_regs, in_regs) };
}
}
extern {
#[rust_stack]
fn swap_registers(out_regs: *mut Registers, in_regs: *Registers);
}
#[cfg(target_arch = "x86")]
struct Registers {
eax: u32, ebx: u32, ecx: u32, edx: u32,
ebp: u32, esi: u32, edi: u32, esp: u32,
cs: u16, ds: u16, ss: u16, es: u16, fs: u16, gs: u16,
eflags: u32, eip: u32
}
#[cfg(target_arch = "x86")]
fn new_regs() -> ~Registers {
~Registers {
eax: 0, ebx: 0, ecx: 0, edx: 0,
ebp: 0, esi: 0, edi: 0, esp: 0,
cs: 0, ds: 0, ss: 0, es: 0, fs: 0, gs: 0,
eflags: 0, eip: 0
}
}
#[cfg(target_arch = "x86")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, _stack_base: *uint) {
let sp = align_down(sp);
let sp = mut_offset(sp, -4);
unsafe { *sp = arg as uint };
let sp = mut_offset(sp, -1);
unsafe { *sp = 0 }; // The final return address
regs.esp = sp as u32;
regs.eip = fptr as u32;
// Last base pointer on the stack is 0
regs.ebp = 0;
}
#[cfg(windows, target_arch = "x86_64")]
type Registers = [uint,..34];
#[cfg(not(windows), target_arch = "x86_64")]
type Registers = [uint,..22];
#[cfg(windows, target_arch = "x86_64")]
fn new_regs() -> ~Registers { ~([0,.. 34]) }
#[cfg(not(windows), target_arch = "x86_64")]
fn new_regs() -> ~Registers { ~([0,.. 22]) }
#[cfg(target_arch = "x86_64")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, stack_base: *uint) {
// Redefinitions from regs.h
static RUSTRT_ARG0: uint = 3;
static RUSTRT_RSP: uint = 1;
static RUSTRT_IP: uint = 8;
static RUSTRT_RBP: uint = 2;
#[cfg(windows)]
fn initialize_tib(regs: &mut Registers, sp: *mut uint, stack_base: *uint) {
// Redefinitions from regs.h
static RUSTRT_ST1: uint = 11; // stack bottom
static RUSTRT_ST2: uint = 12; // stack top
regs[RUSTRT_ST1] = sp as uint;
regs[RUSTRT_ST2] = stack_base as uint;
}
#[cfg(not(windows))]
fn initialize_tib(_: &mut Registers, _: *mut uint, _: *uint) {
}
// Win64 manages stack range at TIB: %gs:0x08 (top) and %gs:0x10 (bottom)
initialize_tib(regs, sp, stack_base);
let sp = align_down(sp);
let sp = mut_offset(sp, -1);
// The final return address. 0 indicates the bottom of the stack
unsafe { *sp = 0; }
rtdebug!("creating call frame");
rtdebug!("fptr {}", fptr as uint);
rtdebug!("arg {}", arg as uint);
rtdebug!("sp {}", sp as uint);
regs[RUSTRT_ARG0] = arg as uint;
regs[RUSTRT_RSP] = sp as uint;
regs[RUSTRT_IP] = fptr as uint;
// Last base pointer on the stack should be 0
regs[RUSTRT_RBP] = 0;
}
#[cfg(target_arch = "arm")]
type Registers = [uint,..32];
#[cfg(target_arch = "arm")]
fn new_regs() -> ~Registers { ~([0,.. 32]) }
#[cfg(target_arch = "arm")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, _stack_base: *uint) {
let sp = align_down(sp);
// sp of arm eabi is 8-byte aligned
let sp = mut_offset(sp, -2);
// The final return address. 0 indicates the bottom of the stack
unsafe { *sp = 0; }
regs[0] = arg as uint; // r0
regs[13] = sp as uint; // #53 sp, r13
regs[14] = fptr as uint; // #60 pc, r15 --> lr
}
#[cfg(target_arch = "mips")]
type Registers = [uint,..32];
#[cfg(target_arch = "mips")]
fn new_regs() -> ~Registers { ~([0,.. 32]) }
#[cfg(target_arch = "mips")]
fn initialize_call_frame(regs: &mut Registers, fptr: *c_void, arg: *c_void,
sp: *mut uint, _stack_base: *uint) {
let sp = align_down(sp);
// sp of mips o32 is 8-byte aligned
let sp = mut_offset(sp, -2);
// The final return address. 0 indicates the bottom of the stack
unsafe { *sp = 0; }
regs[4] = arg as uint;
regs[29] = sp as uint;
regs[25] = fptr as uint;
regs[31] = fptr as uint;
}
fn align_down(sp: *mut uint) -> *mut uint {
unsafe {
let sp: uint = transmute(sp);
let sp = sp &!(16 - 1);
transmute::<uint, *mut uint>(sp)
}
}
// ptr::mut_offset is positive ints only
#[inline]
pub fn mut_offset<T>(ptr: *mut T, count: int) -> *mut T {
use std::sys::size_of;
(ptr as int + count * (size_of::<T>() as int)) as *mut T
} | // | random_line_split |
test_stmt.rs | #![allow(clippy::non_ascii_literal)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use std::iter::FromIterator;
use syn::Stmt;
#[test]
fn test_raw_operator() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
snapshot!(stmt, @r###"
Local(Local {
pat: Pat::Wild,
init: Some(Verbatim(`& raw const x`)),
})
"###);
}
#[test]
fn test_raw_variable() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
snapshot!(stmt, @r###"
Local(Local {
pat: Pat::Wild,
init: Some(Expr::Reference {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "raw",
arguments: None,
},
],
},
},
}),
})
"###);
}
#[test]
fn test_raw_invalid() {
assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
}
#[test]
fn test_none_group() | output: Default,
},
block: Block,
})
"###);
}
| {
// <Ø async fn f() {} Ø>
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("async", Span::call_site())),
TokenTree::Ident(Ident::new("fn", Span::call_site())),
TokenTree::Ident(Ident::new("f", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]),
))]);
snapshot!(tokens as Stmt, @r###"
Item(Item::Fn {
vis: Inherited,
sig: Signature {
asyncness: Some,
ident: "f",
generics: Generics, | identifier_body |
test_stmt.rs | #![allow(clippy::non_ascii_literal)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use std::iter::FromIterator;
use syn::Stmt;
#[test]
fn test_raw_operator() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
snapshot!(stmt, @r###"
Local(Local {
pat: Pat::Wild,
init: Some(Verbatim(`& raw const x`)),
})
"###);
}
#[test]
fn test_raw_variable() { | Local(Local {
pat: Pat::Wild,
init: Some(Expr::Reference {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "raw",
arguments: None,
},
],
},
},
}),
})
"###);
}
#[test]
fn test_raw_invalid() {
assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
}
#[test]
fn test_none_group() {
// <Ø async fn f() {} Ø>
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("async", Span::call_site())),
TokenTree::Ident(Ident::new("fn", Span::call_site())),
TokenTree::Ident(Ident::new("f", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]),
))]);
snapshot!(tokens as Stmt, @r###"
Item(Item::Fn {
vis: Inherited,
sig: Signature {
asyncness: Some,
ident: "f",
generics: Generics,
output: Default,
},
block: Block,
})
"###);
} | let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
snapshot!(stmt, @r###" | random_line_split |
test_stmt.rs | #![allow(clippy::non_ascii_literal)]
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use std::iter::FromIterator;
use syn::Stmt;
#[test]
fn test_raw_operator() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
snapshot!(stmt, @r###"
Local(Local {
pat: Pat::Wild,
init: Some(Verbatim(`& raw const x`)),
})
"###);
}
#[test]
fn | () {
let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
snapshot!(stmt, @r###"
Local(Local {
pat: Pat::Wild,
init: Some(Expr::Reference {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "raw",
arguments: None,
},
],
},
},
}),
})
"###);
}
#[test]
fn test_raw_invalid() {
assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
}
#[test]
fn test_none_group() {
// <Ø async fn f() {} Ø>
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("async", Span::call_site())),
TokenTree::Ident(Ident::new("fn", Span::call_site())),
TokenTree::Ident(Ident::new("f", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]),
))]);
snapshot!(tokens as Stmt, @r###"
Item(Item::Fn {
vis: Inherited,
sig: Signature {
asyncness: Some,
ident: "f",
generics: Generics,
output: Default,
},
block: Block,
})
"###);
}
| test_raw_variable | identifier_name |
no_0014_longest_common_prefix.rs | struct Solution;
impl Solution {
pub fn longest_common_prefix(strs: Vec<String>) -> String {
match strs.len() {
0 => return String::new(),
1 => return strs[0].clone(),
_ => (),
};
let mut buf = String::new();
for (i, c) in strs[0].as_bytes().iter().enumerate() {
for j in 1..strs.len() {
if i >= strs[j].len() || strs[j].as_bytes()[i]!= *c {
return buf;
}
} | }
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_longest_common_prefix() {
let strs = vec!["flower", "flow", "flight"]
.iter()
.map(|x| x.to_string())
.collect();
assert_eq!(Solution::longest_common_prefix(strs), "fl".to_owned());
let strs = vec!["dog", "racecar", "car"]
.iter()
.map(|x| x.to_string())
.collect();
assert_eq!(Solution::longest_common_prefix(strs), "".to_owned());
let strs = vec!["aa", "a"].iter().map(|x| x.to_string()).collect();
assert_eq!(Solution::longest_common_prefix(strs), "a".to_owned());
}
} | buf.push(*c as char);
}
buf | random_line_split |
no_0014_longest_common_prefix.rs | struct Solution;
impl Solution {
pub fn longest_common_prefix(strs: Vec<String>) -> String {
match strs.len() {
0 => return String::new(),
1 => return strs[0].clone(),
_ => (),
};
let mut buf = String::new();
for (i, c) in strs[0].as_bytes().iter().enumerate() {
for j in 1..strs.len() {
if i >= strs[j].len() || strs[j].as_bytes()[i]!= *c |
}
buf.push(*c as char);
}
buf
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_longest_common_prefix() {
let strs = vec!["flower", "flow", "flight"]
.iter()
.map(|x| x.to_string())
.collect();
assert_eq!(Solution::longest_common_prefix(strs), "fl".to_owned());
let strs = vec!["dog", "racecar", "car"]
.iter()
.map(|x| x.to_string())
.collect();
assert_eq!(Solution::longest_common_prefix(strs), "".to_owned());
let strs = vec!["aa", "a"].iter().map(|x| x.to_string()).collect();
assert_eq!(Solution::longest_common_prefix(strs), "a".to_owned());
}
}
| {
return buf;
} | conditional_block |
no_0014_longest_common_prefix.rs | struct Solution;
impl Solution {
pub fn | (strs: Vec<String>) -> String {
match strs.len() {
0 => return String::new(),
1 => return strs[0].clone(),
_ => (),
};
let mut buf = String::new();
for (i, c) in strs[0].as_bytes().iter().enumerate() {
for j in 1..strs.len() {
if i >= strs[j].len() || strs[j].as_bytes()[i]!= *c {
return buf;
}
}
buf.push(*c as char);
}
buf
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_longest_common_prefix() {
let strs = vec!["flower", "flow", "flight"]
.iter()
.map(|x| x.to_string())
.collect();
assert_eq!(Solution::longest_common_prefix(strs), "fl".to_owned());
let strs = vec!["dog", "racecar", "car"]
.iter()
.map(|x| x.to_string())
.collect();
assert_eq!(Solution::longest_common_prefix(strs), "".to_owned());
let strs = vec!["aa", "a"].iter().map(|x| x.to_string()).collect();
assert_eq!(Solution::longest_common_prefix(strs), "a".to_owned());
}
}
| longest_common_prefix | identifier_name |
parse_movie.rs | // Copyright (c) 2017 Simon Dickson
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use std::path::Path;
use failure::Error;
use regex::Regex;
use data::Movie;
pub fn parse<'a>(search_path: &Path, path: &'a Path) -> Result<Movie, Error> {
let (title, year) = parse_title(search_path, path)?;
Ok(Movie {
title: title.to_owned(),
year: year,
file_path: path.to_str().ok_or(format_err!("should be a path"))?.to_owned()
})
}
fn parse_title<'a>(base_path: &Path, path: &'a Path) -> Result<(&'a str, Option<u16>), Error> | Some (cap) => {
let title = cap.get(1).map(|m| m.as_str()).ok_or(format_err!("failed to parse title"))?;
Ok((title, None))
},
None => {
Ok((folder_name, None))
},
}
},
}
}
#[test]
fn a_clockwork_orange(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/A Clockwork Orange (1971).mkv")) {
Ok(Movie { ref title, year: Some (1971),.. }) if title == "A Clockwork Orange" => (),
result => assert!(false, "{:?}", result)
}
}
#[test]
fn american_history_x(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/American History X.mp4")) {
Ok(Movie { ref title, year: None,.. }) if title == "American History X" => (),
result => assert!(false, "{:?}", result)
}
}
#[test]
fn great_escape(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/Great Escape.m4v")) {
Ok(Movie { ref title, year: None,.. }) if title == "Great Escape" => (),
result => assert!(false, "{:?}", result)
}
}
#[test]
fn die_hard(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/Die Hard.m4v")) {
Ok(Movie { ref title, year: None,.. }) if title == "Die Hard" => (),
result => assert!(false, "{:?}", result)
}
}
| {
lazy_static! {
static ref TITLE_FORMAT_1: Regex = Regex::new(r"([^']+)\s+\((\d{4})\)").unwrap();
}
lazy_static! {
static ref TITLE_FORMAT_2: Regex = Regex::new(r"([^']+)\.").unwrap();
}
let folder_name =
path.strip_prefix(base_path)?.components().next().ok_or(format_err!("failed to parse folder"))?
.as_os_str().to_str().ok_or(format_err!("failed to parse folder"))?;
match TITLE_FORMAT_1.captures_iter(folder_name).nth(0) {
Some (cap) => {
let title = cap.get(1).map(|m| m.as_str()).ok_or(format_err!("failed to parse title"))?;
let year = cap.get(2).map(|m| m.as_str()).ok_or(format_err!("failed to parse year"))?.parse::<u16>()?;
Ok((title, Some(year)))
},
None => {
match TITLE_FORMAT_2.captures_iter(folder_name).nth(0) { | identifier_body |
parse_movie.rs | // Copyright (c) 2017 Simon Dickson
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use std::path::Path;
use failure::Error;
use regex::Regex;
use data::Movie;
pub fn parse<'a>(search_path: &Path, path: &'a Path) -> Result<Movie, Error> {
let (title, year) = parse_title(search_path, path)?;
Ok(Movie {
title: title.to_owned(),
year: year,
file_path: path.to_str().ok_or(format_err!("should be a path"))?.to_owned()
})
}
fn parse_title<'a>(base_path: &Path, path: &'a Path) -> Result<(&'a str, Option<u16>), Error> {
lazy_static! {
static ref TITLE_FORMAT_1: Regex = Regex::new(r"([^']+)\s+\((\d{4})\)").unwrap();
}
lazy_static! {
static ref TITLE_FORMAT_2: Regex = Regex::new(r"([^']+)\.").unwrap();
}
let folder_name =
path.strip_prefix(base_path)?.components().next().ok_or(format_err!("failed to parse folder"))?
.as_os_str().to_str().ok_or(format_err!("failed to parse folder"))?;
match TITLE_FORMAT_1.captures_iter(folder_name).nth(0) {
Some (cap) => {
let title = cap.get(1).map(|m| m.as_str()).ok_or(format_err!("failed to parse title"))?;
let year = cap.get(2).map(|m| m.as_str()).ok_or(format_err!("failed to parse year"))?.parse::<u16>()?;
Ok((title, Some(year)))
},
None => {
match TITLE_FORMAT_2.captures_iter(folder_name).nth(0) {
Some (cap) => {
let title = cap.get(1).map(|m| m.as_str()).ok_or(format_err!("failed to parse title"))?;
Ok((title, None))
},
None => {
Ok((folder_name, None))
},
}
},
}
}
#[test]
fn a_clockwork_orange(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/A Clockwork Orange (1971).mkv")) {
Ok(Movie { ref title, year: Some (1971),.. }) if title == "A Clockwork Orange" => (),
result => assert!(false, "{:?}", result)
}
}
#[test]
fn american_history_x(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/American History X.mp4")) {
Ok(Movie { ref title, year: None,.. }) if title == "American History X" => (),
result => assert!(false, "{:?}", result)
}
}
#[test]
fn great_escape(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/Great Escape.m4v")) {
Ok(Movie { ref title, year: None,.. }) if title == "Great Escape" => (),
result => assert!(false, "{:?}", result)
}
}
#[test]
fn die_hard(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/Die Hard.m4v")) {
Ok(Movie { ref title, year: None,.. }) if title == "Die Hard" => (),
result => assert!(false, "{:?}", result)
} | } | random_line_split |
|
parse_movie.rs | // Copyright (c) 2017 Simon Dickson
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use std::path::Path;
use failure::Error;
use regex::Regex;
use data::Movie;
pub fn parse<'a>(search_path: &Path, path: &'a Path) -> Result<Movie, Error> {
let (title, year) = parse_title(search_path, path)?;
Ok(Movie {
title: title.to_owned(),
year: year,
file_path: path.to_str().ok_or(format_err!("should be a path"))?.to_owned()
})
}
fn parse_title<'a>(base_path: &Path, path: &'a Path) -> Result<(&'a str, Option<u16>), Error> {
lazy_static! {
static ref TITLE_FORMAT_1: Regex = Regex::new(r"([^']+)\s+\((\d{4})\)").unwrap();
}
lazy_static! {
static ref TITLE_FORMAT_2: Regex = Regex::new(r"([^']+)\.").unwrap();
}
let folder_name =
path.strip_prefix(base_path)?.components().next().ok_or(format_err!("failed to parse folder"))?
.as_os_str().to_str().ok_or(format_err!("failed to parse folder"))?;
match TITLE_FORMAT_1.captures_iter(folder_name).nth(0) {
Some (cap) => {
let title = cap.get(1).map(|m| m.as_str()).ok_or(format_err!("failed to parse title"))?;
let year = cap.get(2).map(|m| m.as_str()).ok_or(format_err!("failed to parse year"))?.parse::<u16>()?;
Ok((title, Some(year)))
},
None => {
match TITLE_FORMAT_2.captures_iter(folder_name).nth(0) {
Some (cap) => {
let title = cap.get(1).map(|m| m.as_str()).ok_or(format_err!("failed to parse title"))?;
Ok((title, None))
},
None => {
Ok((folder_name, None))
},
}
},
}
}
#[test]
fn a_clockwork_orange(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/A Clockwork Orange (1971).mkv")) {
Ok(Movie { ref title, year: Some (1971),.. }) if title == "A Clockwork Orange" => (),
result => assert!(false, "{:?}", result)
}
}
#[test]
fn american_history_x(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/American History X.mp4")) {
Ok(Movie { ref title, year: None,.. }) if title == "American History X" => (),
result => assert!(false, "{:?}", result)
}
}
#[test]
fn | (){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/Great Escape.m4v")) {
Ok(Movie { ref title, year: None,.. }) if title == "Great Escape" => (),
result => assert!(false, "{:?}", result)
}
}
#[test]
fn die_hard(){
match parse(Path::new("/storage/movies/"), Path::new("/storage/movies/Die Hard.m4v")) {
Ok(Movie { ref title, year: None,.. }) if title == "Die Hard" => (),
result => assert!(false, "{:?}", result)
}
}
| great_escape | identifier_name |
eth_pubsub.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Eth PUB-SUB rpc implementation.
use std::sync::Arc;
use std::collections::BTreeMap;
use futures::{self, future, BoxFuture, Future};
use jsonrpc_core::Error;
use jsonrpc_macros::Trailing;
use jsonrpc_macros::pubsub::{Sink, Subscriber};
use jsonrpc_pubsub::SubscriptionId;
use v1::helpers::{errors, limit_logs, Subscribers};
use v1::helpers::light_fetch::LightFetch;
use v1::metadata::Metadata;
use v1::traits::EthPubSub;
use v1::types::{pubsub, RichHeader, Log};
use ethcore::encoded;
use ethcore::filter::Filter as EthFilter;
use ethcore::client::{BlockChainClient, ChainNotify, BlockId};
use ethsync::LightSync;
use light::cache::Cache;
use light::on_demand::OnDemand;
use light::client::{LightChainClient, LightChainNotify};
use parity_reactor::Remote;
use util::{RwLock, Mutex, H256, Bytes};
type Client = Sink<pubsub::Result>;
/// Eth PubSub implementation.
pub struct EthPubSubClient<C> {
handler: Arc<ChainNotificationHandler<C>>,
heads_subscribers: Arc<RwLock<Subscribers<Client>>>,
logs_subscribers: Arc<RwLock<Subscribers<(Client, EthFilter)>>>,
}
impl<C> EthPubSubClient<C> {
/// Creates new `EthPubSubClient`.
pub fn new(client: Arc<C>, remote: Remote) -> Self {
let heads_subscribers = Arc::new(RwLock::new(Subscribers::default()));
let logs_subscribers = Arc::new(RwLock::new(Subscribers::default()));
EthPubSubClient {
handler: Arc::new(ChainNotificationHandler {
client,
remote,
heads_subscribers: heads_subscribers.clone(),
logs_subscribers: logs_subscribers.clone(),
}),
heads_subscribers,
logs_subscribers,
}
}
/// Creates new `EthPubSubCient` with deterministic subscription ids.
#[cfg(test)]
pub fn new_test(client: Arc<C>, remote: Remote) -> Self {
let client = Self::new(client, remote);
*client.heads_subscribers.write() = Subscribers::new_test();
*client.logs_subscribers.write() = Subscribers::new_test();
client
}
/// Returns a chain notification handler.
pub fn handler(&self) -> Arc<ChainNotificationHandler<C>> {
self.handler.clone()
}
}
impl EthPubSubClient<LightFetch> {
/// Creates a new `EthPubSubClient` for `LightClient`.
pub fn light(
client: Arc<LightChainClient>,
on_demand: Arc<OnDemand>,
sync: Arc<LightSync>,
cache: Arc<Mutex<Cache>>,
remote: Remote,
) -> Self {
let fetch = LightFetch {
client,
on_demand,
sync,
cache
};
EthPubSubClient::new(Arc::new(fetch), remote)
}
}
/// PubSub Notification handler.
pub struct ChainNotificationHandler<C> {
client: Arc<C>,
remote: Remote,
heads_subscribers: Arc<RwLock<Subscribers<Client>>>,
logs_subscribers: Arc<RwLock<Subscribers<(Client, EthFilter)>>>,
}
impl<C> ChainNotificationHandler<C> {
fn notify(remote: &Remote, subscriber: &Client, result: pubsub::Result) {
remote.spawn(subscriber
.notify(Ok(result))
.map(|_| ())
.map_err(|e| warn!(target: "rpc", "Unable to send notification: {}", e))
);
}
fn notify_heads(&self, headers: &[(encoded::Header, BTreeMap<String, String>)]) {
for subscriber in self.heads_subscribers.read().values() {
for &(ref header, ref extra_info) in headers {
Self::notify(&self.remote, subscriber, pubsub::Result::Header(RichHeader {
inner: header.into(),
extra_info: extra_info.clone(),
}));
}
}
}
fn notify_logs<F>(&self, enacted: &[H256], logs: F) where
F: Fn(EthFilter) -> BoxFuture<Vec<Log>, Error>,
{
for &(ref subscriber, ref filter) in self.logs_subscribers.read().values() {
let logs = futures::future::join_all(enacted
.iter()
.map(|hash| {
let mut filter = filter.clone();
filter.from_block = BlockId::Hash(*hash);
filter.to_block = filter.from_block.clone();
logs(filter)
})
.collect::<Vec<_>>()
);
let limit = filter.limit;
let remote = self.remote.clone();
let subscriber = subscriber.clone();
self.remote.spawn(logs
.map(move |logs| {
let logs = logs.into_iter().flat_map(|log| log).collect();
let logs = limit_logs(logs, limit);
if!logs.is_empty() |
})
.map_err(|e| warn!("Unable to fetch latest logs: {:?}", e))
);
}
}
}
/// A light client wrapper struct.
pub trait LightClient: Send + Sync {
/// Get a recent block header.
fn block_header(&self, id: BlockId) -> Option<encoded::Header>;
/// Fetch logs.
fn logs(&self, filter: EthFilter) -> BoxFuture<Vec<Log>, Error>;
}
impl LightClient for LightFetch {
fn block_header(&self, id: BlockId) -> Option<encoded::Header> {
self.client.block_header(id)
}
fn logs(&self, filter: EthFilter) -> BoxFuture<Vec<Log>, Error> {
LightFetch::logs(self, filter)
}
}
impl<C: LightClient> LightChainNotify for ChainNotificationHandler<C> {
fn new_headers(
&self,
enacted: &[H256],
) {
let headers = enacted
.iter()
.filter_map(|hash| self.client.block_header(BlockId::Hash(*hash)))
.map(|header| (header, Default::default()))
.collect::<Vec<_>>();
self.notify_heads(&headers);
self.notify_logs(&enacted, |filter| self.client.logs(filter))
}
}
impl<C: BlockChainClient> ChainNotify for ChainNotificationHandler<C> {
fn new_blocks(
&self,
_imported: Vec<H256>,
_invalid: Vec<H256>,
enacted: Vec<H256>,
retracted: Vec<H256>,
_sealed: Vec<H256>,
// Block bytes.
_proposed: Vec<Bytes>,
_duration: u64,
) {
const EXTRA_INFO_PROOF: &'static str = "Object exists in in blockchain (fetched earlier), extra_info is always available if object exists; qed";
let headers = enacted
.iter()
.filter_map(|hash| self.client.block_header(BlockId::Hash(*hash)))
.map(|header| {
let hash = header.hash();
(header, self.client.block_extra_info(BlockId::Hash(hash)).expect(EXTRA_INFO_PROOF))
})
.collect::<Vec<_>>();
// Headers
self.notify_heads(&headers);
// Enacted logs
self.notify_logs(&enacted, |filter| {
future::ok(self.client.logs(filter).into_iter().map(Into::into).collect()).boxed()
});
// Retracted logs
self.notify_logs(&retracted, |filter| {
future::ok(self.client.logs(filter).into_iter().map(Into::into).map(|mut log: Log| {
log.log_type = "removed".into();
log
}).collect()).boxed()
});
}
}
impl<C: Send + Sync +'static> EthPubSub for EthPubSubClient<C> {
type Metadata = Metadata;
fn subscribe(
&self,
_meta: Metadata,
subscriber: Subscriber<pubsub::Result>,
kind: pubsub::Kind,
params: Trailing<pubsub::Params>,
) {
match (kind, params.into()) {
(pubsub::Kind::NewHeads, None) => {
self.heads_subscribers.write().push(subscriber)
},
(pubsub::Kind::Logs, Some(pubsub::Params::Logs(filter))) => {
self.logs_subscribers.write().push(subscriber, filter.into());
},
_ => {
let _ = subscriber.reject(errors::unimplemented(None));
},
}
}
fn unsubscribe(&self, id: SubscriptionId) -> BoxFuture<bool, Error> {
let res = self.heads_subscribers.write().remove(&id).is_some();
let res2 = self.logs_subscribers.write().remove(&id).is_some();
future::ok(res || res2).boxed()
}
}
| {
Self::notify(&remote, &subscriber, pubsub::Result::Logs(logs));
} | conditional_block |
eth_pubsub.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Eth PUB-SUB rpc implementation.
use std::sync::Arc;
use std::collections::BTreeMap;
use futures::{self, future, BoxFuture, Future};
use jsonrpc_core::Error;
use jsonrpc_macros::Trailing;
use jsonrpc_macros::pubsub::{Sink, Subscriber};
use jsonrpc_pubsub::SubscriptionId;
use v1::helpers::{errors, limit_logs, Subscribers};
use v1::helpers::light_fetch::LightFetch;
use v1::metadata::Metadata;
use v1::traits::EthPubSub;
use v1::types::{pubsub, RichHeader, Log};
use ethcore::encoded;
use ethcore::filter::Filter as EthFilter;
use ethcore::client::{BlockChainClient, ChainNotify, BlockId};
use ethsync::LightSync;
use light::cache::Cache;
use light::on_demand::OnDemand;
use light::client::{LightChainClient, LightChainNotify};
use parity_reactor::Remote;
use util::{RwLock, Mutex, H256, Bytes};
type Client = Sink<pubsub::Result>;
/// Eth PubSub implementation.
pub struct EthPubSubClient<C> {
handler: Arc<ChainNotificationHandler<C>>,
heads_subscribers: Arc<RwLock<Subscribers<Client>>>,
logs_subscribers: Arc<RwLock<Subscribers<(Client, EthFilter)>>>,
}
impl<C> EthPubSubClient<C> {
/// Creates new `EthPubSubClient`.
pub fn new(client: Arc<C>, remote: Remote) -> Self {
let heads_subscribers = Arc::new(RwLock::new(Subscribers::default()));
let logs_subscribers = Arc::new(RwLock::new(Subscribers::default()));
EthPubSubClient {
handler: Arc::new(ChainNotificationHandler {
client,
remote,
heads_subscribers: heads_subscribers.clone(),
logs_subscribers: logs_subscribers.clone(),
}),
heads_subscribers,
logs_subscribers,
}
}
/// Creates new `EthPubSubCient` with deterministic subscription ids.
#[cfg(test)]
pub fn new_test(client: Arc<C>, remote: Remote) -> Self {
let client = Self::new(client, remote);
*client.heads_subscribers.write() = Subscribers::new_test();
*client.logs_subscribers.write() = Subscribers::new_test();
client
}
/// Returns a chain notification handler.
pub fn handler(&self) -> Arc<ChainNotificationHandler<C>> {
self.handler.clone()
}
}
impl EthPubSubClient<LightFetch> {
/// Creates a new `EthPubSubClient` for `LightClient`.
pub fn light(
client: Arc<LightChainClient>,
on_demand: Arc<OnDemand>,
sync: Arc<LightSync>,
cache: Arc<Mutex<Cache>>,
remote: Remote,
) -> Self {
let fetch = LightFetch {
client,
on_demand,
sync,
cache
};
EthPubSubClient::new(Arc::new(fetch), remote)
}
}
/// PubSub Notification handler.
pub struct ChainNotificationHandler<C> {
client: Arc<C>,
remote: Remote,
heads_subscribers: Arc<RwLock<Subscribers<Client>>>,
logs_subscribers: Arc<RwLock<Subscribers<(Client, EthFilter)>>>,
}
impl<C> ChainNotificationHandler<C> {
fn notify(remote: &Remote, subscriber: &Client, result: pubsub::Result) {
remote.spawn(subscriber
.notify(Ok(result))
.map(|_| ())
.map_err(|e| warn!(target: "rpc", "Unable to send notification: {}", e))
);
}
fn notify_heads(&self, headers: &[(encoded::Header, BTreeMap<String, String>)]) {
for subscriber in self.heads_subscribers.read().values() {
for &(ref header, ref extra_info) in headers {
Self::notify(&self.remote, subscriber, pubsub::Result::Header(RichHeader {
inner: header.into(),
extra_info: extra_info.clone(),
}));
}
}
}
fn notify_logs<F>(&self, enacted: &[H256], logs: F) where
F: Fn(EthFilter) -> BoxFuture<Vec<Log>, Error>,
{
for &(ref subscriber, ref filter) in self.logs_subscribers.read().values() {
let logs = futures::future::join_all(enacted
.iter()
.map(|hash| {
let mut filter = filter.clone();
filter.from_block = BlockId::Hash(*hash);
filter.to_block = filter.from_block.clone();
logs(filter)
})
.collect::<Vec<_>>()
);
let limit = filter.limit;
let remote = self.remote.clone();
let subscriber = subscriber.clone();
self.remote.spawn(logs
.map(move |logs| {
let logs = logs.into_iter().flat_map(|log| log).collect();
let logs = limit_logs(logs, limit);
if!logs.is_empty() {
Self::notify(&remote, &subscriber, pubsub::Result::Logs(logs));
}
})
.map_err(|e| warn!("Unable to fetch latest logs: {:?}", e))
);
}
}
}
/// A light client wrapper struct.
pub trait LightClient: Send + Sync {
/// Get a recent block header.
fn block_header(&self, id: BlockId) -> Option<encoded::Header>;
/// Fetch logs.
fn logs(&self, filter: EthFilter) -> BoxFuture<Vec<Log>, Error>;
}
impl LightClient for LightFetch {
fn block_header(&self, id: BlockId) -> Option<encoded::Header> {
self.client.block_header(id)
}
fn logs(&self, filter: EthFilter) -> BoxFuture<Vec<Log>, Error> {
LightFetch::logs(self, filter)
}
}
impl<C: LightClient> LightChainNotify for ChainNotificationHandler<C> {
fn new_headers(
&self,
enacted: &[H256],
) {
let headers = enacted
.iter()
.filter_map(|hash| self.client.block_header(BlockId::Hash(*hash)))
.map(|header| (header, Default::default()))
.collect::<Vec<_>>();
self.notify_heads(&headers);
self.notify_logs(&enacted, |filter| self.client.logs(filter))
}
}
impl<C: BlockChainClient> ChainNotify for ChainNotificationHandler<C> {
fn new_blocks(
&self,
_imported: Vec<H256>,
_invalid: Vec<H256>,
enacted: Vec<H256>,
retracted: Vec<H256>,
_sealed: Vec<H256>,
// Block bytes.
_proposed: Vec<Bytes>,
_duration: u64,
) {
const EXTRA_INFO_PROOF: &'static str = "Object exists in in blockchain (fetched earlier), extra_info is always available if object exists; qed";
let headers = enacted
.iter()
.filter_map(|hash| self.client.block_header(BlockId::Hash(*hash)))
.map(|header| {
let hash = header.hash();
(header, self.client.block_extra_info(BlockId::Hash(hash)).expect(EXTRA_INFO_PROOF))
})
.collect::<Vec<_>>();
// Headers
self.notify_heads(&headers);
// Enacted logs
self.notify_logs(&enacted, |filter| {
future::ok(self.client.logs(filter).into_iter().map(Into::into).collect()).boxed()
});
// Retracted logs
self.notify_logs(&retracted, |filter| {
future::ok(self.client.logs(filter).into_iter().map(Into::into).map(|mut log: Log| {
log.log_type = "removed".into();
log
}).collect()).boxed()
});
}
}
impl<C: Send + Sync +'static> EthPubSub for EthPubSubClient<C> {
type Metadata = Metadata;
fn | (
&self,
_meta: Metadata,
subscriber: Subscriber<pubsub::Result>,
kind: pubsub::Kind,
params: Trailing<pubsub::Params>,
) {
match (kind, params.into()) {
(pubsub::Kind::NewHeads, None) => {
self.heads_subscribers.write().push(subscriber)
},
(pubsub::Kind::Logs, Some(pubsub::Params::Logs(filter))) => {
self.logs_subscribers.write().push(subscriber, filter.into());
},
_ => {
let _ = subscriber.reject(errors::unimplemented(None));
},
}
}
fn unsubscribe(&self, id: SubscriptionId) -> BoxFuture<bool, Error> {
let res = self.heads_subscribers.write().remove(&id).is_some();
let res2 = self.logs_subscribers.write().remove(&id).is_some();
future::ok(res || res2).boxed()
}
}
| subscribe | identifier_name |
eth_pubsub.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Eth PUB-SUB rpc implementation.
use std::sync::Arc;
use std::collections::BTreeMap;
use futures::{self, future, BoxFuture, Future};
use jsonrpc_core::Error;
use jsonrpc_macros::Trailing;
use jsonrpc_macros::pubsub::{Sink, Subscriber};
use jsonrpc_pubsub::SubscriptionId;
use v1::helpers::{errors, limit_logs, Subscribers};
use v1::helpers::light_fetch::LightFetch;
use v1::metadata::Metadata;
use v1::traits::EthPubSub;
use v1::types::{pubsub, RichHeader, Log};
use ethcore::encoded;
use ethcore::filter::Filter as EthFilter;
use ethcore::client::{BlockChainClient, ChainNotify, BlockId};
use ethsync::LightSync;
use light::cache::Cache;
use light::on_demand::OnDemand;
use light::client::{LightChainClient, LightChainNotify};
use parity_reactor::Remote;
use util::{RwLock, Mutex, H256, Bytes};
type Client = Sink<pubsub::Result>;
/// Eth PubSub implementation.
pub struct EthPubSubClient<C> {
handler: Arc<ChainNotificationHandler<C>>,
heads_subscribers: Arc<RwLock<Subscribers<Client>>>,
logs_subscribers: Arc<RwLock<Subscribers<(Client, EthFilter)>>>,
}
impl<C> EthPubSubClient<C> {
/// Creates new `EthPubSubClient`.
pub fn new(client: Arc<C>, remote: Remote) -> Self {
let heads_subscribers = Arc::new(RwLock::new(Subscribers::default()));
let logs_subscribers = Arc::new(RwLock::new(Subscribers::default()));
EthPubSubClient {
handler: Arc::new(ChainNotificationHandler {
client,
remote,
heads_subscribers: heads_subscribers.clone(),
logs_subscribers: logs_subscribers.clone(),
}),
heads_subscribers,
logs_subscribers,
}
}
/// Creates new `EthPubSubCient` with deterministic subscription ids.
#[cfg(test)]
pub fn new_test(client: Arc<C>, remote: Remote) -> Self {
let client = Self::new(client, remote);
*client.heads_subscribers.write() = Subscribers::new_test();
*client.logs_subscribers.write() = Subscribers::new_test();
client
}
/// Returns a chain notification handler.
pub fn handler(&self) -> Arc<ChainNotificationHandler<C>> {
self.handler.clone()
}
}
impl EthPubSubClient<LightFetch> {
/// Creates a new `EthPubSubClient` for `LightClient`.
pub fn light(
client: Arc<LightChainClient>,
on_demand: Arc<OnDemand>,
sync: Arc<LightSync>,
cache: Arc<Mutex<Cache>>,
remote: Remote,
) -> Self {
let fetch = LightFetch {
client,
on_demand,
sync,
cache
};
EthPubSubClient::new(Arc::new(fetch), remote)
}
}
/// PubSub Notification handler.
pub struct ChainNotificationHandler<C> {
client: Arc<C>,
remote: Remote,
heads_subscribers: Arc<RwLock<Subscribers<Client>>>,
logs_subscribers: Arc<RwLock<Subscribers<(Client, EthFilter)>>>,
}
impl<C> ChainNotificationHandler<C> {
fn notify(remote: &Remote, subscriber: &Client, result: pubsub::Result) {
remote.spawn(subscriber
.notify(Ok(result))
.map(|_| ())
.map_err(|e| warn!(target: "rpc", "Unable to send notification: {}", e))
);
}
fn notify_heads(&self, headers: &[(encoded::Header, BTreeMap<String, String>)]) |
fn notify_logs<F>(&self, enacted: &[H256], logs: F) where
F: Fn(EthFilter) -> BoxFuture<Vec<Log>, Error>,
{
for &(ref subscriber, ref filter) in self.logs_subscribers.read().values() {
let logs = futures::future::join_all(enacted
.iter()
.map(|hash| {
let mut filter = filter.clone();
filter.from_block = BlockId::Hash(*hash);
filter.to_block = filter.from_block.clone();
logs(filter)
})
.collect::<Vec<_>>()
);
let limit = filter.limit;
let remote = self.remote.clone();
let subscriber = subscriber.clone();
self.remote.spawn(logs
.map(move |logs| {
let logs = logs.into_iter().flat_map(|log| log).collect();
let logs = limit_logs(logs, limit);
if!logs.is_empty() {
Self::notify(&remote, &subscriber, pubsub::Result::Logs(logs));
}
})
.map_err(|e| warn!("Unable to fetch latest logs: {:?}", e))
);
}
}
}
/// A light client wrapper struct.
pub trait LightClient: Send + Sync {
/// Get a recent block header.
fn block_header(&self, id: BlockId) -> Option<encoded::Header>;
/// Fetch logs.
fn logs(&self, filter: EthFilter) -> BoxFuture<Vec<Log>, Error>;
}
impl LightClient for LightFetch {
fn block_header(&self, id: BlockId) -> Option<encoded::Header> {
self.client.block_header(id)
}
fn logs(&self, filter: EthFilter) -> BoxFuture<Vec<Log>, Error> {
LightFetch::logs(self, filter)
}
}
impl<C: LightClient> LightChainNotify for ChainNotificationHandler<C> {
fn new_headers(
&self,
enacted: &[H256],
) {
let headers = enacted
.iter()
.filter_map(|hash| self.client.block_header(BlockId::Hash(*hash)))
.map(|header| (header, Default::default()))
.collect::<Vec<_>>();
self.notify_heads(&headers);
self.notify_logs(&enacted, |filter| self.client.logs(filter))
}
}
impl<C: BlockChainClient> ChainNotify for ChainNotificationHandler<C> {
fn new_blocks(
&self,
_imported: Vec<H256>,
_invalid: Vec<H256>,
enacted: Vec<H256>,
retracted: Vec<H256>,
_sealed: Vec<H256>,
// Block bytes.
_proposed: Vec<Bytes>,
_duration: u64,
) {
const EXTRA_INFO_PROOF: &'static str = "Object exists in in blockchain (fetched earlier), extra_info is always available if object exists; qed";
let headers = enacted
.iter()
.filter_map(|hash| self.client.block_header(BlockId::Hash(*hash)))
.map(|header| {
let hash = header.hash();
(header, self.client.block_extra_info(BlockId::Hash(hash)).expect(EXTRA_INFO_PROOF))
})
.collect::<Vec<_>>();
// Headers
self.notify_heads(&headers);
// Enacted logs
self.notify_logs(&enacted, |filter| {
future::ok(self.client.logs(filter).into_iter().map(Into::into).collect()).boxed()
});
// Retracted logs
self.notify_logs(&retracted, |filter| {
future::ok(self.client.logs(filter).into_iter().map(Into::into).map(|mut log: Log| {
log.log_type = "removed".into();
log
}).collect()).boxed()
});
}
}
impl<C: Send + Sync +'static> EthPubSub for EthPubSubClient<C> {
type Metadata = Metadata;
fn subscribe(
&self,
_meta: Metadata,
subscriber: Subscriber<pubsub::Result>,
kind: pubsub::Kind,
params: Trailing<pubsub::Params>,
) {
match (kind, params.into()) {
(pubsub::Kind::NewHeads, None) => {
self.heads_subscribers.write().push(subscriber)
},
(pubsub::Kind::Logs, Some(pubsub::Params::Logs(filter))) => {
self.logs_subscribers.write().push(subscriber, filter.into());
},
_ => {
let _ = subscriber.reject(errors::unimplemented(None));
},
}
}
fn unsubscribe(&self, id: SubscriptionId) -> BoxFuture<bool, Error> {
let res = self.heads_subscribers.write().remove(&id).is_some();
let res2 = self.logs_subscribers.write().remove(&id).is_some();
future::ok(res || res2).boxed()
}
}
| {
for subscriber in self.heads_subscribers.read().values() {
for &(ref header, ref extra_info) in headers {
Self::notify(&self.remote, subscriber, pubsub::Result::Header(RichHeader {
inner: header.into(),
extra_info: extra_info.clone(),
}));
}
}
} | identifier_body |
eth_pubsub.rs | // Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Eth PUB-SUB rpc implementation.
use std::sync::Arc;
use std::collections::BTreeMap;
use futures::{self, future, BoxFuture, Future};
use jsonrpc_core::Error;
use jsonrpc_macros::Trailing;
use jsonrpc_macros::pubsub::{Sink, Subscriber};
use jsonrpc_pubsub::SubscriptionId;
use v1::helpers::{errors, limit_logs, Subscribers};
use v1::helpers::light_fetch::LightFetch;
use v1::metadata::Metadata;
use v1::traits::EthPubSub;
use v1::types::{pubsub, RichHeader, Log};
use ethcore::encoded;
use ethcore::filter::Filter as EthFilter;
use ethcore::client::{BlockChainClient, ChainNotify, BlockId};
use ethsync::LightSync;
use light::cache::Cache;
use light::on_demand::OnDemand;
use light::client::{LightChainClient, LightChainNotify};
use parity_reactor::Remote;
use util::{RwLock, Mutex, H256, Bytes};
type Client = Sink<pubsub::Result>;
/// Eth PubSub implementation.
pub struct EthPubSubClient<C> {
handler: Arc<ChainNotificationHandler<C>>,
heads_subscribers: Arc<RwLock<Subscribers<Client>>>,
logs_subscribers: Arc<RwLock<Subscribers<(Client, EthFilter)>>>,
}
impl<C> EthPubSubClient<C> {
/// Creates new `EthPubSubClient`.
pub fn new(client: Arc<C>, remote: Remote) -> Self {
let heads_subscribers = Arc::new(RwLock::new(Subscribers::default()));
let logs_subscribers = Arc::new(RwLock::new(Subscribers::default()));
EthPubSubClient {
handler: Arc::new(ChainNotificationHandler {
client,
remote,
heads_subscribers: heads_subscribers.clone(),
logs_subscribers: logs_subscribers.clone(),
}),
heads_subscribers,
logs_subscribers,
}
}
/// Creates new `EthPubSubCient` with deterministic subscription ids.
#[cfg(test)]
pub fn new_test(client: Arc<C>, remote: Remote) -> Self {
let client = Self::new(client, remote);
*client.heads_subscribers.write() = Subscribers::new_test();
*client.logs_subscribers.write() = Subscribers::new_test();
client
}
/// Returns a chain notification handler.
pub fn handler(&self) -> Arc<ChainNotificationHandler<C>> {
self.handler.clone()
}
}
impl EthPubSubClient<LightFetch> {
/// Creates a new `EthPubSubClient` for `LightClient`.
pub fn light(
client: Arc<LightChainClient>,
on_demand: Arc<OnDemand>,
sync: Arc<LightSync>,
cache: Arc<Mutex<Cache>>,
remote: Remote,
) -> Self {
let fetch = LightFetch {
client,
on_demand,
sync,
cache
};
EthPubSubClient::new(Arc::new(fetch), remote)
}
}
/// PubSub Notification handler.
pub struct ChainNotificationHandler<C> {
client: Arc<C>,
remote: Remote,
heads_subscribers: Arc<RwLock<Subscribers<Client>>>,
logs_subscribers: Arc<RwLock<Subscribers<(Client, EthFilter)>>>,
}
impl<C> ChainNotificationHandler<C> {
fn notify(remote: &Remote, subscriber: &Client, result: pubsub::Result) { | .notify(Ok(result))
.map(|_| ())
.map_err(|e| warn!(target: "rpc", "Unable to send notification: {}", e))
);
}
fn notify_heads(&self, headers: &[(encoded::Header, BTreeMap<String, String>)]) {
for subscriber in self.heads_subscribers.read().values() {
for &(ref header, ref extra_info) in headers {
Self::notify(&self.remote, subscriber, pubsub::Result::Header(RichHeader {
inner: header.into(),
extra_info: extra_info.clone(),
}));
}
}
}
fn notify_logs<F>(&self, enacted: &[H256], logs: F) where
F: Fn(EthFilter) -> BoxFuture<Vec<Log>, Error>,
{
for &(ref subscriber, ref filter) in self.logs_subscribers.read().values() {
let logs = futures::future::join_all(enacted
.iter()
.map(|hash| {
let mut filter = filter.clone();
filter.from_block = BlockId::Hash(*hash);
filter.to_block = filter.from_block.clone();
logs(filter)
})
.collect::<Vec<_>>()
);
let limit = filter.limit;
let remote = self.remote.clone();
let subscriber = subscriber.clone();
self.remote.spawn(logs
.map(move |logs| {
let logs = logs.into_iter().flat_map(|log| log).collect();
let logs = limit_logs(logs, limit);
if!logs.is_empty() {
Self::notify(&remote, &subscriber, pubsub::Result::Logs(logs));
}
})
.map_err(|e| warn!("Unable to fetch latest logs: {:?}", e))
);
}
}
}
/// A light client wrapper struct.
pub trait LightClient: Send + Sync {
/// Get a recent block header.
fn block_header(&self, id: BlockId) -> Option<encoded::Header>;
/// Fetch logs.
fn logs(&self, filter: EthFilter) -> BoxFuture<Vec<Log>, Error>;
}
impl LightClient for LightFetch {
fn block_header(&self, id: BlockId) -> Option<encoded::Header> {
self.client.block_header(id)
}
fn logs(&self, filter: EthFilter) -> BoxFuture<Vec<Log>, Error> {
LightFetch::logs(self, filter)
}
}
impl<C: LightClient> LightChainNotify for ChainNotificationHandler<C> {
fn new_headers(
&self,
enacted: &[H256],
) {
let headers = enacted
.iter()
.filter_map(|hash| self.client.block_header(BlockId::Hash(*hash)))
.map(|header| (header, Default::default()))
.collect::<Vec<_>>();
self.notify_heads(&headers);
self.notify_logs(&enacted, |filter| self.client.logs(filter))
}
}
impl<C: BlockChainClient> ChainNotify for ChainNotificationHandler<C> {
fn new_blocks(
&self,
_imported: Vec<H256>,
_invalid: Vec<H256>,
enacted: Vec<H256>,
retracted: Vec<H256>,
_sealed: Vec<H256>,
// Block bytes.
_proposed: Vec<Bytes>,
_duration: u64,
) {
const EXTRA_INFO_PROOF: &'static str = "Object exists in in blockchain (fetched earlier), extra_info is always available if object exists; qed";
let headers = enacted
.iter()
.filter_map(|hash| self.client.block_header(BlockId::Hash(*hash)))
.map(|header| {
let hash = header.hash();
(header, self.client.block_extra_info(BlockId::Hash(hash)).expect(EXTRA_INFO_PROOF))
})
.collect::<Vec<_>>();
// Headers
self.notify_heads(&headers);
// Enacted logs
self.notify_logs(&enacted, |filter| {
future::ok(self.client.logs(filter).into_iter().map(Into::into).collect()).boxed()
});
// Retracted logs
self.notify_logs(&retracted, |filter| {
future::ok(self.client.logs(filter).into_iter().map(Into::into).map(|mut log: Log| {
log.log_type = "removed".into();
log
}).collect()).boxed()
});
}
}
impl<C: Send + Sync +'static> EthPubSub for EthPubSubClient<C> {
type Metadata = Metadata;
fn subscribe(
&self,
_meta: Metadata,
subscriber: Subscriber<pubsub::Result>,
kind: pubsub::Kind,
params: Trailing<pubsub::Params>,
) {
match (kind, params.into()) {
(pubsub::Kind::NewHeads, None) => {
self.heads_subscribers.write().push(subscriber)
},
(pubsub::Kind::Logs, Some(pubsub::Params::Logs(filter))) => {
self.logs_subscribers.write().push(subscriber, filter.into());
},
_ => {
let _ = subscriber.reject(errors::unimplemented(None));
},
}
}
fn unsubscribe(&self, id: SubscriptionId) -> BoxFuture<bool, Error> {
let res = self.heads_subscribers.write().remove(&id).is_some();
let res2 = self.logs_subscribers.write().remove(&id).is_some();
future::ok(res || res2).boxed()
}
} | remote.spawn(subscriber | random_line_split |
mutability-inherits-through-fixed-length-vec.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn test1() {
let mut ints = [0i,..32];
ints[0] += 1;
assert_eq!(ints[0], 1); | for i in ints.iter_mut() { *i += 22; }
for i in ints.iter() { assert!(*i == 22); }
}
pub fn main() {
test1();
test2();
} | }
fn test2() {
let mut ints = [0i, ..32]; | random_line_split |
mutability-inherits-through-fixed-length-vec.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn test1() |
fn test2() {
let mut ints = [0i,..32];
for i in ints.iter_mut() { *i += 22; }
for i in ints.iter() { assert!(*i == 22); }
}
pub fn main() {
test1();
test2();
}
| {
let mut ints = [0i, ..32];
ints[0] += 1;
assert_eq!(ints[0], 1);
} | identifier_body |
mutability-inherits-through-fixed-length-vec.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn test1() {
let mut ints = [0i,..32];
ints[0] += 1;
assert_eq!(ints[0], 1);
}
fn | () {
let mut ints = [0i,..32];
for i in ints.iter_mut() { *i += 22; }
for i in ints.iter() { assert!(*i == 22); }
}
pub fn main() {
test1();
test2();
}
| test2 | identifier_name |
lib.rs | pub mod math;
pub mod objective;
pub mod tableau;
use math::variables::is_gen_arti_var;
use objective::problems::ProblemType;
use objective::functions::Function;
use objective::constraints::SystemOfConstraints;
use objective::solvers::{transform_constraint_rels_to_eq, rearrange_fun_eq_zero};
use tableau::tables::Table;
use tableau::initials::{get_initial_table_from, append_function};
use tableau::enter_vars::{enter_var_pivot_optimal, enter_var_pivot_feasible};
use tableau::leave_vars::leave_var;
use tableau::pivots::{pivot_around, apply_transition_rule};
pub type Num = f32;
pub fn optimise(function: &mut Function,
constraints: &mut SystemOfConstraints)
-> Vec<(String, Num)> {
rearrange_fun_eq_zero(function);
if let Some(mut phase1_fun) = transform_constraint_rels_to_eq(constraints) {
rearrange_fun_eq_zero(&mut phase1_fun);
let mut phase1_table = get_initial_table_from(function, constraints);
// Set Phase I function to work with.
append_function(&phase1_fun, &mut phase1_table);
let phase1_solution = run_simplex(&phase1_fun, &mut phase1_table);
if phase1_solution.contains(&("W".to_string(), 0.0)) {
// Check to see if there are any artificial variables in the Phase I solution.
let arti_vars_in_solution = phase1_solution.into_iter()
.filter(|basic_var| is_gen_arti_var(&basic_var.0))
.collect::<Vec<(String, Num)>>();
if arti_vars_in_solution.is_empty() {
// Carry out Phase II - no need for Transition Rule.
return run_phase_2_from_1(function, &mut phase1_table);
} else {
// Remove artificial variables from the basis by applying the Transition Rule.
apply_transition_rule(arti_vars_in_solution, constraints, &mut phase1_table);
return run_phase_2_from_1(function, &mut phase1_table);
}
} else {
panic!("Could not find a feasible solution to start Phase II.");
} | } else {
// Carry on with Phase II.
let mut table = get_initial_table_from(function, constraints);
return run_simplex(function, &mut table);
}
}
fn run_simplex(function: &Function, table: &mut Table) -> Vec<(String, Num)> {
loop {
match table.get_basic_solution() {
Ok(mut basic_solution) => {
if table.is_solution_optimal() {
if function.p_type() == &ProblemType::MIN {
// Give solution for MIN as currently it is given as MAX.
let (pos, _) =
basic_solution.iter()
.enumerate()
.find(|&entry| (entry.1).0 == "Q")
.expect("Failed to locate value of \"Q\" in optimal solution.");
basic_solution[pos] = (function.name().clone(),
basic_solution[pos].1 * -1.0);
return basic_solution;
} else {
return basic_solution;
}
} else {
let enter_var_index = enter_var_pivot_optimal(&table);
pivot_around(enter_var_index, leave_var(enter_var_index, &table), table);
}
}
Err(index_report) => {
let enter_var_index = enter_var_pivot_feasible(&table,
index_report.0,
index_report.1)
.expect("Could not find a leftmost \
positive value cell for pivoting \
to enter feasible region.");
pivot_around(enter_var_index, leave_var(enter_var_index, &table), table);
}
}
}
}
fn run_phase_2_from_1(fun: &Function, table: &mut Table) -> Vec<(String, Num)> {
// Set original function to work with.
table.remove_last_row();
let old_num_fun_rows = table.get_num_fun_rows();
table.set_num_fun_rows(old_num_fun_rows - 1);
run_simplex(fun, table)
} | random_line_split |
|
lib.rs | pub mod math;
pub mod objective;
pub mod tableau;
use math::variables::is_gen_arti_var;
use objective::problems::ProblemType;
use objective::functions::Function;
use objective::constraints::SystemOfConstraints;
use objective::solvers::{transform_constraint_rels_to_eq, rearrange_fun_eq_zero};
use tableau::tables::Table;
use tableau::initials::{get_initial_table_from, append_function};
use tableau::enter_vars::{enter_var_pivot_optimal, enter_var_pivot_feasible};
use tableau::leave_vars::leave_var;
use tableau::pivots::{pivot_around, apply_transition_rule};
pub type Num = f32;
pub fn optimise(function: &mut Function,
constraints: &mut SystemOfConstraints)
-> Vec<(String, Num)> {
rearrange_fun_eq_zero(function);
if let Some(mut phase1_fun) = transform_constraint_rels_to_eq(constraints) {
rearrange_fun_eq_zero(&mut phase1_fun);
let mut phase1_table = get_initial_table_from(function, constraints);
// Set Phase I function to work with.
append_function(&phase1_fun, &mut phase1_table);
let phase1_solution = run_simplex(&phase1_fun, &mut phase1_table);
if phase1_solution.contains(&("W".to_string(), 0.0)) {
// Check to see if there are any artificial variables in the Phase I solution.
let arti_vars_in_solution = phase1_solution.into_iter()
.filter(|basic_var| is_gen_arti_var(&basic_var.0))
.collect::<Vec<(String, Num)>>();
if arti_vars_in_solution.is_empty() {
// Carry out Phase II - no need for Transition Rule.
return run_phase_2_from_1(function, &mut phase1_table);
} else {
// Remove artificial variables from the basis by applying the Transition Rule.
apply_transition_rule(arti_vars_in_solution, constraints, &mut phase1_table);
return run_phase_2_from_1(function, &mut phase1_table);
}
} else {
panic!("Could not find a feasible solution to start Phase II.");
}
} else |
}
fn run_simplex(function: &Function, table: &mut Table) -> Vec<(String, Num)> {
loop {
match table.get_basic_solution() {
Ok(mut basic_solution) => {
if table.is_solution_optimal() {
if function.p_type() == &ProblemType::MIN {
// Give solution for MIN as currently it is given as MAX.
let (pos, _) =
basic_solution.iter()
.enumerate()
.find(|&entry| (entry.1).0 == "Q")
.expect("Failed to locate value of \"Q\" in optimal solution.");
basic_solution[pos] = (function.name().clone(),
basic_solution[pos].1 * -1.0);
return basic_solution;
} else {
return basic_solution;
}
} else {
let enter_var_index = enter_var_pivot_optimal(&table);
pivot_around(enter_var_index, leave_var(enter_var_index, &table), table);
}
}
Err(index_report) => {
let enter_var_index = enter_var_pivot_feasible(&table,
index_report.0,
index_report.1)
.expect("Could not find a leftmost \
positive value cell for pivoting \
to enter feasible region.");
pivot_around(enter_var_index, leave_var(enter_var_index, &table), table);
}
}
}
}
fn run_phase_2_from_1(fun: &Function, table: &mut Table) -> Vec<(String, Num)> {
// Set original function to work with.
table.remove_last_row();
let old_num_fun_rows = table.get_num_fun_rows();
table.set_num_fun_rows(old_num_fun_rows - 1);
run_simplex(fun, table)
}
| {
// Carry on with Phase II.
let mut table = get_initial_table_from(function, constraints);
return run_simplex(function, &mut table);
} | conditional_block |
lib.rs | pub mod math;
pub mod objective;
pub mod tableau;
use math::variables::is_gen_arti_var;
use objective::problems::ProblemType;
use objective::functions::Function;
use objective::constraints::SystemOfConstraints;
use objective::solvers::{transform_constraint_rels_to_eq, rearrange_fun_eq_zero};
use tableau::tables::Table;
use tableau::initials::{get_initial_table_from, append_function};
use tableau::enter_vars::{enter_var_pivot_optimal, enter_var_pivot_feasible};
use tableau::leave_vars::leave_var;
use tableau::pivots::{pivot_around, apply_transition_rule};
pub type Num = f32;
pub fn optimise(function: &mut Function,
constraints: &mut SystemOfConstraints)
-> Vec<(String, Num)> | }
} else {
panic!("Could not find a feasible solution to start Phase II.");
}
} else {
// Carry on with Phase II.
let mut table = get_initial_table_from(function, constraints);
return run_simplex(function, &mut table);
}
}
fn run_simplex(function: &Function, table: &mut Table) -> Vec<(String, Num)> {
loop {
match table.get_basic_solution() {
Ok(mut basic_solution) => {
if table.is_solution_optimal() {
if function.p_type() == &ProblemType::MIN {
// Give solution for MIN as currently it is given as MAX.
let (pos, _) =
basic_solution.iter()
.enumerate()
.find(|&entry| (entry.1).0 == "Q")
.expect("Failed to locate value of \"Q\" in optimal solution.");
basic_solution[pos] = (function.name().clone(),
basic_solution[pos].1 * -1.0);
return basic_solution;
} else {
return basic_solution;
}
} else {
let enter_var_index = enter_var_pivot_optimal(&table);
pivot_around(enter_var_index, leave_var(enter_var_index, &table), table);
}
}
Err(index_report) => {
let enter_var_index = enter_var_pivot_feasible(&table,
index_report.0,
index_report.1)
.expect("Could not find a leftmost \
positive value cell for pivoting \
to enter feasible region.");
pivot_around(enter_var_index, leave_var(enter_var_index, &table), table);
}
}
}
}
fn run_phase_2_from_1(fun: &Function, table: &mut Table) -> Vec<(String, Num)> {
// Set original function to work with.
table.remove_last_row();
let old_num_fun_rows = table.get_num_fun_rows();
table.set_num_fun_rows(old_num_fun_rows - 1);
run_simplex(fun, table)
}
| {
rearrange_fun_eq_zero(function);
if let Some(mut phase1_fun) = transform_constraint_rels_to_eq(constraints) {
rearrange_fun_eq_zero(&mut phase1_fun);
let mut phase1_table = get_initial_table_from(function, constraints);
// Set Phase I function to work with.
append_function(&phase1_fun, &mut phase1_table);
let phase1_solution = run_simplex(&phase1_fun, &mut phase1_table);
if phase1_solution.contains(&("W".to_string(), 0.0)) {
// Check to see if there are any artificial variables in the Phase I solution.
let arti_vars_in_solution = phase1_solution.into_iter()
.filter(|basic_var| is_gen_arti_var(&basic_var.0))
.collect::<Vec<(String, Num)>>();
if arti_vars_in_solution.is_empty() {
// Carry out Phase II - no need for Transition Rule.
return run_phase_2_from_1(function, &mut phase1_table);
} else {
// Remove artificial variables from the basis by applying the Transition Rule.
apply_transition_rule(arti_vars_in_solution, constraints, &mut phase1_table);
return run_phase_2_from_1(function, &mut phase1_table); | identifier_body |
lib.rs | pub mod math;
pub mod objective;
pub mod tableau;
use math::variables::is_gen_arti_var;
use objective::problems::ProblemType;
use objective::functions::Function;
use objective::constraints::SystemOfConstraints;
use objective::solvers::{transform_constraint_rels_to_eq, rearrange_fun_eq_zero};
use tableau::tables::Table;
use tableau::initials::{get_initial_table_from, append_function};
use tableau::enter_vars::{enter_var_pivot_optimal, enter_var_pivot_feasible};
use tableau::leave_vars::leave_var;
use tableau::pivots::{pivot_around, apply_transition_rule};
pub type Num = f32;
pub fn optimise(function: &mut Function,
constraints: &mut SystemOfConstraints)
-> Vec<(String, Num)> {
rearrange_fun_eq_zero(function);
if let Some(mut phase1_fun) = transform_constraint_rels_to_eq(constraints) {
rearrange_fun_eq_zero(&mut phase1_fun);
let mut phase1_table = get_initial_table_from(function, constraints);
// Set Phase I function to work with.
append_function(&phase1_fun, &mut phase1_table);
let phase1_solution = run_simplex(&phase1_fun, &mut phase1_table);
if phase1_solution.contains(&("W".to_string(), 0.0)) {
// Check to see if there are any artificial variables in the Phase I solution.
let arti_vars_in_solution = phase1_solution.into_iter()
.filter(|basic_var| is_gen_arti_var(&basic_var.0))
.collect::<Vec<(String, Num)>>();
if arti_vars_in_solution.is_empty() {
// Carry out Phase II - no need for Transition Rule.
return run_phase_2_from_1(function, &mut phase1_table);
} else {
// Remove artificial variables from the basis by applying the Transition Rule.
apply_transition_rule(arti_vars_in_solution, constraints, &mut phase1_table);
return run_phase_2_from_1(function, &mut phase1_table);
}
} else {
panic!("Could not find a feasible solution to start Phase II.");
}
} else {
// Carry on with Phase II.
let mut table = get_initial_table_from(function, constraints);
return run_simplex(function, &mut table);
}
}
fn | (function: &Function, table: &mut Table) -> Vec<(String, Num)> {
loop {
match table.get_basic_solution() {
Ok(mut basic_solution) => {
if table.is_solution_optimal() {
if function.p_type() == &ProblemType::MIN {
// Give solution for MIN as currently it is given as MAX.
let (pos, _) =
basic_solution.iter()
.enumerate()
.find(|&entry| (entry.1).0 == "Q")
.expect("Failed to locate value of \"Q\" in optimal solution.");
basic_solution[pos] = (function.name().clone(),
basic_solution[pos].1 * -1.0);
return basic_solution;
} else {
return basic_solution;
}
} else {
let enter_var_index = enter_var_pivot_optimal(&table);
pivot_around(enter_var_index, leave_var(enter_var_index, &table), table);
}
}
Err(index_report) => {
let enter_var_index = enter_var_pivot_feasible(&table,
index_report.0,
index_report.1)
.expect("Could not find a leftmost \
positive value cell for pivoting \
to enter feasible region.");
pivot_around(enter_var_index, leave_var(enter_var_index, &table), table);
}
}
}
}
fn run_phase_2_from_1(fun: &Function, table: &mut Table) -> Vec<(String, Num)> {
// Set original function to work with.
table.remove_last_row();
let old_num_fun_rows = table.get_num_fun_rows();
table.set_num_fun_rows(old_num_fun_rows - 1);
run_simplex(fun, table)
}
| run_simplex | identifier_name |
input_test.rs | //! Example that just prints out all the input events.
use ggez::event::{self, Axis, Button, GamepadId, KeyCode, KeyMods, MouseButton};
use ggez::graphics::{self, Color, DrawMode};
use ggez::{conf, input};
use ggez::{Context, GameResult};
use glam::*;
struct MainState {
pos_x: f32,
pos_y: f32,
mouse_down: bool,
}
impl MainState {
fn new() -> MainState {
MainState {
pos_x: 100.0,
pos_y: 100.0,
mouse_down: false,
}
}
}
impl event::EventHandler<ggez::GameError> for MainState {
fn update(&mut self, ctx: &mut Context) -> GameResult {
if input::keyboard::is_key_pressed(ctx, KeyCode::A) {
println!("The A key is pressed");
if input::keyboard::is_mod_active(ctx, input::keyboard::KeyMods::SHIFT) {
println!("The shift key is held too.");
}
println!(
"Full list of pressed keys: {:?}",
input::keyboard::pressed_keys(ctx)
);
}
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
graphics::clear(ctx, [0.1, 0.2, 0.3, 1.0].into());
let rectangle = graphics::Mesh::new_rectangle(
ctx,
DrawMode::fill(),
graphics::Rect {
x: self.pos_x,
y: self.pos_y,
w: 400.0,
h: 300.0,
},
Color::WHITE,
)?;
graphics::draw(ctx, &rectangle, (glam::Vec2::new(0.0, 0.0),))?;
graphics::present(ctx)?;
Ok(())
}
fn mouse_button_down_event(&mut self, _ctx: &mut Context, button: MouseButton, x: f32, y: f32) {
self.mouse_down = true;
println!("Mouse button pressed: {:?}, x: {}, y: {}", button, x, y);
}
fn mouse_button_up_event(&mut self, _ctx: &mut Context, button: MouseButton, x: f32, y: f32) {
self.mouse_down = false;
println!("Mouse button released: {:?}, x: {}, y: {}", button, x, y);
}
fn mouse_motion_event(&mut self, _ctx: &mut Context, x: f32, y: f32, xrel: f32, yrel: f32) {
if self.mouse_down {
// Mouse coordinates are PHYSICAL coordinates, but here we want logical coordinates.
// If you simply use the initial coordinate system, then physical and logical
// coordinates are identical.
self.pos_x = x;
self.pos_y = y;
// If you change your screen coordinate system you need to calculate the
// logical coordinates like this:
/*
let screen_rect = graphics::screen_coordinates(_ctx);
let size = graphics::window(_ctx).inner_size();
self.pos_x = (x / (size.width as f32)) * screen_rect.w + screen_rect.x;
self.pos_y = (y / (size.height as f32)) * screen_rect.h + screen_rect.y;
*/
}
println!(
"Mouse motion, x: {}, y: {}, relative x: {}, relative y: {}",
x, y, xrel, yrel
);
}
fn mouse_wheel_event(&mut self, _ctx: &mut Context, x: f32, y: f32) {
println!("Mousewheel event, x: {}, y: {}", x, y);
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
keycode: KeyCode,
keymod: KeyMods,
repeat: bool,
) {
println!(
"Key pressed: {:?}, modifier {:?}, repeat: {}",
keycode, keymod, repeat
);
}
fn key_up_event(&mut self, _ctx: &mut Context, keycode: KeyCode, keymod: KeyMods) {
println!("Key released: {:?}, modifier {:?}", keycode, keymod);
}
fn text_input_event(&mut self, _ctx: &mut Context, ch: char) {
println!("Text input: {}", ch);
}
fn gamepad_button_down_event(&mut self, _ctx: &mut Context, btn: Button, id: GamepadId) {
println!("Gamepad button pressed: {:?} Gamepad_Id: {:?}", btn, id);
}
fn gamepad_button_up_event(&mut self, _ctx: &mut Context, btn: Button, id: GamepadId) {
println!("Gamepad button released: {:?} Gamepad_Id: {:?}", btn, id);
}
fn gamepad_axis_event(&mut self, _ctx: &mut Context, axis: Axis, value: f32, id: GamepadId) {
println!(
"Axis Event: {:?} Value: {} Gamepad_Id: {:?}",
axis, value, id
);
}
fn focus_event(&mut self, _ctx: &mut Context, gained: bool) {
if gained {
println!("Focus gained");
} else |
}
}
pub fn main() -> GameResult {
let cb = ggez::ContextBuilder::new("input_test", "ggez").window_mode(
conf::WindowMode::default()
.fullscreen_type(conf::FullscreenType::Windowed)
.resizable(true),
);
let (ctx, event_loop) = cb.build()?;
// remove the comment to see how physical mouse coordinates can differ
// from logical game coordinates when the screen coordinate system changes
// graphics::set_screen_coordinates(&mut ctx, Rect::new(20., 50., 2000., 1000.));
// alternatively, resizing the window also leads to screen coordinates
// and physical window size being out of sync
let state = MainState::new();
event::run(ctx, event_loop, state)
}
| {
println!("Focus lost");
} | conditional_block |
input_test.rs | //! Example that just prints out all the input events.
use ggez::event::{self, Axis, Button, GamepadId, KeyCode, KeyMods, MouseButton};
use ggez::graphics::{self, Color, DrawMode};
use ggez::{conf, input};
use ggez::{Context, GameResult};
use glam::*;
struct MainState {
pos_x: f32,
pos_y: f32,
mouse_down: bool,
}
impl MainState {
fn new() -> MainState {
MainState {
pos_x: 100.0,
pos_y: 100.0,
mouse_down: false,
}
}
}
impl event::EventHandler<ggez::GameError> for MainState {
fn update(&mut self, ctx: &mut Context) -> GameResult {
if input::keyboard::is_key_pressed(ctx, KeyCode::A) {
println!("The A key is pressed");
if input::keyboard::is_mod_active(ctx, input::keyboard::KeyMods::SHIFT) {
println!("The shift key is held too.");
}
println!(
"Full list of pressed keys: {:?}",
input::keyboard::pressed_keys(ctx)
);
}
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
graphics::clear(ctx, [0.1, 0.2, 0.3, 1.0].into());
let rectangle = graphics::Mesh::new_rectangle(
ctx,
DrawMode::fill(),
graphics::Rect {
x: self.pos_x,
y: self.pos_y,
w: 400.0,
h: 300.0,
},
Color::WHITE,
)?;
graphics::draw(ctx, &rectangle, (glam::Vec2::new(0.0, 0.0),))?;
graphics::present(ctx)?;
Ok(())
}
fn mouse_button_down_event(&mut self, _ctx: &mut Context, button: MouseButton, x: f32, y: f32) {
self.mouse_down = true;
println!("Mouse button pressed: {:?}, x: {}, y: {}", button, x, y);
}
fn mouse_button_up_event(&mut self, _ctx: &mut Context, button: MouseButton, x: f32, y: f32) {
self.mouse_down = false;
println!("Mouse button released: {:?}, x: {}, y: {}", button, x, y);
}
fn mouse_motion_event(&mut self, _ctx: &mut Context, x: f32, y: f32, xrel: f32, yrel: f32) {
if self.mouse_down {
// Mouse coordinates are PHYSICAL coordinates, but here we want logical coordinates.
// If you simply use the initial coordinate system, then physical and logical
// coordinates are identical.
self.pos_x = x;
self.pos_y = y;
// If you change your screen coordinate system you need to calculate the
// logical coordinates like this:
/*
let screen_rect = graphics::screen_coordinates(_ctx);
let size = graphics::window(_ctx).inner_size();
self.pos_x = (x / (size.width as f32)) * screen_rect.w + screen_rect.x;
self.pos_y = (y / (size.height as f32)) * screen_rect.h + screen_rect.y;
*/
}
println!(
"Mouse motion, x: {}, y: {}, relative x: {}, relative y: {}",
x, y, xrel, yrel
);
}
fn mouse_wheel_event(&mut self, _ctx: &mut Context, x: f32, y: f32) {
println!("Mousewheel event, x: {}, y: {}", x, y);
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
keycode: KeyCode,
keymod: KeyMods,
repeat: bool,
) {
println!(
"Key pressed: {:?}, modifier {:?}, repeat: {}",
keycode, keymod, repeat
);
}
fn key_up_event(&mut self, _ctx: &mut Context, keycode: KeyCode, keymod: KeyMods) {
println!("Key released: {:?}, modifier {:?}", keycode, keymod);
}
fn | (&mut self, _ctx: &mut Context, ch: char) {
println!("Text input: {}", ch);
}
fn gamepad_button_down_event(&mut self, _ctx: &mut Context, btn: Button, id: GamepadId) {
println!("Gamepad button pressed: {:?} Gamepad_Id: {:?}", btn, id);
}
fn gamepad_button_up_event(&mut self, _ctx: &mut Context, btn: Button, id: GamepadId) {
println!("Gamepad button released: {:?} Gamepad_Id: {:?}", btn, id);
}
fn gamepad_axis_event(&mut self, _ctx: &mut Context, axis: Axis, value: f32, id: GamepadId) {
println!(
"Axis Event: {:?} Value: {} Gamepad_Id: {:?}",
axis, value, id
);
}
fn focus_event(&mut self, _ctx: &mut Context, gained: bool) {
if gained {
println!("Focus gained");
} else {
println!("Focus lost");
}
}
}
pub fn main() -> GameResult {
let cb = ggez::ContextBuilder::new("input_test", "ggez").window_mode(
conf::WindowMode::default()
.fullscreen_type(conf::FullscreenType::Windowed)
.resizable(true),
);
let (ctx, event_loop) = cb.build()?;
// remove the comment to see how physical mouse coordinates can differ
// from logical game coordinates when the screen coordinate system changes
// graphics::set_screen_coordinates(&mut ctx, Rect::new(20., 50., 2000., 1000.));
// alternatively, resizing the window also leads to screen coordinates
// and physical window size being out of sync
let state = MainState::new();
event::run(ctx, event_loop, state)
}
| text_input_event | identifier_name |
input_test.rs | //! Example that just prints out all the input events.
use ggez::event::{self, Axis, Button, GamepadId, KeyCode, KeyMods, MouseButton};
use ggez::graphics::{self, Color, DrawMode};
use ggez::{conf, input};
use ggez::{Context, GameResult};
use glam::*;
struct MainState {
pos_x: f32,
pos_y: f32,
mouse_down: bool,
}
impl MainState {
fn new() -> MainState {
MainState {
pos_x: 100.0,
pos_y: 100.0,
mouse_down: false,
}
}
}
impl event::EventHandler<ggez::GameError> for MainState {
fn update(&mut self, ctx: &mut Context) -> GameResult {
if input::keyboard::is_key_pressed(ctx, KeyCode::A) {
println!("The A key is pressed");
if input::keyboard::is_mod_active(ctx, input::keyboard::KeyMods::SHIFT) {
println!("The shift key is held too.");
}
println!(
"Full list of pressed keys: {:?}",
input::keyboard::pressed_keys(ctx)
);
}
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
graphics::clear(ctx, [0.1, 0.2, 0.3, 1.0].into());
let rectangle = graphics::Mesh::new_rectangle(
ctx,
DrawMode::fill(),
graphics::Rect {
x: self.pos_x,
y: self.pos_y,
w: 400.0,
h: 300.0,
},
Color::WHITE,
)?;
graphics::draw(ctx, &rectangle, (glam::Vec2::new(0.0, 0.0),))?;
graphics::present(ctx)?;
Ok(())
}
fn mouse_button_down_event(&mut self, _ctx: &mut Context, button: MouseButton, x: f32, y: f32) {
self.mouse_down = true;
println!("Mouse button pressed: {:?}, x: {}, y: {}", button, x, y);
}
fn mouse_button_up_event(&mut self, _ctx: &mut Context, button: MouseButton, x: f32, y: f32) {
self.mouse_down = false;
println!("Mouse button released: {:?}, x: {}, y: {}", button, x, y);
}
fn mouse_motion_event(&mut self, _ctx: &mut Context, x: f32, y: f32, xrel: f32, yrel: f32) {
if self.mouse_down {
// Mouse coordinates are PHYSICAL coordinates, but here we want logical coordinates.
// If you simply use the initial coordinate system, then physical and logical
// coordinates are identical.
self.pos_x = x;
self.pos_y = y;
// If you change your screen coordinate system you need to calculate the
// logical coordinates like this:
/*
let screen_rect = graphics::screen_coordinates(_ctx);
let size = graphics::window(_ctx).inner_size();
self.pos_x = (x / (size.width as f32)) * screen_rect.w + screen_rect.x;
self.pos_y = (y / (size.height as f32)) * screen_rect.h + screen_rect.y;
*/
}
println!(
"Mouse motion, x: {}, y: {}, relative x: {}, relative y: {}",
x, y, xrel, yrel
);
}
fn mouse_wheel_event(&mut self, _ctx: &mut Context, x: f32, y: f32) {
println!("Mousewheel event, x: {}, y: {}", x, y);
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
keycode: KeyCode,
keymod: KeyMods,
repeat: bool,
) {
println!(
"Key pressed: {:?}, modifier {:?}, repeat: {}", | println!("Key released: {:?}, modifier {:?}", keycode, keymod);
}
fn text_input_event(&mut self, _ctx: &mut Context, ch: char) {
println!("Text input: {}", ch);
}
fn gamepad_button_down_event(&mut self, _ctx: &mut Context, btn: Button, id: GamepadId) {
println!("Gamepad button pressed: {:?} Gamepad_Id: {:?}", btn, id);
}
fn gamepad_button_up_event(&mut self, _ctx: &mut Context, btn: Button, id: GamepadId) {
println!("Gamepad button released: {:?} Gamepad_Id: {:?}", btn, id);
}
fn gamepad_axis_event(&mut self, _ctx: &mut Context, axis: Axis, value: f32, id: GamepadId) {
println!(
"Axis Event: {:?} Value: {} Gamepad_Id: {:?}",
axis, value, id
);
}
fn focus_event(&mut self, _ctx: &mut Context, gained: bool) {
if gained {
println!("Focus gained");
} else {
println!("Focus lost");
}
}
}
pub fn main() -> GameResult {
let cb = ggez::ContextBuilder::new("input_test", "ggez").window_mode(
conf::WindowMode::default()
.fullscreen_type(conf::FullscreenType::Windowed)
.resizable(true),
);
let (ctx, event_loop) = cb.build()?;
// remove the comment to see how physical mouse coordinates can differ
// from logical game coordinates when the screen coordinate system changes
// graphics::set_screen_coordinates(&mut ctx, Rect::new(20., 50., 2000., 1000.));
// alternatively, resizing the window also leads to screen coordinates
// and physical window size being out of sync
let state = MainState::new();
event::run(ctx, event_loop, state)
} | keycode, keymod, repeat
);
}
fn key_up_event(&mut self, _ctx: &mut Context, keycode: KeyCode, keymod: KeyMods) { | random_line_split |
input_test.rs | //! Example that just prints out all the input events.
use ggez::event::{self, Axis, Button, GamepadId, KeyCode, KeyMods, MouseButton};
use ggez::graphics::{self, Color, DrawMode};
use ggez::{conf, input};
use ggez::{Context, GameResult};
use glam::*;
struct MainState {
pos_x: f32,
pos_y: f32,
mouse_down: bool,
}
impl MainState {
fn new() -> MainState {
MainState {
pos_x: 100.0,
pos_y: 100.0,
mouse_down: false,
}
}
}
impl event::EventHandler<ggez::GameError> for MainState {
fn update(&mut self, ctx: &mut Context) -> GameResult {
if input::keyboard::is_key_pressed(ctx, KeyCode::A) {
println!("The A key is pressed");
if input::keyboard::is_mod_active(ctx, input::keyboard::KeyMods::SHIFT) {
println!("The shift key is held too.");
}
println!(
"Full list of pressed keys: {:?}",
input::keyboard::pressed_keys(ctx)
);
}
Ok(())
}
fn draw(&mut self, ctx: &mut Context) -> GameResult {
graphics::clear(ctx, [0.1, 0.2, 0.3, 1.0].into());
let rectangle = graphics::Mesh::new_rectangle(
ctx,
DrawMode::fill(),
graphics::Rect {
x: self.pos_x,
y: self.pos_y,
w: 400.0,
h: 300.0,
},
Color::WHITE,
)?;
graphics::draw(ctx, &rectangle, (glam::Vec2::new(0.0, 0.0),))?;
graphics::present(ctx)?;
Ok(())
}
fn mouse_button_down_event(&mut self, _ctx: &mut Context, button: MouseButton, x: f32, y: f32) {
self.mouse_down = true;
println!("Mouse button pressed: {:?}, x: {}, y: {}", button, x, y);
}
fn mouse_button_up_event(&mut self, _ctx: &mut Context, button: MouseButton, x: f32, y: f32) {
self.mouse_down = false;
println!("Mouse button released: {:?}, x: {}, y: {}", button, x, y);
}
fn mouse_motion_event(&mut self, _ctx: &mut Context, x: f32, y: f32, xrel: f32, yrel: f32) {
if self.mouse_down {
// Mouse coordinates are PHYSICAL coordinates, but here we want logical coordinates.
// If you simply use the initial coordinate system, then physical and logical
// coordinates are identical.
self.pos_x = x;
self.pos_y = y;
// If you change your screen coordinate system you need to calculate the
// logical coordinates like this:
/*
let screen_rect = graphics::screen_coordinates(_ctx);
let size = graphics::window(_ctx).inner_size();
self.pos_x = (x / (size.width as f32)) * screen_rect.w + screen_rect.x;
self.pos_y = (y / (size.height as f32)) * screen_rect.h + screen_rect.y;
*/
}
println!(
"Mouse motion, x: {}, y: {}, relative x: {}, relative y: {}",
x, y, xrel, yrel
);
}
fn mouse_wheel_event(&mut self, _ctx: &mut Context, x: f32, y: f32) {
println!("Mousewheel event, x: {}, y: {}", x, y);
}
fn key_down_event(
&mut self,
_ctx: &mut Context,
keycode: KeyCode,
keymod: KeyMods,
repeat: bool,
) {
println!(
"Key pressed: {:?}, modifier {:?}, repeat: {}",
keycode, keymod, repeat
);
}
fn key_up_event(&mut self, _ctx: &mut Context, keycode: KeyCode, keymod: KeyMods) {
println!("Key released: {:?}, modifier {:?}", keycode, keymod);
}
fn text_input_event(&mut self, _ctx: &mut Context, ch: char) {
println!("Text input: {}", ch);
}
fn gamepad_button_down_event(&mut self, _ctx: &mut Context, btn: Button, id: GamepadId) {
println!("Gamepad button pressed: {:?} Gamepad_Id: {:?}", btn, id);
}
fn gamepad_button_up_event(&mut self, _ctx: &mut Context, btn: Button, id: GamepadId) {
println!("Gamepad button released: {:?} Gamepad_Id: {:?}", btn, id);
}
fn gamepad_axis_event(&mut self, _ctx: &mut Context, axis: Axis, value: f32, id: GamepadId) {
println!(
"Axis Event: {:?} Value: {} Gamepad_Id: {:?}",
axis, value, id
);
}
fn focus_event(&mut self, _ctx: &mut Context, gained: bool) {
if gained {
println!("Focus gained");
} else {
println!("Focus lost");
}
}
}
pub fn main() -> GameResult | {
let cb = ggez::ContextBuilder::new("input_test", "ggez").window_mode(
conf::WindowMode::default()
.fullscreen_type(conf::FullscreenType::Windowed)
.resizable(true),
);
let (ctx, event_loop) = cb.build()?;
// remove the comment to see how physical mouse coordinates can differ
// from logical game coordinates when the screen coordinate system changes
// graphics::set_screen_coordinates(&mut ctx, Rect::new(20., 50., 2000., 1000.));
// alternatively, resizing the window also leads to screen coordinates
// and physical window size being out of sync
let state = MainState::new();
event::run(ctx, event_loop, state)
} | identifier_body |
|
uuid.rs | #![allow(clippy::needless_lifetimes)]
use uuid::Uuid;
use crate::{
parser::{ParseError, ScalarToken, Token},
value::ParseScalarResult,
Value,
};
#[crate::graphql_scalar(description = "Uuid")]
impl<S> GraphQLScalar for Uuid
where
S: ScalarValue,
{
fn resolve(&self) -> Value {
Value::scalar(self.to_string())
}
fn from_input_value(v: &InputValue) -> Option<Uuid> {
v.as_string_value().and_then(|s| Uuid::parse_str(s).ok())
}
fn from_str<'a>(value: ScalarToken<'a>) -> ParseScalarResult<'a, S> {
if let ScalarToken::String(value) = value {
Ok(S::from(value.to_owned()))
} else {
Err(ParseError::UnexpectedToken(Token::Scalar(value)))
}
}
}
#[cfg(test)]
mod test {
use crate::{value::DefaultScalarValue, InputValue};
use uuid::Uuid;
#[test]
fn uuid_from_input_value() {
let raw = "123e4567-e89b-12d3-a456-426655440000";
let input: InputValue<DefaultScalarValue> = InputValue::scalar(raw.to_string());
let parsed: Uuid = crate::FromInputValue::from_input_value(&input).unwrap();
let id = Uuid::parse_str(raw).unwrap(); | assert_eq!(parsed, id);
}
} | random_line_split |
|
uuid.rs | #![allow(clippy::needless_lifetimes)]
use uuid::Uuid;
use crate::{
parser::{ParseError, ScalarToken, Token},
value::ParseScalarResult,
Value,
};
#[crate::graphql_scalar(description = "Uuid")]
impl<S> GraphQLScalar for Uuid
where
S: ScalarValue,
{
fn resolve(&self) -> Value {
Value::scalar(self.to_string())
}
fn from_input_value(v: &InputValue) -> Option<Uuid> {
v.as_string_value().and_then(|s| Uuid::parse_str(s).ok())
}
fn from_str<'a>(value: ScalarToken<'a>) -> ParseScalarResult<'a, S> {
if let ScalarToken::String(value) = value | else {
Err(ParseError::UnexpectedToken(Token::Scalar(value)))
}
}
}
#[cfg(test)]
mod test {
use crate::{value::DefaultScalarValue, InputValue};
use uuid::Uuid;
#[test]
fn uuid_from_input_value() {
let raw = "123e4567-e89b-12d3-a456-426655440000";
let input: InputValue<DefaultScalarValue> = InputValue::scalar(raw.to_string());
let parsed: Uuid = crate::FromInputValue::from_input_value(&input).unwrap();
let id = Uuid::parse_str(raw).unwrap();
assert_eq!(parsed, id);
}
}
| {
Ok(S::from(value.to_owned()))
} | conditional_block |
uuid.rs | #![allow(clippy::needless_lifetimes)]
use uuid::Uuid;
use crate::{
parser::{ParseError, ScalarToken, Token},
value::ParseScalarResult,
Value,
};
#[crate::graphql_scalar(description = "Uuid")]
impl<S> GraphQLScalar for Uuid
where
S: ScalarValue,
{
fn resolve(&self) -> Value {
Value::scalar(self.to_string())
}
fn | (v: &InputValue) -> Option<Uuid> {
v.as_string_value().and_then(|s| Uuid::parse_str(s).ok())
}
fn from_str<'a>(value: ScalarToken<'a>) -> ParseScalarResult<'a, S> {
if let ScalarToken::String(value) = value {
Ok(S::from(value.to_owned()))
} else {
Err(ParseError::UnexpectedToken(Token::Scalar(value)))
}
}
}
#[cfg(test)]
mod test {
use crate::{value::DefaultScalarValue, InputValue};
use uuid::Uuid;
#[test]
fn uuid_from_input_value() {
let raw = "123e4567-e89b-12d3-a456-426655440000";
let input: InputValue<DefaultScalarValue> = InputValue::scalar(raw.to_string());
let parsed: Uuid = crate::FromInputValue::from_input_value(&input).unwrap();
let id = Uuid::parse_str(raw).unwrap();
assert_eq!(parsed, id);
}
}
| from_input_value | identifier_name |
uuid.rs | #![allow(clippy::needless_lifetimes)]
use uuid::Uuid;
use crate::{
parser::{ParseError, ScalarToken, Token},
value::ParseScalarResult,
Value,
};
#[crate::graphql_scalar(description = "Uuid")]
impl<S> GraphQLScalar for Uuid
where
S: ScalarValue,
{
fn resolve(&self) -> Value {
Value::scalar(self.to_string())
}
fn from_input_value(v: &InputValue) -> Option<Uuid> {
v.as_string_value().and_then(|s| Uuid::parse_str(s).ok())
}
fn from_str<'a>(value: ScalarToken<'a>) -> ParseScalarResult<'a, S> |
}
#[cfg(test)]
mod test {
use crate::{value::DefaultScalarValue, InputValue};
use uuid::Uuid;
#[test]
fn uuid_from_input_value() {
let raw = "123e4567-e89b-12d3-a456-426655440000";
let input: InputValue<DefaultScalarValue> = InputValue::scalar(raw.to_string());
let parsed: Uuid = crate::FromInputValue::from_input_value(&input).unwrap();
let id = Uuid::parse_str(raw).unwrap();
assert_eq!(parsed, id);
}
}
| {
if let ScalarToken::String(value) = value {
Ok(S::from(value.to_owned()))
} else {
Err(ParseError::UnexpectedToken(Token::Scalar(value)))
}
} | identifier_body |
path-lookahead.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![warn(unused)]
// Parser test for #37765
fn with_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `with_parens`
return (<T as ToString>::to_string(&arg)); //~WARN unnecessary parentheses around `return` value
}
fn no_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `no_parens`
return <T as ToString>::to_string(&arg);
}
fn | () {
}
| main | identifier_name |
path-lookahead.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![warn(unused)]
// Parser test for #37765
fn with_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `with_parens`
return (<T as ToString>::to_string(&arg)); //~WARN unnecessary parentheses around `return` value
}
fn no_parens<T: ToString>(arg: T) -> String |
fn main() {
}
| { //~WARN function is never used: `no_parens`
return <T as ToString>::to_string(&arg);
} | identifier_body |
path-lookahead.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![warn(unused)]
|
fn with_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `with_parens`
return (<T as ToString>::to_string(&arg)); //~WARN unnecessary parentheses around `return` value
}
fn no_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `no_parens`
return <T as ToString>::to_string(&arg);
}
fn main() {
} | // Parser test for #37765 | random_line_split |
error.rs | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Library error_lib creates a generic error type for rust
// This allows simplified chaining of error callbacks using the? operator
#[derive(Debug)] | pub enum SgeError {
IO(std::io::Error),
StdErr(Box<dyn std::error::Error>),
Literal(&'static str),
Message(String),
}
pub type SgeResult<T> = Result<T, SgeError>;
impl From<std::io::Error> for SgeError {
fn from(e: std::io::Error) -> Self {
SgeError::IO(e)
}
}
impl From<Box<dyn std::error::Error>> for SgeError {
fn from(e: Box<dyn std::error::Error>) -> Self {
SgeError::StdErr(e)
}
}
impl From<&'static str> for SgeError {
fn from(e: &'static str) -> Self {
SgeError::Literal(e)
}
}
impl From<String> for SgeError {
fn from(e: String) -> Self {
SgeError::Message(e)
}
}
impl From<std::fmt::Error> for SgeError {
fn from(e: std::fmt::Error) -> Self {
SgeError::Message(format!("{:?}", e))
}
}
impl From<std::ffi::NulError> for SgeError {
fn from(_: std::ffi::NulError) -> Self {
SgeError::Literal("Null error")
}
}
impl From<()> for SgeError {
fn from(_: ()) -> Self {
SgeError::Literal("")
}
}
impl Into<&'static str> for SgeError {
fn into(self) -> &'static str {
match self {
SgeError::IO(_) => "io error",
SgeError::StdErr(_) => "std err",
SgeError::Literal(_) => "literal",
SgeError::Message(_) => "message",
}
}
}
impl std::error::Error for SgeError {
fn source(&self) -> Option<&(dyn std::error::Error +'static)> {
match *self {
SgeError::IO(ref e) => Some(e),
SgeError::StdErr(_) => None,
SgeError::Literal(_) => None,
SgeError::Message(_) => None,
}
}
}
impl std::fmt::Display for SgeError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
SgeError::IO(ref e) => e.fmt(f),
SgeError::StdErr(_) => write!(f, "std err"),
SgeError::Literal(ref lit) => write!(f, "{}", lit),
SgeError::Message(ref msg) => write!(f, "{}", msg),
}
}
}
impl Clone for SgeError {
fn clone(&self) -> Self {
match self {
SgeError::IO(m) => SgeError::Message(format!("{}", m)),
SgeError::StdErr(m) => SgeError::Message(format!("{}", m)),
SgeError::Literal(m) => SgeError::Literal(*m),
SgeError::Message(m) => SgeError::Message(m.into()),
}
}
}
impl PartialEq for SgeError {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(SgeError::IO(_), SgeError::IO(_)) => true,
(SgeError::StdErr(_), SgeError::StdErr(_)) => true,
(SgeError::Literal(a), SgeError::Literal(b)) => a == b,
(SgeError::Message(a), SgeError::Message(b)) => a == b,
(_, _) => false,
}
}
}
pub fn err_logged<T>(msg: &'static str) -> Result<T, &'static str> {
println!("{}", msg);
Err(msg)
} | random_line_split |
|
error.rs | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Library error_lib creates a generic error type for rust
// This allows simplified chaining of error callbacks using the? operator
#[derive(Debug)]
pub enum SgeError {
IO(std::io::Error),
StdErr(Box<dyn std::error::Error>),
Literal(&'static str),
Message(String),
}
pub type SgeResult<T> = Result<T, SgeError>;
impl From<std::io::Error> for SgeError {
fn from(e: std::io::Error) -> Self {
SgeError::IO(e)
}
}
impl From<Box<dyn std::error::Error>> for SgeError {
fn from(e: Box<dyn std::error::Error>) -> Self {
SgeError::StdErr(e)
}
}
impl From<&'static str> for SgeError {
fn from(e: &'static str) -> Self {
SgeError::Literal(e)
}
}
impl From<String> for SgeError {
fn from(e: String) -> Self {
SgeError::Message(e)
}
}
impl From<std::fmt::Error> for SgeError {
fn from(e: std::fmt::Error) -> Self {
SgeError::Message(format!("{:?}", e))
}
}
impl From<std::ffi::NulError> for SgeError {
fn from(_: std::ffi::NulError) -> Self {
SgeError::Literal("Null error")
}
}
impl From<()> for SgeError {
fn from(_: ()) -> Self {
SgeError::Literal("")
}
}
impl Into<&'static str> for SgeError {
fn | (self) -> &'static str {
match self {
SgeError::IO(_) => "io error",
SgeError::StdErr(_) => "std err",
SgeError::Literal(_) => "literal",
SgeError::Message(_) => "message",
}
}
}
impl std::error::Error for SgeError {
fn source(&self) -> Option<&(dyn std::error::Error +'static)> {
match *self {
SgeError::IO(ref e) => Some(e),
SgeError::StdErr(_) => None,
SgeError::Literal(_) => None,
SgeError::Message(_) => None,
}
}
}
impl std::fmt::Display for SgeError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
SgeError::IO(ref e) => e.fmt(f),
SgeError::StdErr(_) => write!(f, "std err"),
SgeError::Literal(ref lit) => write!(f, "{}", lit),
SgeError::Message(ref msg) => write!(f, "{}", msg),
}
}
}
impl Clone for SgeError {
fn clone(&self) -> Self {
match self {
SgeError::IO(m) => SgeError::Message(format!("{}", m)),
SgeError::StdErr(m) => SgeError::Message(format!("{}", m)),
SgeError::Literal(m) => SgeError::Literal(*m),
SgeError::Message(m) => SgeError::Message(m.into()),
}
}
}
impl PartialEq for SgeError {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(SgeError::IO(_), SgeError::IO(_)) => true,
(SgeError::StdErr(_), SgeError::StdErr(_)) => true,
(SgeError::Literal(a), SgeError::Literal(b)) => a == b,
(SgeError::Message(a), SgeError::Message(b)) => a == b,
(_, _) => false,
}
}
}
pub fn err_logged<T>(msg: &'static str) -> Result<T, &'static str> {
println!("{}", msg);
Err(msg)
}
| into | identifier_name |
error.rs | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Library error_lib creates a generic error type for rust
// This allows simplified chaining of error callbacks using the? operator
#[derive(Debug)]
pub enum SgeError {
IO(std::io::Error),
StdErr(Box<dyn std::error::Error>),
Literal(&'static str),
Message(String),
}
pub type SgeResult<T> = Result<T, SgeError>;
impl From<std::io::Error> for SgeError {
fn from(e: std::io::Error) -> Self |
}
impl From<Box<dyn std::error::Error>> for SgeError {
fn from(e: Box<dyn std::error::Error>) -> Self {
SgeError::StdErr(e)
}
}
impl From<&'static str> for SgeError {
fn from(e: &'static str) -> Self {
SgeError::Literal(e)
}
}
impl From<String> for SgeError {
fn from(e: String) -> Self {
SgeError::Message(e)
}
}
impl From<std::fmt::Error> for SgeError {
fn from(e: std::fmt::Error) -> Self {
SgeError::Message(format!("{:?}", e))
}
}
impl From<std::ffi::NulError> for SgeError {
fn from(_: std::ffi::NulError) -> Self {
SgeError::Literal("Null error")
}
}
impl From<()> for SgeError {
fn from(_: ()) -> Self {
SgeError::Literal("")
}
}
impl Into<&'static str> for SgeError {
fn into(self) -> &'static str {
match self {
SgeError::IO(_) => "io error",
SgeError::StdErr(_) => "std err",
SgeError::Literal(_) => "literal",
SgeError::Message(_) => "message",
}
}
}
impl std::error::Error for SgeError {
fn source(&self) -> Option<&(dyn std::error::Error +'static)> {
match *self {
SgeError::IO(ref e) => Some(e),
SgeError::StdErr(_) => None,
SgeError::Literal(_) => None,
SgeError::Message(_) => None,
}
}
}
impl std::fmt::Display for SgeError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
SgeError::IO(ref e) => e.fmt(f),
SgeError::StdErr(_) => write!(f, "std err"),
SgeError::Literal(ref lit) => write!(f, "{}", lit),
SgeError::Message(ref msg) => write!(f, "{}", msg),
}
}
}
impl Clone for SgeError {
fn clone(&self) -> Self {
match self {
SgeError::IO(m) => SgeError::Message(format!("{}", m)),
SgeError::StdErr(m) => SgeError::Message(format!("{}", m)),
SgeError::Literal(m) => SgeError::Literal(*m),
SgeError::Message(m) => SgeError::Message(m.into()),
}
}
}
impl PartialEq for SgeError {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(SgeError::IO(_), SgeError::IO(_)) => true,
(SgeError::StdErr(_), SgeError::StdErr(_)) => true,
(SgeError::Literal(a), SgeError::Literal(b)) => a == b,
(SgeError::Message(a), SgeError::Message(b)) => a == b,
(_, _) => false,
}
}
}
pub fn err_logged<T>(msg: &'static str) -> Result<T, &'static str> {
println!("{}", msg);
Err(msg)
}
| {
SgeError::IO(e)
} | identifier_body |
parser-unicode-whitespace.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
// | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Beware editing: it has numerous whitespace characters which are important.
// It contains one ranges from the 'PATTERN_WHITE_SPACE' property outlined in
// http://unicode.org/Public/UNIDATA/PropList.txt
//
// The characters in the first expression of the assertion can be generated
// from: "4\u{0C}+\n\t\r7\t*\u{20}2\u{85}/\u{200E}3\u{200F}*\u{2028}2\u{2029}"
pub fn main() {
assert_eq!(4+
7 * 2
/3*
2
, 4 + 7 * 2 / 3 * 2);
} | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | random_line_split |
parser-unicode-whitespace.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Beware editing: it has numerous whitespace characters which are important.
// It contains one ranges from the 'PATTERN_WHITE_SPACE' property outlined in
// http://unicode.org/Public/UNIDATA/PropList.txt
//
// The characters in the first expression of the assertion can be generated
// from: "4\u{0C}+\n\t\r7\t*\u{20}2\u{85}/\u{200E}3\u{200F}*\u{2028}2\u{2029}"
pub fn main() | {
assert_eq!(4+
7 * 2
/3*
2
, 4 + 7 * 2 / 3 * 2);
}
| identifier_body |
|
parser-unicode-whitespace.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Beware editing: it has numerous whitespace characters which are important.
// It contains one ranges from the 'PATTERN_WHITE_SPACE' property outlined in
// http://unicode.org/Public/UNIDATA/PropList.txt
//
// The characters in the first expression of the assertion can be generated
// from: "4\u{0C}+\n\t\r7\t*\u{20}2\u{85}/\u{200E}3\u{200F}*\u{2028}2\u{2029}"
pub fn | () {
assert_eq!(4+
7 * 2
/3*
2
, 4 + 7 * 2 / 3 * 2);
}
| main | identifier_name |
test.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_private)]
#![feature(libc)]
extern crate libc;
extern crate rustc;
extern crate rustc_driver;
extern crate rustc_lint;
extern crate rustc_resolve;
extern crate syntax;
use std::ffi::{CStr, CString};
use std::mem::transmute;
use std::path::PathBuf;
use std::thread::Builder;
use rustc::ast_map;
use rustc::llvm;
use rustc::metadata::cstore::RequireDynamic;
use rustc::middle::ty;
use rustc::session::config::{self, basic_options, build_configuration, Input, Options};
use rustc::session::build_session;
use rustc_driver::driver;
use rustc_resolve::MakeGlobMap;
use libc::c_void;
use syntax::diagnostics::registry::Registry;
| fn main() {
let program = r#"
#[no_mangle]
pub static TEST_STATIC: i32 = 42;
"#;
let program2 = r#"
#[no_mangle]
pub fn test_add(a: i32, b: i32) -> i32 { a + b }
"#;
let mut path = match std::env::args().nth(2) {
Some(path) => PathBuf::from(&path),
None => panic!("missing rustc path")
};
// Remove two segments from rustc path to get sysroot.
path.pop();
path.pop();
let mut ee = ExecutionEngine::new(program, path);
let test_static = match ee.get_global("TEST_STATIC") {
Some(g) => g as *const i32,
None => panic!("failed to get global")
};
assert_eq!(unsafe { *test_static }, 42);
ee.add_module(program2);
let test_add: fn(i32, i32) -> i32;
test_add = match ee.get_function("test_add") {
Some(f) => unsafe { transmute(f) },
None => panic!("failed to get function")
};
assert_eq!(test_add(1, 2), 3);
}
struct ExecutionEngine {
ee: llvm::ExecutionEngineRef,
modules: Vec<llvm::ModuleRef>,
sysroot: PathBuf,
}
impl ExecutionEngine {
pub fn new(program: &str, sysroot: PathBuf) -> ExecutionEngine {
let (llmod, deps) = compile_program(program, sysroot.clone())
.expect("failed to compile program");
let ee = unsafe { llvm::LLVMBuildExecutionEngine(llmod) };
if ee.is_null() {
panic!("Failed to create ExecutionEngine: {}", llvm_error());
}
let ee = ExecutionEngine{
ee: ee,
modules: vec![llmod],
sysroot: sysroot,
};
ee.load_deps(&deps);
ee
}
pub fn add_module(&mut self, program: &str) {
let (llmod, deps) = compile_program(program, self.sysroot.clone())
.expect("failed to compile program in add_module");
unsafe { llvm::LLVMExecutionEngineAddModule(self.ee, llmod); }
self.modules.push(llmod);
self.load_deps(&deps);
}
/// Returns a raw pointer to the named function.
pub fn get_function(&mut self, name: &str) -> Option<*const c_void> {
let s = CString::new(name.as_bytes()).unwrap();
for &m in &self.modules {
let fv = unsafe { llvm::LLVMGetNamedFunction(m, s.as_ptr()) };
if!fv.is_null() {
let fp = unsafe { llvm::LLVMGetPointerToGlobal(self.ee, fv) };
assert!(!fp.is_null());
return Some(fp);
}
}
None
}
/// Returns a raw pointer to the named global item.
pub fn get_global(&mut self, name: &str) -> Option<*const c_void> {
let s = CString::new(name.as_bytes()).unwrap();
for &m in &self.modules {
let gv = unsafe { llvm::LLVMGetNamedGlobal(m, s.as_ptr()) };
if!gv.is_null() {
let gp = unsafe { llvm::LLVMGetPointerToGlobal(self.ee, gv) };
assert!(!gp.is_null());
return Some(gp);
}
}
None
}
/// Loads all dependencies of compiled code.
/// Expects a series of paths to dynamic library files.
fn load_deps(&self, deps: &[PathBuf]) {
for path in deps {
let s = match path.as_os_str().to_str() {
Some(s) => s,
None => panic!(
"Could not convert crate path to UTF-8 string: {:?}", path)
};
let cs = CString::new(s).unwrap();
let res = unsafe { llvm::LLVMRustLoadDynamicLibrary(cs.as_ptr()) };
if res == 0 {
panic!("Failed to load crate {:?}: {}",
path.display(), llvm_error());
}
}
}
}
impl Drop for ExecutionEngine {
fn drop(&mut self) {
unsafe { llvm::LLVMDisposeExecutionEngine(self.ee) };
}
}
/// Returns last error from LLVM wrapper code.
fn llvm_error() -> String {
String::from_utf8_lossy(
unsafe { CStr::from_ptr(llvm::LLVMRustGetLastError()).to_bytes() })
.into_owned()
}
fn build_exec_options(sysroot: PathBuf) -> Options {
let mut opts = basic_options();
// librustc derives sysroot from the executable name.
// Since we are not rustc, we must specify it.
opts.maybe_sysroot = Some(sysroot);
// Prefer faster build time
opts.optimize = config::No;
// Don't require a `main` function
opts.crate_types = vec![config::CrateTypeDylib];
opts
}
/// Compiles input up to phase 4, translation to LLVM.
///
/// Returns the LLVM `ModuleRef` and a series of paths to dynamic libraries
/// for crates used in the given input.
fn compile_program(input: &str, sysroot: PathBuf)
-> Option<(llvm::ModuleRef, Vec<PathBuf>)> {
let input = Input::Str(input.to_string());
let thread = Builder::new().name("compile_program".to_string());
let handle = thread.spawn(move || {
let opts = build_exec_options(sysroot);
let sess = build_session(opts, None, Registry::new(&rustc::DIAGNOSTICS));
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let cfg = build_configuration(&sess);
let id = "input".to_string();
let krate = driver::phase_1_parse_input(&sess, cfg, &input);
let krate = driver::phase_2_configure_and_expand(&sess, krate, &id, None)
.expect("phase_2 returned `None`");
let mut forest = ast_map::Forest::new(krate);
let arenas = ty::CtxtArenas::new();
let ast_map = driver::assign_node_ids_and_map(&sess, &mut forest);
driver::phase_3_run_analysis_passes(
sess, ast_map, &arenas, id, MakeGlobMap::No, |tcx, analysis| {
let trans = driver::phase_4_translate_to_llvm(tcx, analysis);
let crates = tcx.sess.cstore.get_used_crates(RequireDynamic);
// Collect crates used in the session.
// Reverse order finds dependencies first.
let deps = crates.into_iter().rev()
.filter_map(|(_, p)| p).collect();
assert_eq!(trans.modules.len(), 1);
let llmod = trans.modules[0].llmod;
// Workaround because raw pointers do not impl Send
let modp = llmod as usize;
(modp, deps)
}).1
}).unwrap();
match handle.join() {
Ok((llmod, deps)) => Some((llmod as llvm::ModuleRef, deps)),
Err(_) => None
}
} | random_line_split |
|
test.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_private)]
#![feature(libc)]
extern crate libc;
extern crate rustc;
extern crate rustc_driver;
extern crate rustc_lint;
extern crate rustc_resolve;
extern crate syntax;
use std::ffi::{CStr, CString};
use std::mem::transmute;
use std::path::PathBuf;
use std::thread::Builder;
use rustc::ast_map;
use rustc::llvm;
use rustc::metadata::cstore::RequireDynamic;
use rustc::middle::ty;
use rustc::session::config::{self, basic_options, build_configuration, Input, Options};
use rustc::session::build_session;
use rustc_driver::driver;
use rustc_resolve::MakeGlobMap;
use libc::c_void;
use syntax::diagnostics::registry::Registry;
fn main() {
let program = r#"
#[no_mangle]
pub static TEST_STATIC: i32 = 42;
"#;
let program2 = r#"
#[no_mangle]
pub fn test_add(a: i32, b: i32) -> i32 { a + b }
"#;
let mut path = match std::env::args().nth(2) {
Some(path) => PathBuf::from(&path),
None => panic!("missing rustc path")
};
// Remove two segments from rustc path to get sysroot.
path.pop();
path.pop();
let mut ee = ExecutionEngine::new(program, path);
let test_static = match ee.get_global("TEST_STATIC") {
Some(g) => g as *const i32,
None => panic!("failed to get global")
};
assert_eq!(unsafe { *test_static }, 42);
ee.add_module(program2);
let test_add: fn(i32, i32) -> i32;
test_add = match ee.get_function("test_add") {
Some(f) => unsafe { transmute(f) },
None => panic!("failed to get function")
};
assert_eq!(test_add(1, 2), 3);
}
struct ExecutionEngine {
ee: llvm::ExecutionEngineRef,
modules: Vec<llvm::ModuleRef>,
sysroot: PathBuf,
}
impl ExecutionEngine {
pub fn new(program: &str, sysroot: PathBuf) -> ExecutionEngine {
let (llmod, deps) = compile_program(program, sysroot.clone())
.expect("failed to compile program");
let ee = unsafe { llvm::LLVMBuildExecutionEngine(llmod) };
if ee.is_null() {
panic!("Failed to create ExecutionEngine: {}", llvm_error());
}
let ee = ExecutionEngine{
ee: ee,
modules: vec![llmod],
sysroot: sysroot,
};
ee.load_deps(&deps);
ee
}
pub fn add_module(&mut self, program: &str) {
let (llmod, deps) = compile_program(program, self.sysroot.clone())
.expect("failed to compile program in add_module");
unsafe { llvm::LLVMExecutionEngineAddModule(self.ee, llmod); }
self.modules.push(llmod);
self.load_deps(&deps);
}
/// Returns a raw pointer to the named function.
pub fn get_function(&mut self, name: &str) -> Option<*const c_void> {
let s = CString::new(name.as_bytes()).unwrap();
for &m in &self.modules {
let fv = unsafe { llvm::LLVMGetNamedFunction(m, s.as_ptr()) };
if!fv.is_null() {
let fp = unsafe { llvm::LLVMGetPointerToGlobal(self.ee, fv) };
assert!(!fp.is_null());
return Some(fp);
}
}
None
}
/// Returns a raw pointer to the named global item.
pub fn get_global(&mut self, name: &str) -> Option<*const c_void> {
let s = CString::new(name.as_bytes()).unwrap();
for &m in &self.modules {
let gv = unsafe { llvm::LLVMGetNamedGlobal(m, s.as_ptr()) };
if!gv.is_null() {
let gp = unsafe { llvm::LLVMGetPointerToGlobal(self.ee, gv) };
assert!(!gp.is_null());
return Some(gp);
}
}
None
}
/// Loads all dependencies of compiled code.
/// Expects a series of paths to dynamic library files.
fn load_deps(&self, deps: &[PathBuf]) {
for path in deps {
let s = match path.as_os_str().to_str() {
Some(s) => s,
None => panic!(
"Could not convert crate path to UTF-8 string: {:?}", path)
};
let cs = CString::new(s).unwrap();
let res = unsafe { llvm::LLVMRustLoadDynamicLibrary(cs.as_ptr()) };
if res == 0 {
panic!("Failed to load crate {:?}: {}",
path.display(), llvm_error());
}
}
}
}
impl Drop for ExecutionEngine {
fn drop(&mut self) {
unsafe { llvm::LLVMDisposeExecutionEngine(self.ee) };
}
}
/// Returns last error from LLVM wrapper code.
fn llvm_error() -> String {
String::from_utf8_lossy(
unsafe { CStr::from_ptr(llvm::LLVMRustGetLastError()).to_bytes() })
.into_owned()
}
fn build_exec_options(sysroot: PathBuf) -> Options {
let mut opts = basic_options();
// librustc derives sysroot from the executable name.
// Since we are not rustc, we must specify it.
opts.maybe_sysroot = Some(sysroot);
// Prefer faster build time
opts.optimize = config::No;
// Don't require a `main` function
opts.crate_types = vec![config::CrateTypeDylib];
opts
}
/// Compiles input up to phase 4, translation to LLVM.
///
/// Returns the LLVM `ModuleRef` and a series of paths to dynamic libraries
/// for crates used in the given input.
fn compile_program(input: &str, sysroot: PathBuf)
-> Option<(llvm::ModuleRef, Vec<PathBuf>)> | let ast_map = driver::assign_node_ids_and_map(&sess, &mut forest);
driver::phase_3_run_analysis_passes(
sess, ast_map, &arenas, id, MakeGlobMap::No, |tcx, analysis| {
let trans = driver::phase_4_translate_to_llvm(tcx, analysis);
let crates = tcx.sess.cstore.get_used_crates(RequireDynamic);
// Collect crates used in the session.
// Reverse order finds dependencies first.
let deps = crates.into_iter().rev()
.filter_map(|(_, p)| p).collect();
assert_eq!(trans.modules.len(), 1);
let llmod = trans.modules[0].llmod;
// Workaround because raw pointers do not impl Send
let modp = llmod as usize;
(modp, deps)
}).1
}).unwrap();
match handle.join() {
Ok((llmod, deps)) => Some((llmod as llvm::ModuleRef, deps)),
Err(_) => None
}
}
| {
let input = Input::Str(input.to_string());
let thread = Builder::new().name("compile_program".to_string());
let handle = thread.spawn(move || {
let opts = build_exec_options(sysroot);
let sess = build_session(opts, None, Registry::new(&rustc::DIAGNOSTICS));
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let cfg = build_configuration(&sess);
let id = "input".to_string();
let krate = driver::phase_1_parse_input(&sess, cfg, &input);
let krate = driver::phase_2_configure_and_expand(&sess, krate, &id, None)
.expect("phase_2 returned `None`");
let mut forest = ast_map::Forest::new(krate);
let arenas = ty::CtxtArenas::new(); | identifier_body |
test.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_private)]
#![feature(libc)]
extern crate libc;
extern crate rustc;
extern crate rustc_driver;
extern crate rustc_lint;
extern crate rustc_resolve;
extern crate syntax;
use std::ffi::{CStr, CString};
use std::mem::transmute;
use std::path::PathBuf;
use std::thread::Builder;
use rustc::ast_map;
use rustc::llvm;
use rustc::metadata::cstore::RequireDynamic;
use rustc::middle::ty;
use rustc::session::config::{self, basic_options, build_configuration, Input, Options};
use rustc::session::build_session;
use rustc_driver::driver;
use rustc_resolve::MakeGlobMap;
use libc::c_void;
use syntax::diagnostics::registry::Registry;
fn main() {
let program = r#"
#[no_mangle]
pub static TEST_STATIC: i32 = 42;
"#;
let program2 = r#"
#[no_mangle]
pub fn test_add(a: i32, b: i32) -> i32 { a + b }
"#;
let mut path = match std::env::args().nth(2) {
Some(path) => PathBuf::from(&path),
None => panic!("missing rustc path")
};
// Remove two segments from rustc path to get sysroot.
path.pop();
path.pop();
let mut ee = ExecutionEngine::new(program, path);
let test_static = match ee.get_global("TEST_STATIC") {
Some(g) => g as *const i32,
None => panic!("failed to get global")
};
assert_eq!(unsafe { *test_static }, 42);
ee.add_module(program2);
let test_add: fn(i32, i32) -> i32;
test_add = match ee.get_function("test_add") {
Some(f) => unsafe { transmute(f) },
None => panic!("failed to get function")
};
assert_eq!(test_add(1, 2), 3);
}
struct ExecutionEngine {
ee: llvm::ExecutionEngineRef,
modules: Vec<llvm::ModuleRef>,
sysroot: PathBuf,
}
impl ExecutionEngine {
pub fn new(program: &str, sysroot: PathBuf) -> ExecutionEngine {
let (llmod, deps) = compile_program(program, sysroot.clone())
.expect("failed to compile program");
let ee = unsafe { llvm::LLVMBuildExecutionEngine(llmod) };
if ee.is_null() {
panic!("Failed to create ExecutionEngine: {}", llvm_error());
}
let ee = ExecutionEngine{
ee: ee,
modules: vec![llmod],
sysroot: sysroot,
};
ee.load_deps(&deps);
ee
}
pub fn add_module(&mut self, program: &str) {
let (llmod, deps) = compile_program(program, self.sysroot.clone())
.expect("failed to compile program in add_module");
unsafe { llvm::LLVMExecutionEngineAddModule(self.ee, llmod); }
self.modules.push(llmod);
self.load_deps(&deps);
}
/// Returns a raw pointer to the named function.
pub fn get_function(&mut self, name: &str) -> Option<*const c_void> {
let s = CString::new(name.as_bytes()).unwrap();
for &m in &self.modules {
let fv = unsafe { llvm::LLVMGetNamedFunction(m, s.as_ptr()) };
if!fv.is_null() {
let fp = unsafe { llvm::LLVMGetPointerToGlobal(self.ee, fv) };
assert!(!fp.is_null());
return Some(fp);
}
}
None
}
/// Returns a raw pointer to the named global item.
pub fn get_global(&mut self, name: &str) -> Option<*const c_void> {
let s = CString::new(name.as_bytes()).unwrap();
for &m in &self.modules {
let gv = unsafe { llvm::LLVMGetNamedGlobal(m, s.as_ptr()) };
if!gv.is_null() {
let gp = unsafe { llvm::LLVMGetPointerToGlobal(self.ee, gv) };
assert!(!gp.is_null());
return Some(gp);
}
}
None
}
/// Loads all dependencies of compiled code.
/// Expects a series of paths to dynamic library files.
fn load_deps(&self, deps: &[PathBuf]) {
for path in deps {
let s = match path.as_os_str().to_str() {
Some(s) => s,
None => panic!(
"Could not convert crate path to UTF-8 string: {:?}", path)
};
let cs = CString::new(s).unwrap();
let res = unsafe { llvm::LLVMRustLoadDynamicLibrary(cs.as_ptr()) };
if res == 0 {
panic!("Failed to load crate {:?}: {}",
path.display(), llvm_error());
}
}
}
}
impl Drop for ExecutionEngine {
fn drop(&mut self) {
unsafe { llvm::LLVMDisposeExecutionEngine(self.ee) };
}
}
/// Returns last error from LLVM wrapper code.
fn llvm_error() -> String {
String::from_utf8_lossy(
unsafe { CStr::from_ptr(llvm::LLVMRustGetLastError()).to_bytes() })
.into_owned()
}
fn build_exec_options(sysroot: PathBuf) -> Options {
let mut opts = basic_options();
// librustc derives sysroot from the executable name.
// Since we are not rustc, we must specify it.
opts.maybe_sysroot = Some(sysroot);
// Prefer faster build time
opts.optimize = config::No;
// Don't require a `main` function
opts.crate_types = vec![config::CrateTypeDylib];
opts
}
/// Compiles input up to phase 4, translation to LLVM.
///
/// Returns the LLVM `ModuleRef` and a series of paths to dynamic libraries
/// for crates used in the given input.
fn | (input: &str, sysroot: PathBuf)
-> Option<(llvm::ModuleRef, Vec<PathBuf>)> {
let input = Input::Str(input.to_string());
let thread = Builder::new().name("compile_program".to_string());
let handle = thread.spawn(move || {
let opts = build_exec_options(sysroot);
let sess = build_session(opts, None, Registry::new(&rustc::DIAGNOSTICS));
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let cfg = build_configuration(&sess);
let id = "input".to_string();
let krate = driver::phase_1_parse_input(&sess, cfg, &input);
let krate = driver::phase_2_configure_and_expand(&sess, krate, &id, None)
.expect("phase_2 returned `None`");
let mut forest = ast_map::Forest::new(krate);
let arenas = ty::CtxtArenas::new();
let ast_map = driver::assign_node_ids_and_map(&sess, &mut forest);
driver::phase_3_run_analysis_passes(
sess, ast_map, &arenas, id, MakeGlobMap::No, |tcx, analysis| {
let trans = driver::phase_4_translate_to_llvm(tcx, analysis);
let crates = tcx.sess.cstore.get_used_crates(RequireDynamic);
// Collect crates used in the session.
// Reverse order finds dependencies first.
let deps = crates.into_iter().rev()
.filter_map(|(_, p)| p).collect();
assert_eq!(trans.modules.len(), 1);
let llmod = trans.modules[0].llmod;
// Workaround because raw pointers do not impl Send
let modp = llmod as usize;
(modp, deps)
}).1
}).unwrap();
match handle.join() {
Ok((llmod, deps)) => Some((llmod as llvm::ModuleRef, deps)),
Err(_) => None
}
}
| compile_program | identifier_name |
function_wrapper_cpp.rs | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::conversion::{
analysis::fun::function_wrapper::{CppConversionType, TypeConversionPolicy},
ConvertError,
};
use crate::known_types::type_lacks_copy_constructor; | use super::type_to_cpp::{type_to_cpp, CppNameMap};
impl TypeConversionPolicy {
pub(super) fn unconverted_type(
&self,
cpp_name_map: &CppNameMap,
) -> Result<String, ConvertError> {
match self.cpp_conversion {
CppConversionType::FromUniquePtrToValue => self.wrapped_type(cpp_name_map),
_ => self.unwrapped_type_as_string(cpp_name_map),
}
}
pub(super) fn converted_type(&self, cpp_name_map: &CppNameMap) -> Result<String, ConvertError> {
match self.cpp_conversion {
CppConversionType::FromValueToUniquePtr => self.wrapped_type(cpp_name_map),
_ => self.unwrapped_type_as_string(cpp_name_map),
}
}
fn unwrapped_type_as_string(&self, cpp_name_map: &CppNameMap) -> Result<String, ConvertError> {
type_to_cpp(&self.unwrapped_type, cpp_name_map)
}
fn wrapped_type(&self, original_name_map: &CppNameMap) -> Result<String, ConvertError> {
Ok(format!(
"std::unique_ptr<{}>",
self.unwrapped_type_as_string(original_name_map)?
))
}
pub(super) fn cpp_conversion(
&self,
var_name: &str,
cpp_name_map: &CppNameMap,
use_rvo: bool,
) -> Result<String, ConvertError> {
Ok(match self.cpp_conversion {
CppConversionType::None => {
if type_lacks_copy_constructor(&self.unwrapped_type) &&!use_rvo {
format!("std::move({})", var_name)
} else {
var_name.to_string()
}
}
CppConversionType::FromUniquePtrToValue | CppConversionType::FromPtrToMove => {
format!("std::move(*{})", var_name)
}
CppConversionType::FromValueToUniquePtr => format!(
"std::make_unique<{}>({})",
self.unconverted_type(cpp_name_map)?,
var_name
),
})
}
} | random_line_split |
|
function_wrapper_cpp.rs | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::conversion::{
analysis::fun::function_wrapper::{CppConversionType, TypeConversionPolicy},
ConvertError,
};
use crate::known_types::type_lacks_copy_constructor;
use super::type_to_cpp::{type_to_cpp, CppNameMap};
impl TypeConversionPolicy {
pub(super) fn unconverted_type(
&self,
cpp_name_map: &CppNameMap,
) -> Result<String, ConvertError> {
match self.cpp_conversion {
CppConversionType::FromUniquePtrToValue => self.wrapped_type(cpp_name_map),
_ => self.unwrapped_type_as_string(cpp_name_map),
}
}
pub(super) fn converted_type(&self, cpp_name_map: &CppNameMap) -> Result<String, ConvertError> {
match self.cpp_conversion {
CppConversionType::FromValueToUniquePtr => self.wrapped_type(cpp_name_map),
_ => self.unwrapped_type_as_string(cpp_name_map),
}
}
fn unwrapped_type_as_string(&self, cpp_name_map: &CppNameMap) -> Result<String, ConvertError> {
type_to_cpp(&self.unwrapped_type, cpp_name_map)
}
fn wrapped_type(&self, original_name_map: &CppNameMap) -> Result<String, ConvertError> {
Ok(format!(
"std::unique_ptr<{}>",
self.unwrapped_type_as_string(original_name_map)?
))
}
pub(super) fn cpp_conversion(
&self,
var_name: &str,
cpp_name_map: &CppNameMap,
use_rvo: bool,
) -> Result<String, ConvertError> |
}
| {
Ok(match self.cpp_conversion {
CppConversionType::None => {
if type_lacks_copy_constructor(&self.unwrapped_type) && !use_rvo {
format!("std::move({})", var_name)
} else {
var_name.to_string()
}
}
CppConversionType::FromUniquePtrToValue | CppConversionType::FromPtrToMove => {
format!("std::move(*{})", var_name)
}
CppConversionType::FromValueToUniquePtr => format!(
"std::make_unique<{}>({})",
self.unconverted_type(cpp_name_map)?,
var_name
),
})
} | identifier_body |
function_wrapper_cpp.rs | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::conversion::{
analysis::fun::function_wrapper::{CppConversionType, TypeConversionPolicy},
ConvertError,
};
use crate::known_types::type_lacks_copy_constructor;
use super::type_to_cpp::{type_to_cpp, CppNameMap};
impl TypeConversionPolicy {
pub(super) fn unconverted_type(
&self,
cpp_name_map: &CppNameMap,
) -> Result<String, ConvertError> {
match self.cpp_conversion {
CppConversionType::FromUniquePtrToValue => self.wrapped_type(cpp_name_map),
_ => self.unwrapped_type_as_string(cpp_name_map),
}
}
pub(super) fn | (&self, cpp_name_map: &CppNameMap) -> Result<String, ConvertError> {
match self.cpp_conversion {
CppConversionType::FromValueToUniquePtr => self.wrapped_type(cpp_name_map),
_ => self.unwrapped_type_as_string(cpp_name_map),
}
}
fn unwrapped_type_as_string(&self, cpp_name_map: &CppNameMap) -> Result<String, ConvertError> {
type_to_cpp(&self.unwrapped_type, cpp_name_map)
}
fn wrapped_type(&self, original_name_map: &CppNameMap) -> Result<String, ConvertError> {
Ok(format!(
"std::unique_ptr<{}>",
self.unwrapped_type_as_string(original_name_map)?
))
}
pub(super) fn cpp_conversion(
&self,
var_name: &str,
cpp_name_map: &CppNameMap,
use_rvo: bool,
) -> Result<String, ConvertError> {
Ok(match self.cpp_conversion {
CppConversionType::None => {
if type_lacks_copy_constructor(&self.unwrapped_type) &&!use_rvo {
format!("std::move({})", var_name)
} else {
var_name.to_string()
}
}
CppConversionType::FromUniquePtrToValue | CppConversionType::FromPtrToMove => {
format!("std::move(*{})", var_name)
}
CppConversionType::FromValueToUniquePtr => format!(
"std::make_unique<{}>({})",
self.unconverted_type(cpp_name_map)?,
var_name
),
})
}
}
| converted_type | identifier_name |
task-comm-13.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
| while i < number_of_messages { tx.send(start + i); i += 1; }
}
pub fn main() {
println!("Check that we don't deadlock.");
let (tx, rx) = channel();
task::try(proc() { start(&tx, 0, 10) });
println!("Joined task");
} | use std::task;
fn start(tx: &Sender<int>, start: int, number_of_messages: int) {
let mut i: int = 0; | random_line_split |
task-comm-13.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::task;
fn start(tx: &Sender<int>, start: int, number_of_messages: int) |
pub fn main() {
println!("Check that we don't deadlock.");
let (tx, rx) = channel();
task::try(proc() { start(&tx, 0, 10) });
println!("Joined task");
}
| {
let mut i: int = 0;
while i < number_of_messages { tx.send(start + i); i += 1; }
} | identifier_body |
task-comm-13.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::task;
fn start(tx: &Sender<int>, start: int, number_of_messages: int) {
let mut i: int = 0;
while i < number_of_messages { tx.send(start + i); i += 1; }
}
pub fn | () {
println!("Check that we don't deadlock.");
let (tx, rx) = channel();
task::try(proc() { start(&tx, 0, 10) });
println!("Joined task");
}
| main | identifier_name |
config.rs | use std::io::Read;
use std;
use serde_json;
#[derive(Serialize, Deserialize)]
pub struct ConfigData{
pub username:String,
pub password:String,
pub channels:Vec<String>,
pub admins:Vec<String>,
pub nyaa:Nyaa,
}
#[derive(Serialize, Deserialize,Clone)]
pub struct Nyaa{
pub delay:u64,
}
#[derive(Debug)]
pub enum ConfigErr{
Parse,
Open,
Read
}
impl ConfigData{
pub fn new(file: &str)->Result<ConfigData,ConfigErr>{
let s = try!(file_to_string(file));
serde_json::from_str(&s).map_err(|_|ConfigErr::Parse)
}
}
fn file_to_string(file: &str)->Result<String,ConfigErr>{
let mut f = try!(std::fs::File::open(file).map_err(|_|ConfigErr::Open));
let mut s = String::new();
match f.read_to_string(&mut s){
Ok(_)=>Ok(s),
Err(_)=>Err(ConfigErr::Read),
}
}
#[cfg(test)]
mod test {
#[test]
fn new_config_data(){ | assert_eq!("name",cd.username());
assert_eq!("oauth:1234",cd.password());
assert_eq!("___4Header",cd.channels()[0]);
assert_eq!("PagChomp",cd.channels()[1]);
assert_eq!("Keepo",cd.channels()[2]);
assert_eq!(3,cd.channels().len());
assert_eq!("443297327",cd.admins()[0]);
assert_eq!("443417327",cd.admins()[1]);
assert_eq!(2,cd.admins().len());
assert_eq!(100,cd.nyaa().delay().to_owned());
}
} | let mut cd = ConfigData::new("tests/config_test.json").unwrap(); | random_line_split |
config.rs | use std::io::Read;
use std;
use serde_json;
#[derive(Serialize, Deserialize)]
pub struct ConfigData{
pub username:String,
pub password:String,
pub channels:Vec<String>,
pub admins:Vec<String>,
pub nyaa:Nyaa,
}
#[derive(Serialize, Deserialize,Clone)]
pub struct | {
pub delay:u64,
}
#[derive(Debug)]
pub enum ConfigErr{
Parse,
Open,
Read
}
impl ConfigData{
pub fn new(file: &str)->Result<ConfigData,ConfigErr>{
let s = try!(file_to_string(file));
serde_json::from_str(&s).map_err(|_|ConfigErr::Parse)
}
}
fn file_to_string(file: &str)->Result<String,ConfigErr>{
let mut f = try!(std::fs::File::open(file).map_err(|_|ConfigErr::Open));
let mut s = String::new();
match f.read_to_string(&mut s){
Ok(_)=>Ok(s),
Err(_)=>Err(ConfigErr::Read),
}
}
#[cfg(test)]
mod test {
#[test]
fn new_config_data(){
let mut cd = ConfigData::new("tests/config_test.json").unwrap();
assert_eq!("name",cd.username());
assert_eq!("oauth:1234",cd.password());
assert_eq!("___4Header",cd.channels()[0]);
assert_eq!("PagChomp",cd.channels()[1]);
assert_eq!("Keepo",cd.channels()[2]);
assert_eq!(3,cd.channels().len());
assert_eq!("443297327",cd.admins()[0]);
assert_eq!("443417327",cd.admins()[1]);
assert_eq!(2,cd.admins().len());
assert_eq!(100,cd.nyaa().delay().to_owned());
}
}
| Nyaa | identifier_name |
mv.rs | use board::chess::board::*;
use pgn_traits::pgn::PgnBoard;
use board_game_traits::board::Board;
|
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct ChessReverseMove {
pub from : Square,
pub to : Square,
pub capture : PieceType,
pub prom : bool,
pub old_castling_en_passant : u8,
pub old_half_move_clock : u8,
pub old_past_move_hashes: Option<Vec<u64>>,
}
impl ChessReverseMove {
/// Returns the corresponding undo move for a move
/// Must be called before the move was done on the board
pub fn from_move(c_move: ChessMove, board: &ChessBoard) -> ChessReverseMove {
ChessReverseMove { from: c_move.from, to: c_move.to, capture: board[c_move.to].piece_type(),
prom: c_move.prom.is_some(),
old_castling_en_passant: board.castling_en_passant,
old_half_move_clock: board.half_move_clock,
old_past_move_hashes: None,
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct ChessMove {
pub from : Square,
pub to : Square,
pub prom : Option<PieceType>,
}
impl fmt::Display for ChessMove {
fn fmt(&self, fmt : &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt.write_str(&format!("{}", ChessBoard::start_board().move_to_lan(self))).unwrap();
Ok(())
}
}
impl fmt::Debug for ChessMove {
fn fmt(&self, fmt : &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt::Display::fmt(self, fmt)
}
}
impl ChessMove {
pub fn new(from : Square, to : Square) -> ChessMove {
ChessMove { from, to, prom: None }
}
pub fn new_prom(from : Square, to : Square, prom : PieceType) -> ChessMove {
ChessMove { from, to, prom: Some(prom) }
}
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct ChessReverseNullMove {
pub old_castling_en_passant : u8,
} | use std::fmt; | random_line_split |
mv.rs | use board::chess::board::*;
use pgn_traits::pgn::PgnBoard;
use board_game_traits::board::Board;
use std::fmt;
#[derive(Clone, Eq, PartialEq, Debug)]
pub struct ChessReverseMove {
pub from : Square,
pub to : Square,
pub capture : PieceType,
pub prom : bool,
pub old_castling_en_passant : u8,
pub old_half_move_clock : u8,
pub old_past_move_hashes: Option<Vec<u64>>,
}
impl ChessReverseMove {
/// Returns the corresponding undo move for a move
/// Must be called before the move was done on the board
pub fn from_move(c_move: ChessMove, board: &ChessBoard) -> ChessReverseMove {
ChessReverseMove { from: c_move.from, to: c_move.to, capture: board[c_move.to].piece_type(),
prom: c_move.prom.is_some(),
old_castling_en_passant: board.castling_en_passant,
old_half_move_clock: board.half_move_clock,
old_past_move_hashes: None,
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct ChessMove {
pub from : Square,
pub to : Square,
pub prom : Option<PieceType>,
}
impl fmt::Display for ChessMove {
fn | (&self, fmt : &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt.write_str(&format!("{}", ChessBoard::start_board().move_to_lan(self))).unwrap();
Ok(())
}
}
impl fmt::Debug for ChessMove {
fn fmt(&self, fmt : &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt::Display::fmt(self, fmt)
}
}
impl ChessMove {
pub fn new(from : Square, to : Square) -> ChessMove {
ChessMove { from, to, prom: None }
}
pub fn new_prom(from : Square, to : Square, prom : PieceType) -> ChessMove {
ChessMove { from, to, prom: Some(prom) }
}
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct ChessReverseNullMove {
pub old_castling_en_passant : u8,
} | fmt | identifier_name |
tests.rs | use rocket::testing::MockRequest;
use rocket::http::Method::*;
use rocket::http::{ContentType, Status};
use super::rocket;
fn test_login(username: &str, password: &str, age: isize, status: Status,
body: Option<&'static str>) {
let rocket = rocket();
let mut req = MockRequest::new(Post, "/login")
.header(ContentType::Form)
.body(&format!("username={}&password={}&age={}", username, password, age));
let mut response = req.dispatch_with(&rocket);
let body_str = response.body_string();
println!("Checking: {:?}/{:?}/{:?}/{:?}", username, password, age, body_str);
assert_eq!(response.status(), status);
if let Some(string) = body {
assert!(body_str.map_or(true, |s| s.contains(string)));
}
}
#[test]
fn test_good_login() {
test_login("Sergio", "password", 30, Status::SeeOther, None);
}
const OK: Status = self::Status::Ok;
#[test]
fn test_bad_login() {
test_login("Sergio", "password", 20, OK, Some("Sorry, 20 is too young!"));
test_login("Sergio", "password", 200, OK, Some("Are you sure you're 200?"));
test_login("Sergio", "jk", -100, OK, Some("'-100' is not a valid integer."));
test_login("Sergio", "ok", 30, OK, Some("Wrong password!"));
test_login("Mike", "password", 30, OK, Some("Unrecognized user, 'Mike'."));
}
fn check_bad_form(form_str: &str, status: Status) |
#[test]
fn test_bad_form() {
check_bad_form("&", Status::BadRequest);
check_bad_form("=", Status::BadRequest);
check_bad_form("&&&===&", Status::BadRequest);
check_bad_form("username=Sergio", Status::UnprocessableEntity);
check_bad_form("username=Sergio&", Status::UnprocessableEntity);
check_bad_form("username=Sergio&pass=something", Status::UnprocessableEntity);
check_bad_form("user=Sergio&password=something", Status::UnprocessableEntity);
check_bad_form("password=something", Status::UnprocessableEntity);
}
| {
let rocket = rocket();
let mut req = MockRequest::new(Post, "/login")
.header(ContentType::Form)
.body(form_str);
let response = req.dispatch_with(&rocket);
assert_eq!(response.status(), status);
} | identifier_body |
tests.rs | use rocket::testing::MockRequest;
use rocket::http::Method::*;
use rocket::http::{ContentType, Status};
use super::rocket;
fn test_login(username: &str, password: &str, age: isize, status: Status,
body: Option<&'static str>) {
let rocket = rocket();
let mut req = MockRequest::new(Post, "/login")
.header(ContentType::Form)
.body(&format!("username={}&password={}&age={}", username, password, age));
let mut response = req.dispatch_with(&rocket);
let body_str = response.body_string();
println!("Checking: {:?}/{:?}/{:?}/{:?}", username, password, age, body_str);
assert_eq!(response.status(), status);
if let Some(string) = body {
assert!(body_str.map_or(true, |s| s.contains(string)));
}
}
#[test]
fn | () {
test_login("Sergio", "password", 30, Status::SeeOther, None);
}
const OK: Status = self::Status::Ok;
#[test]
fn test_bad_login() {
test_login("Sergio", "password", 20, OK, Some("Sorry, 20 is too young!"));
test_login("Sergio", "password", 200, OK, Some("Are you sure you're 200?"));
test_login("Sergio", "jk", -100, OK, Some("'-100' is not a valid integer."));
test_login("Sergio", "ok", 30, OK, Some("Wrong password!"));
test_login("Mike", "password", 30, OK, Some("Unrecognized user, 'Mike'."));
}
fn check_bad_form(form_str: &str, status: Status) {
let rocket = rocket();
let mut req = MockRequest::new(Post, "/login")
.header(ContentType::Form)
.body(form_str);
let response = req.dispatch_with(&rocket);
assert_eq!(response.status(), status);
}
#[test]
fn test_bad_form() {
check_bad_form("&", Status::BadRequest);
check_bad_form("=", Status::BadRequest);
check_bad_form("&&&===&", Status::BadRequest);
check_bad_form("username=Sergio", Status::UnprocessableEntity);
check_bad_form("username=Sergio&", Status::UnprocessableEntity);
check_bad_form("username=Sergio&pass=something", Status::UnprocessableEntity);
check_bad_form("user=Sergio&password=something", Status::UnprocessableEntity);
check_bad_form("password=something", Status::UnprocessableEntity);
}
| test_good_login | identifier_name |
tests.rs | use rocket::testing::MockRequest;
use rocket::http::Method::*;
use rocket::http::{ContentType, Status};
use super::rocket;
fn test_login(username: &str, password: &str, age: isize, status: Status,
body: Option<&'static str>) {
let rocket = rocket();
let mut req = MockRequest::new(Post, "/login")
.header(ContentType::Form)
.body(&format!("username={}&password={}&age={}", username, password, age));
let mut response = req.dispatch_with(&rocket);
let body_str = response.body_string();
println!("Checking: {:?}/{:?}/{:?}/{:?}", username, password, age, body_str);
assert_eq!(response.status(), status);
if let Some(string) = body |
}
#[test]
fn test_good_login() {
test_login("Sergio", "password", 30, Status::SeeOther, None);
}
const OK: Status = self::Status::Ok;
#[test]
fn test_bad_login() {
test_login("Sergio", "password", 20, OK, Some("Sorry, 20 is too young!"));
test_login("Sergio", "password", 200, OK, Some("Are you sure you're 200?"));
test_login("Sergio", "jk", -100, OK, Some("'-100' is not a valid integer."));
test_login("Sergio", "ok", 30, OK, Some("Wrong password!"));
test_login("Mike", "password", 30, OK, Some("Unrecognized user, 'Mike'."));
}
fn check_bad_form(form_str: &str, status: Status) {
let rocket = rocket();
let mut req = MockRequest::new(Post, "/login")
.header(ContentType::Form)
.body(form_str);
let response = req.dispatch_with(&rocket);
assert_eq!(response.status(), status);
}
#[test]
fn test_bad_form() {
check_bad_form("&", Status::BadRequest);
check_bad_form("=", Status::BadRequest);
check_bad_form("&&&===&", Status::BadRequest);
check_bad_form("username=Sergio", Status::UnprocessableEntity);
check_bad_form("username=Sergio&", Status::UnprocessableEntity);
check_bad_form("username=Sergio&pass=something", Status::UnprocessableEntity);
check_bad_form("user=Sergio&password=something", Status::UnprocessableEntity);
check_bad_form("password=something", Status::UnprocessableEntity);
}
| {
assert!(body_str.map_or(true, |s| s.contains(string)));
} | conditional_block |
tests.rs | use rocket::testing::MockRequest;
use rocket::http::Method::*;
use rocket::http::{ContentType, Status};
use super::rocket;
fn test_login(username: &str, password: &str, age: isize, status: Status,
body: Option<&'static str>) {
let rocket = rocket();
let mut req = MockRequest::new(Post, "/login")
.header(ContentType::Form)
.body(&format!("username={}&password={}&age={}", username, password, age));
let mut response = req.dispatch_with(&rocket);
let body_str = response.body_string();
println!("Checking: {:?}/{:?}/{:?}/{:?}", username, password, age, body_str);
assert_eq!(response.status(), status);
if let Some(string) = body {
assert!(body_str.map_or(true, |s| s.contains(string)));
}
}
#[test]
fn test_good_login() {
test_login("Sergio", "password", 30, Status::SeeOther, None); | #[test]
fn test_bad_login() {
test_login("Sergio", "password", 20, OK, Some("Sorry, 20 is too young!"));
test_login("Sergio", "password", 200, OK, Some("Are you sure you're 200?"));
test_login("Sergio", "jk", -100, OK, Some("'-100' is not a valid integer."));
test_login("Sergio", "ok", 30, OK, Some("Wrong password!"));
test_login("Mike", "password", 30, OK, Some("Unrecognized user, 'Mike'."));
}
fn check_bad_form(form_str: &str, status: Status) {
let rocket = rocket();
let mut req = MockRequest::new(Post, "/login")
.header(ContentType::Form)
.body(form_str);
let response = req.dispatch_with(&rocket);
assert_eq!(response.status(), status);
}
#[test]
fn test_bad_form() {
check_bad_form("&", Status::BadRequest);
check_bad_form("=", Status::BadRequest);
check_bad_form("&&&===&", Status::BadRequest);
check_bad_form("username=Sergio", Status::UnprocessableEntity);
check_bad_form("username=Sergio&", Status::UnprocessableEntity);
check_bad_form("username=Sergio&pass=something", Status::UnprocessableEntity);
check_bad_form("user=Sergio&password=something", Status::UnprocessableEntity);
check_bad_form("password=something", Status::UnprocessableEntity);
} | }
const OK: Status = self::Status::Ok;
| random_line_split |
build.rs | #![warn(rust_2018_idioms)]
use std::env;
include!("no_atomic.rs");
// The rustc-cfg listed below are considered public API, but it is *unstable*
// and outside of the normal semver guarantees:
//
// - `crossbeam_no_atomic_cas`
// Assume the target does *not* support atomic CAS operations.
// This is usually detected automatically by the build script, but you may
// need to enable it manually when building for custom targets or using
// non-cargo build systems that don't run the build script.
//
// With the exceptions mentioned above, the rustc-cfg strings below are
// *not* public API. Please let us know by opening a GitHub issue if your build
// environment requires some way to enable these cfgs other than by executing
// our build script.
fn main() |
println!("cargo:rerun-if-changed=no_atomic.rs");
}
| {
let target = match env::var("TARGET") {
Ok(target) => target,
Err(e) => {
println!(
"cargo:warning={}: unable to get TARGET environment variable: {}",
env!("CARGO_PKG_NAME"),
e
);
return;
}
};
// Note that this is `no_*`, not `has_*`. This allows treating
// `cfg(target_has_atomic = "ptr")` as true when the build script doesn't
// run. This is needed for compatibility with non-cargo build systems that
// don't run the build script.
if NO_ATOMIC_CAS.contains(&&*target) {
println!("cargo:rustc-cfg=crossbeam_no_atomic_cas");
} | identifier_body |
build.rs | #![warn(rust_2018_idioms)]
use std::env;
include!("no_atomic.rs");
// The rustc-cfg listed below are considered public API, but it is *unstable*
// and outside of the normal semver guarantees:
//
// - `crossbeam_no_atomic_cas`
// Assume the target does *not* support atomic CAS operations.
// This is usually detected automatically by the build script, but you may
// need to enable it manually when building for custom targets or using
// non-cargo build systems that don't run the build script.
//
// With the exceptions mentioned above, the rustc-cfg strings below are
// *not* public API. Please let us know by opening a GitHub issue if your build
// environment requires some way to enable these cfgs other than by executing
// our build script.
fn | () {
let target = match env::var("TARGET") {
Ok(target) => target,
Err(e) => {
println!(
"cargo:warning={}: unable to get TARGET environment variable: {}",
env!("CARGO_PKG_NAME"),
e
);
return;
}
};
// Note that this is `no_*`, not `has_*`. This allows treating
// `cfg(target_has_atomic = "ptr")` as true when the build script doesn't
// run. This is needed for compatibility with non-cargo build systems that
// don't run the build script.
if NO_ATOMIC_CAS.contains(&&*target) {
println!("cargo:rustc-cfg=crossbeam_no_atomic_cas");
}
println!("cargo:rerun-if-changed=no_atomic.rs");
}
| main | identifier_name |
build.rs | #![warn(rust_2018_idioms)]
use std::env;
include!("no_atomic.rs");
// The rustc-cfg listed below are considered public API, but it is *unstable*
// and outside of the normal semver guarantees:
//
// - `crossbeam_no_atomic_cas`
// Assume the target does *not* support atomic CAS operations.
// This is usually detected automatically by the build script, but you may
// need to enable it manually when building for custom targets or using
// non-cargo build systems that don't run the build script.
//
// With the exceptions mentioned above, the rustc-cfg strings below are
// *not* public API. Please let us know by opening a GitHub issue if your build
// environment requires some way to enable these cfgs other than by executing
// our build script. | Err(e) => {
println!(
"cargo:warning={}: unable to get TARGET environment variable: {}",
env!("CARGO_PKG_NAME"),
e
);
return;
}
};
// Note that this is `no_*`, not `has_*`. This allows treating
// `cfg(target_has_atomic = "ptr")` as true when the build script doesn't
// run. This is needed for compatibility with non-cargo build systems that
// don't run the build script.
if NO_ATOMIC_CAS.contains(&&*target) {
println!("cargo:rustc-cfg=crossbeam_no_atomic_cas");
}
println!("cargo:rerun-if-changed=no_atomic.rs");
} | fn main() {
let target = match env::var("TARGET") {
Ok(target) => target, | random_line_split |
lr.template.rs | #![allow(dead_code)]
#![allow(unused_mut)]
#![allow(unreachable_code)]
extern crate onig;
#[macro_use]
extern crate lazy_static;
use onig::{Regex, Syntax, RegexOptions};
use std::collections::HashMap;
/**
* Stack value.
*/
enum SV {
Undefined,
{{{SV_ENUM}}}
}
/**
* Lex rules.
*/
static LEX_RULES: {{{LEX_RULES}}};
/**
* EOF value.
*/
static EOF: &'static str = "$";
/**
* A macro for map literals.
*
* hashmap!{ 1 => "one", 2 => "two" };
*/
macro_rules! hashmap(
{ $($key:expr => $value:expr),+ } => {
{
let mut m = ::std::collections::HashMap::new();
$(
m.insert($key, $value);
)+
m
}
};
);
/**
* Unwraps a SV for the result. The result type is known from the grammar.
*/
macro_rules! get_result {
($r:expr, $ty:ident) => (match $r { SV::$ty(v) => v, _ => unreachable!() });
}
/**
* Pops a SV with needed enum value.
*/
macro_rules! pop {
($s:expr, $ty:ident) => (get_result!($s.pop().unwrap(), $ty));
}
/**
* Productions data.
*
* 0 - encoded non-terminal, 1 - length of RHS to pop from the stack
*/
static PRODUCTIONS : {{{PRODUCTIONS}}};
/**
* Table entry.
*/
enum TE {
Accept,
// Shift, and transit to the state.
Shift(usize),
// Reduce by a production number.
Reduce(usize),
// Simple state transition.
Transit(usize),
}
lazy_static! {
/**
* Lexical rules grouped by lexer state (by start condition).
*/
static ref LEX_RULES_BY_START_CONDITIONS: HashMap<&'static str, Vec<i32>> = {{{LEX_RULES_BY_START_CONDITIONS}}};
/**
* Maps a string name of a token type to its encoded number (the first
* token number starts after all numbers for non-terminal).
*/
static ref TOKENS_MAP: HashMap<&'static str, i32> = {{{TOKENS}}};
/**
* Parsing table.
*
* Vector index is the state number, value is a map
* from an encoded symbol to table entry (TE).
*/
static ref TABLE: Vec<HashMap<i32, TE>>= {{{TABLE}}};
}
// ------------------------------------
// Module include prologue.
//
// Should include at least result type:
//
// type TResult = <...>;
//
// Can also include parsing hooks:
//
// fn on_parse_begin(parser: &mut Parser, string: &str) {
// ...
// }
//
// fn on_parse_end(parser: &mut Parser, result: &TResult) {
// ...
// }
//
{{{MODULE_INCLUDE}}}
// --- end of Module include ---------
{{{TOKENIZER}}}
// ------------------------------------------------------------------
// Parser.
/**
* Parser.
*/
pub struct Parser<'t> {
/**
* Parsing stack: semantic values.
*/
values_stack: Vec<SV>,
/**
* Parsing stack: state numbers.
*/
states_stack: Vec<usize>,
/**
* Tokenizer instance.
*/
tokenizer: Tokenizer<'t>,
/**
* Semantic action handlers.
*/
handlers: [fn(&mut Parser<'t>) -> SV; {{{PRODUCTION_HANDLERS_COUNT}}}],
}
impl<'t> Parser<'t> {
/**
* Creates a new Parser instance.
*/
pub fn new() -> Parser<'t> {
Parser {
// Stacks.
values_stack: Vec::new(),
states_stack: Vec::new(),
tokenizer: Tokenizer::new(),
handlers: {{{PRODUCTION_HANDLERS_ARRAY}}}
}
}
/**
* Parses a string.
*/
pub fn parse(&mut self, string: &'t str) -> TResult {
{{{ON_PARSE_BEGIN_CALL}}}
// Initialize the tokenizer and the string.
self.tokenizer.init_string(string);
// Initialize the stacks.
self.values_stack.clear();
// Initial 0 state.
self.states_stack.clear();
self.states_stack.push(0);
let mut token = self.tokenizer.get_next_token();
let mut shifted_token = token;
loop {
let state = *self.states_stack.last().unwrap();
let column = token.kind;
if!TABLE[state].contains_key(&column) |
let entry = &TABLE[state][&column];
match entry {
// Shift a token, go to state.
&TE::Shift(next_state) => {
// Push token.
self.values_stack.push(SV::_0(token));
// Push next state number: "s5" -> 5
self.states_stack.push(next_state as usize);
shifted_token = token;
token = self.tokenizer.get_next_token();
},
// Reduce by production.
&TE::Reduce(production_number) => {
let production = PRODUCTIONS[production_number];
self.tokenizer.yytext = shifted_token.value;
self.tokenizer.yyleng = shifted_token.value.len();
let mut rhs_length = production[1];
while rhs_length > 0 {
self.states_stack.pop();
rhs_length = rhs_length - 1;
}
// Call the handler, push result onto the stack.
let result_value = self.handlers[production_number](self);
let previous_state = *self.states_stack.last().unwrap();
let symbol_to_reduce_with = production[0];
// Then push LHS onto the stack.
self.values_stack.push(result_value);
let next_state = match &TABLE[previous_state][&symbol_to_reduce_with] {
&TE::Transit(next_state) => next_state,
_ => unreachable!(),
};
self.states_stack.push(next_state);
},
// Accept the string.
&TE::Accept => {
// Pop state number.
self.states_stack.pop();
// Pop the parsed value.
let parsed = self.values_stack.pop().unwrap();
if self.states_stack.len()!= 1 ||
self.states_stack.pop().unwrap()!= 0 ||
self.tokenizer.has_more_tokens() {
self.unexpected_token(&token);
}
let result = get_result!(parsed, {{{RESULT_TYPE}}});
{{{ON_PARSE_END_CALL}}}
return result;
},
_ => unreachable!(),
}
}
unreachable!();
}
fn unexpected_token(&self, token: &Token) {
{{{ON_PARSE_ERROR_CALL}}}
}
{{{PRODUCTION_HANDLERS}}}
}
| {
self.unexpected_token(&token);
break;
} | conditional_block |
lr.template.rs | #![allow(dead_code)]
#![allow(unused_mut)]
#![allow(unreachable_code)]
extern crate onig;
#[macro_use]
extern crate lazy_static;
use onig::{Regex, Syntax, RegexOptions};
use std::collections::HashMap;
/**
* Stack value.
*/
enum SV {
Undefined,
{{{SV_ENUM}}}
}
/**
* Lex rules.
*/
static LEX_RULES: {{{LEX_RULES}}};
/**
* EOF value.
*/
static EOF: &'static str = "$";
/**
* A macro for map literals.
*
* hashmap!{ 1 => "one", 2 => "two" };
*/
macro_rules! hashmap(
{ $($key:expr => $value:expr),+ } => {
{
let mut m = ::std::collections::HashMap::new();
$(
m.insert($key, $value);
)+
m
}
};
);
/**
* Unwraps a SV for the result. The result type is known from the grammar.
*/
macro_rules! get_result {
($r:expr, $ty:ident) => (match $r { SV::$ty(v) => v, _ => unreachable!() });
}
/**
* Pops a SV with needed enum value.
*/
macro_rules! pop {
($s:expr, $ty:ident) => (get_result!($s.pop().unwrap(), $ty));
}
/**
* Productions data.
*
* 0 - encoded non-terminal, 1 - length of RHS to pop from the stack
*/
static PRODUCTIONS : {{{PRODUCTIONS}}};
/**
* Table entry.
*/
enum | {
Accept,
// Shift, and transit to the state.
Shift(usize),
// Reduce by a production number.
Reduce(usize),
// Simple state transition.
Transit(usize),
}
lazy_static! {
/**
* Lexical rules grouped by lexer state (by start condition).
*/
static ref LEX_RULES_BY_START_CONDITIONS: HashMap<&'static str, Vec<i32>> = {{{LEX_RULES_BY_START_CONDITIONS}}};
/**
* Maps a string name of a token type to its encoded number (the first
* token number starts after all numbers for non-terminal).
*/
static ref TOKENS_MAP: HashMap<&'static str, i32> = {{{TOKENS}}};
/**
* Parsing table.
*
* Vector index is the state number, value is a map
* from an encoded symbol to table entry (TE).
*/
static ref TABLE: Vec<HashMap<i32, TE>>= {{{TABLE}}};
}
// ------------------------------------
// Module include prologue.
//
// Should include at least result type:
//
// type TResult = <...>;
//
// Can also include parsing hooks:
//
// fn on_parse_begin(parser: &mut Parser, string: &str) {
// ...
// }
//
// fn on_parse_end(parser: &mut Parser, result: &TResult) {
// ...
// }
//
{{{MODULE_INCLUDE}}}
// --- end of Module include ---------
{{{TOKENIZER}}}
// ------------------------------------------------------------------
// Parser.
/**
* Parser.
*/
pub struct Parser<'t> {
/**
* Parsing stack: semantic values.
*/
values_stack: Vec<SV>,
/**
* Parsing stack: state numbers.
*/
states_stack: Vec<usize>,
/**
* Tokenizer instance.
*/
tokenizer: Tokenizer<'t>,
/**
* Semantic action handlers.
*/
handlers: [fn(&mut Parser<'t>) -> SV; {{{PRODUCTION_HANDLERS_COUNT}}}],
}
impl<'t> Parser<'t> {
/**
* Creates a new Parser instance.
*/
pub fn new() -> Parser<'t> {
Parser {
// Stacks.
values_stack: Vec::new(),
states_stack: Vec::new(),
tokenizer: Tokenizer::new(),
handlers: {{{PRODUCTION_HANDLERS_ARRAY}}}
}
}
/**
* Parses a string.
*/
pub fn parse(&mut self, string: &'t str) -> TResult {
{{{ON_PARSE_BEGIN_CALL}}}
// Initialize the tokenizer and the string.
self.tokenizer.init_string(string);
// Initialize the stacks.
self.values_stack.clear();
// Initial 0 state.
self.states_stack.clear();
self.states_stack.push(0);
let mut token = self.tokenizer.get_next_token();
let mut shifted_token = token;
loop {
let state = *self.states_stack.last().unwrap();
let column = token.kind;
if!TABLE[state].contains_key(&column) {
self.unexpected_token(&token);
break;
}
let entry = &TABLE[state][&column];
match entry {
// Shift a token, go to state.
&TE::Shift(next_state) => {
// Push token.
self.values_stack.push(SV::_0(token));
// Push next state number: "s5" -> 5
self.states_stack.push(next_state as usize);
shifted_token = token;
token = self.tokenizer.get_next_token();
},
// Reduce by production.
&TE::Reduce(production_number) => {
let production = PRODUCTIONS[production_number];
self.tokenizer.yytext = shifted_token.value;
self.tokenizer.yyleng = shifted_token.value.len();
let mut rhs_length = production[1];
while rhs_length > 0 {
self.states_stack.pop();
rhs_length = rhs_length - 1;
}
// Call the handler, push result onto the stack.
let result_value = self.handlers[production_number](self);
let previous_state = *self.states_stack.last().unwrap();
let symbol_to_reduce_with = production[0];
// Then push LHS onto the stack.
self.values_stack.push(result_value);
let next_state = match &TABLE[previous_state][&symbol_to_reduce_with] {
&TE::Transit(next_state) => next_state,
_ => unreachable!(),
};
self.states_stack.push(next_state);
},
// Accept the string.
&TE::Accept => {
// Pop state number.
self.states_stack.pop();
// Pop the parsed value.
let parsed = self.values_stack.pop().unwrap();
if self.states_stack.len()!= 1 ||
self.states_stack.pop().unwrap()!= 0 ||
self.tokenizer.has_more_tokens() {
self.unexpected_token(&token);
}
let result = get_result!(parsed, {{{RESULT_TYPE}}});
{{{ON_PARSE_END_CALL}}}
return result;
},
_ => unreachable!(),
}
}
unreachable!();
}
fn unexpected_token(&self, token: &Token) {
{{{ON_PARSE_ERROR_CALL}}}
}
{{{PRODUCTION_HANDLERS}}}
}
| TE | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.