file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
htmllielement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLLIElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLLIElementDerived;
use dom::bindings::js::{JSRef, Temporary};
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use util::str::DOMString;
#[dom_struct]
pub struct HTMLLIElement {
htmlelement: HTMLElement,
}
impl HTMLLIElementDerived for EventTarget {
fn
|
(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLLIElement)))
}
}
impl HTMLLIElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> HTMLLIElement {
HTMLLIElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLLIElement, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: JSRef<Document>) -> Temporary<HTMLLIElement> {
let element = HTMLLIElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLLIElementBinding::Wrap)
}
}
|
is_htmllielement
|
identifier_name
|
yubi.rs
|
use yubico::config::{Command, Config, Mode, Slot};
use yubico::configure::DeviceModeConfig;
use yubico::hmacmode::HmacKey;
use yubico::Yubico;
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
use hex;
use std::ops::Deref;
pub fn setup_secret() -> Result<(), String> {
let mut yubi = Yubico::new();
// We assume the first device
if let Ok(device) = yubi.find_yubikey() {
let config = Config::default()
.set_vendor_id(device.vendor_id)
.set_product_id(device.product_id)
.set_command(Command::Configuration2);
let mut rng = thread_rng();
let require_press_button = true;
let secret: String = rng.sample_iter(&Alphanumeric).take(20).collect();
let hmac_key: HmacKey = HmacKey::from_slice(secret.as_bytes());
let mut device_config = DeviceModeConfig::default();
device_config.challenge_response_hmac(&hmac_key, false, require_press_button);
if let Err(_) = yubi.write_config(config, &mut device_config) {
return Err("Failed to write configation".into());
} else {
return Ok(());
}
}
Err("Yubikey not found".into())
|
}
pub fn retrieve_secret(token: [u8; 20]) -> Option<String> {
let mut yubi = Yubico::new();
if let Ok(device) = yubi.find_yubikey() {
let config = Config::default()
.set_vendor_id(device.vendor_id)
.set_product_id(device.product_id)
.set_variable_size(true)
.set_mode(Mode::Sha1)
.set_slot(Slot::Slot2);
let hmac_result = yubi.challenge_response_hmac(&token, config).unwrap();
let v: &[u8] = hmac_result.deref();
return Some(hex::encode(v));
} else {
return None;
}
}
|
random_line_split
|
|
yubi.rs
|
use yubico::config::{Command, Config, Mode, Slot};
use yubico::configure::DeviceModeConfig;
use yubico::hmacmode::HmacKey;
use yubico::Yubico;
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
use hex;
use std::ops::Deref;
pub fn setup_secret() -> Result<(), String> {
let mut yubi = Yubico::new();
// We assume the first device
if let Ok(device) = yubi.find_yubikey() {
let config = Config::default()
.set_vendor_id(device.vendor_id)
.set_product_id(device.product_id)
.set_command(Command::Configuration2);
let mut rng = thread_rng();
let require_press_button = true;
let secret: String = rng.sample_iter(&Alphanumeric).take(20).collect();
let hmac_key: HmacKey = HmacKey::from_slice(secret.as_bytes());
let mut device_config = DeviceModeConfig::default();
device_config.challenge_response_hmac(&hmac_key, false, require_press_button);
if let Err(_) = yubi.write_config(config, &mut device_config) {
return Err("Failed to write configation".into());
} else {
return Ok(());
}
}
Err("Yubikey not found".into())
}
pub fn retrieve_secret(token: [u8; 20]) -> Option<String> {
let mut yubi = Yubico::new();
if let Ok(device) = yubi.find_yubikey() {
let config = Config::default()
.set_vendor_id(device.vendor_id)
.set_product_id(device.product_id)
.set_variable_size(true)
.set_mode(Mode::Sha1)
.set_slot(Slot::Slot2);
let hmac_result = yubi.challenge_response_hmac(&token, config).unwrap();
let v: &[u8] = hmac_result.deref();
return Some(hex::encode(v));
} else
|
}
|
{
return None;
}
|
conditional_block
|
yubi.rs
|
use yubico::config::{Command, Config, Mode, Slot};
use yubico::configure::DeviceModeConfig;
use yubico::hmacmode::HmacKey;
use yubico::Yubico;
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
use hex;
use std::ops::Deref;
pub fn setup_secret() -> Result<(), String>
|
} else {
return Ok(());
}
}
Err("Yubikey not found".into())
}
pub fn retrieve_secret(token: [u8; 20]) -> Option<String> {
let mut yubi = Yubico::new();
if let Ok(device) = yubi.find_yubikey() {
let config = Config::default()
.set_vendor_id(device.vendor_id)
.set_product_id(device.product_id)
.set_variable_size(true)
.set_mode(Mode::Sha1)
.set_slot(Slot::Slot2);
let hmac_result = yubi.challenge_response_hmac(&token, config).unwrap();
let v: &[u8] = hmac_result.deref();
return Some(hex::encode(v));
} else {
return None;
}
}
|
{
let mut yubi = Yubico::new();
// We assume the first device
if let Ok(device) = yubi.find_yubikey() {
let config = Config::default()
.set_vendor_id(device.vendor_id)
.set_product_id(device.product_id)
.set_command(Command::Configuration2);
let mut rng = thread_rng();
let require_press_button = true;
let secret: String = rng.sample_iter(&Alphanumeric).take(20).collect();
let hmac_key: HmacKey = HmacKey::from_slice(secret.as_bytes());
let mut device_config = DeviceModeConfig::default();
device_config.challenge_response_hmac(&hmac_key, false, require_press_button);
if let Err(_) = yubi.write_config(config, &mut device_config) {
return Err("Failed to write configation".into());
|
identifier_body
|
yubi.rs
|
use yubico::config::{Command, Config, Mode, Slot};
use yubico::configure::DeviceModeConfig;
use yubico::hmacmode::HmacKey;
use yubico::Yubico;
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng};
use hex;
use std::ops::Deref;
pub fn
|
() -> Result<(), String> {
let mut yubi = Yubico::new();
// We assume the first device
if let Ok(device) = yubi.find_yubikey() {
let config = Config::default()
.set_vendor_id(device.vendor_id)
.set_product_id(device.product_id)
.set_command(Command::Configuration2);
let mut rng = thread_rng();
let require_press_button = true;
let secret: String = rng.sample_iter(&Alphanumeric).take(20).collect();
let hmac_key: HmacKey = HmacKey::from_slice(secret.as_bytes());
let mut device_config = DeviceModeConfig::default();
device_config.challenge_response_hmac(&hmac_key, false, require_press_button);
if let Err(_) = yubi.write_config(config, &mut device_config) {
return Err("Failed to write configation".into());
} else {
return Ok(());
}
}
Err("Yubikey not found".into())
}
pub fn retrieve_secret(token: [u8; 20]) -> Option<String> {
let mut yubi = Yubico::new();
if let Ok(device) = yubi.find_yubikey() {
let config = Config::default()
.set_vendor_id(device.vendor_id)
.set_product_id(device.product_id)
.set_variable_size(true)
.set_mode(Mode::Sha1)
.set_slot(Slot::Slot2);
let hmac_result = yubi.challenge_response_hmac(&token, config).unwrap();
let v: &[u8] = hmac_result.deref();
return Some(hex::encode(v));
} else {
return None;
}
}
|
setup_secret
|
identifier_name
|
phantom.rs
|
/*
* Copyright 2017 Sreejith Krishnan R
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::{Drawable, BoundingBox, MeasureMode};
use ::platform::Context;
use ::paint::{Canvas, Point};
pub struct Phantom<'a> {
wrapped: Option<Box<Drawable + 'a>>,
bounding_box: BoundingBox,
}
impl<'a> Phantom<'a> {
pub fn new() -> Phantom<'a> {
Phantom { wrapped: None, bounding_box: BoundingBox::default() }
}
pub fn wrap(&mut self, wrapped: Option<Box<Drawable + 'a>>) {
self.wrapped = wrapped;
}
}
impl<'a> Drawable for Phantom<'a> {
fn draw(&self, _: &Canvas, _: &Point) {
// do nothing
}
fn
|
(&mut self, context: &Context, width_mode: &MeasureMode, height_mode: &MeasureMode) {
if let Some(ref mut wrapped) = self.wrapped {
wrapped.calculate(context, width_mode, height_mode);
self.bounding_box = wrapped.bounding_box().clone();
} else {
self.bounding_box = BoundingBox::default()
}
}
fn bounding_box(&self) -> &BoundingBox {
&self.bounding_box
}
}
|
calculate
|
identifier_name
|
phantom.rs
|
/*
* Copyright 2017 Sreejith Krishnan R
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::{Drawable, BoundingBox, MeasureMode};
use ::platform::Context;
use ::paint::{Canvas, Point};
pub struct Phantom<'a> {
wrapped: Option<Box<Drawable + 'a>>,
bounding_box: BoundingBox,
}
impl<'a> Phantom<'a> {
pub fn new() -> Phantom<'a> {
Phantom { wrapped: None, bounding_box: BoundingBox::default() }
}
pub fn wrap(&mut self, wrapped: Option<Box<Drawable + 'a>>) {
self.wrapped = wrapped;
}
}
impl<'a> Drawable for Phantom<'a> {
fn draw(&self, _: &Canvas, _: &Point) {
// do nothing
}
fn calculate(&mut self, context: &Context, width_mode: &MeasureMode, height_mode: &MeasureMode) {
if let Some(ref mut wrapped) = self.wrapped
|
else {
self.bounding_box = BoundingBox::default()
}
}
fn bounding_box(&self) -> &BoundingBox {
&self.bounding_box
}
}
|
{
wrapped.calculate(context, width_mode, height_mode);
self.bounding_box = wrapped.bounding_box().clone();
}
|
conditional_block
|
phantom.rs
|
/*
* Copyright 2017 Sreejith Krishnan R
|
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::{Drawable, BoundingBox, MeasureMode};
use ::platform::Context;
use ::paint::{Canvas, Point};
pub struct Phantom<'a> {
wrapped: Option<Box<Drawable + 'a>>,
bounding_box: BoundingBox,
}
impl<'a> Phantom<'a> {
pub fn new() -> Phantom<'a> {
Phantom { wrapped: None, bounding_box: BoundingBox::default() }
}
pub fn wrap(&mut self, wrapped: Option<Box<Drawable + 'a>>) {
self.wrapped = wrapped;
}
}
impl<'a> Drawable for Phantom<'a> {
fn draw(&self, _: &Canvas, _: &Point) {
// do nothing
}
fn calculate(&mut self, context: &Context, width_mode: &MeasureMode, height_mode: &MeasureMode) {
if let Some(ref mut wrapped) = self.wrapped {
wrapped.calculate(context, width_mode, height_mode);
self.bounding_box = wrapped.bounding_box().clone();
} else {
self.bounding_box = BoundingBox::default()
}
}
fn bounding_box(&self) -> &BoundingBox {
&self.bounding_box
}
}
|
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
|
random_line_split
|
htmlheadingelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLHeadingElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLHeadingElementDerived;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use util::str::DOMString;
#[derive(JSTraceable)]
pub enum HeadingLevel {
Heading1,
Heading2,
Heading3,
Heading4,
Heading5,
Heading6,
}
#[dom_struct]
pub struct HTMLHeadingElement {
htmlelement: HTMLElement,
level: HeadingLevel,
}
impl HTMLHeadingElementDerived for EventTarget {
fn
|
(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLHeadingElement)))
}
}
impl HTMLHeadingElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document,
level: HeadingLevel) -> HTMLHeadingElement {
HTMLHeadingElement {
htmlelement:
HTMLElement::new_inherited(HTMLElementTypeId::HTMLHeadingElement, localName, prefix, document),
level: level,
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document,
level: HeadingLevel) -> Root<HTMLHeadingElement> {
let element = HTMLHeadingElement::new_inherited(localName, prefix, document, level);
Node::reflect_node(box element, document, HTMLHeadingElementBinding::Wrap)
}
}
|
is_htmlheadingelement
|
identifier_name
|
htmlheadingelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLHeadingElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLHeadingElementDerived;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use util::str::DOMString;
#[derive(JSTraceable)]
pub enum HeadingLevel {
Heading1,
Heading2,
Heading3,
Heading4,
Heading5,
Heading6,
}
#[dom_struct]
pub struct HTMLHeadingElement {
htmlelement: HTMLElement,
level: HeadingLevel,
}
impl HTMLHeadingElementDerived for EventTarget {
fn is_htmlheadingelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLHeadingElement)))
}
}
impl HTMLHeadingElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document,
level: HeadingLevel) -> HTMLHeadingElement {
HTMLHeadingElement {
htmlelement:
HTMLElement::new_inherited(HTMLElementTypeId::HTMLHeadingElement, localName, prefix, document),
level: level,
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document,
level: HeadingLevel) -> Root<HTMLHeadingElement>
|
}
|
{
let element = HTMLHeadingElement::new_inherited(localName, prefix, document, level);
Node::reflect_node(box element, document, HTMLHeadingElementBinding::Wrap)
}
|
identifier_body
|
htmlheadingelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLHeadingElementBinding;
use dom::bindings::codegen::InheritTypes::HTMLHeadingElementDerived;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeTypeId};
use util::str::DOMString;
#[derive(JSTraceable)]
pub enum HeadingLevel {
Heading1,
Heading2,
Heading3,
Heading4,
Heading5,
|
pub struct HTMLHeadingElement {
htmlelement: HTMLElement,
level: HeadingLevel,
}
impl HTMLHeadingElementDerived for EventTarget {
fn is_htmlheadingelement(&self) -> bool {
*self.type_id() ==
EventTargetTypeId::Node(
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLHeadingElement)))
}
}
impl HTMLHeadingElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document,
level: HeadingLevel) -> HTMLHeadingElement {
HTMLHeadingElement {
htmlelement:
HTMLElement::new_inherited(HTMLElementTypeId::HTMLHeadingElement, localName, prefix, document),
level: level,
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document,
level: HeadingLevel) -> Root<HTMLHeadingElement> {
let element = HTMLHeadingElement::new_inherited(localName, prefix, document, level);
Node::reflect_node(box element, document, HTMLHeadingElementBinding::Wrap)
}
}
|
Heading6,
}
#[dom_struct]
|
random_line_split
|
recursion_limit_macro.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the recursion limit can be changed and that the compiler
// suggests a fix. In this case, we have a recursing macro that will
// overflow if the number of arguments surpasses the recursion limit.
#![allow(dead_code)]
#![recursion_limit="10"]
macro_rules! recurse {
() => { };
($t:tt $($tail:tt)*) => { recurse!($($tail)*) }; //~ ERROR recursion limit
}
fn
|
() {
recurse!(0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9);
}
|
main
|
identifier_name
|
recursion_limit_macro.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
|
// except according to those terms.
// Test that the recursion limit can be changed and that the compiler
// suggests a fix. In this case, we have a recursing macro that will
// overflow if the number of arguments surpasses the recursion limit.
#![allow(dead_code)]
#![recursion_limit="10"]
macro_rules! recurse {
() => { };
($t:tt $($tail:tt)*) => { recurse!($($tail)*) }; //~ ERROR recursion limit
}
fn main() {
recurse!(0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9);
}
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
|
random_line_split
|
recursion_limit_macro.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the recursion limit can be changed and that the compiler
// suggests a fix. In this case, we have a recursing macro that will
// overflow if the number of arguments surpasses the recursion limit.
#![allow(dead_code)]
#![recursion_limit="10"]
macro_rules! recurse {
() => { };
($t:tt $($tail:tt)*) => { recurse!($($tail)*) }; //~ ERROR recursion limit
}
fn main()
|
{
recurse!(0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9);
}
|
identifier_body
|
|
trait-static-method-generic-inference.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #3902. We are (at least currently) unable to infer `Self`
// based on `T`, even though there is only a single impl, because of
// the possibility of associated types and other things (basically: no
// constraints on `Self` here at all).
mod base {
pub trait HasNew<T> {
fn new() -> T;
fn
|
(&self) { }
}
pub struct Foo {
dummy: (),
}
impl HasNew<Foo> for Foo {
fn new() -> Foo {
Foo { dummy: () }
}
}
}
pub fn foo() {
let _f: base::Foo = base::HasNew::new();
//~^ ERROR type annotations required
}
fn main() { }
|
dummy
|
identifier_name
|
trait-static-method-generic-inference.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #3902. We are (at least currently) unable to infer `Self`
// based on `T`, even though there is only a single impl, because of
// the possibility of associated types and other things (basically: no
// constraints on `Self` here at all).
mod base {
pub trait HasNew<T> {
fn new() -> T;
fn dummy(&self) { }
}
pub struct Foo {
dummy: (),
}
impl HasNew<Foo> for Foo {
fn new() -> Foo {
Foo { dummy: () }
}
}
}
pub fn foo() {
let _f: base::Foo = base::HasNew::new();
//~^ ERROR type annotations required
|
fn main() { }
|
}
|
random_line_split
|
class-implement-trait-cross-crate.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_class_trait.rs
extern crate cci_class_trait;
use cci_class_trait::animals::noisy;
|
how_hungry : int,
name : String,
}
impl cat {
pub fn eat(&mut self) -> bool {
if self.how_hungry > 0 {
println!("OM NOM NOM");
self.how_hungry -= 2;
return true;
}
else {
println!("Not hungry!");
return false;
}
}
}
impl noisy for cat {
fn speak(&mut self) { self.meow(); }
}
impl cat {
fn meow(&mut self) {
println!("Meow");
self.meows += 1u;
if self.meows % 5u == 0u {
self.how_hungry += 1;
}
}
}
fn cat(in_x : uint, in_y : int, in_name: String) -> cat {
cat {
meows: in_x,
how_hungry: in_y,
name: in_name
}
}
pub fn main() {
let mut nyan = cat(0u, 2, "nyan".to_string());
nyan.eat();
assert!((!nyan.eat()));
for _ in 1u..10u { nyan.speak(); };
assert!((nyan.eat()));
}
|
struct cat {
meows: uint,
|
random_line_split
|
class-implement-trait-cross-crate.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_class_trait.rs
extern crate cci_class_trait;
use cci_class_trait::animals::noisy;
struct cat {
meows: uint,
how_hungry : int,
name : String,
}
impl cat {
pub fn eat(&mut self) -> bool {
if self.how_hungry > 0 {
println!("OM NOM NOM");
self.how_hungry -= 2;
return true;
}
else
|
}
}
impl noisy for cat {
fn speak(&mut self) { self.meow(); }
}
impl cat {
fn meow(&mut self) {
println!("Meow");
self.meows += 1u;
if self.meows % 5u == 0u {
self.how_hungry += 1;
}
}
}
fn cat(in_x : uint, in_y : int, in_name: String) -> cat {
cat {
meows: in_x,
how_hungry: in_y,
name: in_name
}
}
pub fn main() {
let mut nyan = cat(0u, 2, "nyan".to_string());
nyan.eat();
assert!((!nyan.eat()));
for _ in 1u..10u { nyan.speak(); };
assert!((nyan.eat()));
}
|
{
println!("Not hungry!");
return false;
}
|
conditional_block
|
class-implement-trait-cross-crate.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_class_trait.rs
extern crate cci_class_trait;
use cci_class_trait::animals::noisy;
struct
|
{
meows: uint,
how_hungry : int,
name : String,
}
impl cat {
pub fn eat(&mut self) -> bool {
if self.how_hungry > 0 {
println!("OM NOM NOM");
self.how_hungry -= 2;
return true;
}
else {
println!("Not hungry!");
return false;
}
}
}
impl noisy for cat {
fn speak(&mut self) { self.meow(); }
}
impl cat {
fn meow(&mut self) {
println!("Meow");
self.meows += 1u;
if self.meows % 5u == 0u {
self.how_hungry += 1;
}
}
}
fn cat(in_x : uint, in_y : int, in_name: String) -> cat {
cat {
meows: in_x,
how_hungry: in_y,
name: in_name
}
}
pub fn main() {
let mut nyan = cat(0u, 2, "nyan".to_string());
nyan.eat();
assert!((!nyan.eat()));
for _ in 1u..10u { nyan.speak(); };
assert!((nyan.eat()));
}
|
cat
|
identifier_name
|
class-implement-trait-cross-crate.rs
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cci_class_trait.rs
extern crate cci_class_trait;
use cci_class_trait::animals::noisy;
struct cat {
meows: uint,
how_hungry : int,
name : String,
}
impl cat {
pub fn eat(&mut self) -> bool {
if self.how_hungry > 0 {
println!("OM NOM NOM");
self.how_hungry -= 2;
return true;
}
else {
println!("Not hungry!");
return false;
}
}
}
impl noisy for cat {
fn speak(&mut self) { self.meow(); }
}
impl cat {
fn meow(&mut self) {
println!("Meow");
self.meows += 1u;
if self.meows % 5u == 0u {
self.how_hungry += 1;
}
}
}
fn cat(in_x : uint, in_y : int, in_name: String) -> cat {
cat {
meows: in_x,
how_hungry: in_y,
name: in_name
}
}
pub fn main()
|
{
let mut nyan = cat(0u, 2, "nyan".to_string());
nyan.eat();
assert!((!nyan.eat()));
for _ in 1u..10u { nyan.speak(); };
assert!((nyan.eat()));
}
|
identifier_body
|
|
mod.rs
|
// Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
//! Manages the connection between the API and a server.
pub mod local;
pub mod remote;
use errors::*;
use futures::Future;
use remote::{Request, Response};
use std::io;
use telemetry::Telemetry;
use tokio_core::reactor::Handle;
use tokio_proto::streaming::{Body, Message};
/// Trait for local and remote host types.
pub trait Host: Clone {
/// Get `Telemetry` for this host.
fn telemetry(&self) -> &Telemetry;
/// Get `Handle` to Tokio reactor.
fn handle(&self) -> &Handle;
#[doc(hidden)]
fn
|
(&self, request: Request) ->
Box<Future<Item = Message<Response, Body<Vec<u8>, io::Error>>, Error = Error>>
{
self.request_msg(Message::WithoutBody(request))
}
#[doc(hidden)]
fn request_msg(&self, Message<Request, Body<Vec<u8>, io::Error>>) ->
Box<Future<Item = Message<Response, Body<Vec<u8>, io::Error>>, Error = Error>>;
}
|
request
|
identifier_name
|
mod.rs
|
// Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
//! Manages the connection between the API and a server.
pub mod local;
pub mod remote;
use errors::*;
|
use tokio_core::reactor::Handle;
use tokio_proto::streaming::{Body, Message};
/// Trait for local and remote host types.
pub trait Host: Clone {
/// Get `Telemetry` for this host.
fn telemetry(&self) -> &Telemetry;
/// Get `Handle` to Tokio reactor.
fn handle(&self) -> &Handle;
#[doc(hidden)]
fn request(&self, request: Request) ->
Box<Future<Item = Message<Response, Body<Vec<u8>, io::Error>>, Error = Error>>
{
self.request_msg(Message::WithoutBody(request))
}
#[doc(hidden)]
fn request_msg(&self, Message<Request, Body<Vec<u8>, io::Error>>) ->
Box<Future<Item = Message<Response, Body<Vec<u8>, io::Error>>, Error = Error>>;
}
|
use futures::Future;
use remote::{Request, Response};
use std::io;
use telemetry::Telemetry;
|
random_line_split
|
mod.rs
|
// Copyright 2015-2017 Intecture Developers.
//
// Licensed under the Mozilla Public License 2.0 <LICENSE or
// https://www.tldrlegal.com/l/mpl-2.0>. This file may not be copied,
// modified, or distributed except according to those terms.
//! Manages the connection between the API and a server.
pub mod local;
pub mod remote;
use errors::*;
use futures::Future;
use remote::{Request, Response};
use std::io;
use telemetry::Telemetry;
use tokio_core::reactor::Handle;
use tokio_proto::streaming::{Body, Message};
/// Trait for local and remote host types.
pub trait Host: Clone {
/// Get `Telemetry` for this host.
fn telemetry(&self) -> &Telemetry;
/// Get `Handle` to Tokio reactor.
fn handle(&self) -> &Handle;
#[doc(hidden)]
fn request(&self, request: Request) ->
Box<Future<Item = Message<Response, Body<Vec<u8>, io::Error>>, Error = Error>>
|
#[doc(hidden)]
fn request_msg(&self, Message<Request, Body<Vec<u8>, io::Error>>) ->
Box<Future<Item = Message<Response, Body<Vec<u8>, io::Error>>, Error = Error>>;
}
|
{
self.request_msg(Message::WithoutBody(request))
}
|
identifier_body
|
ui.rs
|
//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use clap::{Arg, App, SubCommand};
pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a>
|
.subcommand(SubCommand::with_name("import")
.about("Import contacts")
.version("0.1")
.arg(Arg::with_name("path")
.index(1)
.takes_value(true)
.required(true)
.multiple(false)
.value_name("PATH")
.help("Import from this file/directory"))
)
.subcommand(SubCommand::with_name("show")
.about("Show contact")
.version("0.1")
.arg(Arg::with_name("hash")
.index(1)
.takes_value(true)
.required(true)
.multiple(false)
.value_name("HASH")
.help("Show the contact pointed to by this reference hash"))
.arg(Arg::with_name("format")
.long("format")
.takes_value(true)
.required(false)
.multiple(false)
.value_name("FORMAT")
.help("Format to format the contact when printing it"))
)
.subcommand(SubCommand::with_name("create")
.about("Create a contact file (.vcf) and track it in imag.")
.version("0.1")
.arg(Arg::with_name("file-location")
.short("F")
.long("file")
.takes_value(true)
.required(false)
.multiple(false)
.value_name("PATH")
.help("Create this file. If a directory is passed, a file with a uuid as name will be created. vcf contents are dumped to stdout if this is not passed."))
.arg(Arg::with_name("dont-track")
.short("T")
.long("no-track")
.takes_value(false)
.required(false)
.multiple(false)
.help("Don't track the new vcf file if one is created."))
)
}
|
{
app
.subcommand(SubCommand::with_name("list")
.about("List contacts")
.version("0.1")
.arg(Arg::with_name("filter")
.index(1)
.takes_value(true)
.required(false)
.multiple(true)
.value_name("FILTER")
.help("Filter by these properties (not implemented yet)"))
.arg(Arg::with_name("format")
.long("format")
.takes_value(true)
.required(false)
.multiple(false)
.value_name("FORMAT")
.help("Format to format the listing"))
)
|
identifier_body
|
ui.rs
|
//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use clap::{Arg, App, SubCommand};
pub fn build_ui<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
app
.subcommand(SubCommand::with_name("list")
.about("List contacts")
.version("0.1")
.arg(Arg::with_name("filter")
.index(1)
.takes_value(true)
.required(false)
.multiple(true)
.value_name("FILTER")
.help("Filter by these properties (not implemented yet)"))
.arg(Arg::with_name("format")
.long("format")
.takes_value(true)
.required(false)
.multiple(false)
.value_name("FORMAT")
.help("Format to format the listing"))
)
.subcommand(SubCommand::with_name("import")
.about("Import contacts")
.version("0.1")
.arg(Arg::with_name("path")
.index(1)
.takes_value(true)
.required(true)
.multiple(false)
.value_name("PATH")
.help("Import from this file/directory"))
)
.subcommand(SubCommand::with_name("show")
.about("Show contact")
.version("0.1")
.arg(Arg::with_name("hash")
.index(1)
.takes_value(true)
.required(true)
.multiple(false)
.value_name("HASH")
.help("Show the contact pointed to by this reference hash"))
.arg(Arg::with_name("format")
.long("format")
|
.value_name("FORMAT")
.help("Format to format the contact when printing it"))
)
.subcommand(SubCommand::with_name("create")
.about("Create a contact file (.vcf) and track it in imag.")
.version("0.1")
.arg(Arg::with_name("file-location")
.short("F")
.long("file")
.takes_value(true)
.required(false)
.multiple(false)
.value_name("PATH")
.help("Create this file. If a directory is passed, a file with a uuid as name will be created. vcf contents are dumped to stdout if this is not passed."))
.arg(Arg::with_name("dont-track")
.short("T")
.long("no-track")
.takes_value(false)
.required(false)
.multiple(false)
.help("Don't track the new vcf file if one is created."))
)
}
|
.takes_value(true)
.required(false)
.multiple(false)
|
random_line_split
|
ui.rs
|
//
// imag - the personal information management suite for the commandline
// Copyright (C) 2015, 2016 Matthias Beyer <[email protected]> and contributors
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; version
// 2.1 of the License.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
use clap::{Arg, App, SubCommand};
pub fn
|
<'a>(app: App<'a, 'a>) -> App<'a, 'a> {
app
.subcommand(SubCommand::with_name("list")
.about("List contacts")
.version("0.1")
.arg(Arg::with_name("filter")
.index(1)
.takes_value(true)
.required(false)
.multiple(true)
.value_name("FILTER")
.help("Filter by these properties (not implemented yet)"))
.arg(Arg::with_name("format")
.long("format")
.takes_value(true)
.required(false)
.multiple(false)
.value_name("FORMAT")
.help("Format to format the listing"))
)
.subcommand(SubCommand::with_name("import")
.about("Import contacts")
.version("0.1")
.arg(Arg::with_name("path")
.index(1)
.takes_value(true)
.required(true)
.multiple(false)
.value_name("PATH")
.help("Import from this file/directory"))
)
.subcommand(SubCommand::with_name("show")
.about("Show contact")
.version("0.1")
.arg(Arg::with_name("hash")
.index(1)
.takes_value(true)
.required(true)
.multiple(false)
.value_name("HASH")
.help("Show the contact pointed to by this reference hash"))
.arg(Arg::with_name("format")
.long("format")
.takes_value(true)
.required(false)
.multiple(false)
.value_name("FORMAT")
.help("Format to format the contact when printing it"))
)
.subcommand(SubCommand::with_name("create")
.about("Create a contact file (.vcf) and track it in imag.")
.version("0.1")
.arg(Arg::with_name("file-location")
.short("F")
.long("file")
.takes_value(true)
.required(false)
.multiple(false)
.value_name("PATH")
.help("Create this file. If a directory is passed, a file with a uuid as name will be created. vcf contents are dumped to stdout if this is not passed."))
.arg(Arg::with_name("dont-track")
.short("T")
.long("no-track")
.takes_value(false)
.required(false)
.multiple(false)
.help("Don't track the new vcf file if one is created."))
)
}
|
build_ui
|
identifier_name
|
css.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, serialize_identifier};
use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
use dom::bindings::error::Fallible;
use dom::bindings::reflector::Reflector;
use dom::bindings::str::DOMString;
use dom::window::Window;
use dom_struct::dom_struct;
use style::context::QuirksMode;
use style::parser::{PARSING_MODE_DEFAULT, ParserContext};
use style::stylesheets::CssRuleType;
use style::supports::{Declaration, parse_condition_or_declaration};
#[dom_struct]
pub struct CSS {
reflector_: Reflector,
}
impl CSS {
/// http://dev.w3.org/csswg/cssom/#serialize-an-identifier
pub fn
|
(_: &Window, ident: DOMString) -> Fallible<DOMString> {
let mut escaped = String::new();
serialize_identifier(&ident, &mut escaped).unwrap();
Ok(DOMString::from(escaped))
}
/// https://drafts.csswg.org/css-conditional/#dom-css-supports
pub fn Supports(win: &Window, property: DOMString, value: DOMString) -> bool {
let decl = Declaration { prop: property.into(), val: value.into() };
let url = win.Document().url();
let context = ParserContext::new_for_cssom(&url, win.css_error_reporter(), Some(CssRuleType::Supports),
PARSING_MODE_DEFAULT,
QuirksMode::NoQuirks);
decl.eval(&context)
}
/// https://drafts.csswg.org/css-conditional/#dom-css-supports
pub fn Supports_(win: &Window, condition: DOMString) -> bool {
let mut input = Parser::new(&condition);
let cond = parse_condition_or_declaration(&mut input);
if let Ok(cond) = cond {
let url = win.Document().url();
let context = ParserContext::new_for_cssom(&url, win.css_error_reporter(), Some(CssRuleType::Supports),
PARSING_MODE_DEFAULT,
QuirksMode::NoQuirks);
cond.eval(&context)
} else {
false
}
}
}
|
Escape
|
identifier_name
|
css.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, serialize_identifier};
use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
use dom::bindings::error::Fallible;
use dom::bindings::reflector::Reflector;
use dom::bindings::str::DOMString;
use dom::window::Window;
use dom_struct::dom_struct;
use style::context::QuirksMode;
use style::parser::{PARSING_MODE_DEFAULT, ParserContext};
use style::stylesheets::CssRuleType;
use style::supports::{Declaration, parse_condition_or_declaration};
#[dom_struct]
pub struct CSS {
reflector_: Reflector,
}
impl CSS {
/// http://dev.w3.org/csswg/cssom/#serialize-an-identifier
pub fn Escape(_: &Window, ident: DOMString) -> Fallible<DOMString> {
let mut escaped = String::new();
serialize_identifier(&ident, &mut escaped).unwrap();
Ok(DOMString::from(escaped))
}
/// https://drafts.csswg.org/css-conditional/#dom-css-supports
pub fn Supports(win: &Window, property: DOMString, value: DOMString) -> bool {
let decl = Declaration { prop: property.into(), val: value.into() };
let url = win.Document().url();
let context = ParserContext::new_for_cssom(&url, win.css_error_reporter(), Some(CssRuleType::Supports),
PARSING_MODE_DEFAULT,
QuirksMode::NoQuirks);
decl.eval(&context)
}
/// https://drafts.csswg.org/css-conditional/#dom-css-supports
pub fn Supports_(win: &Window, condition: DOMString) -> bool {
let mut input = Parser::new(&condition);
let cond = parse_condition_or_declaration(&mut input);
if let Ok(cond) = cond
|
else {
false
}
}
}
|
{
let url = win.Document().url();
let context = ParserContext::new_for_cssom(&url, win.css_error_reporter(), Some(CssRuleType::Supports),
PARSING_MODE_DEFAULT,
QuirksMode::NoQuirks);
cond.eval(&context)
}
|
conditional_block
|
css.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, serialize_identifier};
use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
use dom::bindings::error::Fallible;
use dom::bindings::reflector::Reflector;
use dom::bindings::str::DOMString;
use dom::window::Window;
use dom_struct::dom_struct;
use style::context::QuirksMode;
use style::parser::{PARSING_MODE_DEFAULT, ParserContext};
use style::stylesheets::CssRuleType;
use style::supports::{Declaration, parse_condition_or_declaration};
#[dom_struct]
pub struct CSS {
reflector_: Reflector,
}
impl CSS {
/// http://dev.w3.org/csswg/cssom/#serialize-an-identifier
pub fn Escape(_: &Window, ident: DOMString) -> Fallible<DOMString> {
let mut escaped = String::new();
serialize_identifier(&ident, &mut escaped).unwrap();
Ok(DOMString::from(escaped))
}
/// https://drafts.csswg.org/css-conditional/#dom-css-supports
pub fn Supports(win: &Window, property: DOMString, value: DOMString) -> bool {
let decl = Declaration { prop: property.into(), val: value.into() };
let url = win.Document().url();
let context = ParserContext::new_for_cssom(&url, win.css_error_reporter(), Some(CssRuleType::Supports),
PARSING_MODE_DEFAULT,
QuirksMode::NoQuirks);
decl.eval(&context)
}
|
pub fn Supports_(win: &Window, condition: DOMString) -> bool {
let mut input = Parser::new(&condition);
let cond = parse_condition_or_declaration(&mut input);
if let Ok(cond) = cond {
let url = win.Document().url();
let context = ParserContext::new_for_cssom(&url, win.css_error_reporter(), Some(CssRuleType::Supports),
PARSING_MODE_DEFAULT,
QuirksMode::NoQuirks);
cond.eval(&context)
} else {
false
}
}
}
|
/// https://drafts.csswg.org/css-conditional/#dom-css-supports
|
random_line_split
|
title.rs
|
// Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate native;
extern crate glfw;
use glfw::Context;
#[start]
fn start(argc: int, argv: *const *const u8) -> int {
native::start(argc, argv, main)
}
fn main() {
let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
let (window, events) = glfw.create_window(400, 400, "English 日本語 русский язык 官話", glfw::Windowed)
.expect("Failed to create GLFW window.");
window.set_key_polling(true);
window.make_current();
glfw.set_swap_interval(1);
while!window.should_close() {
glfw.poll_events();
for (_, event) in glfw::flush_messages(&events) {
handle_window_event(&window, event);
}
}
}
fn handle_window_event(window: &glfw::Window, event: glfw::WindowEvent) {
match event {
|
glfw::KeyEvent(glfw::KeyEscape, _, glfw::Press, _) => {
window.set_should_close(true)
}
_ => {}
}
}
|
identifier_body
|
|
title.rs
|
// Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate native;
extern crate glfw;
use glfw::Context;
#[start]
fn start(argc: int, argv: *const *const u8) -> int {
native::start(argc, argv, main)
}
fn main() {
let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
let (window, events) = glfw.create_window(400, 400, "English 日本語 русский язык 官話", glfw::Windowed)
.expect("Failed to create GLFW window.");
window.set_key_polling(true);
window.make_current();
glfw.set_swap_interval(1);
while!window.should_close() {
glfw.poll_events();
for (_, event) in glfw::flush_messages(&events) {
handle_window_event(&window, event);
}
}
}
fn handle_window_event(window: &glfw::Window, event: glfw::WindowEvent) {
match event {
glfw::KeyEvent(glfw::KeyEscape, _, glfw::Press, _) => {
window.
|
}
}
|
set_should_close(true)
}
_ => {}
|
conditional_block
|
title.rs
|
// Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate native;
extern crate glfw;
use glfw::Context;
#[start]
fn start(argc: int, argv: *const *const u8) -> int {
native::start(argc, argv, main)
}
fn main() {
let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
let (window, events) = glfw.create_window(400, 400, "English 日本語 русский язык 官話", glfw::Windowed)
.expect("Failed to create GLFW window.");
window.set_key_polling(true);
window.make_current();
glfw.set_swap_interval(1);
while!window.should_close() {
glfw.poll_events();
for (_, event) in glfw::flush_messages(&events) {
handle_window_event(&window, event);
}
}
}
fn handle_window_event(window: &glfw::Window, event: glfw::WindowEvent) {
match event {
glfw::KeyEvent(glfw::KeyEscape, _, glfw::Press, _) => {
window.set_should_close(true)
}
_ => {}
}
}
|
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
|
random_line_split
|
title.rs
|
// Copyright 2013 The GLFW-RS Developers. For a full listing of the authors,
// refer to the AUTHORS file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
extern crate native;
extern crate glfw;
use glfw::Context;
#[start]
fn
|
(argc: int, argv: *const *const u8) -> int {
native::start(argc, argv, main)
}
fn main() {
let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
let (window, events) = glfw.create_window(400, 400, "English 日本語 русский язык 官話", glfw::Windowed)
.expect("Failed to create GLFW window.");
window.set_key_polling(true);
window.make_current();
glfw.set_swap_interval(1);
while!window.should_close() {
glfw.poll_events();
for (_, event) in glfw::flush_messages(&events) {
handle_window_event(&window, event);
}
}
}
fn handle_window_event(window: &glfw::Window, event: glfw::WindowEvent) {
match event {
glfw::KeyEvent(glfw::KeyEscape, _, glfw::Press, _) => {
window.set_should_close(true)
}
_ => {}
}
}
|
start
|
identifier_name
|
font.rs
|
the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use euclid::{Point2D, Rect, Size2D};
use font_context::{FontContext, FontSource};
use font_template::FontTemplateDescriptor;
use ordered_float::NotNan;
use platform::font::{FontHandle, FontTable};
use platform::font_context::FontContextHandle;
pub use platform::font_list::fallback_font_families;
use platform::font_template::FontTemplateData;
use servo_atoms::Atom;
use smallvec::SmallVec;
use std::borrow::ToOwned;
use std::cell::RefCell;
use std::collections::HashMap;
use std::iter;
use std::rc::Rc;
use std::str;
use std::sync::Arc;
use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering};
use style::computed_values::{font_stretch, font_style, font_variant_caps, font_weight};
use style::properties::style_structs::Font as FontStyleStruct;
use style::values::computed::font::SingleFontFamily;
use text::Shaper;
use text::glyph::{ByteIndex, GlyphData, GlyphId, GlyphStore};
use text::shaping::ShaperMethods;
use time;
use unicode_script::Script;
use webrender_api;
macro_rules! ot_tag {
($t1:expr, $t2:expr, $t3:expr, $t4:expr) => (
(($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32)
);
}
pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S');
pub const GSUB: u32 = ot_tag!('G', 'S', 'U', 'B');
pub const KERN: u32 = ot_tag!('k', 'e', 'r', 'n');
static TEXT_SHAPING_PERFORMANCE_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
// FontHandle encapsulates access to the platform's font API,
// e.g. quartz, FreeType. It provides access to metrics and tables
// needed by the text shaper as well as access to the underlying font
// resources needed by the graphics layer to draw glyphs.
pub trait FontHandleMethods: Sized {
fn new_from_template(
fctx: &FontContextHandle,
template: Arc<FontTemplateData>,
pt_size: Option<Au>,
) -> Result<Self, ()>;
fn template(&self) -> Arc<FontTemplateData>;
fn family_name(&self) -> String;
fn face_name(&self) -> Option<String>;
fn style(&self) -> font_style::T;
fn boldness(&self) -> font_weight::T;
fn stretchiness(&self) -> font_stretch::T;
fn glyph_index(&self, codepoint: char) -> Option<GlyphId>;
fn glyph_h_advance(&self, GlyphId) -> Option<FractionalPixel>;
fn glyph_h_kerning(&self, glyph0: GlyphId, glyph1: GlyphId) -> FractionalPixel;
/// Can this font do basic horizontal LTR shaping without Harfbuzz?
fn can_do_fast_shaping(&self) -> bool;
fn metrics(&self) -> FontMetrics;
fn table_for_tag(&self, FontTableTag) -> Option<FontTable>;
/// A unique identifier for the font, allowing comparison.
fn identifier(&self) -> Atom;
}
// Used to abstract over the shaper's choice of fixed int representation.
pub type FractionalPixel = f64;
pub type FontTableTag = u32;
trait FontTableTagConversions {
fn tag_to_str(&self) -> String;
}
impl FontTableTagConversions for FontTableTag {
fn tag_to_str(&self) -> String {
let bytes = [(self >> 24) as u8,
(self >> 16) as u8,
(self >> 8) as u8,
(self >> 0) as u8];
str::from_utf8(&bytes).unwrap().to_owned()
}
}
pub trait FontTableMethods {
fn buffer(&self) -> &[u8];
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct FontMetrics {
pub underline_size: Au,
pub underline_offset: Au,
pub strikeout_size: Au,
pub strikeout_offset: Au,
pub leading: Au,
pub x_height: Au,
pub em_size: Au,
pub ascent: Au,
pub descent: Au,
pub max_advance: Au,
pub average_advance: Au,
pub line_gap: Au,
}
/// `FontDescriptor` describes the parameters of a `Font`. It represents rendering a given font
/// template at a particular size, with a particular font-variant-caps applied, etc. This contrasts
/// with `FontTemplateDescriptor` in that the latter represents only the parameters inherent in the
/// font data (weight, stretch, etc.).
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct FontDescriptor {
pub template_descriptor: FontTemplateDescriptor,
pub variant: font_variant_caps::T,
pub pt_size: Au,
}
impl<'a> From<&'a FontStyleStruct> for FontDescriptor {
fn from(style: &'a FontStyleStruct) -> Self {
FontDescriptor {
template_descriptor: FontTemplateDescriptor::from(style),
variant: style.font_variant_caps,
pt_size: style.font_size.size(),
}
}
}
#[derive(Debug)]
pub struct Font {
pub handle: FontHandle,
pub metrics: FontMetrics,
pub descriptor: FontDescriptor,
pub actual_pt_size: Au,
shaper: Option<Shaper>,
shape_cache: RefCell<HashMap<ShapeCacheEntry, Arc<GlyphStore>>>,
glyph_advance_cache: RefCell<HashMap<u32, FractionalPixel>>,
pub font_key: webrender_api::FontInstanceKey,
}
impl Font {
pub fn new(handle: FontHandle,
descriptor: FontDescriptor,
actual_pt_size: Au,
font_key: webrender_api::FontInstanceKey) -> Font {
let metrics = handle.metrics();
Font {
handle: handle,
shaper: None,
descriptor,
actual_pt_size,
metrics,
shape_cache: RefCell::new(HashMap::new()),
glyph_advance_cache: RefCell::new(HashMap::new()),
font_key,
}
}
/// A unique identifier for the font, allowing comparison.
pub fn identifier(&self) -> Atom {
self.handle.identifier()
}
}
bitflags! {
pub struct ShapingFlags: u8 {
#[doc = "Set if the text is entirely whitespace."]
const IS_WHITESPACE_SHAPING_FLAG = 0x01;
#[doc = "Set if we are to ignore ligatures."]
const IGNORE_LIGATURES_SHAPING_FLAG = 0x02;
#[doc = "Set if we are to disable kerning."]
const DISABLE_KERNING_SHAPING_FLAG = 0x04;
#[doc = "Text direction is right-to-left."]
const RTL_FLAG = 0x08;
#[doc = "Set if word-break is set to keep-all."]
const KEEP_ALL_FLAG = 0x10;
}
}
/// Various options that control text shaping.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct ShapingOptions {
/// Spacing to add between each letter. Corresponds to the CSS 2.1 `letter-spacing` property.
/// NB: You will probably want to set the `IGNORE_LIGATURES_SHAPING_FLAG` if this is non-null.
pub letter_spacing: Option<Au>,
/// Spacing to add between each word. Corresponds to the CSS 2.1 `word-spacing` property.
pub word_spacing: (Au, NotNan<f32>),
/// The Unicode script property of the characters in this run.
pub script: Script,
/// Various flags.
pub flags: ShapingFlags,
}
/// An entry in the shape cache.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct ShapeCacheEntry {
text: String,
options: ShapingOptions,
}
impl Font {
pub fn shape_text(&mut self, text: &str, options: &ShapingOptions) -> Arc<GlyphStore> {
let this = self as *const Font;
let mut shaper = self.shaper.take();
let lookup_key = ShapeCacheEntry {
text: text.to_owned(),
options: *options,
};
let result = self.shape_cache.borrow_mut().entry(lookup_key).or_insert_with(|| {
let start_time = time::precise_time_ns();
let mut glyphs = GlyphStore::new(text.len(),
options.flags.contains(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG),
options.flags.contains(ShapingFlags::RTL_FLAG));
if self.can_do_fast_shaping(text, options) {
debug!("shape_text: Using ASCII fast path.");
self.shape_text_fast(text, options, &mut glyphs);
} else {
debug!("shape_text: Using Harfbuzz.");
if shaper.is_none() {
shaper = Some(Shaper::new(this));
}
shaper.as_ref().unwrap().shape_text(text, options, &mut glyphs);
}
let end_time = time::precise_time_ns();
TEXT_SHAPING_PERFORMANCE_COUNTER.fetch_add((end_time - start_time) as usize,
Ordering::Relaxed);
Arc::new(glyphs)
}).clone();
self.shaper = shaper;
result
}
fn can_do_fast_shaping(&self, text: &str, options: &ShapingOptions) -> bool {
options.script == Script::Latin &&
!options.flags.contains(ShapingFlags::RTL_FLAG) &&
self.handle.can_do_fast_shaping() &&
text.is_ascii()
}
/// Fast path for ASCII text that only needs simple horizontal LTR kerning.
fn shape_text_fast(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) {
let mut prev_glyph_id = None;
for (i, byte) in text.bytes().enumerate() {
let character = byte as char;
let glyph_id = match self.glyph_index(character) {
Some(id) => id,
None => continue,
};
let mut advance = Au::from_f64_px(self.glyph_h_advance(glyph_id));
if character =='' {
// https://drafts.csswg.org/css-text-3/#word-spacing-property
let (length, percent) = options.word_spacing;
advance = (advance + length) + Au((advance.0 as f32 * percent.into_inner()) as i32);
}
if let Some(letter_spacing) = options.letter_spacing {
advance += letter_spacing;
}
let offset = prev_glyph_id.map(|prev| {
let h_kerning = Au::from_f64_px(self.glyph_h_kerning(prev, glyph_id));
advance += h_kerning;
Point2D::new(h_kerning, Au(0))
});
let glyph = GlyphData::new(glyph_id, advance, offset, true, true);
glyphs.add_glyph_for_byte_index(ByteIndex(i as isize), character, &glyph);
prev_glyph_id = Some(glyph_id);
}
glyphs.finalize_changes();
}
pub fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result = self.handle.table_for_tag(tag);
let status = if result.is_some()
|
else { "Didn't find" };
debug!("{} font table[{}] with family={}, face={}",
status, tag.tag_to_str(),
self.handle.family_name(), self.handle.face_name().unwrap_or("unavailable".to_owned()));
result
}
#[inline]
pub fn glyph_index(&self, codepoint: char) -> Option<GlyphId> {
let codepoint = match self.descriptor.variant {
font_variant_caps::T::SmallCaps => codepoint.to_uppercase().next().unwrap(), //FIXME: #5938
font_variant_caps::T::Normal => codepoint,
};
self.handle.glyph_index(codepoint)
}
pub fn has_glyph_for(&self, codepoint: char) -> bool {
self.glyph_index(codepoint).is_some()
}
pub fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId)
-> FractionalPixel {
self.handle.glyph_h_kerning(first_glyph, second_glyph)
}
pub fn glyph_h_advance(&self, glyph: GlyphId) -> FractionalPixel {
*self.glyph_advance_cache.borrow_mut().entry(glyph).or_insert_with(|| {
match self.handle.glyph_h_advance(glyph) {
Some(adv) => adv,
None => 10f64 as FractionalPixel // FIXME: Need fallback strategy
}
})
}
}
pub type FontRef = Rc<RefCell<Font>>;
/// A `FontGroup` is a prioritised list of fonts for a given set of font styles. It is used by
/// `TextRun` to decide which font to render a character with. If none of the fonts listed in the
/// styles are suitable, a fallback font may be used.
#[derive(Debug)]
pub struct FontGroup {
descriptor: FontDescriptor,
families: SmallVec<[FontGroupFamily; 8]>,
last_matching_fallback: Option<FontRef>,
}
impl FontGroup {
pub fn new(style: &FontStyleStruct) -> FontGroup {
let descriptor = FontDescriptor::from(style);
let families =
style.font_family.0.iter()
.map(|family| FontGroupFamily::new(descriptor.clone(), &family))
.collect();
FontGroup {
descriptor,
families,
last_matching_fallback: None,
}
}
/// Finds the first font, or else the first fallback font, which contains a glyph for
/// `codepoint`. If no such font is found, returns the first available font or fallback font
/// (which will cause a "glyph not found" character to be rendered). If no font at all can be
/// found, returns None.
pub fn find_by_codepoint<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>,
codepoint: char
) -> Option<FontRef> {
let has_glyph = |font: &FontRef| font.borrow().has_glyph_for(codepoint);
let font = self.find(&mut font_context, |font| has_glyph(font));
if font.is_some() {
return font
}
if let Some(ref fallback) = self.last_matching_fallback {
if has_glyph(&fallback) {
return self.last_matching_fallback.clone()
}
}
let font = self.find_fallback(&mut font_context, Some(codepoint), has_glyph);
if font.is_some() {
self.last_matching_fallback = font.clone();
return font
}
self.first(&mut font_context)
}
/// Find the first available font in the group, or the first available fallback font.
pub fn first<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>
) -> Option<FontRef> {
self.find(&mut font_context, |_| true)
.or_else(|| self.find_fallback(&mut font_context, None, |_| true))
}
/// Find a font which returns true for `predicate`. This method mutates because we may need to
/// load new font data in the process of finding a suitable font.
fn find<S, P>(
&mut self,
mut font_context: &mut FontContext<S>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
self.families.iter_mut()
.filter_map(|family| family.font(&mut font_context))
.find(predicate)
}
/// Attempts to find a suitable fallback font which matches the `predicate`. The default
/// family (i.e. "serif") will be tried first, followed by platform-specific family names.
/// If a `codepoint` is provided, then its Unicode block may be used to refine the list of
/// family names which will be tried.
fn find_fallback<S, P>(
&mut self,
font_context: &mut FontContext<S>,
codepoint: Option<char>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
iter::once(FontFamilyDescriptor::default())
.chain(
fallback_font_families(codepoint).into_iter().map(|family| {
FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Local,
)
})
)
.filter_map(|family| font_context.font(&self.descriptor, &family))
.find(predicate)
}
}
/// A `FontGroupFamily` is a single font family in a `FontGroup`. It corresponds to one of the
/// families listed in the `font-family` CSS property. The corresponding font data is lazy-loaded,
/// only if actually needed.
#[derive(Debug)]
struct FontGroupFamily {
font_descriptor: FontDescriptor,
family_descriptor: FontFamilyDescriptor,
loaded: bool,
font: Option<FontRef>,
}
impl FontGroupFamily {
fn new(font_descriptor: FontDescriptor, family: &SingleFontFamily) -> FontGroupFamily {
let family_descriptor = FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Any
);
FontGroupFamily {
font_descriptor,
family_descriptor,
loaded: false,
font: None,
}
}
/// Returns the font within this family which matches the style. We'll fetch the data from the
/// `FontContext` the first time this method is called, and return a cached reference on
/// subsequent calls.
fn font<S: FontSource>(&mut self, font_context: &mut FontContext<S>) -> Option<FontRef> {
if!self.loaded {
self.font = font_context.font(&self.font_descriptor, &self.family_descriptor);
self.loaded = true;
}
self.font.clone()
}
}
pub struct RunMetrics {
// may be negative due to negative width (i.e., kerning of '.' in 'P.T.')
pub advance_width: Au,
pub ascent: Au, // nonzero
pub descent: Au, // nonzero
// this bounding box is relative to the left origin baseline.
// so, bounding_box.position.y = -ascent
pub bounding_box: Rect<Au>
}
impl RunMetrics {
pub fn new(advance: Au, ascent: Au, descent: Au) -> RunMetrics {
let bounds = Rect::new(Point2D::new(Au(0), -ascent),
Size2D::new(advance, ascent + descent));
// TODO(Issue #125): support loose and tight bounding boxes; using the
// ascent+descent and advance is sometimes too generous and
// looking at actual glyph extents can yield a tighter box.
RunMetrics {
advance_width: advance,
bounding_box: bounds,
ascent: ascent,
descent: descent,
}
}
}
pub fn get_and_reset_text_shaping_performance_counter() -> usize {
let value = TEXT_SHAPING_PERFORMANCE_COUNTER.load(Ordering::SeqCst);
TEXT_SHAPING_PERFORMANCE_COUNTER.store(0, Ordering::SeqCst);
value
}
/// The scope within which we will look for a font.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontSearchScope {
/// All fonts will be searched, including those specified via `@font-face` rules.
Any,
/// Only local system fonts will be searched.
Local,
}
/// A font family name used in font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontFamilyName {
/// A specific name such as `"Arial"`
Specific(Atom),
/// A generic name such as `sans-serif`
Generic(Atom),
}
impl FontFamilyName {
pub fn name(&self) -> &str {
match *self {
FontFamilyName::Specific(ref name) => name,
FontFamilyName::Generic(ref name) => name,
}
}
}
impl<'a> From<&'a SingleFontFamily> for FontFamilyName {
fn from(other: &'a SingleFontFamily) -> FontFamilyName {
match *other {
SingleFontFamily::FamilyName(ref family_name) =>
FontFamilyName::Specific(family_name.name.clone()),
SingleFontFamily::Generic(ref generic_name) =>
FontFamilyName::Generic(generic_name.clone()),
}
}
}
impl<'a> From<&'a str> for FontFamilyName {
fn from(other: &'a str) -> FontFamilyName {
FontFamilyName::Specific(Atom::from(other))
}
}
/// The font family parameters for font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub struct FontFamilyDescriptor {
pub name: FontFamilyName,
pub scope: FontSearchScope,
}
impl FontFamilyDescriptor {
pub fn new(name: FontFamilyName, scope: FontSearchScope) -> FontFamilyDescriptor {
FontFamilyDescriptor { name, scope }
}
fn default() -> FontFamilyDescriptor {
FontFamilyDescriptor {
name: FontFamilyName::Generic(atom!("serif")),
scope: FontSearchScope::Local,
}
}
pub fn name(&self) -> &str {
|
{ "Found" }
|
conditional_block
|
font.rs
|
the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use euclid::{Point2D, Rect, Size2D};
use font_context::{FontContext, FontSource};
use font_template::FontTemplateDescriptor;
use ordered_float::NotNan;
use platform::font::{FontHandle, FontTable};
use platform::font_context::FontContextHandle;
pub use platform::font_list::fallback_font_families;
use platform::font_template::FontTemplateData;
use servo_atoms::Atom;
use smallvec::SmallVec;
use std::borrow::ToOwned;
use std::cell::RefCell;
use std::collections::HashMap;
use std::iter;
use std::rc::Rc;
use std::str;
use std::sync::Arc;
use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering};
use style::computed_values::{font_stretch, font_style, font_variant_caps, font_weight};
use style::properties::style_structs::Font as FontStyleStruct;
use style::values::computed::font::SingleFontFamily;
use text::Shaper;
use text::glyph::{ByteIndex, GlyphData, GlyphId, GlyphStore};
use text::shaping::ShaperMethods;
use time;
use unicode_script::Script;
use webrender_api;
macro_rules! ot_tag {
($t1:expr, $t2:expr, $t3:expr, $t4:expr) => (
(($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32)
);
}
pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S');
pub const GSUB: u32 = ot_tag!('G', 'S', 'U', 'B');
pub const KERN: u32 = ot_tag!('k', 'e', 'r', 'n');
static TEXT_SHAPING_PERFORMANCE_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
// FontHandle encapsulates access to the platform's font API,
// e.g. quartz, FreeType. It provides access to metrics and tables
// needed by the text shaper as well as access to the underlying font
// resources needed by the graphics layer to draw glyphs.
pub trait FontHandleMethods: Sized {
fn new_from_template(
fctx: &FontContextHandle,
template: Arc<FontTemplateData>,
pt_size: Option<Au>,
) -> Result<Self, ()>;
fn template(&self) -> Arc<FontTemplateData>;
fn family_name(&self) -> String;
fn face_name(&self) -> Option<String>;
fn style(&self) -> font_style::T;
fn boldness(&self) -> font_weight::T;
fn stretchiness(&self) -> font_stretch::T;
fn glyph_index(&self, codepoint: char) -> Option<GlyphId>;
fn glyph_h_advance(&self, GlyphId) -> Option<FractionalPixel>;
fn glyph_h_kerning(&self, glyph0: GlyphId, glyph1: GlyphId) -> FractionalPixel;
/// Can this font do basic horizontal LTR shaping without Harfbuzz?
fn can_do_fast_shaping(&self) -> bool;
fn metrics(&self) -> FontMetrics;
fn table_for_tag(&self, FontTableTag) -> Option<FontTable>;
/// A unique identifier for the font, allowing comparison.
fn identifier(&self) -> Atom;
}
// Used to abstract over the shaper's choice of fixed int representation.
pub type FractionalPixel = f64;
pub type FontTableTag = u32;
trait FontTableTagConversions {
fn tag_to_str(&self) -> String;
}
impl FontTableTagConversions for FontTableTag {
fn tag_to_str(&self) -> String {
let bytes = [(self >> 24) as u8,
(self >> 16) as u8,
(self >> 8) as u8,
(self >> 0) as u8];
str::from_utf8(&bytes).unwrap().to_owned()
}
}
pub trait FontTableMethods {
fn buffer(&self) -> &[u8];
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct FontMetrics {
pub underline_size: Au,
pub underline_offset: Au,
pub strikeout_size: Au,
pub strikeout_offset: Au,
pub leading: Au,
pub x_height: Au,
pub em_size: Au,
pub ascent: Au,
pub descent: Au,
pub max_advance: Au,
pub average_advance: Au,
pub line_gap: Au,
}
/// `FontDescriptor` describes the parameters of a `Font`. It represents rendering a given font
/// template at a particular size, with a particular font-variant-caps applied, etc. This contrasts
/// with `FontTemplateDescriptor` in that the latter represents only the parameters inherent in the
/// font data (weight, stretch, etc.).
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct FontDescriptor {
pub template_descriptor: FontTemplateDescriptor,
pub variant: font_variant_caps::T,
pub pt_size: Au,
}
impl<'a> From<&'a FontStyleStruct> for FontDescriptor {
fn from(style: &'a FontStyleStruct) -> Self {
FontDescriptor {
template_descriptor: FontTemplateDescriptor::from(style),
variant: style.font_variant_caps,
pt_size: style.font_size.size(),
}
}
}
#[derive(Debug)]
pub struct Font {
pub handle: FontHandle,
pub metrics: FontMetrics,
pub descriptor: FontDescriptor,
pub actual_pt_size: Au,
shaper: Option<Shaper>,
shape_cache: RefCell<HashMap<ShapeCacheEntry, Arc<GlyphStore>>>,
glyph_advance_cache: RefCell<HashMap<u32, FractionalPixel>>,
pub font_key: webrender_api::FontInstanceKey,
}
impl Font {
pub fn new(handle: FontHandle,
descriptor: FontDescriptor,
actual_pt_size: Au,
font_key: webrender_api::FontInstanceKey) -> Font {
let metrics = handle.metrics();
Font {
handle: handle,
shaper: None,
descriptor,
actual_pt_size,
metrics,
shape_cache: RefCell::new(HashMap::new()),
glyph_advance_cache: RefCell::new(HashMap::new()),
font_key,
}
}
/// A unique identifier for the font, allowing comparison.
pub fn identifier(&self) -> Atom {
self.handle.identifier()
}
}
bitflags! {
pub struct ShapingFlags: u8 {
#[doc = "Set if the text is entirely whitespace."]
const IS_WHITESPACE_SHAPING_FLAG = 0x01;
#[doc = "Set if we are to ignore ligatures."]
const IGNORE_LIGATURES_SHAPING_FLAG = 0x02;
#[doc = "Set if we are to disable kerning."]
const DISABLE_KERNING_SHAPING_FLAG = 0x04;
#[doc = "Text direction is right-to-left."]
const RTL_FLAG = 0x08;
#[doc = "Set if word-break is set to keep-all."]
const KEEP_ALL_FLAG = 0x10;
}
}
/// Various options that control text shaping.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct ShapingOptions {
/// Spacing to add between each letter. Corresponds to the CSS 2.1 `letter-spacing` property.
/// NB: You will probably want to set the `IGNORE_LIGATURES_SHAPING_FLAG` if this is non-null.
pub letter_spacing: Option<Au>,
/// Spacing to add between each word. Corresponds to the CSS 2.1 `word-spacing` property.
pub word_spacing: (Au, NotNan<f32>),
/// The Unicode script property of the characters in this run.
pub script: Script,
/// Various flags.
pub flags: ShapingFlags,
}
/// An entry in the shape cache.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct ShapeCacheEntry {
text: String,
options: ShapingOptions,
}
impl Font {
pub fn shape_text(&mut self, text: &str, options: &ShapingOptions) -> Arc<GlyphStore> {
let this = self as *const Font;
let mut shaper = self.shaper.take();
let lookup_key = ShapeCacheEntry {
text: text.to_owned(),
options: *options,
};
let result = self.shape_cache.borrow_mut().entry(lookup_key).or_insert_with(|| {
let start_time = time::precise_time_ns();
let mut glyphs = GlyphStore::new(text.len(),
options.flags.contains(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG),
options.flags.contains(ShapingFlags::RTL_FLAG));
if self.can_do_fast_shaping(text, options) {
debug!("shape_text: Using ASCII fast path.");
self.shape_text_fast(text, options, &mut glyphs);
} else {
debug!("shape_text: Using Harfbuzz.");
if shaper.is_none() {
shaper = Some(Shaper::new(this));
}
shaper.as_ref().unwrap().shape_text(text, options, &mut glyphs);
}
let end_time = time::precise_time_ns();
TEXT_SHAPING_PERFORMANCE_COUNTER.fetch_add((end_time - start_time) as usize,
Ordering::Relaxed);
Arc::new(glyphs)
}).clone();
self.shaper = shaper;
result
}
fn can_do_fast_shaping(&self, text: &str, options: &ShapingOptions) -> bool {
options.script == Script::Latin &&
!options.flags.contains(ShapingFlags::RTL_FLAG) &&
self.handle.can_do_fast_shaping() &&
text.is_ascii()
}
/// Fast path for ASCII text that only needs simple horizontal LTR kerning.
fn shape_text_fast(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) {
let mut prev_glyph_id = None;
for (i, byte) in text.bytes().enumerate() {
let character = byte as char;
let glyph_id = match self.glyph_index(character) {
Some(id) => id,
None => continue,
};
let mut advance = Au::from_f64_px(self.glyph_h_advance(glyph_id));
if character =='' {
// https://drafts.csswg.org/css-text-3/#word-spacing-property
let (length, percent) = options.word_spacing;
advance = (advance + length) + Au((advance.0 as f32 * percent.into_inner()) as i32);
}
if let Some(letter_spacing) = options.letter_spacing {
advance += letter_spacing;
}
let offset = prev_glyph_id.map(|prev| {
let h_kerning = Au::from_f64_px(self.glyph_h_kerning(prev, glyph_id));
advance += h_kerning;
Point2D::new(h_kerning, Au(0))
});
let glyph = GlyphData::new(glyph_id, advance, offset, true, true);
glyphs.add_glyph_for_byte_index(ByteIndex(i as isize), character, &glyph);
prev_glyph_id = Some(glyph_id);
}
glyphs.finalize_changes();
}
pub fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result = self.handle.table_for_tag(tag);
let status = if result.is_some() { "Found" } else { "Didn't find" };
debug!("{} font table[{}] with family={}, face={}",
status, tag.tag_to_str(),
self.handle.family_name(), self.handle.face_name().unwrap_or("unavailable".to_owned()));
result
}
#[inline]
pub fn glyph_index(&self, codepoint: char) -> Option<GlyphId> {
let codepoint = match self.descriptor.variant {
font_variant_caps::T::SmallCaps => codepoint.to_uppercase().next().unwrap(), //FIXME: #5938
font_variant_caps::T::Normal => codepoint,
};
self.handle.glyph_index(codepoint)
}
pub fn has_glyph_for(&self, codepoint: char) -> bool {
self.glyph_index(codepoint).is_some()
}
pub fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId)
-> FractionalPixel {
self.handle.glyph_h_kerning(first_glyph, second_glyph)
}
pub fn glyph_h_advance(&self, glyph: GlyphId) -> FractionalPixel {
*self.glyph_advance_cache.borrow_mut().entry(glyph).or_insert_with(|| {
match self.handle.glyph_h_advance(glyph) {
Some(adv) => adv,
None => 10f64 as FractionalPixel // FIXME: Need fallback strategy
}
})
}
}
pub type FontRef = Rc<RefCell<Font>>;
/// A `FontGroup` is a prioritised list of fonts for a given set of font styles. It is used by
/// `TextRun` to decide which font to render a character with. If none of the fonts listed in the
/// styles are suitable, a fallback font may be used.
#[derive(Debug)]
pub struct FontGroup {
descriptor: FontDescriptor,
families: SmallVec<[FontGroupFamily; 8]>,
last_matching_fallback: Option<FontRef>,
}
impl FontGroup {
pub fn new(style: &FontStyleStruct) -> FontGroup {
let descriptor = FontDescriptor::from(style);
let families =
style.font_family.0.iter()
.map(|family| FontGroupFamily::new(descriptor.clone(), &family))
.collect();
FontGroup {
descriptor,
families,
last_matching_fallback: None,
}
}
/// Finds the first font, or else the first fallback font, which contains a glyph for
/// `codepoint`. If no such font is found, returns the first available font or fallback font
/// (which will cause a "glyph not found" character to be rendered). If no font at all can be
/// found, returns None.
pub fn find_by_codepoint<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>,
codepoint: char
) -> Option<FontRef> {
let has_glyph = |font: &FontRef| font.borrow().has_glyph_for(codepoint);
let font = self.find(&mut font_context, |font| has_glyph(font));
if font.is_some() {
return font
}
if let Some(ref fallback) = self.last_matching_fallback {
if has_glyph(&fallback) {
return self.last_matching_fallback.clone()
}
}
let font = self.find_fallback(&mut font_context, Some(codepoint), has_glyph);
if font.is_some() {
self.last_matching_fallback = font.clone();
return font
}
self.first(&mut font_context)
}
/// Find the first available font in the group, or the first available fallback font.
pub fn first<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>
) -> Option<FontRef> {
self.find(&mut font_context, |_| true)
.or_else(|| self.find_fallback(&mut font_context, None, |_| true))
}
/// Find a font which returns true for `predicate`. This method mutates because we may need to
/// load new font data in the process of finding a suitable font.
fn find<S, P>(
&mut self,
mut font_context: &mut FontContext<S>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
self.families.iter_mut()
.filter_map(|family| family.font(&mut font_context))
.find(predicate)
}
/// Attempts to find a suitable fallback font which matches the `predicate`. The default
/// family (i.e. "serif") will be tried first, followed by platform-specific family names.
/// If a `codepoint` is provided, then its Unicode block may be used to refine the list of
/// family names which will be tried.
fn find_fallback<S, P>(
&mut self,
font_context: &mut FontContext<S>,
codepoint: Option<char>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
iter::once(FontFamilyDescriptor::default())
.chain(
fallback_font_families(codepoint).into_iter().map(|family| {
FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Local,
)
})
)
.filter_map(|family| font_context.font(&self.descriptor, &family))
.find(predicate)
}
}
/// A `FontGroupFamily` is a single font family in a `FontGroup`. It corresponds to one of the
/// families listed in the `font-family` CSS property. The corresponding font data is lazy-loaded,
/// only if actually needed.
#[derive(Debug)]
struct FontGroupFamily {
font_descriptor: FontDescriptor,
family_descriptor: FontFamilyDescriptor,
loaded: bool,
font: Option<FontRef>,
}
impl FontGroupFamily {
fn new(font_descriptor: FontDescriptor, family: &SingleFontFamily) -> FontGroupFamily {
let family_descriptor = FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Any
);
FontGroupFamily {
font_descriptor,
family_descriptor,
loaded: false,
font: None,
}
}
/// Returns the font within this family which matches the style. We'll fetch the data from the
/// `FontContext` the first time this method is called, and return a cached reference on
/// subsequent calls.
fn font<S: FontSource>(&mut self, font_context: &mut FontContext<S>) -> Option<FontRef> {
if!self.loaded {
self.font = font_context.font(&self.font_descriptor, &self.family_descriptor);
self.loaded = true;
}
self.font.clone()
}
}
pub struct RunMetrics {
// may be negative due to negative width (i.e., kerning of '.' in 'P.T.')
pub advance_width: Au,
pub ascent: Au, // nonzero
pub descent: Au, // nonzero
// this bounding box is relative to the left origin baseline.
// so, bounding_box.position.y = -ascent
pub bounding_box: Rect<Au>
}
impl RunMetrics {
pub fn new(advance: Au, ascent: Au, descent: Au) -> RunMetrics {
let bounds = Rect::new(Point2D::new(Au(0), -ascent),
Size2D::new(advance, ascent + descent));
// TODO(Issue #125): support loose and tight bounding boxes; using the
// ascent+descent and advance is sometimes too generous and
// looking at actual glyph extents can yield a tighter box.
RunMetrics {
advance_width: advance,
bounding_box: bounds,
ascent: ascent,
descent: descent,
}
}
}
pub fn get_and_reset_text_shaping_performance_counter() -> usize
|
/// The scope within which we will look for a font.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontSearchScope {
/// All fonts will be searched, including those specified via `@font-face` rules.
Any,
/// Only local system fonts will be searched.
Local,
}
/// A font family name used in font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontFamilyName {
/// A specific name such as `"Arial"`
Specific(Atom),
/// A generic name such as `sans-serif`
Generic(Atom),
}
impl FontFamilyName {
pub fn name(&self) -> &str {
match *self {
FontFamilyName::Specific(ref name) => name,
FontFamilyName::Generic(ref name) => name,
}
}
}
impl<'a> From<&'a SingleFontFamily> for FontFamilyName {
fn from(other: &'a SingleFontFamily) -> FontFamilyName {
match *other {
SingleFontFamily::FamilyName(ref family_name) =>
FontFamilyName::Specific(family_name.name.clone()),
SingleFontFamily::Generic(ref generic_name) =>
FontFamilyName::Generic(generic_name.clone()),
}
}
}
impl<'a> From<&'a str> for FontFamilyName {
fn from(other: &'a str) -> FontFamilyName {
FontFamilyName::Specific(Atom::from(other))
}
}
/// The font family parameters for font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub struct FontFamilyDescriptor {
pub name: FontFamilyName,
pub scope: FontSearchScope,
}
impl FontFamilyDescriptor {
pub fn new(name: FontFamilyName, scope: FontSearchScope) -> FontFamilyDescriptor {
FontFamilyDescriptor { name, scope }
}
fn default() -> FontFamilyDescriptor {
FontFamilyDescriptor {
name: FontFamilyName::Generic(atom!("serif")),
scope: FontSearchScope::Local,
}
}
pub fn name(&self) -> &str {
|
{
let value = TEXT_SHAPING_PERFORMANCE_COUNTER.load(Ordering::SeqCst);
TEXT_SHAPING_PERFORMANCE_COUNTER.store(0, Ordering::SeqCst);
value
}
|
identifier_body
|
font.rs
|
the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use euclid::{Point2D, Rect, Size2D};
use font_context::{FontContext, FontSource};
use font_template::FontTemplateDescriptor;
use ordered_float::NotNan;
use platform::font::{FontHandle, FontTable};
use platform::font_context::FontContextHandle;
pub use platform::font_list::fallback_font_families;
use platform::font_template::FontTemplateData;
use servo_atoms::Atom;
use smallvec::SmallVec;
use std::borrow::ToOwned;
use std::cell::RefCell;
use std::collections::HashMap;
use std::iter;
use std::rc::Rc;
use std::str;
use std::sync::Arc;
use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering};
use style::computed_values::{font_stretch, font_style, font_variant_caps, font_weight};
use style::properties::style_structs::Font as FontStyleStruct;
use style::values::computed::font::SingleFontFamily;
use text::Shaper;
use text::glyph::{ByteIndex, GlyphData, GlyphId, GlyphStore};
use text::shaping::ShaperMethods;
use time;
use unicode_script::Script;
use webrender_api;
macro_rules! ot_tag {
($t1:expr, $t2:expr, $t3:expr, $t4:expr) => (
(($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32)
);
}
pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S');
pub const GSUB: u32 = ot_tag!('G', 'S', 'U', 'B');
pub const KERN: u32 = ot_tag!('k', 'e', 'r', 'n');
static TEXT_SHAPING_PERFORMANCE_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
// FontHandle encapsulates access to the platform's font API,
// e.g. quartz, FreeType. It provides access to metrics and tables
// needed by the text shaper as well as access to the underlying font
// resources needed by the graphics layer to draw glyphs.
pub trait FontHandleMethods: Sized {
fn new_from_template(
fctx: &FontContextHandle,
template: Arc<FontTemplateData>,
pt_size: Option<Au>,
) -> Result<Self, ()>;
fn template(&self) -> Arc<FontTemplateData>;
fn family_name(&self) -> String;
fn face_name(&self) -> Option<String>;
fn style(&self) -> font_style::T;
fn boldness(&self) -> font_weight::T;
fn stretchiness(&self) -> font_stretch::T;
fn glyph_index(&self, codepoint: char) -> Option<GlyphId>;
fn glyph_h_advance(&self, GlyphId) -> Option<FractionalPixel>;
fn glyph_h_kerning(&self, glyph0: GlyphId, glyph1: GlyphId) -> FractionalPixel;
/// Can this font do basic horizontal LTR shaping without Harfbuzz?
fn can_do_fast_shaping(&self) -> bool;
fn metrics(&self) -> FontMetrics;
fn table_for_tag(&self, FontTableTag) -> Option<FontTable>;
/// A unique identifier for the font, allowing comparison.
fn identifier(&self) -> Atom;
}
// Used to abstract over the shaper's choice of fixed int representation.
pub type FractionalPixel = f64;
pub type FontTableTag = u32;
trait FontTableTagConversions {
fn tag_to_str(&self) -> String;
}
impl FontTableTagConversions for FontTableTag {
fn tag_to_str(&self) -> String {
let bytes = [(self >> 24) as u8,
(self >> 16) as u8,
(self >> 8) as u8,
(self >> 0) as u8];
str::from_utf8(&bytes).unwrap().to_owned()
}
}
pub trait FontTableMethods {
fn buffer(&self) -> &[u8];
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct FontMetrics {
pub underline_size: Au,
pub underline_offset: Au,
pub strikeout_size: Au,
pub strikeout_offset: Au,
pub leading: Au,
pub x_height: Au,
pub em_size: Au,
pub ascent: Au,
pub descent: Au,
pub max_advance: Au,
pub average_advance: Au,
pub line_gap: Au,
}
/// `FontDescriptor` describes the parameters of a `Font`. It represents rendering a given font
/// template at a particular size, with a particular font-variant-caps applied, etc. This contrasts
/// with `FontTemplateDescriptor` in that the latter represents only the parameters inherent in the
/// font data (weight, stretch, etc.).
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct FontDescriptor {
pub template_descriptor: FontTemplateDescriptor,
pub variant: font_variant_caps::T,
pub pt_size: Au,
}
impl<'a> From<&'a FontStyleStruct> for FontDescriptor {
fn from(style: &'a FontStyleStruct) -> Self {
FontDescriptor {
template_descriptor: FontTemplateDescriptor::from(style),
variant: style.font_variant_caps,
pt_size: style.font_size.size(),
}
}
}
#[derive(Debug)]
pub struct Font {
pub handle: FontHandle,
pub metrics: FontMetrics,
pub descriptor: FontDescriptor,
pub actual_pt_size: Au,
shaper: Option<Shaper>,
shape_cache: RefCell<HashMap<ShapeCacheEntry, Arc<GlyphStore>>>,
glyph_advance_cache: RefCell<HashMap<u32, FractionalPixel>>,
pub font_key: webrender_api::FontInstanceKey,
}
impl Font {
pub fn new(handle: FontHandle,
descriptor: FontDescriptor,
actual_pt_size: Au,
font_key: webrender_api::FontInstanceKey) -> Font {
let metrics = handle.metrics();
Font {
handle: handle,
shaper: None,
descriptor,
actual_pt_size,
metrics,
shape_cache: RefCell::new(HashMap::new()),
glyph_advance_cache: RefCell::new(HashMap::new()),
font_key,
}
}
/// A unique identifier for the font, allowing comparison.
pub fn identifier(&self) -> Atom {
self.handle.identifier()
}
}
bitflags! {
pub struct ShapingFlags: u8 {
#[doc = "Set if the text is entirely whitespace."]
const IS_WHITESPACE_SHAPING_FLAG = 0x01;
#[doc = "Set if we are to ignore ligatures."]
const IGNORE_LIGATURES_SHAPING_FLAG = 0x02;
#[doc = "Set if we are to disable kerning."]
const DISABLE_KERNING_SHAPING_FLAG = 0x04;
#[doc = "Text direction is right-to-left."]
const RTL_FLAG = 0x08;
#[doc = "Set if word-break is set to keep-all."]
const KEEP_ALL_FLAG = 0x10;
}
}
/// Various options that control text shaping.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct ShapingOptions {
/// Spacing to add between each letter. Corresponds to the CSS 2.1 `letter-spacing` property.
/// NB: You will probably want to set the `IGNORE_LIGATURES_SHAPING_FLAG` if this is non-null.
pub letter_spacing: Option<Au>,
/// Spacing to add between each word. Corresponds to the CSS 2.1 `word-spacing` property.
pub word_spacing: (Au, NotNan<f32>),
/// The Unicode script property of the characters in this run.
pub script: Script,
/// Various flags.
pub flags: ShapingFlags,
}
/// An entry in the shape cache.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct ShapeCacheEntry {
text: String,
options: ShapingOptions,
}
impl Font {
pub fn shape_text(&mut self, text: &str, options: &ShapingOptions) -> Arc<GlyphStore> {
let this = self as *const Font;
let mut shaper = self.shaper.take();
let lookup_key = ShapeCacheEntry {
text: text.to_owned(),
options: *options,
};
let result = self.shape_cache.borrow_mut().entry(lookup_key).or_insert_with(|| {
let start_time = time::precise_time_ns();
let mut glyphs = GlyphStore::new(text.len(),
options.flags.contains(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG),
options.flags.contains(ShapingFlags::RTL_FLAG));
if self.can_do_fast_shaping(text, options) {
debug!("shape_text: Using ASCII fast path.");
self.shape_text_fast(text, options, &mut glyphs);
} else {
debug!("shape_text: Using Harfbuzz.");
if shaper.is_none() {
shaper = Some(Shaper::new(this));
}
shaper.as_ref().unwrap().shape_text(text, options, &mut glyphs);
}
let end_time = time::precise_time_ns();
TEXT_SHAPING_PERFORMANCE_COUNTER.fetch_add((end_time - start_time) as usize,
Ordering::Relaxed);
Arc::new(glyphs)
}).clone();
self.shaper = shaper;
result
}
fn can_do_fast_shaping(&self, text: &str, options: &ShapingOptions) -> bool {
options.script == Script::Latin &&
!options.flags.contains(ShapingFlags::RTL_FLAG) &&
self.handle.can_do_fast_shaping() &&
text.is_ascii()
}
/// Fast path for ASCII text that only needs simple horizontal LTR kerning.
fn shape_text_fast(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) {
let mut prev_glyph_id = None;
for (i, byte) in text.bytes().enumerate() {
let character = byte as char;
let glyph_id = match self.glyph_index(character) {
Some(id) => id,
None => continue,
};
let mut advance = Au::from_f64_px(self.glyph_h_advance(glyph_id));
if character =='' {
// https://drafts.csswg.org/css-text-3/#word-spacing-property
let (length, percent) = options.word_spacing;
advance = (advance + length) + Au((advance.0 as f32 * percent.into_inner()) as i32);
}
if let Some(letter_spacing) = options.letter_spacing {
advance += letter_spacing;
}
let offset = prev_glyph_id.map(|prev| {
let h_kerning = Au::from_f64_px(self.glyph_h_kerning(prev, glyph_id));
advance += h_kerning;
Point2D::new(h_kerning, Au(0))
});
let glyph = GlyphData::new(glyph_id, advance, offset, true, true);
glyphs.add_glyph_for_byte_index(ByteIndex(i as isize), character, &glyph);
prev_glyph_id = Some(glyph_id);
}
glyphs.finalize_changes();
}
pub fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result = self.handle.table_for_tag(tag);
let status = if result.is_some() { "Found" } else { "Didn't find" };
debug!("{} font table[{}] with family={}, face={}",
status, tag.tag_to_str(),
self.handle.family_name(), self.handle.face_name().unwrap_or("unavailable".to_owned()));
result
}
#[inline]
pub fn
|
(&self, codepoint: char) -> Option<GlyphId> {
let codepoint = match self.descriptor.variant {
font_variant_caps::T::SmallCaps => codepoint.to_uppercase().next().unwrap(), //FIXME: #5938
font_variant_caps::T::Normal => codepoint,
};
self.handle.glyph_index(codepoint)
}
pub fn has_glyph_for(&self, codepoint: char) -> bool {
self.glyph_index(codepoint).is_some()
}
pub fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId)
-> FractionalPixel {
self.handle.glyph_h_kerning(first_glyph, second_glyph)
}
pub fn glyph_h_advance(&self, glyph: GlyphId) -> FractionalPixel {
*self.glyph_advance_cache.borrow_mut().entry(glyph).or_insert_with(|| {
match self.handle.glyph_h_advance(glyph) {
Some(adv) => adv,
None => 10f64 as FractionalPixel // FIXME: Need fallback strategy
}
})
}
}
pub type FontRef = Rc<RefCell<Font>>;
/// A `FontGroup` is a prioritised list of fonts for a given set of font styles. It is used by
/// `TextRun` to decide which font to render a character with. If none of the fonts listed in the
/// styles are suitable, a fallback font may be used.
#[derive(Debug)]
pub struct FontGroup {
descriptor: FontDescriptor,
families: SmallVec<[FontGroupFamily; 8]>,
last_matching_fallback: Option<FontRef>,
}
impl FontGroup {
pub fn new(style: &FontStyleStruct) -> FontGroup {
let descriptor = FontDescriptor::from(style);
let families =
style.font_family.0.iter()
.map(|family| FontGroupFamily::new(descriptor.clone(), &family))
.collect();
FontGroup {
descriptor,
families,
last_matching_fallback: None,
}
}
/// Finds the first font, or else the first fallback font, which contains a glyph for
/// `codepoint`. If no such font is found, returns the first available font or fallback font
/// (which will cause a "glyph not found" character to be rendered). If no font at all can be
/// found, returns None.
pub fn find_by_codepoint<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>,
codepoint: char
) -> Option<FontRef> {
let has_glyph = |font: &FontRef| font.borrow().has_glyph_for(codepoint);
let font = self.find(&mut font_context, |font| has_glyph(font));
if font.is_some() {
return font
}
if let Some(ref fallback) = self.last_matching_fallback {
if has_glyph(&fallback) {
return self.last_matching_fallback.clone()
}
}
let font = self.find_fallback(&mut font_context, Some(codepoint), has_glyph);
if font.is_some() {
self.last_matching_fallback = font.clone();
return font
}
self.first(&mut font_context)
}
/// Find the first available font in the group, or the first available fallback font.
pub fn first<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>
) -> Option<FontRef> {
self.find(&mut font_context, |_| true)
.or_else(|| self.find_fallback(&mut font_context, None, |_| true))
}
/// Find a font which returns true for `predicate`. This method mutates because we may need to
/// load new font data in the process of finding a suitable font.
fn find<S, P>(
&mut self,
mut font_context: &mut FontContext<S>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
self.families.iter_mut()
.filter_map(|family| family.font(&mut font_context))
.find(predicate)
}
/// Attempts to find a suitable fallback font which matches the `predicate`. The default
/// family (i.e. "serif") will be tried first, followed by platform-specific family names.
/// If a `codepoint` is provided, then its Unicode block may be used to refine the list of
/// family names which will be tried.
fn find_fallback<S, P>(
&mut self,
font_context: &mut FontContext<S>,
codepoint: Option<char>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
iter::once(FontFamilyDescriptor::default())
.chain(
fallback_font_families(codepoint).into_iter().map(|family| {
FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Local,
)
})
)
.filter_map(|family| font_context.font(&self.descriptor, &family))
.find(predicate)
}
}
/// A `FontGroupFamily` is a single font family in a `FontGroup`. It corresponds to one of the
/// families listed in the `font-family` CSS property. The corresponding font data is lazy-loaded,
/// only if actually needed.
#[derive(Debug)]
struct FontGroupFamily {
font_descriptor: FontDescriptor,
family_descriptor: FontFamilyDescriptor,
loaded: bool,
font: Option<FontRef>,
}
impl FontGroupFamily {
fn new(font_descriptor: FontDescriptor, family: &SingleFontFamily) -> FontGroupFamily {
let family_descriptor = FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Any
);
FontGroupFamily {
font_descriptor,
family_descriptor,
loaded: false,
font: None,
}
}
/// Returns the font within this family which matches the style. We'll fetch the data from the
/// `FontContext` the first time this method is called, and return a cached reference on
/// subsequent calls.
fn font<S: FontSource>(&mut self, font_context: &mut FontContext<S>) -> Option<FontRef> {
if!self.loaded {
self.font = font_context.font(&self.font_descriptor, &self.family_descriptor);
self.loaded = true;
}
self.font.clone()
}
}
pub struct RunMetrics {
// may be negative due to negative width (i.e., kerning of '.' in 'P.T.')
pub advance_width: Au,
pub ascent: Au, // nonzero
pub descent: Au, // nonzero
// this bounding box is relative to the left origin baseline.
// so, bounding_box.position.y = -ascent
pub bounding_box: Rect<Au>
}
impl RunMetrics {
pub fn new(advance: Au, ascent: Au, descent: Au) -> RunMetrics {
let bounds = Rect::new(Point2D::new(Au(0), -ascent),
Size2D::new(advance, ascent + descent));
// TODO(Issue #125): support loose and tight bounding boxes; using the
// ascent+descent and advance is sometimes too generous and
// looking at actual glyph extents can yield a tighter box.
RunMetrics {
advance_width: advance,
bounding_box: bounds,
ascent: ascent,
descent: descent,
}
}
}
pub fn get_and_reset_text_shaping_performance_counter() -> usize {
let value = TEXT_SHAPING_PERFORMANCE_COUNTER.load(Ordering::SeqCst);
TEXT_SHAPING_PERFORMANCE_COUNTER.store(0, Ordering::SeqCst);
value
}
/// The scope within which we will look for a font.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontSearchScope {
/// All fonts will be searched, including those specified via `@font-face` rules.
Any,
/// Only local system fonts will be searched.
Local,
}
/// A font family name used in font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontFamilyName {
/// A specific name such as `"Arial"`
Specific(Atom),
/// A generic name such as `sans-serif`
Generic(Atom),
}
impl FontFamilyName {
pub fn name(&self) -> &str {
match *self {
FontFamilyName::Specific(ref name) => name,
FontFamilyName::Generic(ref name) => name,
}
}
}
impl<'a> From<&'a SingleFontFamily> for FontFamilyName {
fn from(other: &'a SingleFontFamily) -> FontFamilyName {
match *other {
SingleFontFamily::FamilyName(ref family_name) =>
FontFamilyName::Specific(family_name.name.clone()),
SingleFontFamily::Generic(ref generic_name) =>
FontFamilyName::Generic(generic_name.clone()),
}
}
}
impl<'a> From<&'a str> for FontFamilyName {
fn from(other: &'a str) -> FontFamilyName {
FontFamilyName::Specific(Atom::from(other))
}
}
/// The font family parameters for font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub struct FontFamilyDescriptor {
pub name: FontFamilyName,
pub scope: FontSearchScope,
}
impl FontFamilyDescriptor {
pub fn new(name: FontFamilyName, scope: FontSearchScope) -> FontFamilyDescriptor {
FontFamilyDescriptor { name, scope }
}
fn default() -> FontFamilyDescriptor {
FontFamilyDescriptor {
name: FontFamilyName::Generic(atom!("serif")),
scope: FontSearchScope::Local,
}
}
pub fn name(&self) -> &str {
|
glyph_index
|
identifier_name
|
font.rs
|
subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use euclid::{Point2D, Rect, Size2D};
use font_context::{FontContext, FontSource};
use font_template::FontTemplateDescriptor;
use ordered_float::NotNan;
use platform::font::{FontHandle, FontTable};
use platform::font_context::FontContextHandle;
pub use platform::font_list::fallback_font_families;
use platform::font_template::FontTemplateData;
use servo_atoms::Atom;
use smallvec::SmallVec;
use std::borrow::ToOwned;
use std::cell::RefCell;
use std::collections::HashMap;
use std::iter;
use std::rc::Rc;
use std::str;
use std::sync::Arc;
use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering};
use style::computed_values::{font_stretch, font_style, font_variant_caps, font_weight};
use style::properties::style_structs::Font as FontStyleStruct;
use style::values::computed::font::SingleFontFamily;
use text::Shaper;
use text::glyph::{ByteIndex, GlyphData, GlyphId, GlyphStore};
use text::shaping::ShaperMethods;
use time;
use unicode_script::Script;
use webrender_api;
|
(($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32)
);
}
pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S');
pub const GSUB: u32 = ot_tag!('G', 'S', 'U', 'B');
pub const KERN: u32 = ot_tag!('k', 'e', 'r', 'n');
static TEXT_SHAPING_PERFORMANCE_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
// FontHandle encapsulates access to the platform's font API,
// e.g. quartz, FreeType. It provides access to metrics and tables
// needed by the text shaper as well as access to the underlying font
// resources needed by the graphics layer to draw glyphs.
pub trait FontHandleMethods: Sized {
fn new_from_template(
fctx: &FontContextHandle,
template: Arc<FontTemplateData>,
pt_size: Option<Au>,
) -> Result<Self, ()>;
fn template(&self) -> Arc<FontTemplateData>;
fn family_name(&self) -> String;
fn face_name(&self) -> Option<String>;
fn style(&self) -> font_style::T;
fn boldness(&self) -> font_weight::T;
fn stretchiness(&self) -> font_stretch::T;
fn glyph_index(&self, codepoint: char) -> Option<GlyphId>;
fn glyph_h_advance(&self, GlyphId) -> Option<FractionalPixel>;
fn glyph_h_kerning(&self, glyph0: GlyphId, glyph1: GlyphId) -> FractionalPixel;
/// Can this font do basic horizontal LTR shaping without Harfbuzz?
fn can_do_fast_shaping(&self) -> bool;
fn metrics(&self) -> FontMetrics;
fn table_for_tag(&self, FontTableTag) -> Option<FontTable>;
/// A unique identifier for the font, allowing comparison.
fn identifier(&self) -> Atom;
}
// Used to abstract over the shaper's choice of fixed int representation.
pub type FractionalPixel = f64;
pub type FontTableTag = u32;
trait FontTableTagConversions {
fn tag_to_str(&self) -> String;
}
impl FontTableTagConversions for FontTableTag {
fn tag_to_str(&self) -> String {
let bytes = [(self >> 24) as u8,
(self >> 16) as u8,
(self >> 8) as u8,
(self >> 0) as u8];
str::from_utf8(&bytes).unwrap().to_owned()
}
}
pub trait FontTableMethods {
fn buffer(&self) -> &[u8];
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct FontMetrics {
pub underline_size: Au,
pub underline_offset: Au,
pub strikeout_size: Au,
pub strikeout_offset: Au,
pub leading: Au,
pub x_height: Au,
pub em_size: Au,
pub ascent: Au,
pub descent: Au,
pub max_advance: Au,
pub average_advance: Au,
pub line_gap: Au,
}
/// `FontDescriptor` describes the parameters of a `Font`. It represents rendering a given font
/// template at a particular size, with a particular font-variant-caps applied, etc. This contrasts
/// with `FontTemplateDescriptor` in that the latter represents only the parameters inherent in the
/// font data (weight, stretch, etc.).
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct FontDescriptor {
pub template_descriptor: FontTemplateDescriptor,
pub variant: font_variant_caps::T,
pub pt_size: Au,
}
impl<'a> From<&'a FontStyleStruct> for FontDescriptor {
fn from(style: &'a FontStyleStruct) -> Self {
FontDescriptor {
template_descriptor: FontTemplateDescriptor::from(style),
variant: style.font_variant_caps,
pt_size: style.font_size.size(),
}
}
}
#[derive(Debug)]
pub struct Font {
pub handle: FontHandle,
pub metrics: FontMetrics,
pub descriptor: FontDescriptor,
pub actual_pt_size: Au,
shaper: Option<Shaper>,
shape_cache: RefCell<HashMap<ShapeCacheEntry, Arc<GlyphStore>>>,
glyph_advance_cache: RefCell<HashMap<u32, FractionalPixel>>,
pub font_key: webrender_api::FontInstanceKey,
}
impl Font {
pub fn new(handle: FontHandle,
descriptor: FontDescriptor,
actual_pt_size: Au,
font_key: webrender_api::FontInstanceKey) -> Font {
let metrics = handle.metrics();
Font {
handle: handle,
shaper: None,
descriptor,
actual_pt_size,
metrics,
shape_cache: RefCell::new(HashMap::new()),
glyph_advance_cache: RefCell::new(HashMap::new()),
font_key,
}
}
/// A unique identifier for the font, allowing comparison.
pub fn identifier(&self) -> Atom {
self.handle.identifier()
}
}
bitflags! {
pub struct ShapingFlags: u8 {
#[doc = "Set if the text is entirely whitespace."]
const IS_WHITESPACE_SHAPING_FLAG = 0x01;
#[doc = "Set if we are to ignore ligatures."]
const IGNORE_LIGATURES_SHAPING_FLAG = 0x02;
#[doc = "Set if we are to disable kerning."]
const DISABLE_KERNING_SHAPING_FLAG = 0x04;
#[doc = "Text direction is right-to-left."]
const RTL_FLAG = 0x08;
#[doc = "Set if word-break is set to keep-all."]
const KEEP_ALL_FLAG = 0x10;
}
}
/// Various options that control text shaping.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct ShapingOptions {
/// Spacing to add between each letter. Corresponds to the CSS 2.1 `letter-spacing` property.
/// NB: You will probably want to set the `IGNORE_LIGATURES_SHAPING_FLAG` if this is non-null.
pub letter_spacing: Option<Au>,
/// Spacing to add between each word. Corresponds to the CSS 2.1 `word-spacing` property.
pub word_spacing: (Au, NotNan<f32>),
/// The Unicode script property of the characters in this run.
pub script: Script,
/// Various flags.
pub flags: ShapingFlags,
}
/// An entry in the shape cache.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
struct ShapeCacheEntry {
text: String,
options: ShapingOptions,
}
impl Font {
pub fn shape_text(&mut self, text: &str, options: &ShapingOptions) -> Arc<GlyphStore> {
let this = self as *const Font;
let mut shaper = self.shaper.take();
let lookup_key = ShapeCacheEntry {
text: text.to_owned(),
options: *options,
};
let result = self.shape_cache.borrow_mut().entry(lookup_key).or_insert_with(|| {
let start_time = time::precise_time_ns();
let mut glyphs = GlyphStore::new(text.len(),
options.flags.contains(ShapingFlags::IS_WHITESPACE_SHAPING_FLAG),
options.flags.contains(ShapingFlags::RTL_FLAG));
if self.can_do_fast_shaping(text, options) {
debug!("shape_text: Using ASCII fast path.");
self.shape_text_fast(text, options, &mut glyphs);
} else {
debug!("shape_text: Using Harfbuzz.");
if shaper.is_none() {
shaper = Some(Shaper::new(this));
}
shaper.as_ref().unwrap().shape_text(text, options, &mut glyphs);
}
let end_time = time::precise_time_ns();
TEXT_SHAPING_PERFORMANCE_COUNTER.fetch_add((end_time - start_time) as usize,
Ordering::Relaxed);
Arc::new(glyphs)
}).clone();
self.shaper = shaper;
result
}
fn can_do_fast_shaping(&self, text: &str, options: &ShapingOptions) -> bool {
options.script == Script::Latin &&
!options.flags.contains(ShapingFlags::RTL_FLAG) &&
self.handle.can_do_fast_shaping() &&
text.is_ascii()
}
/// Fast path for ASCII text that only needs simple horizontal LTR kerning.
fn shape_text_fast(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) {
let mut prev_glyph_id = None;
for (i, byte) in text.bytes().enumerate() {
let character = byte as char;
let glyph_id = match self.glyph_index(character) {
Some(id) => id,
None => continue,
};
let mut advance = Au::from_f64_px(self.glyph_h_advance(glyph_id));
if character =='' {
// https://drafts.csswg.org/css-text-3/#word-spacing-property
let (length, percent) = options.word_spacing;
advance = (advance + length) + Au((advance.0 as f32 * percent.into_inner()) as i32);
}
if let Some(letter_spacing) = options.letter_spacing {
advance += letter_spacing;
}
let offset = prev_glyph_id.map(|prev| {
let h_kerning = Au::from_f64_px(self.glyph_h_kerning(prev, glyph_id));
advance += h_kerning;
Point2D::new(h_kerning, Au(0))
});
let glyph = GlyphData::new(glyph_id, advance, offset, true, true);
glyphs.add_glyph_for_byte_index(ByteIndex(i as isize), character, &glyph);
prev_glyph_id = Some(glyph_id);
}
glyphs.finalize_changes();
}
pub fn table_for_tag(&self, tag: FontTableTag) -> Option<FontTable> {
let result = self.handle.table_for_tag(tag);
let status = if result.is_some() { "Found" } else { "Didn't find" };
debug!("{} font table[{}] with family={}, face={}",
status, tag.tag_to_str(),
self.handle.family_name(), self.handle.face_name().unwrap_or("unavailable".to_owned()));
result
}
#[inline]
pub fn glyph_index(&self, codepoint: char) -> Option<GlyphId> {
let codepoint = match self.descriptor.variant {
font_variant_caps::T::SmallCaps => codepoint.to_uppercase().next().unwrap(), //FIXME: #5938
font_variant_caps::T::Normal => codepoint,
};
self.handle.glyph_index(codepoint)
}
pub fn has_glyph_for(&self, codepoint: char) -> bool {
self.glyph_index(codepoint).is_some()
}
pub fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId)
-> FractionalPixel {
self.handle.glyph_h_kerning(first_glyph, second_glyph)
}
pub fn glyph_h_advance(&self, glyph: GlyphId) -> FractionalPixel {
*self.glyph_advance_cache.borrow_mut().entry(glyph).or_insert_with(|| {
match self.handle.glyph_h_advance(glyph) {
Some(adv) => adv,
None => 10f64 as FractionalPixel // FIXME: Need fallback strategy
}
})
}
}
pub type FontRef = Rc<RefCell<Font>>;
/// A `FontGroup` is a prioritised list of fonts for a given set of font styles. It is used by
/// `TextRun` to decide which font to render a character with. If none of the fonts listed in the
/// styles are suitable, a fallback font may be used.
#[derive(Debug)]
pub struct FontGroup {
descriptor: FontDescriptor,
families: SmallVec<[FontGroupFamily; 8]>,
last_matching_fallback: Option<FontRef>,
}
impl FontGroup {
pub fn new(style: &FontStyleStruct) -> FontGroup {
let descriptor = FontDescriptor::from(style);
let families =
style.font_family.0.iter()
.map(|family| FontGroupFamily::new(descriptor.clone(), &family))
.collect();
FontGroup {
descriptor,
families,
last_matching_fallback: None,
}
}
/// Finds the first font, or else the first fallback font, which contains a glyph for
/// `codepoint`. If no such font is found, returns the first available font or fallback font
/// (which will cause a "glyph not found" character to be rendered). If no font at all can be
/// found, returns None.
pub fn find_by_codepoint<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>,
codepoint: char
) -> Option<FontRef> {
let has_glyph = |font: &FontRef| font.borrow().has_glyph_for(codepoint);
let font = self.find(&mut font_context, |font| has_glyph(font));
if font.is_some() {
return font
}
if let Some(ref fallback) = self.last_matching_fallback {
if has_glyph(&fallback) {
return self.last_matching_fallback.clone()
}
}
let font = self.find_fallback(&mut font_context, Some(codepoint), has_glyph);
if font.is_some() {
self.last_matching_fallback = font.clone();
return font
}
self.first(&mut font_context)
}
/// Find the first available font in the group, or the first available fallback font.
pub fn first<S: FontSource>(
&mut self,
mut font_context: &mut FontContext<S>
) -> Option<FontRef> {
self.find(&mut font_context, |_| true)
.or_else(|| self.find_fallback(&mut font_context, None, |_| true))
}
/// Find a font which returns true for `predicate`. This method mutates because we may need to
/// load new font data in the process of finding a suitable font.
fn find<S, P>(
&mut self,
mut font_context: &mut FontContext<S>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
self.families.iter_mut()
.filter_map(|family| family.font(&mut font_context))
.find(predicate)
}
/// Attempts to find a suitable fallback font which matches the `predicate`. The default
/// family (i.e. "serif") will be tried first, followed by platform-specific family names.
/// If a `codepoint` is provided, then its Unicode block may be used to refine the list of
/// family names which will be tried.
fn find_fallback<S, P>(
&mut self,
font_context: &mut FontContext<S>,
codepoint: Option<char>,
predicate: P,
) -> Option<FontRef>
where
S: FontSource,
P: FnMut(&FontRef) -> bool,
{
iter::once(FontFamilyDescriptor::default())
.chain(
fallback_font_families(codepoint).into_iter().map(|family| {
FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Local,
)
})
)
.filter_map(|family| font_context.font(&self.descriptor, &family))
.find(predicate)
}
}
/// A `FontGroupFamily` is a single font family in a `FontGroup`. It corresponds to one of the
/// families listed in the `font-family` CSS property. The corresponding font data is lazy-loaded,
/// only if actually needed.
#[derive(Debug)]
struct FontGroupFamily {
font_descriptor: FontDescriptor,
family_descriptor: FontFamilyDescriptor,
loaded: bool,
font: Option<FontRef>,
}
impl FontGroupFamily {
fn new(font_descriptor: FontDescriptor, family: &SingleFontFamily) -> FontGroupFamily {
let family_descriptor = FontFamilyDescriptor::new(
FontFamilyName::from(family),
FontSearchScope::Any
);
FontGroupFamily {
font_descriptor,
family_descriptor,
loaded: false,
font: None,
}
}
/// Returns the font within this family which matches the style. We'll fetch the data from the
/// `FontContext` the first time this method is called, and return a cached reference on
/// subsequent calls.
fn font<S: FontSource>(&mut self, font_context: &mut FontContext<S>) -> Option<FontRef> {
if!self.loaded {
self.font = font_context.font(&self.font_descriptor, &self.family_descriptor);
self.loaded = true;
}
self.font.clone()
}
}
pub struct RunMetrics {
// may be negative due to negative width (i.e., kerning of '.' in 'P.T.')
pub advance_width: Au,
pub ascent: Au, // nonzero
pub descent: Au, // nonzero
// this bounding box is relative to the left origin baseline.
// so, bounding_box.position.y = -ascent
pub bounding_box: Rect<Au>
}
impl RunMetrics {
pub fn new(advance: Au, ascent: Au, descent: Au) -> RunMetrics {
let bounds = Rect::new(Point2D::new(Au(0), -ascent),
Size2D::new(advance, ascent + descent));
// TODO(Issue #125): support loose and tight bounding boxes; using the
// ascent+descent and advance is sometimes too generous and
// looking at actual glyph extents can yield a tighter box.
RunMetrics {
advance_width: advance,
bounding_box: bounds,
ascent: ascent,
descent: descent,
}
}
}
pub fn get_and_reset_text_shaping_performance_counter() -> usize {
let value = TEXT_SHAPING_PERFORMANCE_COUNTER.load(Ordering::SeqCst);
TEXT_SHAPING_PERFORMANCE_COUNTER.store(0, Ordering::SeqCst);
value
}
/// The scope within which we will look for a font.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontSearchScope {
/// All fonts will be searched, including those specified via `@font-face` rules.
Any,
/// Only local system fonts will be searched.
Local,
}
/// A font family name used in font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum FontFamilyName {
/// A specific name such as `"Arial"`
Specific(Atom),
/// A generic name such as `sans-serif`
Generic(Atom),
}
impl FontFamilyName {
pub fn name(&self) -> &str {
match *self {
FontFamilyName::Specific(ref name) => name,
FontFamilyName::Generic(ref name) => name,
}
}
}
impl<'a> From<&'a SingleFontFamily> for FontFamilyName {
fn from(other: &'a SingleFontFamily) -> FontFamilyName {
match *other {
SingleFontFamily::FamilyName(ref family_name) =>
FontFamilyName::Specific(family_name.name.clone()),
SingleFontFamily::Generic(ref generic_name) =>
FontFamilyName::Generic(generic_name.clone()),
}
}
}
impl<'a> From<&'a str> for FontFamilyName {
fn from(other: &'a str) -> FontFamilyName {
FontFamilyName::Specific(Atom::from(other))
}
}
/// The font family parameters for font selection.
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub struct FontFamilyDescriptor {
pub name: FontFamilyName,
pub scope: FontSearchScope,
}
impl FontFamilyDescriptor {
pub fn new(name: FontFamilyName, scope: FontSearchScope) -> FontFamilyDescriptor {
FontFamilyDescriptor { name, scope }
}
fn default() -> FontFamilyDescriptor {
FontFamilyDescriptor {
name: FontFamilyName::Generic(atom!("serif")),
scope: FontSearchScope::Local,
}
}
pub fn name(&self) -> &str {
|
macro_rules! ot_tag {
($t1:expr, $t2:expr, $t3:expr, $t4:expr) => (
|
random_line_split
|
event.rs
|
use std::io::Write;
use termion::event::Key;
use Editor;
pub type EventHandler<'a, W> = FnMut(Event<W>) + 'a;
pub struct Event<'a, 'out: 'a, W: Write + 'a> {
pub editor: &'a mut Editor<'out, W>,
|
pub fn new(editor: &'a mut Editor<'out, W>, kind: EventKind) -> Self {
Event {
editor: editor,
kind: kind,
}
}
}
#[derive(Debug)]
pub enum EventKind {
/// Sent before handling a keypress.
BeforeKey(Key),
/// Sent after handling a keypress.
AfterKey(Key),
/// Sent in `Editor.complete()`, before processing the completion.
BeforeComplete,
}
|
pub kind: EventKind,
}
impl<'a, 'out: 'a, W: Write + 'a> Event<'a, 'out, W> {
|
random_line_split
|
event.rs
|
use std::io::Write;
use termion::event::Key;
use Editor;
pub type EventHandler<'a, W> = FnMut(Event<W>) + 'a;
pub struct Event<'a, 'out: 'a, W: Write + 'a> {
pub editor: &'a mut Editor<'out, W>,
pub kind: EventKind,
}
impl<'a, 'out: 'a, W: Write + 'a> Event<'a, 'out, W> {
pub fn new(editor: &'a mut Editor<'out, W>, kind: EventKind) -> Self
|
}
#[derive(Debug)]
pub enum EventKind {
/// Sent before handling a keypress.
BeforeKey(Key),
/// Sent after handling a keypress.
AfterKey(Key),
/// Sent in `Editor.complete()`, before processing the completion.
BeforeComplete,
}
|
{
Event {
editor: editor,
kind: kind,
}
}
|
identifier_body
|
event.rs
|
use std::io::Write;
use termion::event::Key;
use Editor;
pub type EventHandler<'a, W> = FnMut(Event<W>) + 'a;
pub struct
|
<'a, 'out: 'a, W: Write + 'a> {
pub editor: &'a mut Editor<'out, W>,
pub kind: EventKind,
}
impl<'a, 'out: 'a, W: Write + 'a> Event<'a, 'out, W> {
pub fn new(editor: &'a mut Editor<'out, W>, kind: EventKind) -> Self {
Event {
editor: editor,
kind: kind,
}
}
}
#[derive(Debug)]
pub enum EventKind {
/// Sent before handling a keypress.
BeforeKey(Key),
/// Sent after handling a keypress.
AfterKey(Key),
/// Sent in `Editor.complete()`, before processing the completion.
BeforeComplete,
}
|
Event
|
identifier_name
|
lib.rs
|
#![recursion_limit = "1024"] // for error_chain!
extern crate rand;
extern crate scopeguard;
#[macro_use]
extern crate error_chain;
extern crate rustc_serialize;
extern crate sha2;
extern crate url;
extern crate toml;
extern crate download;
extern crate semver;
#[cfg(windows)]
extern crate winapi;
#[cfg(windows)]
extern crate winreg;
#[cfg(windows)]
extern crate shell32;
#[cfg(windows)]
extern crate ole32;
#[cfg(windows)]
extern crate kernel32;
#[cfg(windows)]
extern crate advapi32;
#[cfg(windows)]
extern crate userenv;
#[cfg(windows)]
#[macro_use]
extern crate lazy_static;
#[cfg(unix)]
extern crate libc;
pub mod errors;
pub mod notifications;
pub mod raw;
pub mod tty;
pub mod utils;
pub mod toml_utils;
|
pub use errors::*;
pub use notifications::{Notification};
pub mod notify;
|
random_line_split
|
|
core_application.rs
|
include_generated!();
/// A struct providing valid `argc` and `argv` values for Qt application
/// objects.
///
/// Constructors of `qt_core::core_application::CoreApplication`,
/// `qt_gui::gui_application::GuiApplication` and `qt_widgets::application::Application`
/// require `argc` and `argv` values that are available in C++'s `main` function but
/// not available in Rust. More importantly, `argc` and `argv` must be valid for the entire
/// life of the application. This struct stores list of arguments in a format compatible with
/// `argc` and `argv`, and can be used to initialize Qt application objects.
/// `CoreApplicationArgs` must live longer than the application object.
///
/// `CoreApplication::create_and_exit` convenience function
/// and similar functions in the other application types
/// can be used instead of `CoreApplicationArgs`.
pub struct CoreApplicationArgs {
_values: Vec<Vec<u8>>,
argc: Box<::libc::c_int>,
argv: Vec<*mut ::libc::c_char>,
}
impl CoreApplicationArgs {
/// Creates an object containing `args`.
pub fn from(mut args: Vec<Vec<u8>>) -> CoreApplicationArgs {
for arg in &mut args {
if!arg.ends_with(&[0]) {
arg.push(0);
}
}
CoreApplicationArgs {
argc: Box::new(args.len() as ::libc::c_int),
argv: args
.iter_mut()
.map(|x| x.as_mut_ptr() as *mut ::libc::c_char)
.collect(),
_values: args,
}
}
/// Creates an object containing empty list of arguments.
/// Although this is the cheapest way to construct a `CoreApplicationArgs`
/// object, it's not clear whether Qt considers empty arguments list valid.
pub fn empty() -> CoreApplicationArgs {
CoreApplicationArgs::from(Vec::new())
}
/// Returns `(argc, argv)` values in the form accepted by the application objects'
/// constructors.
pub fn get(&mut self) -> (&mut ::libc::c_int, *mut *mut ::libc::c_char) {
(self.argc.as_mut(), self.argv.as_mut_ptr())
}
#[cfg(unix)]
/// Creates an object representing real arguments of the application.
/// On Windows, this function uses empty argument list for performance reasons because
/// Qt doesn't use `argc` and `argv` on Windows at all.
pub fn from_real() -> CoreApplicationArgs
|
#[cfg(windows)]
/// Creates an object representing real arguments of the application.
/// On Windows, this function uses empty argument list for performance reasons because
/// Qt doesn't use `argc` and `argv` on Windows at all.
pub fn from_real() -> CoreApplicationArgs {
// Qt doesn't use argc and argv on Windows anyway
// TODO: check this
CoreApplicationArgs::empty()
}
}
impl ::core_application::CoreApplication {
/// A convenience function for performing proper initialization and de-initialization of
/// a Qt application.
///
/// This function creates `CoreApplication` with valid `argc` and `argv`, calls the passed
/// closure `f(app)` with the application object and exist the process with the exit code
/// returned by the closure. The closure should perform the initialization of the application
/// and either return immediately or call `CoreApplication::exec()` and return its return value:
/// ```
/// fn main() {
/// CoreApplication::create_and_exit(|app| {
/// // initialization goes here
/// CoreApplication::exec()
/// })
/// }
/// ```
pub fn create_and_exit<F: FnOnce(&mut ::core_application::CoreApplication) -> i32>(f: F) ->! {
let exit_code = {
let mut args = CoreApplicationArgs::from_real();
let mut app = unsafe { ::core_application::CoreApplication::new(args.get()) };
f(app.as_mut())
};
::std::process::exit(exit_code)
}
}
|
{
use std::os::unix::ffi::OsStringExt;
let args = ::std::env::args_os().map(|arg| arg.into_vec()).collect();
CoreApplicationArgs::from(args)
}
|
identifier_body
|
core_application.rs
|
include_generated!();
/// A struct providing valid `argc` and `argv` values for Qt application
/// objects.
|
/// `qt_gui::gui_application::GuiApplication` and `qt_widgets::application::Application`
/// require `argc` and `argv` values that are available in C++'s `main` function but
/// not available in Rust. More importantly, `argc` and `argv` must be valid for the entire
/// life of the application. This struct stores list of arguments in a format compatible with
/// `argc` and `argv`, and can be used to initialize Qt application objects.
/// `CoreApplicationArgs` must live longer than the application object.
///
/// `CoreApplication::create_and_exit` convenience function
/// and similar functions in the other application types
/// can be used instead of `CoreApplicationArgs`.
pub struct CoreApplicationArgs {
_values: Vec<Vec<u8>>,
argc: Box<::libc::c_int>,
argv: Vec<*mut ::libc::c_char>,
}
impl CoreApplicationArgs {
/// Creates an object containing `args`.
pub fn from(mut args: Vec<Vec<u8>>) -> CoreApplicationArgs {
for arg in &mut args {
if!arg.ends_with(&[0]) {
arg.push(0);
}
}
CoreApplicationArgs {
argc: Box::new(args.len() as ::libc::c_int),
argv: args
.iter_mut()
.map(|x| x.as_mut_ptr() as *mut ::libc::c_char)
.collect(),
_values: args,
}
}
/// Creates an object containing empty list of arguments.
/// Although this is the cheapest way to construct a `CoreApplicationArgs`
/// object, it's not clear whether Qt considers empty arguments list valid.
pub fn empty() -> CoreApplicationArgs {
CoreApplicationArgs::from(Vec::new())
}
/// Returns `(argc, argv)` values in the form accepted by the application objects'
/// constructors.
pub fn get(&mut self) -> (&mut ::libc::c_int, *mut *mut ::libc::c_char) {
(self.argc.as_mut(), self.argv.as_mut_ptr())
}
#[cfg(unix)]
/// Creates an object representing real arguments of the application.
/// On Windows, this function uses empty argument list for performance reasons because
/// Qt doesn't use `argc` and `argv` on Windows at all.
pub fn from_real() -> CoreApplicationArgs {
use std::os::unix::ffi::OsStringExt;
let args = ::std::env::args_os().map(|arg| arg.into_vec()).collect();
CoreApplicationArgs::from(args)
}
#[cfg(windows)]
/// Creates an object representing real arguments of the application.
/// On Windows, this function uses empty argument list for performance reasons because
/// Qt doesn't use `argc` and `argv` on Windows at all.
pub fn from_real() -> CoreApplicationArgs {
// Qt doesn't use argc and argv on Windows anyway
// TODO: check this
CoreApplicationArgs::empty()
}
}
impl ::core_application::CoreApplication {
/// A convenience function for performing proper initialization and de-initialization of
/// a Qt application.
///
/// This function creates `CoreApplication` with valid `argc` and `argv`, calls the passed
/// closure `f(app)` with the application object and exist the process with the exit code
/// returned by the closure. The closure should perform the initialization of the application
/// and either return immediately or call `CoreApplication::exec()` and return its return value:
/// ```
/// fn main() {
/// CoreApplication::create_and_exit(|app| {
/// // initialization goes here
/// CoreApplication::exec()
/// })
/// }
/// ```
pub fn create_and_exit<F: FnOnce(&mut ::core_application::CoreApplication) -> i32>(f: F) ->! {
let exit_code = {
let mut args = CoreApplicationArgs::from_real();
let mut app = unsafe { ::core_application::CoreApplication::new(args.get()) };
f(app.as_mut())
};
::std::process::exit(exit_code)
}
}
|
///
/// Constructors of `qt_core::core_application::CoreApplication`,
|
random_line_split
|
core_application.rs
|
include_generated!();
/// A struct providing valid `argc` and `argv` values for Qt application
/// objects.
///
/// Constructors of `qt_core::core_application::CoreApplication`,
/// `qt_gui::gui_application::GuiApplication` and `qt_widgets::application::Application`
/// require `argc` and `argv` values that are available in C++'s `main` function but
/// not available in Rust. More importantly, `argc` and `argv` must be valid for the entire
/// life of the application. This struct stores list of arguments in a format compatible with
/// `argc` and `argv`, and can be used to initialize Qt application objects.
/// `CoreApplicationArgs` must live longer than the application object.
///
/// `CoreApplication::create_and_exit` convenience function
/// and similar functions in the other application types
/// can be used instead of `CoreApplicationArgs`.
pub struct CoreApplicationArgs {
_values: Vec<Vec<u8>>,
argc: Box<::libc::c_int>,
argv: Vec<*mut ::libc::c_char>,
}
impl CoreApplicationArgs {
/// Creates an object containing `args`.
pub fn from(mut args: Vec<Vec<u8>>) -> CoreApplicationArgs {
for arg in &mut args {
if!arg.ends_with(&[0]) {
arg.push(0);
}
}
CoreApplicationArgs {
argc: Box::new(args.len() as ::libc::c_int),
argv: args
.iter_mut()
.map(|x| x.as_mut_ptr() as *mut ::libc::c_char)
.collect(),
_values: args,
}
}
/// Creates an object containing empty list of arguments.
/// Although this is the cheapest way to construct a `CoreApplicationArgs`
/// object, it's not clear whether Qt considers empty arguments list valid.
pub fn empty() -> CoreApplicationArgs {
CoreApplicationArgs::from(Vec::new())
}
/// Returns `(argc, argv)` values in the form accepted by the application objects'
/// constructors.
pub fn
|
(&mut self) -> (&mut ::libc::c_int, *mut *mut ::libc::c_char) {
(self.argc.as_mut(), self.argv.as_mut_ptr())
}
#[cfg(unix)]
/// Creates an object representing real arguments of the application.
/// On Windows, this function uses empty argument list for performance reasons because
/// Qt doesn't use `argc` and `argv` on Windows at all.
pub fn from_real() -> CoreApplicationArgs {
use std::os::unix::ffi::OsStringExt;
let args = ::std::env::args_os().map(|arg| arg.into_vec()).collect();
CoreApplicationArgs::from(args)
}
#[cfg(windows)]
/// Creates an object representing real arguments of the application.
/// On Windows, this function uses empty argument list for performance reasons because
/// Qt doesn't use `argc` and `argv` on Windows at all.
pub fn from_real() -> CoreApplicationArgs {
// Qt doesn't use argc and argv on Windows anyway
// TODO: check this
CoreApplicationArgs::empty()
}
}
impl ::core_application::CoreApplication {
/// A convenience function for performing proper initialization and de-initialization of
/// a Qt application.
///
/// This function creates `CoreApplication` with valid `argc` and `argv`, calls the passed
/// closure `f(app)` with the application object and exist the process with the exit code
/// returned by the closure. The closure should perform the initialization of the application
/// and either return immediately or call `CoreApplication::exec()` and return its return value:
/// ```
/// fn main() {
/// CoreApplication::create_and_exit(|app| {
/// // initialization goes here
/// CoreApplication::exec()
/// })
/// }
/// ```
pub fn create_and_exit<F: FnOnce(&mut ::core_application::CoreApplication) -> i32>(f: F) ->! {
let exit_code = {
let mut args = CoreApplicationArgs::from_real();
let mut app = unsafe { ::core_application::CoreApplication::new(args.get()) };
f(app.as_mut())
};
::std::process::exit(exit_code)
}
}
|
get
|
identifier_name
|
core_application.rs
|
include_generated!();
/// A struct providing valid `argc` and `argv` values for Qt application
/// objects.
///
/// Constructors of `qt_core::core_application::CoreApplication`,
/// `qt_gui::gui_application::GuiApplication` and `qt_widgets::application::Application`
/// require `argc` and `argv` values that are available in C++'s `main` function but
/// not available in Rust. More importantly, `argc` and `argv` must be valid for the entire
/// life of the application. This struct stores list of arguments in a format compatible with
/// `argc` and `argv`, and can be used to initialize Qt application objects.
/// `CoreApplicationArgs` must live longer than the application object.
///
/// `CoreApplication::create_and_exit` convenience function
/// and similar functions in the other application types
/// can be used instead of `CoreApplicationArgs`.
pub struct CoreApplicationArgs {
_values: Vec<Vec<u8>>,
argc: Box<::libc::c_int>,
argv: Vec<*mut ::libc::c_char>,
}
impl CoreApplicationArgs {
/// Creates an object containing `args`.
pub fn from(mut args: Vec<Vec<u8>>) -> CoreApplicationArgs {
for arg in &mut args {
if!arg.ends_with(&[0])
|
}
CoreApplicationArgs {
argc: Box::new(args.len() as ::libc::c_int),
argv: args
.iter_mut()
.map(|x| x.as_mut_ptr() as *mut ::libc::c_char)
.collect(),
_values: args,
}
}
/// Creates an object containing empty list of arguments.
/// Although this is the cheapest way to construct a `CoreApplicationArgs`
/// object, it's not clear whether Qt considers empty arguments list valid.
pub fn empty() -> CoreApplicationArgs {
CoreApplicationArgs::from(Vec::new())
}
/// Returns `(argc, argv)` values in the form accepted by the application objects'
/// constructors.
pub fn get(&mut self) -> (&mut ::libc::c_int, *mut *mut ::libc::c_char) {
(self.argc.as_mut(), self.argv.as_mut_ptr())
}
#[cfg(unix)]
/// Creates an object representing real arguments of the application.
/// On Windows, this function uses empty argument list for performance reasons because
/// Qt doesn't use `argc` and `argv` on Windows at all.
pub fn from_real() -> CoreApplicationArgs {
use std::os::unix::ffi::OsStringExt;
let args = ::std::env::args_os().map(|arg| arg.into_vec()).collect();
CoreApplicationArgs::from(args)
}
#[cfg(windows)]
/// Creates an object representing real arguments of the application.
/// On Windows, this function uses empty argument list for performance reasons because
/// Qt doesn't use `argc` and `argv` on Windows at all.
pub fn from_real() -> CoreApplicationArgs {
// Qt doesn't use argc and argv on Windows anyway
// TODO: check this
CoreApplicationArgs::empty()
}
}
impl ::core_application::CoreApplication {
/// A convenience function for performing proper initialization and de-initialization of
/// a Qt application.
///
/// This function creates `CoreApplication` with valid `argc` and `argv`, calls the passed
/// closure `f(app)` with the application object and exist the process with the exit code
/// returned by the closure. The closure should perform the initialization of the application
/// and either return immediately or call `CoreApplication::exec()` and return its return value:
/// ```
/// fn main() {
/// CoreApplication::create_and_exit(|app| {
/// // initialization goes here
/// CoreApplication::exec()
/// })
/// }
/// ```
pub fn create_and_exit<F: FnOnce(&mut ::core_application::CoreApplication) -> i32>(f: F) ->! {
let exit_code = {
let mut args = CoreApplicationArgs::from_real();
let mut app = unsafe { ::core_application::CoreApplication::new(args.get()) };
f(app.as_mut())
};
::std::process::exit(exit_code)
}
}
|
{
arg.push(0);
}
|
conditional_block
|
main.rs
|
extern crate dialoguer;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate quote;
use dialoguer::Input;
use std::path::PathBuf;
use std::{
fs::{self, File},
io::Write,
path::Path,
};
mod api_generator;
mod error;
mod rest_spec;
fn main() {
// This must be run from the src root directory, with cargo run -p api_generator
let download_dir = fs::canonicalize(PathBuf::from("./api_generator/rest_specs")).unwrap();
let generated_dir = fs::canonicalize(PathBuf::from("./elasticsearch/src/generated")).unwrap();
let last_downloaded_version = "./api_generator/last_downloaded_version";
let mut download_specs = false;
let mut answer = String::new();
let default_branch = if Path::new(last_downloaded_version).exists() {
fs::read_to_string(last_downloaded_version).expect("Could not read branch into string")
} else {
String::from("master")
};
let mut branch = default_branch.clone();
while answer!= "y" && answer!= "n" {
answer = Input::new()
.default(String::from("n"))
.show_default(false)
.with_prompt("Download rest specifications [y/N]")
.interact()
.unwrap()
.to_lowercase();
download_specs = answer == "y";
}
if download_specs {
branch = Input::new()
.default(default_branch.clone())
.show_default(false)
.with_prompt(
format!(
"Branch to download specification from [default {}]",
default_branch
)
.as_str(),
)
.interact()
.unwrap();
fs::remove_dir_all(&download_dir).unwrap();
rest_spec::download_specs(&branch, &download_dir);
File::create(last_downloaded_version)
.expect("failed to create last_downloaded_version file")
.write_all(branch.as_bytes())
.expect("unable to write branch to last_downloaded_version file");
}
// only offer to generate if there are downloaded specs
if download_dir
.read_dir()
.map(|mut r| r.next().is_some())
.unwrap_or(false)
{
let mut generate_code = true;
answer = String::new();
while answer!= "y" && answer!= "n" {
answer = Input::new()
.default(String::from("y"))
.show_default(false)
.with_prompt(
format!("Generate code from rest specifications {} [Y/n]", branch).as_str(),
)
.interact()
.unwrap()
.to_lowercase();
generate_code = answer == "y";
}
if generate_code
|
}
}
|
{
// delete existing generated files if the exist
if generated_dir.exists() {
fs::remove_dir_all(&generated_dir).unwrap();
}
fs::create_dir_all(&generated_dir).unwrap();
api_generator::generate(&branch, &download_dir, &generated_dir).unwrap();
}
|
conditional_block
|
main.rs
|
extern crate dialoguer;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate quote;
use dialoguer::Input;
use std::path::PathBuf;
use std::{
fs::{self, File},
io::Write,
path::Path,
};
mod api_generator;
mod error;
mod rest_spec;
fn main() {
// This must be run from the src root directory, with cargo run -p api_generator
let download_dir = fs::canonicalize(PathBuf::from("./api_generator/rest_specs")).unwrap();
let generated_dir = fs::canonicalize(PathBuf::from("./elasticsearch/src/generated")).unwrap();
let last_downloaded_version = "./api_generator/last_downloaded_version";
let mut download_specs = false;
let mut answer = String::new();
let default_branch = if Path::new(last_downloaded_version).exists() {
fs::read_to_string(last_downloaded_version).expect("Could not read branch into string")
} else {
String::from("master")
};
let mut branch = default_branch.clone();
while answer!= "y" && answer!= "n" {
answer = Input::new()
.default(String::from("n"))
.show_default(false)
.with_prompt("Download rest specifications [y/N]")
.interact()
.unwrap()
.to_lowercase();
download_specs = answer == "y";
}
if download_specs {
branch = Input::new()
.default(default_branch.clone())
.show_default(false)
.with_prompt(
format!(
"Branch to download specification from [default {}]",
default_branch
)
.as_str(),
)
.interact()
.unwrap();
fs::remove_dir_all(&download_dir).unwrap();
rest_spec::download_specs(&branch, &download_dir);
|
}
// only offer to generate if there are downloaded specs
if download_dir
.read_dir()
.map(|mut r| r.next().is_some())
.unwrap_or(false)
{
let mut generate_code = true;
answer = String::new();
while answer!= "y" && answer!= "n" {
answer = Input::new()
.default(String::from("y"))
.show_default(false)
.with_prompt(
format!("Generate code from rest specifications {} [Y/n]", branch).as_str(),
)
.interact()
.unwrap()
.to_lowercase();
generate_code = answer == "y";
}
if generate_code {
// delete existing generated files if the exist
if generated_dir.exists() {
fs::remove_dir_all(&generated_dir).unwrap();
}
fs::create_dir_all(&generated_dir).unwrap();
api_generator::generate(&branch, &download_dir, &generated_dir).unwrap();
}
}
}
|
File::create(last_downloaded_version)
.expect("failed to create last_downloaded_version file")
.write_all(branch.as_bytes())
.expect("unable to write branch to last_downloaded_version file");
|
random_line_split
|
main.rs
|
extern crate dialoguer;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate quote;
use dialoguer::Input;
use std::path::PathBuf;
use std::{
fs::{self, File},
io::Write,
path::Path,
};
mod api_generator;
mod error;
mod rest_spec;
fn main()
|
.interact()
.unwrap()
.to_lowercase();
download_specs = answer == "y";
}
if download_specs {
branch = Input::new()
.default(default_branch.clone())
.show_default(false)
.with_prompt(
format!(
"Branch to download specification from [default {}]",
default_branch
)
.as_str(),
)
.interact()
.unwrap();
fs::remove_dir_all(&download_dir).unwrap();
rest_spec::download_specs(&branch, &download_dir);
File::create(last_downloaded_version)
.expect("failed to create last_downloaded_version file")
.write_all(branch.as_bytes())
.expect("unable to write branch to last_downloaded_version file");
}
// only offer to generate if there are downloaded specs
if download_dir
.read_dir()
.map(|mut r| r.next().is_some())
.unwrap_or(false)
{
let mut generate_code = true;
answer = String::new();
while answer!= "y" && answer!= "n" {
answer = Input::new()
.default(String::from("y"))
.show_default(false)
.with_prompt(
format!("Generate code from rest specifications {} [Y/n]", branch).as_str(),
)
.interact()
.unwrap()
.to_lowercase();
generate_code = answer == "y";
}
if generate_code {
// delete existing generated files if the exist
if generated_dir.exists() {
fs::remove_dir_all(&generated_dir).unwrap();
}
fs::create_dir_all(&generated_dir).unwrap();
api_generator::generate(&branch, &download_dir, &generated_dir).unwrap();
}
}
}
|
{
// This must be run from the src root directory, with cargo run -p api_generator
let download_dir = fs::canonicalize(PathBuf::from("./api_generator/rest_specs")).unwrap();
let generated_dir = fs::canonicalize(PathBuf::from("./elasticsearch/src/generated")).unwrap();
let last_downloaded_version = "./api_generator/last_downloaded_version";
let mut download_specs = false;
let mut answer = String::new();
let default_branch = if Path::new(last_downloaded_version).exists() {
fs::read_to_string(last_downloaded_version).expect("Could not read branch into string")
} else {
String::from("master")
};
let mut branch = default_branch.clone();
while answer != "y" && answer != "n" {
answer = Input::new()
.default(String::from("n"))
.show_default(false)
.with_prompt("Download rest specifications [y/N]")
|
identifier_body
|
main.rs
|
extern crate dialoguer;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate quote;
use dialoguer::Input;
use std::path::PathBuf;
use std::{
fs::{self, File},
io::Write,
path::Path,
};
mod api_generator;
mod error;
mod rest_spec;
fn
|
() {
// This must be run from the src root directory, with cargo run -p api_generator
let download_dir = fs::canonicalize(PathBuf::from("./api_generator/rest_specs")).unwrap();
let generated_dir = fs::canonicalize(PathBuf::from("./elasticsearch/src/generated")).unwrap();
let last_downloaded_version = "./api_generator/last_downloaded_version";
let mut download_specs = false;
let mut answer = String::new();
let default_branch = if Path::new(last_downloaded_version).exists() {
fs::read_to_string(last_downloaded_version).expect("Could not read branch into string")
} else {
String::from("master")
};
let mut branch = default_branch.clone();
while answer!= "y" && answer!= "n" {
answer = Input::new()
.default(String::from("n"))
.show_default(false)
.with_prompt("Download rest specifications [y/N]")
.interact()
.unwrap()
.to_lowercase();
download_specs = answer == "y";
}
if download_specs {
branch = Input::new()
.default(default_branch.clone())
.show_default(false)
.with_prompt(
format!(
"Branch to download specification from [default {}]",
default_branch
)
.as_str(),
)
.interact()
.unwrap();
fs::remove_dir_all(&download_dir).unwrap();
rest_spec::download_specs(&branch, &download_dir);
File::create(last_downloaded_version)
.expect("failed to create last_downloaded_version file")
.write_all(branch.as_bytes())
.expect("unable to write branch to last_downloaded_version file");
}
// only offer to generate if there are downloaded specs
if download_dir
.read_dir()
.map(|mut r| r.next().is_some())
.unwrap_or(false)
{
let mut generate_code = true;
answer = String::new();
while answer!= "y" && answer!= "n" {
answer = Input::new()
.default(String::from("y"))
.show_default(false)
.with_prompt(
format!("Generate code from rest specifications {} [Y/n]", branch).as_str(),
)
.interact()
.unwrap()
.to_lowercase();
generate_code = answer == "y";
}
if generate_code {
// delete existing generated files if the exist
if generated_dir.exists() {
fs::remove_dir_all(&generated_dir).unwrap();
}
fs::create_dir_all(&generated_dir).unwrap();
api_generator::generate(&branch, &download_dir, &generated_dir).unwrap();
}
}
}
|
main
|
identifier_name
|
benches.rs
|
#![feature(test)]
extern crate blake2b;
extern crate blake2_rfc;
extern crate test;
use test::{Bencher, black_box};
const DATA: &'static [u8] = include_bytes!("data");
#[bench]
fn blake2b_32kib(bencher: &mut Bencher) {
bencher.bytes = DATA.len() as u64;
bencher.iter(|| black_box(blake2b::blake2b(64, DATA)));
}
#[bench]
fn blake2_rfc_32kib(bencher: &mut Bencher) {
bencher.bytes = DATA.len() as u64;
bencher.iter(|| black_box(blake2_rfc::blake2b::blake2b(64, &[], DATA)));
}
#[bench]
fn blake2_single_block_buffered(bencher: &mut Bencher) {
bencher.bytes = 128;
bencher.iter(|| black_box(blake2b::blake2b(64, &[0x55; 128])));
}
#[bench]
fn
|
(bencher: &mut Bencher) {
bencher.bytes = 128;
bencher.iter(|| black_box(blake2b::unbuffered::Blake2b::default().finish(&[0x5555555555555555; 16], 128)));
}
#[bench]
fn blake2_single_block_raw(bencher: &mut Bencher) {
bencher.bytes = 128;
bencher.iter(|| black_box(blake2b::compress(&[0x5555555555555555; 16], &mut blake2b::ParameterBlock::new().set_digest_len(64).set_fanout(1).set_max_depth(1).xor_with_iv().0, (128, 0), (!0, 0))));
}
|
blake2_single_block_unbuffered
|
identifier_name
|
benches.rs
|
#![feature(test)]
extern crate blake2b;
extern crate blake2_rfc;
extern crate test;
|
use test::{Bencher, black_box};
const DATA: &'static [u8] = include_bytes!("data");
#[bench]
fn blake2b_32kib(bencher: &mut Bencher) {
bencher.bytes = DATA.len() as u64;
bencher.iter(|| black_box(blake2b::blake2b(64, DATA)));
}
#[bench]
fn blake2_rfc_32kib(bencher: &mut Bencher) {
bencher.bytes = DATA.len() as u64;
bencher.iter(|| black_box(blake2_rfc::blake2b::blake2b(64, &[], DATA)));
}
#[bench]
fn blake2_single_block_buffered(bencher: &mut Bencher) {
bencher.bytes = 128;
bencher.iter(|| black_box(blake2b::blake2b(64, &[0x55; 128])));
}
#[bench]
fn blake2_single_block_unbuffered(bencher: &mut Bencher) {
bencher.bytes = 128;
bencher.iter(|| black_box(blake2b::unbuffered::Blake2b::default().finish(&[0x5555555555555555; 16], 128)));
}
#[bench]
fn blake2_single_block_raw(bencher: &mut Bencher) {
bencher.bytes = 128;
bencher.iter(|| black_box(blake2b::compress(&[0x5555555555555555; 16], &mut blake2b::ParameterBlock::new().set_digest_len(64).set_fanout(1).set_max_depth(1).xor_with_iv().0, (128, 0), (!0, 0))));
}
|
random_line_split
|
|
benches.rs
|
#![feature(test)]
extern crate blake2b;
extern crate blake2_rfc;
extern crate test;
use test::{Bencher, black_box};
const DATA: &'static [u8] = include_bytes!("data");
#[bench]
fn blake2b_32kib(bencher: &mut Bencher) {
bencher.bytes = DATA.len() as u64;
bencher.iter(|| black_box(blake2b::blake2b(64, DATA)));
}
#[bench]
fn blake2_rfc_32kib(bencher: &mut Bencher) {
bencher.bytes = DATA.len() as u64;
bencher.iter(|| black_box(blake2_rfc::blake2b::blake2b(64, &[], DATA)));
}
#[bench]
fn blake2_single_block_buffered(bencher: &mut Bencher) {
bencher.bytes = 128;
bencher.iter(|| black_box(blake2b::blake2b(64, &[0x55; 128])));
}
#[bench]
fn blake2_single_block_unbuffered(bencher: &mut Bencher)
|
#[bench]
fn blake2_single_block_raw(bencher: &mut Bencher) {
bencher.bytes = 128;
bencher.iter(|| black_box(blake2b::compress(&[0x5555555555555555; 16], &mut blake2b::ParameterBlock::new().set_digest_len(64).set_fanout(1).set_max_depth(1).xor_with_iv().0, (128, 0), (!0, 0))));
}
|
{
bencher.bytes = 128;
bencher.iter(|| black_box(blake2b::unbuffered::Blake2b::default().finish(&[0x5555555555555555; 16], 128)));
}
|
identifier_body
|
bitvec.rs
|
//! Defines basic operations defined under FixedSizeBitVectors theory in SMTLIB2.
use std::fmt;
use crate::backends::backend::SMTNode;
#[macro_export]
macro_rules! bv_const {
($solver: ident, $i: expr, $n: expr) => { $solver.new_const(bitvec::OpCodes::Const($i, $n)) }
}
#[derive(Clone, Debug)]
pub enum OpCodes {
|
BvNeg,
BvAdd,
BvMul,
BvUDiv,
BvURem,
BvShl,
BvLShr,
BvULt,
BvNand,
BvNor,
BvXor,
BvXnor,
BvComp,
BvSub,
BvSDiv,
BvSRem,
BvSMod,
BvAShr,
// parameterized functions
Repeat(u64),
ZeroExtend(u64),
SignExtend(u64),
RotateLeft(u64),
RotateRight(u64),
// logical functions
BvULe,
BvUGt,
BvUGe,
BvSLt,
BvSLe,
BvSGt,
BvSGe,
Const(u64, usize),
FreeVar(String),
}
impl fmt::Display for OpCodes {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match *self {
OpCodes::Concat => "concat".to_owned(),
OpCodes::Extract(i, j) => format!("(_ extract {} {})", i, j),
OpCodes::BvNot => "bvnot".to_owned(),
OpCodes::BvAnd => "bvand".to_owned(),
OpCodes::BvOr => "bvor".to_owned(),
OpCodes::BvNeg => "bvneg".to_owned(),
OpCodes::BvAdd => "bvadd".to_owned(),
OpCodes::BvMul => "bvmul".to_owned(),
OpCodes::BvUDiv => "bvudiv".to_owned(),
OpCodes::BvURem => "bvurem".to_owned(),
OpCodes::BvShl => "bvshl".to_owned(),
OpCodes::BvLShr => "bvlshr".to_owned(),
OpCodes::BvULt => "bvult".to_owned(),
OpCodes::BvNand => "bvnand".to_owned(),
OpCodes::BvNor => "bvnor".to_owned(),
OpCodes::BvXor => "bvxor".to_owned(),
OpCodes::BvXnor => "bvxnor".to_owned(),
OpCodes::BvComp => "bvcomp".to_owned(),
OpCodes::BvSub => "bvsub".to_owned(),
OpCodes::BvSDiv => "bvsdiv".to_owned(),
OpCodes::BvSRem => "bvsrem".to_owned(),
OpCodes::BvSMod => "bvsmod".to_owned(),
OpCodes::BvAShr => "bvashr".to_owned(),
OpCodes::Repeat(i) => format!("(_ repeat {})", i),
OpCodes::ZeroExtend(i) => format!("(_ zero_extend {})", i),
OpCodes::SignExtend(i) => format!("(_ sign_extend {})", i),
OpCodes::RotateLeft(i) => format!("(_ rotate_left {})", i),
OpCodes::RotateRight(i) => format!("(_ rotate_right {})", i),
OpCodes::BvULe => "bvule".to_owned(),
OpCodes::BvUGt => "bvugt".to_owned(),
OpCodes::BvUGe => "bvuge".to_owned(),
OpCodes::BvSLt => "bvslt".to_owned(),
OpCodes::BvSLe => "bvsle".to_owned(),
OpCodes::BvSGt => "bvsgt".to_owned(),
OpCodes::BvSGe => "bvsge".to_owned(),
OpCodes::Const(val, n) => format!("(_ bv{} {})", val, n),
OpCodes::FreeVar(ref name) => format!("{}", name),
};
write!(f, "{}", s)
}
}
impl_smt_node!(OpCodes, define vars [OpCodes::FreeVar(_)], define consts [OpCodes::Const(_, _)]);
#[derive(Clone, Debug)]
pub enum Sorts {
BitVector(usize),
}
impl fmt::Display for Sorts {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match *self {
Sorts::BitVector(ref n) => format!("(_ BitVec {})", n),
};
write!(f, "{}", s)
}
}
|
Concat,
Extract(u64, u64),
BvNot,
BvAnd,
BvOr,
|
random_line_split
|
bitvec.rs
|
//! Defines basic operations defined under FixedSizeBitVectors theory in SMTLIB2.
use std::fmt;
use crate::backends::backend::SMTNode;
#[macro_export]
macro_rules! bv_const {
($solver: ident, $i: expr, $n: expr) => { $solver.new_const(bitvec::OpCodes::Const($i, $n)) }
}
#[derive(Clone, Debug)]
pub enum OpCodes {
Concat,
Extract(u64, u64),
BvNot,
BvAnd,
BvOr,
BvNeg,
BvAdd,
BvMul,
BvUDiv,
BvURem,
BvShl,
BvLShr,
BvULt,
BvNand,
BvNor,
BvXor,
BvXnor,
BvComp,
BvSub,
BvSDiv,
BvSRem,
BvSMod,
BvAShr,
// parameterized functions
Repeat(u64),
ZeroExtend(u64),
SignExtend(u64),
RotateLeft(u64),
RotateRight(u64),
// logical functions
BvULe,
BvUGt,
BvUGe,
BvSLt,
BvSLe,
BvSGt,
BvSGe,
Const(u64, usize),
FreeVar(String),
}
impl fmt::Display for OpCodes {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match *self {
OpCodes::Concat => "concat".to_owned(),
OpCodes::Extract(i, j) => format!("(_ extract {} {})", i, j),
OpCodes::BvNot => "bvnot".to_owned(),
OpCodes::BvAnd => "bvand".to_owned(),
OpCodes::BvOr => "bvor".to_owned(),
OpCodes::BvNeg => "bvneg".to_owned(),
OpCodes::BvAdd => "bvadd".to_owned(),
OpCodes::BvMul => "bvmul".to_owned(),
OpCodes::BvUDiv => "bvudiv".to_owned(),
OpCodes::BvURem => "bvurem".to_owned(),
OpCodes::BvShl => "bvshl".to_owned(),
OpCodes::BvLShr => "bvlshr".to_owned(),
OpCodes::BvULt => "bvult".to_owned(),
OpCodes::BvNand => "bvnand".to_owned(),
OpCodes::BvNor => "bvnor".to_owned(),
OpCodes::BvXor => "bvxor".to_owned(),
OpCodes::BvXnor => "bvxnor".to_owned(),
OpCodes::BvComp => "bvcomp".to_owned(),
OpCodes::BvSub => "bvsub".to_owned(),
OpCodes::BvSDiv => "bvsdiv".to_owned(),
OpCodes::BvSRem => "bvsrem".to_owned(),
OpCodes::BvSMod => "bvsmod".to_owned(),
OpCodes::BvAShr => "bvashr".to_owned(),
OpCodes::Repeat(i) => format!("(_ repeat {})", i),
OpCodes::ZeroExtend(i) => format!("(_ zero_extend {})", i),
OpCodes::SignExtend(i) => format!("(_ sign_extend {})", i),
OpCodes::RotateLeft(i) => format!("(_ rotate_left {})", i),
OpCodes::RotateRight(i) => format!("(_ rotate_right {})", i),
OpCodes::BvULe => "bvule".to_owned(),
OpCodes::BvUGt => "bvugt".to_owned(),
OpCodes::BvUGe => "bvuge".to_owned(),
OpCodes::BvSLt => "bvslt".to_owned(),
OpCodes::BvSLe => "bvsle".to_owned(),
OpCodes::BvSGt => "bvsgt".to_owned(),
OpCodes::BvSGe => "bvsge".to_owned(),
OpCodes::Const(val, n) => format!("(_ bv{} {})", val, n),
OpCodes::FreeVar(ref name) => format!("{}", name),
};
write!(f, "{}", s)
}
}
impl_smt_node!(OpCodes, define vars [OpCodes::FreeVar(_)], define consts [OpCodes::Const(_, _)]);
#[derive(Clone, Debug)]
pub enum
|
{
BitVector(usize),
}
impl fmt::Display for Sorts {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match *self {
Sorts::BitVector(ref n) => format!("(_ BitVec {})", n),
};
write!(f, "{}", s)
}
}
|
Sorts
|
identifier_name
|
inner.rs
|
use crate::types::*;
use ndarray::*;
/// Inner Product
///
/// Differenct from `Dot` trait, this take complex conjugate of `self` elements
///
pub trait InnerProduct {
type Elem: Scalar;
/// Inner product `(self.conjugate, rhs)
fn inner<S>(&self, rhs: &ArrayBase<S, Ix1>) -> Self::Elem
where
S: Data<Elem = Self::Elem>;
}
impl<A, S> InnerProduct for ArrayBase<S, Ix1>
where
A: Scalar,
S: Data<Elem = A>,
{
type Elem = A;
fn
|
<St: Data<Elem = A>>(&self, rhs: &ArrayBase<St, Ix1>) -> A {
assert_eq!(self.len(), rhs.len());
Zip::from(self)
.and(rhs)
.fold_while(A::zero(), |acc, s, r| {
FoldWhile::Continue(acc + s.conj() * *r)
})
.into_inner()
}
}
|
inner
|
identifier_name
|
inner.rs
|
use crate::types::*;
use ndarray::*;
/// Inner Product
///
/// Differenct from `Dot` trait, this take complex conjugate of `self` elements
///
pub trait InnerProduct {
type Elem: Scalar;
/// Inner product `(self.conjugate, rhs)
fn inner<S>(&self, rhs: &ArrayBase<S, Ix1>) -> Self::Elem
where
S: Data<Elem = Self::Elem>;
}
impl<A, S> InnerProduct for ArrayBase<S, Ix1>
where
A: Scalar,
S: Data<Elem = A>,
{
type Elem = A;
fn inner<St: Data<Elem = A>>(&self, rhs: &ArrayBase<St, Ix1>) -> A
|
}
|
{
assert_eq!(self.len(), rhs.len());
Zip::from(self)
.and(rhs)
.fold_while(A::zero(), |acc, s, r| {
FoldWhile::Continue(acc + s.conj() * *r)
})
.into_inner()
}
|
identifier_body
|
inner.rs
|
use crate::types::*;
use ndarray::*;
/// Inner Product
///
/// Differenct from `Dot` trait, this take complex conjugate of `self` elements
///
pub trait InnerProduct {
type Elem: Scalar;
/// Inner product `(self.conjugate, rhs)
fn inner<S>(&self, rhs: &ArrayBase<S, Ix1>) -> Self::Elem
where
S: Data<Elem = Self::Elem>;
}
|
S: Data<Elem = A>,
{
type Elem = A;
fn inner<St: Data<Elem = A>>(&self, rhs: &ArrayBase<St, Ix1>) -> A {
assert_eq!(self.len(), rhs.len());
Zip::from(self)
.and(rhs)
.fold_while(A::zero(), |acc, s, r| {
FoldWhile::Continue(acc + s.conj() * *r)
})
.into_inner()
}
}
|
impl<A, S> InnerProduct for ArrayBase<S, Ix1>
where
A: Scalar,
|
random_line_split
|
mod.rs
|
//! A module for grouping things that handle serial data
pub mod command;
pub use self::command::*;
use std::io::Write;
use csv::Writer;
use setup;
use protocol::Message;
use config::DataRecordingConfig;
/// A struct to handle serial data
pub struct SerialHandler {
data_records: Writer<Box<Write>>,
}
impl SerialHandler {
pub fn
|
(config: &DataRecordingConfig) -> Self {
SerialHandler { data_records: setup::data_recording(config) }
}
pub fn handle(&mut self, message: Message) {
match message {
Message::Command(command) => command::CommandHandler::handle(command),
Message::Data(data) => {
if let Err(error) = self.data_records.serialize(data) {
error!(
"Error serializing data to write to records: \"{:?}\"",
error
);
} else if let Err(error) = self.data_records.flush() {
error!("Error flushing csv data record writer: \"{:?}\"", error);
}
}
}
}
}
|
new
|
identifier_name
|
mod.rs
|
//! A module for grouping things that handle serial data
pub mod command;
pub use self::command::*;
use std::io::Write;
use csv::Writer;
use setup;
use protocol::Message;
use config::DataRecordingConfig;
/// A struct to handle serial data
pub struct SerialHandler {
data_records: Writer<Box<Write>>,
}
impl SerialHandler {
pub fn new(config: &DataRecordingConfig) -> Self {
SerialHandler { data_records: setup::data_recording(config) }
}
pub fn handle(&mut self, message: Message)
|
}
|
{
match message {
Message::Command(command) => command::CommandHandler::handle(command),
Message::Data(data) => {
if let Err(error) = self.data_records.serialize(data) {
error!(
"Error serializing data to write to records: \"{:?}\"",
error
);
} else if let Err(error) = self.data_records.flush() {
error!("Error flushing csv data record writer: \"{:?}\"", error);
}
}
}
}
|
identifier_body
|
mod.rs
|
//! A module for grouping things that handle serial data
pub mod command;
pub use self::command::*;
use std::io::Write;
use csv::Writer;
use setup;
use protocol::Message;
use config::DataRecordingConfig;
/// A struct to handle serial data
pub struct SerialHandler {
data_records: Writer<Box<Write>>,
}
impl SerialHandler {
pub fn new(config: &DataRecordingConfig) -> Self {
SerialHandler { data_records: setup::data_recording(config) }
}
pub fn handle(&mut self, message: Message) {
match message {
Message::Command(command) => command::CommandHandler::handle(command),
Message::Data(data) =>
|
}
}
}
|
{
if let Err(error) = self.data_records.serialize(data) {
error!(
"Error serializing data to write to records: \"{:?}\"",
error
);
} else if let Err(error) = self.data_records.flush() {
error!("Error flushing csv data record writer: \"{:?}\"", error);
}
}
|
conditional_block
|
mod.rs
|
//! A module for grouping things that handle serial data
pub mod command;
pub use self::command::*;
|
use setup;
use protocol::Message;
use config::DataRecordingConfig;
/// A struct to handle serial data
pub struct SerialHandler {
data_records: Writer<Box<Write>>,
}
impl SerialHandler {
pub fn new(config: &DataRecordingConfig) -> Self {
SerialHandler { data_records: setup::data_recording(config) }
}
pub fn handle(&mut self, message: Message) {
match message {
Message::Command(command) => command::CommandHandler::handle(command),
Message::Data(data) => {
if let Err(error) = self.data_records.serialize(data) {
error!(
"Error serializing data to write to records: \"{:?}\"",
error
);
} else if let Err(error) = self.data_records.flush() {
error!("Error flushing csv data record writer: \"{:?}\"", error);
}
}
}
}
}
|
use std::io::Write;
use csv::Writer;
|
random_line_split
|
application_window.rs
|
// This file was generated by gir (b7f5189) from gir-files (71d73f0)
// DO NOT EDIT
use Application;
use Bin;
use Container;
#[cfg(feature = "v3_20")]
use ShortcutsWindow;
use Widget;
use Window;
use ffi;
use gio;
use gio_ffi;
use glib::object::Downcast;
use glib::translate::*;
glib_wrapper! {
pub struct ApplicationWindow(Object<ffi::GtkApplicationWindow>): [
Window,
Bin,
Container,
Widget,
gio::ActionGroup => gio_ffi::GActionGroup,
gio::ActionMap => gio_ffi::GActionMap,
];
match fn {
get_type => || ffi::gtk_application_window_get_type(),
}
}
impl ApplicationWindow {
pub fn new(application: &Application) -> ApplicationWindow {
skip_assert_initialized!();
unsafe {
Widget::from_glib_none(ffi::gtk_application_window_new(application.to_glib_none().0)).downcast_unchecked()
}
}
#[cfg(feature = "v3_20")]
pub fn get_help_overlay(&self) -> Option<ShortcutsWindow> {
unsafe {
from_glib_none(ffi::gtk_application_window_get_help_overlay(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_6")]
pub fn get_id(&self) -> u32 {
unsafe {
ffi::gtk_application_window_get_id(self.to_glib_none().0)
}
}
pub fn get_show_menubar(&self) -> bool {
unsafe {
from_glib(ffi::gtk_application_window_get_show_menubar(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_20")]
pub fn set_help_overlay(&self, help_overlay: Option<&ShortcutsWindow>) {
unsafe {
ffi::gtk_application_window_set_help_overlay(self.to_glib_none().0, help_overlay.to_glib_none().0);
}
}
pub fn set_show_menubar(&self, show_menubar: bool)
|
}
|
{
unsafe {
ffi::gtk_application_window_set_show_menubar(self.to_glib_none().0, show_menubar.to_glib());
}
}
|
identifier_body
|
application_window.rs
|
// This file was generated by gir (b7f5189) from gir-files (71d73f0)
// DO NOT EDIT
use Application;
use Bin;
use Container;
#[cfg(feature = "v3_20")]
use ShortcutsWindow;
use Widget;
use Window;
use ffi;
use gio;
use gio_ffi;
use glib::object::Downcast;
use glib::translate::*;
glib_wrapper! {
pub struct ApplicationWindow(Object<ffi::GtkApplicationWindow>): [
Window,
Bin,
Container,
Widget,
gio::ActionGroup => gio_ffi::GActionGroup,
gio::ActionMap => gio_ffi::GActionMap,
];
match fn {
get_type => || ffi::gtk_application_window_get_type(),
}
}
impl ApplicationWindow {
pub fn new(application: &Application) -> ApplicationWindow {
skip_assert_initialized!();
unsafe {
Widget::from_glib_none(ffi::gtk_application_window_new(application.to_glib_none().0)).downcast_unchecked()
}
}
#[cfg(feature = "v3_20")]
pub fn
|
(&self) -> Option<ShortcutsWindow> {
unsafe {
from_glib_none(ffi::gtk_application_window_get_help_overlay(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_6")]
pub fn get_id(&self) -> u32 {
unsafe {
ffi::gtk_application_window_get_id(self.to_glib_none().0)
}
}
pub fn get_show_menubar(&self) -> bool {
unsafe {
from_glib(ffi::gtk_application_window_get_show_menubar(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_20")]
pub fn set_help_overlay(&self, help_overlay: Option<&ShortcutsWindow>) {
unsafe {
ffi::gtk_application_window_set_help_overlay(self.to_glib_none().0, help_overlay.to_glib_none().0);
}
}
pub fn set_show_menubar(&self, show_menubar: bool) {
unsafe {
ffi::gtk_application_window_set_show_menubar(self.to_glib_none().0, show_menubar.to_glib());
}
}
}
|
get_help_overlay
|
identifier_name
|
application_window.rs
|
// This file was generated by gir (b7f5189) from gir-files (71d73f0)
// DO NOT EDIT
use Application;
use Bin;
use Container;
#[cfg(feature = "v3_20")]
use ShortcutsWindow;
use Widget;
use Window;
use ffi;
use gio;
use gio_ffi;
use glib::object::Downcast;
use glib::translate::*;
glib_wrapper! {
|
Widget,
gio::ActionGroup => gio_ffi::GActionGroup,
gio::ActionMap => gio_ffi::GActionMap,
];
match fn {
get_type => || ffi::gtk_application_window_get_type(),
}
}
impl ApplicationWindow {
pub fn new(application: &Application) -> ApplicationWindow {
skip_assert_initialized!();
unsafe {
Widget::from_glib_none(ffi::gtk_application_window_new(application.to_glib_none().0)).downcast_unchecked()
}
}
#[cfg(feature = "v3_20")]
pub fn get_help_overlay(&self) -> Option<ShortcutsWindow> {
unsafe {
from_glib_none(ffi::gtk_application_window_get_help_overlay(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_6")]
pub fn get_id(&self) -> u32 {
unsafe {
ffi::gtk_application_window_get_id(self.to_glib_none().0)
}
}
pub fn get_show_menubar(&self) -> bool {
unsafe {
from_glib(ffi::gtk_application_window_get_show_menubar(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_20")]
pub fn set_help_overlay(&self, help_overlay: Option<&ShortcutsWindow>) {
unsafe {
ffi::gtk_application_window_set_help_overlay(self.to_glib_none().0, help_overlay.to_glib_none().0);
}
}
pub fn set_show_menubar(&self, show_menubar: bool) {
unsafe {
ffi::gtk_application_window_set_show_menubar(self.to_glib_none().0, show_menubar.to_glib());
}
}
}
|
pub struct ApplicationWindow(Object<ffi::GtkApplicationWindow>): [
Window,
Bin,
Container,
|
random_line_split
|
traversal.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Traversals over the DOM and flow trees, running the layout computations.
use construct::FlowConstructor;
use context::LayoutContext;
use display_list_builder::DisplayListBuildState;
use flow::{self, FlowFlags, Flow, ImmutableFlowUtils};
use script_layout_interface::wrapper_traits::{LayoutNode, ThreadSafeLayoutNode};
use servo_config::opts;
use style::context::{SharedStyleContext, StyleContext};
use style::data::ElementData;
use style::dom::{NodeInfo, TElement, TNode};
use style::selector_parser::RestyleDamage;
use style::servo::restyle_damage::ServoRestyleDamage;
use style::traversal::{DomTraversal, recalc_style_at};
use style::traversal::PerLevelTraversalData;
use wrapper::{GetRawData, LayoutNodeLayoutData};
use wrapper::ThreadSafeLayoutNodeHelpers;
pub struct RecalcStyleAndConstructFlows<'a> {
context: LayoutContext<'a>,
}
impl<'a> RecalcStyleAndConstructFlows<'a> {
pub fn layout_context(&self) -> &LayoutContext<'a> {
&self.context
}
}
impl<'a> RecalcStyleAndConstructFlows<'a> {
/// Creates a traversal context, taking ownership of the shared layout context.
pub fn new(context: LayoutContext<'a>) -> Self {
RecalcStyleAndConstructFlows {
context: context,
}
}
/// Consumes this traversal context, returning ownership of the shared layout
/// context to the caller.
pub fn destroy(self) -> LayoutContext<'a> {
self.context
}
}
#[allow(unsafe_code)]
impl<'a, E> DomTraversal<E> for RecalcStyleAndConstructFlows<'a>
where E: TElement,
E::ConcreteNode: LayoutNode,
E::FontMetricsProvider: Send,
{
fn
|
<F>(&self, traversal_data: &PerLevelTraversalData,
context: &mut StyleContext<E>, node: E::ConcreteNode,
note_child: F)
where F: FnMut(E::ConcreteNode)
{
// FIXME(pcwalton): Stop allocating here. Ideally this should just be
// done by the HTML parser.
unsafe { node.initialize_data() };
if!node.is_text_node() {
let el = node.as_element().unwrap();
let mut data = el.mutate_data().unwrap();
recalc_style_at(self, traversal_data, context, el, &mut data, note_child);
}
}
fn process_postorder(&self, _style_context: &mut StyleContext<E>, node: E::ConcreteNode) {
construct_flows_at(&self.context, node);
}
fn text_node_needs_traversal(node: E::ConcreteNode, parent_data: &ElementData) -> bool {
// Text nodes never need styling. However, there are two cases they may need
// flow construction:
// (1) They child doesn't yet have layout data (preorder traversal initializes it).
// (2) The parent element has restyle damage (so the text flow also needs fixup).
node.get_raw_data().is_none() ||
parent_data.damage!= RestyleDamage::empty()
}
fn shared_context(&self) -> &SharedStyleContext {
&self.context.style_context
}
}
/// A top-down traversal.
pub trait PreorderFlowTraversal {
/// The operation to perform. Return true to continue or false to stop.
fn process(&self, flow: &mut Flow);
/// Returns true if this node should be processed and false if neither this node nor its
/// descendants should be processed.
fn should_process_subtree(&self, _flow: &mut Flow) -> bool {
true
}
/// Returns true if this node must be processed in-order. If this returns false,
/// we skip the operation for this node, but continue processing the descendants.
/// This is called *after* parent nodes are visited.
fn should_process(&self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in preorder.
fn traverse(&self, flow: &mut Flow) {
if!self.should_process_subtree(flow) {
return;
}
if self.should_process(flow) {
self.process(flow);
}
for kid in flow::child_iter_mut(flow) {
self.traverse(kid);
}
}
/// Traverse the Absolute flow tree in preorder.
///
/// Traverse all your direct absolute descendants, who will then traverse
/// their direct absolute descendants.
///
/// Return true if the traversal is to continue or false to stop.
fn traverse_absolute_flows(&self, flow: &mut Flow) {
if self.should_process(flow) {
self.process(flow);
}
for descendant_link in flow::mut_base(flow).abs_descendants.iter() {
self.traverse_absolute_flows(descendant_link)
}
}
}
/// A bottom-up traversal, with a optional in-order pass.
pub trait PostorderFlowTraversal {
/// The operation to perform. Return true to continue or false to stop.
fn process(&self, flow: &mut Flow);
/// Returns false if this node must be processed in-order. If this returns false, we skip the
/// operation for this node, but continue processing the ancestors. This is called *after*
/// child nodes are visited.
fn should_process(&self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in postorder.
fn traverse(&self, flow: &mut Flow) {
for kid in flow::child_iter_mut(flow) {
self.traverse(kid);
}
if self.should_process(flow) {
self.process(flow);
}
}
}
/// An in-order (sequential only) traversal.
pub trait InorderFlowTraversal {
/// The operation to perform. Returns the level of the tree we're at.
fn process(&mut self, flow: &mut Flow, level: u32);
/// Returns true if this node should be processed and false if neither this node nor its
/// descendants should be processed.
fn should_process_subtree(&mut self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in-order.
fn traverse(&mut self, flow: &mut Flow, level: u32) {
if!self.should_process_subtree(flow) {
return;
}
self.process(flow, level);
for kid in flow::child_iter_mut(flow) {
self.traverse(kid, level + 1);
}
}
}
/// A bottom-up, parallelizable traversal.
pub trait PostorderNodeMutTraversal<ConcreteThreadSafeLayoutNode: ThreadSafeLayoutNode> {
/// The operation to perform. Return true to continue or false to stop.
fn process(&mut self, node: &ConcreteThreadSafeLayoutNode);
}
/// The flow construction traversal, which builds flows for styled nodes.
#[inline]
#[allow(unsafe_code)]
fn construct_flows_at<N>(context: &LayoutContext, node: N)
where N: LayoutNode,
{
debug!("construct_flows_at: {:?}", node);
// Construct flows for this node.
{
let tnode = node.to_threadsafe();
// Always reconstruct if incremental layout is turned off.
let nonincremental_layout = opts::get().nonincremental_layout;
if nonincremental_layout || tnode.restyle_damage()!= RestyleDamage::empty() ||
node.as_element().map_or(false, |el| el.has_dirty_descendants()) {
let mut flow_constructor = FlowConstructor::new(context);
if nonincremental_layout ||!flow_constructor.repair_if_possible(&tnode) {
flow_constructor.process(&tnode);
debug!("Constructed flow for {:?}: {:x}",
tnode,
tnode.flow_debug_id());
}
}
tnode.mutate_layout_data().unwrap().flags.insert(::data::LayoutDataFlags::HAS_BEEN_TRAVERSED);
}
if let Some(el) = node.as_element() {
unsafe { el.unset_dirty_descendants(); }
}
}
/// The bubble-inline-sizes traversal, the first part of layout computation. This computes
/// preferred and intrinsic inline-sizes and bubbles them up the tree.
pub struct BubbleISizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PostorderFlowTraversal for BubbleISizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
flow.bubble_inline_sizes();
flow::mut_base(flow).restyle_damage.remove(ServoRestyleDamage::BUBBLE_ISIZES);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
flow::base(flow).restyle_damage.contains(ServoRestyleDamage::BUBBLE_ISIZES)
}
}
/// The assign-inline-sizes traversal. In Gecko this corresponds to `Reflow`.
#[derive(Clone, Copy)]
pub struct AssignISizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PreorderFlowTraversal for AssignISizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
flow.assign_inline_sizes(self.layout_context);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
flow::base(flow).restyle_damage.intersects(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW)
}
}
/// The assign-block-sizes-and-store-overflow traversal, the last (and most expensive) part of
/// layout computation. Determines the final block-sizes for all layout objects and computes
/// positions. In Gecko this corresponds to `Reflow`.
#[derive(Clone, Copy)]
pub struct AssignBSizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PostorderFlowTraversal for AssignBSizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
// Can't do anything with anything that floats might flow through until we reach their
// inorder parent.
//
// NB: We must return without resetting the restyle bits for these, as we haven't actually
// reflowed anything!
if flow.floats_might_flow_through() {
return
}
flow.assign_block_size(self.layout_context);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
let base = flow::base(flow);
base.restyle_damage.intersects(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW) &&
// The fragmentation countainer is responsible for calling Flow::fragment recursively
!base.flags.contains(FlowFlags::CAN_BE_FRAGMENTED)
}
}
pub struct ComputeStackingRelativePositions<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PreorderFlowTraversal for ComputeStackingRelativePositions<'a> {
#[inline]
fn should_process_subtree(&self, flow: &mut Flow) -> bool {
flow::base(flow).restyle_damage.contains(ServoRestyleDamage::REPOSITION)
}
#[inline]
fn process(&self, flow: &mut Flow) {
flow.compute_stacking_relative_position(self.layout_context);
flow::mut_base(flow).restyle_damage.remove(ServoRestyleDamage::REPOSITION)
}
}
pub struct BuildDisplayList<'a> {
pub state: DisplayListBuildState<'a>,
}
impl<'a> BuildDisplayList<'a> {
#[inline]
pub fn traverse(&mut self, flow: &mut Flow) {
let parent_stacking_context_id = self.state.current_stacking_context_id;
self.state.current_stacking_context_id = flow::base(flow).stacking_context_id;
let parent_clipping_and_scrolling = self.state.current_clipping_and_scrolling;
self.state.current_clipping_and_scrolling = flow.clipping_and_scrolling();
flow.build_display_list(&mut self.state);
flow::mut_base(flow).restyle_damage.remove(ServoRestyleDamage::REPAINT);
for kid in flow::child_iter_mut(flow) {
self.traverse(kid);
}
self.state.current_stacking_context_id = parent_stacking_context_id;
self.state.current_clipping_and_scrolling = parent_clipping_and_scrolling;
}
}
|
process_preorder
|
identifier_name
|
traversal.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Traversals over the DOM and flow trees, running the layout computations.
use construct::FlowConstructor;
use context::LayoutContext;
use display_list_builder::DisplayListBuildState;
use flow::{self, FlowFlags, Flow, ImmutableFlowUtils};
use script_layout_interface::wrapper_traits::{LayoutNode, ThreadSafeLayoutNode};
use servo_config::opts;
use style::context::{SharedStyleContext, StyleContext};
use style::data::ElementData;
use style::dom::{NodeInfo, TElement, TNode};
use style::selector_parser::RestyleDamage;
use style::servo::restyle_damage::ServoRestyleDamage;
use style::traversal::{DomTraversal, recalc_style_at};
use style::traversal::PerLevelTraversalData;
use wrapper::{GetRawData, LayoutNodeLayoutData};
use wrapper::ThreadSafeLayoutNodeHelpers;
pub struct RecalcStyleAndConstructFlows<'a> {
context: LayoutContext<'a>,
}
impl<'a> RecalcStyleAndConstructFlows<'a> {
pub fn layout_context(&self) -> &LayoutContext<'a> {
&self.context
}
}
impl<'a> RecalcStyleAndConstructFlows<'a> {
/// Creates a traversal context, taking ownership of the shared layout context.
pub fn new(context: LayoutContext<'a>) -> Self {
RecalcStyleAndConstructFlows {
context: context,
}
}
/// Consumes this traversal context, returning ownership of the shared layout
/// context to the caller.
pub fn destroy(self) -> LayoutContext<'a> {
self.context
}
}
#[allow(unsafe_code)]
impl<'a, E> DomTraversal<E> for RecalcStyleAndConstructFlows<'a>
where E: TElement,
E::ConcreteNode: LayoutNode,
E::FontMetricsProvider: Send,
{
fn process_preorder<F>(&self, traversal_data: &PerLevelTraversalData,
context: &mut StyleContext<E>, node: E::ConcreteNode,
note_child: F)
where F: FnMut(E::ConcreteNode)
{
// FIXME(pcwalton): Stop allocating here. Ideally this should just be
// done by the HTML parser.
unsafe { node.initialize_data() };
if!node.is_text_node() {
let el = node.as_element().unwrap();
let mut data = el.mutate_data().unwrap();
recalc_style_at(self, traversal_data, context, el, &mut data, note_child);
}
}
fn process_postorder(&self, _style_context: &mut StyleContext<E>, node: E::ConcreteNode) {
construct_flows_at(&self.context, node);
}
fn text_node_needs_traversal(node: E::ConcreteNode, parent_data: &ElementData) -> bool {
// Text nodes never need styling. However, there are two cases they may need
// flow construction:
// (1) They child doesn't yet have layout data (preorder traversal initializes it).
// (2) The parent element has restyle damage (so the text flow also needs fixup).
node.get_raw_data().is_none() ||
parent_data.damage!= RestyleDamage::empty()
}
fn shared_context(&self) -> &SharedStyleContext {
&self.context.style_context
}
}
/// A top-down traversal.
pub trait PreorderFlowTraversal {
/// The operation to perform. Return true to continue or false to stop.
fn process(&self, flow: &mut Flow);
/// Returns true if this node should be processed and false if neither this node nor its
/// descendants should be processed.
fn should_process_subtree(&self, _flow: &mut Flow) -> bool {
true
}
/// Returns true if this node must be processed in-order. If this returns false,
/// we skip the operation for this node, but continue processing the descendants.
/// This is called *after* parent nodes are visited.
fn should_process(&self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in preorder.
fn traverse(&self, flow: &mut Flow) {
if!self.should_process_subtree(flow) {
return;
}
if self.should_process(flow) {
self.process(flow);
}
for kid in flow::child_iter_mut(flow) {
self.traverse(kid);
}
}
/// Traverse the Absolute flow tree in preorder.
///
/// Traverse all your direct absolute descendants, who will then traverse
/// their direct absolute descendants.
///
/// Return true if the traversal is to continue or false to stop.
fn traverse_absolute_flows(&self, flow: &mut Flow) {
if self.should_process(flow) {
self.process(flow);
}
for descendant_link in flow::mut_base(flow).abs_descendants.iter() {
self.traverse_absolute_flows(descendant_link)
}
}
}
/// A bottom-up traversal, with a optional in-order pass.
pub trait PostorderFlowTraversal {
/// The operation to perform. Return true to continue or false to stop.
fn process(&self, flow: &mut Flow);
/// Returns false if this node must be processed in-order. If this returns false, we skip the
/// operation for this node, but continue processing the ancestors. This is called *after*
/// child nodes are visited.
fn should_process(&self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in postorder.
fn traverse(&self, flow: &mut Flow) {
for kid in flow::child_iter_mut(flow) {
self.traverse(kid);
}
if self.should_process(flow) {
self.process(flow);
}
}
}
/// An in-order (sequential only) traversal.
pub trait InorderFlowTraversal {
/// The operation to perform. Returns the level of the tree we're at.
fn process(&mut self, flow: &mut Flow, level: u32);
/// Returns true if this node should be processed and false if neither this node nor its
/// descendants should be processed.
fn should_process_subtree(&mut self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in-order.
fn traverse(&mut self, flow: &mut Flow, level: u32) {
if!self.should_process_subtree(flow) {
return;
}
self.process(flow, level);
for kid in flow::child_iter_mut(flow) {
self.traverse(kid, level + 1);
}
}
}
/// A bottom-up, parallelizable traversal.
pub trait PostorderNodeMutTraversal<ConcreteThreadSafeLayoutNode: ThreadSafeLayoutNode> {
/// The operation to perform. Return true to continue or false to stop.
fn process(&mut self, node: &ConcreteThreadSafeLayoutNode);
}
/// The flow construction traversal, which builds flows for styled nodes.
#[inline]
#[allow(unsafe_code)]
fn construct_flows_at<N>(context: &LayoutContext, node: N)
where N: LayoutNode,
{
debug!("construct_flows_at: {:?}", node);
// Construct flows for this node.
{
let tnode = node.to_threadsafe();
// Always reconstruct if incremental layout is turned off.
let nonincremental_layout = opts::get().nonincremental_layout;
if nonincremental_layout || tnode.restyle_damage()!= RestyleDamage::empty() ||
node.as_element().map_or(false, |el| el.has_dirty_descendants()) {
let mut flow_constructor = FlowConstructor::new(context);
if nonincremental_layout ||!flow_constructor.repair_if_possible(&tnode) {
flow_constructor.process(&tnode);
debug!("Constructed flow for {:?}: {:x}",
tnode,
tnode.flow_debug_id());
}
}
tnode.mutate_layout_data().unwrap().flags.insert(::data::LayoutDataFlags::HAS_BEEN_TRAVERSED);
}
if let Some(el) = node.as_element() {
unsafe { el.unset_dirty_descendants(); }
}
}
/// The bubble-inline-sizes traversal, the first part of layout computation. This computes
/// preferred and intrinsic inline-sizes and bubbles them up the tree.
pub struct BubbleISizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PostorderFlowTraversal for BubbleISizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
flow.bubble_inline_sizes();
flow::mut_base(flow).restyle_damage.remove(ServoRestyleDamage::BUBBLE_ISIZES);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
flow::base(flow).restyle_damage.contains(ServoRestyleDamage::BUBBLE_ISIZES)
}
}
/// The assign-inline-sizes traversal. In Gecko this corresponds to `Reflow`.
#[derive(Clone, Copy)]
pub struct AssignISizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PreorderFlowTraversal for AssignISizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
flow.assign_inline_sizes(self.layout_context);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
flow::base(flow).restyle_damage.intersects(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW)
}
}
/// The assign-block-sizes-and-store-overflow traversal, the last (and most expensive) part of
/// layout computation. Determines the final block-sizes for all layout objects and computes
/// positions. In Gecko this corresponds to `Reflow`.
#[derive(Clone, Copy)]
pub struct AssignBSizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PostorderFlowTraversal for AssignBSizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
// Can't do anything with anything that floats might flow through until we reach their
// inorder parent.
//
// NB: We must return without resetting the restyle bits for these, as we haven't actually
// reflowed anything!
if flow.floats_might_flow_through()
|
flow.assign_block_size(self.layout_context);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
let base = flow::base(flow);
base.restyle_damage.intersects(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW) &&
// The fragmentation countainer is responsible for calling Flow::fragment recursively
!base.flags.contains(FlowFlags::CAN_BE_FRAGMENTED)
}
}
pub struct ComputeStackingRelativePositions<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PreorderFlowTraversal for ComputeStackingRelativePositions<'a> {
#[inline]
fn should_process_subtree(&self, flow: &mut Flow) -> bool {
flow::base(flow).restyle_damage.contains(ServoRestyleDamage::REPOSITION)
}
#[inline]
fn process(&self, flow: &mut Flow) {
flow.compute_stacking_relative_position(self.layout_context);
flow::mut_base(flow).restyle_damage.remove(ServoRestyleDamage::REPOSITION)
}
}
pub struct BuildDisplayList<'a> {
pub state: DisplayListBuildState<'a>,
}
impl<'a> BuildDisplayList<'a> {
#[inline]
pub fn traverse(&mut self, flow: &mut Flow) {
let parent_stacking_context_id = self.state.current_stacking_context_id;
self.state.current_stacking_context_id = flow::base(flow).stacking_context_id;
let parent_clipping_and_scrolling = self.state.current_clipping_and_scrolling;
self.state.current_clipping_and_scrolling = flow.clipping_and_scrolling();
flow.build_display_list(&mut self.state);
flow::mut_base(flow).restyle_damage.remove(ServoRestyleDamage::REPAINT);
for kid in flow::child_iter_mut(flow) {
self.traverse(kid);
}
self.state.current_stacking_context_id = parent_stacking_context_id;
self.state.current_clipping_and_scrolling = parent_clipping_and_scrolling;
}
}
|
{
return
}
|
conditional_block
|
traversal.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Traversals over the DOM and flow trees, running the layout computations.
use construct::FlowConstructor;
use context::LayoutContext;
use display_list_builder::DisplayListBuildState;
use flow::{self, FlowFlags, Flow, ImmutableFlowUtils};
use script_layout_interface::wrapper_traits::{LayoutNode, ThreadSafeLayoutNode};
use servo_config::opts;
use style::context::{SharedStyleContext, StyleContext};
use style::data::ElementData;
use style::dom::{NodeInfo, TElement, TNode};
use style::selector_parser::RestyleDamage;
use style::servo::restyle_damage::ServoRestyleDamage;
use style::traversal::{DomTraversal, recalc_style_at};
use style::traversal::PerLevelTraversalData;
use wrapper::{GetRawData, LayoutNodeLayoutData};
use wrapper::ThreadSafeLayoutNodeHelpers;
pub struct RecalcStyleAndConstructFlows<'a> {
context: LayoutContext<'a>,
}
impl<'a> RecalcStyleAndConstructFlows<'a> {
pub fn layout_context(&self) -> &LayoutContext<'a> {
&self.context
}
}
impl<'a> RecalcStyleAndConstructFlows<'a> {
/// Creates a traversal context, taking ownership of the shared layout context.
pub fn new(context: LayoutContext<'a>) -> Self {
RecalcStyleAndConstructFlows {
context: context,
}
}
/// Consumes this traversal context, returning ownership of the shared layout
/// context to the caller.
pub fn destroy(self) -> LayoutContext<'a> {
self.context
}
}
#[allow(unsafe_code)]
impl<'a, E> DomTraversal<E> for RecalcStyleAndConstructFlows<'a>
where E: TElement,
E::ConcreteNode: LayoutNode,
E::FontMetricsProvider: Send,
{
fn process_preorder<F>(&self, traversal_data: &PerLevelTraversalData,
context: &mut StyleContext<E>, node: E::ConcreteNode,
note_child: F)
where F: FnMut(E::ConcreteNode)
{
// FIXME(pcwalton): Stop allocating here. Ideally this should just be
// done by the HTML parser.
unsafe { node.initialize_data() };
if!node.is_text_node() {
let el = node.as_element().unwrap();
let mut data = el.mutate_data().unwrap();
recalc_style_at(self, traversal_data, context, el, &mut data, note_child);
}
}
fn process_postorder(&self, _style_context: &mut StyleContext<E>, node: E::ConcreteNode) {
construct_flows_at(&self.context, node);
}
fn text_node_needs_traversal(node: E::ConcreteNode, parent_data: &ElementData) -> bool {
// Text nodes never need styling. However, there are two cases they may need
// flow construction:
// (1) They child doesn't yet have layout data (preorder traversal initializes it).
// (2) The parent element has restyle damage (so the text flow also needs fixup).
node.get_raw_data().is_none() ||
parent_data.damage!= RestyleDamage::empty()
}
fn shared_context(&self) -> &SharedStyleContext {
&self.context.style_context
}
}
/// A top-down traversal.
pub trait PreorderFlowTraversal {
/// The operation to perform. Return true to continue or false to stop.
fn process(&self, flow: &mut Flow);
/// Returns true if this node should be processed and false if neither this node nor its
/// descendants should be processed.
fn should_process_subtree(&self, _flow: &mut Flow) -> bool {
true
}
/// Returns true if this node must be processed in-order. If this returns false,
/// we skip the operation for this node, but continue processing the descendants.
/// This is called *after* parent nodes are visited.
fn should_process(&self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in preorder.
fn traverse(&self, flow: &mut Flow) {
if!self.should_process_subtree(flow) {
return;
}
if self.should_process(flow) {
self.process(flow);
}
for kid in flow::child_iter_mut(flow) {
self.traverse(kid);
}
}
/// Traverse the Absolute flow tree in preorder.
///
/// Traverse all your direct absolute descendants, who will then traverse
/// their direct absolute descendants.
///
/// Return true if the traversal is to continue or false to stop.
fn traverse_absolute_flows(&self, flow: &mut Flow) {
if self.should_process(flow) {
self.process(flow);
}
for descendant_link in flow::mut_base(flow).abs_descendants.iter() {
self.traverse_absolute_flows(descendant_link)
}
}
}
/// A bottom-up traversal, with a optional in-order pass.
pub trait PostorderFlowTraversal {
/// The operation to perform. Return true to continue or false to stop.
fn process(&self, flow: &mut Flow);
/// Returns false if this node must be processed in-order. If this returns false, we skip the
/// operation for this node, but continue processing the ancestors. This is called *after*
/// child nodes are visited.
fn should_process(&self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in postorder.
fn traverse(&self, flow: &mut Flow) {
for kid in flow::child_iter_mut(flow) {
self.traverse(kid);
}
if self.should_process(flow) {
self.process(flow);
}
}
}
/// An in-order (sequential only) traversal.
pub trait InorderFlowTraversal {
/// The operation to perform. Returns the level of the tree we're at.
fn process(&mut self, flow: &mut Flow, level: u32);
/// Returns true if this node should be processed and false if neither this node nor its
/// descendants should be processed.
fn should_process_subtree(&mut self, _flow: &mut Flow) -> bool {
true
}
/// Traverses the tree in-order.
fn traverse(&mut self, flow: &mut Flow, level: u32) {
if!self.should_process_subtree(flow) {
return;
}
self.process(flow, level);
for kid in flow::child_iter_mut(flow) {
self.traverse(kid, level + 1);
}
}
}
/// A bottom-up, parallelizable traversal.
pub trait PostorderNodeMutTraversal<ConcreteThreadSafeLayoutNode: ThreadSafeLayoutNode> {
/// The operation to perform. Return true to continue or false to stop.
fn process(&mut self, node: &ConcreteThreadSafeLayoutNode);
}
/// The flow construction traversal, which builds flows for styled nodes.
#[inline]
#[allow(unsafe_code)]
fn construct_flows_at<N>(context: &LayoutContext, node: N)
where N: LayoutNode,
{
debug!("construct_flows_at: {:?}", node);
// Construct flows for this node.
{
let tnode = node.to_threadsafe();
// Always reconstruct if incremental layout is turned off.
let nonincremental_layout = opts::get().nonincremental_layout;
if nonincremental_layout || tnode.restyle_damage()!= RestyleDamage::empty() ||
node.as_element().map_or(false, |el| el.has_dirty_descendants()) {
let mut flow_constructor = FlowConstructor::new(context);
if nonincremental_layout ||!flow_constructor.repair_if_possible(&tnode) {
flow_constructor.process(&tnode);
debug!("Constructed flow for {:?}: {:x}",
tnode,
tnode.flow_debug_id());
}
}
tnode.mutate_layout_data().unwrap().flags.insert(::data::LayoutDataFlags::HAS_BEEN_TRAVERSED);
}
if let Some(el) = node.as_element() {
unsafe { el.unset_dirty_descendants(); }
}
}
/// The bubble-inline-sizes traversal, the first part of layout computation. This computes
/// preferred and intrinsic inline-sizes and bubbles them up the tree.
pub struct BubbleISizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
|
#[inline]
fn process(&self, flow: &mut Flow) {
flow.bubble_inline_sizes();
flow::mut_base(flow).restyle_damage.remove(ServoRestyleDamage::BUBBLE_ISIZES);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
flow::base(flow).restyle_damage.contains(ServoRestyleDamage::BUBBLE_ISIZES)
}
}
/// The assign-inline-sizes traversal. In Gecko this corresponds to `Reflow`.
#[derive(Clone, Copy)]
pub struct AssignISizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PreorderFlowTraversal for AssignISizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
flow.assign_inline_sizes(self.layout_context);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
flow::base(flow).restyle_damage.intersects(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW)
}
}
/// The assign-block-sizes-and-store-overflow traversal, the last (and most expensive) part of
/// layout computation. Determines the final block-sizes for all layout objects and computes
/// positions. In Gecko this corresponds to `Reflow`.
#[derive(Clone, Copy)]
pub struct AssignBSizes<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PostorderFlowTraversal for AssignBSizes<'a> {
#[inline]
fn process(&self, flow: &mut Flow) {
// Can't do anything with anything that floats might flow through until we reach their
// inorder parent.
//
// NB: We must return without resetting the restyle bits for these, as we haven't actually
// reflowed anything!
if flow.floats_might_flow_through() {
return
}
flow.assign_block_size(self.layout_context);
}
#[inline]
fn should_process(&self, flow: &mut Flow) -> bool {
let base = flow::base(flow);
base.restyle_damage.intersects(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW) &&
// The fragmentation countainer is responsible for calling Flow::fragment recursively
!base.flags.contains(FlowFlags::CAN_BE_FRAGMENTED)
}
}
pub struct ComputeStackingRelativePositions<'a> {
pub layout_context: &'a LayoutContext<'a>,
}
impl<'a> PreorderFlowTraversal for ComputeStackingRelativePositions<'a> {
#[inline]
fn should_process_subtree(&self, flow: &mut Flow) -> bool {
flow::base(flow).restyle_damage.contains(ServoRestyleDamage::REPOSITION)
}
#[inline]
fn process(&self, flow: &mut Flow) {
flow.compute_stacking_relative_position(self.layout_context);
flow::mut_base(flow).restyle_damage.remove(ServoRestyleDamage::REPOSITION)
}
}
pub struct BuildDisplayList<'a> {
pub state: DisplayListBuildState<'a>,
}
impl<'a> BuildDisplayList<'a> {
#[inline]
pub fn traverse(&mut self, flow: &mut Flow) {
let parent_stacking_context_id = self.state.current_stacking_context_id;
self.state.current_stacking_context_id = flow::base(flow).stacking_context_id;
let parent_clipping_and_scrolling = self.state.current_clipping_and_scrolling;
self.state.current_clipping_and_scrolling = flow.clipping_and_scrolling();
flow.build_display_list(&mut self.state);
flow::mut_base(flow).restyle_damage.remove(ServoRestyleDamage::REPAINT);
for kid in flow::child_iter_mut(flow) {
self.traverse(kid);
}
self.state.current_stacking_context_id = parent_stacking_context_id;
self.state.current_clipping_and_scrolling = parent_clipping_and_scrolling;
}
}
|
impl<'a> PostorderFlowTraversal for BubbleISizes<'a> {
|
random_line_split
|
lib.rs
|
extern crate pcap;
use pcap::{Active, Activated, Offline, Capture};
use std::path::Path;
#[test]
fn read_packet_with_full_data() {
|
#[test]
fn read_packet_with_truncated_data() {
let mut capture = capture_from_test_file("packet_snaplen_20.pcap");
assert_eq!(capture.next().unwrap().len(), 20);
}
fn capture_from_test_file(file_name: &str) -> Capture<Offline> {
let path = Path::new("tests/data/").join(file_name);
Capture::from_file(path).unwrap()
}
#[test]
fn unify_activated() {
#![allow(dead_code)]
fn test1() -> Capture<Active> {
loop{}
}
fn test2() -> Capture<Offline> {
loop{}
}
fn maybe(a: bool) -> Capture<Activated> {
if a {
test1().into()
} else {
test2().into()
}
}
fn also_maybe(a: &mut Capture<Activated>) {
a.filter("whatever filter string, this won't be run anyway").unwrap();
}
}
|
let mut capture = capture_from_test_file("packet_snaplen_65535.pcap");
assert_eq!(capture.next().unwrap().len(), 98);
}
|
random_line_split
|
lib.rs
|
extern crate pcap;
use pcap::{Active, Activated, Offline, Capture};
use std::path::Path;
#[test]
fn read_packet_with_full_data() {
let mut capture = capture_from_test_file("packet_snaplen_65535.pcap");
assert_eq!(capture.next().unwrap().len(), 98);
}
#[test]
fn read_packet_with_truncated_data() {
let mut capture = capture_from_test_file("packet_snaplen_20.pcap");
assert_eq!(capture.next().unwrap().len(), 20);
}
fn
|
(file_name: &str) -> Capture<Offline> {
let path = Path::new("tests/data/").join(file_name);
Capture::from_file(path).unwrap()
}
#[test]
fn unify_activated() {
#![allow(dead_code)]
fn test1() -> Capture<Active> {
loop{}
}
fn test2() -> Capture<Offline> {
loop{}
}
fn maybe(a: bool) -> Capture<Activated> {
if a {
test1().into()
} else {
test2().into()
}
}
fn also_maybe(a: &mut Capture<Activated>) {
a.filter("whatever filter string, this won't be run anyway").unwrap();
}
}
|
capture_from_test_file
|
identifier_name
|
lib.rs
|
extern crate pcap;
use pcap::{Active, Activated, Offline, Capture};
use std::path::Path;
#[test]
fn read_packet_with_full_data() {
let mut capture = capture_from_test_file("packet_snaplen_65535.pcap");
assert_eq!(capture.next().unwrap().len(), 98);
}
#[test]
fn read_packet_with_truncated_data() {
let mut capture = capture_from_test_file("packet_snaplen_20.pcap");
assert_eq!(capture.next().unwrap().len(), 20);
}
fn capture_from_test_file(file_name: &str) -> Capture<Offline> {
let path = Path::new("tests/data/").join(file_name);
Capture::from_file(path).unwrap()
}
#[test]
fn unify_activated() {
#![allow(dead_code)]
fn test1() -> Capture<Active> {
loop{}
}
fn test2() -> Capture<Offline> {
loop{}
}
fn maybe(a: bool) -> Capture<Activated> {
if a
|
else {
test2().into()
}
}
fn also_maybe(a: &mut Capture<Activated>) {
a.filter("whatever filter string, this won't be run anyway").unwrap();
}
}
|
{
test1().into()
}
|
conditional_block
|
lib.rs
|
extern crate pcap;
use pcap::{Active, Activated, Offline, Capture};
use std::path::Path;
#[test]
fn read_packet_with_full_data() {
let mut capture = capture_from_test_file("packet_snaplen_65535.pcap");
assert_eq!(capture.next().unwrap().len(), 98);
}
#[test]
fn read_packet_with_truncated_data() {
let mut capture = capture_from_test_file("packet_snaplen_20.pcap");
assert_eq!(capture.next().unwrap().len(), 20);
}
fn capture_from_test_file(file_name: &str) -> Capture<Offline>
|
#[test]
fn unify_activated() {
#![allow(dead_code)]
fn test1() -> Capture<Active> {
loop{}
}
fn test2() -> Capture<Offline> {
loop{}
}
fn maybe(a: bool) -> Capture<Activated> {
if a {
test1().into()
} else {
test2().into()
}
}
fn also_maybe(a: &mut Capture<Activated>) {
a.filter("whatever filter string, this won't be run anyway").unwrap();
}
}
|
{
let path = Path::new("tests/data/").join(file_name);
Capture::from_file(path).unwrap()
}
|
identifier_body
|
unique-vec-res.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct r {
i: @mut int,
}
#[unsafe_destructor]
impl Drop for r {
fn drop(&self) {
unsafe {
*(self.i) = *(self.i) + 1;
}
}
}
fn f<T>(_i: ~[T], _j: ~[T]) {
}
fn main() {
let i1 = @mut 0;
let i2 = @mut 1;
let r1 = ~[~r { i: i1 }];
let r2 = ~[~r { i: i2 }];
f(copy r1, copy r2);
//~^ ERROR copying a value of non-copyable type
//~^^ ERROR copying a value of non-copyable type
info!((r2, *i1));
info!((r1, *i2));
}
|
// http://rust-lang.org/COPYRIGHT.
//
|
random_line_split
|
unique-vec-res.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct r {
i: @mut int,
}
#[unsafe_destructor]
impl Drop for r {
fn drop(&self)
|
}
fn f<T>(_i: ~[T], _j: ~[T]) {
}
fn main() {
let i1 = @mut 0;
let i2 = @mut 1;
let r1 = ~[~r { i: i1 }];
let r2 = ~[~r { i: i2 }];
f(copy r1, copy r2);
//~^ ERROR copying a value of non-copyable type
//~^^ ERROR copying a value of non-copyable type
info!((r2, *i1));
info!((r1, *i2));
}
|
{
unsafe {
*(self.i) = *(self.i) + 1;
}
}
|
identifier_body
|
unique-vec-res.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct r {
i: @mut int,
}
#[unsafe_destructor]
impl Drop for r {
fn
|
(&self) {
unsafe {
*(self.i) = *(self.i) + 1;
}
}
}
fn f<T>(_i: ~[T], _j: ~[T]) {
}
fn main() {
let i1 = @mut 0;
let i2 = @mut 1;
let r1 = ~[~r { i: i1 }];
let r2 = ~[~r { i: i2 }];
f(copy r1, copy r2);
//~^ ERROR copying a value of non-copyable type
//~^^ ERROR copying a value of non-copyable type
info!((r2, *i1));
info!((r1, *i2));
}
|
drop
|
identifier_name
|
number.rs
|
// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Immutable numbers.
#![allow(non_uppercase_statics)]
use base::{CFAllocatorRef, CFRelease, CFRetain, CFTypeID, CFTypeRef};
use base::{TCFType, kCFAllocatorDefault};
use libc::c_void;
use std::mem;
pub type CFNumberType = u32;
// members of enum CFNumberType
// static kCFNumberSInt8Type: CFNumberType = 1;
// static kCFNumberSInt16Type: CFNumberType = 2;
// static kCFNumberSInt32Type: CFNumberType = 3;
static kCFNumberSInt64Type: CFNumberType = 4;
// static kCFNumberFloat32Type: CFNumberType = 5;
static kCFNumberFloat64Type: CFNumberType = 6;
// static kCFNumberCharType: CFNumberType = 7;
// static kCFNumberShortType: CFNumberType = 8;
// static kCFNumberIntType: CFNumberType = 9;
// static kCFNumberLongType: CFNumberType = 10;
// static kCFNumberLongLongType: CFNumberType = 11;
// static kCFNumberFloatType: CFNumberType = 12;
// static kCFNumberDoubleType: CFNumberType = 13;
// static kCFNumberCFIndexType: CFNumberType = 14;
// static kCFNumberNSIntegerType: CFNumberType = 15;
// static kCFNumberCGFloatType: CFNumberType = 16;
// static kCFNumberMaxType: CFNumberType = 16;
struct __CFNumber;
pub type CFNumberRef = *const __CFNumber;
/// An immutable numeric value.
///
/// FIXME(pcwalton): Should be a newtype struct, but that fails due to a Rust compiler bug.
pub struct CFNumber {
obj: CFNumberRef,
}
impl Drop for CFNumber {
fn drop(&mut self) {
unsafe {
CFRelease(self.as_CFTypeRef())
}
}
}
impl TCFType<CFNumberRef> for CFNumber {
#[inline]
fn as_concrete_TypeRef(&self) -> CFNumberRef {
self.obj
}
#[inline]
unsafe fn wrap_under_get_rule(reference: CFNumberRef) -> CFNumber {
let reference: CFNumberRef = mem::transmute(CFRetain(mem::transmute(reference)));
TCFType::wrap_under_create_rule(reference)
}
#[inline]
fn as_CFTypeRef(&self) -> CFTypeRef {
unsafe {
mem::transmute(self.as_concrete_TypeRef())
}
}
unsafe fn wrap_under_create_rule(obj: CFNumberRef) -> CFNumber {
CFNumber {
obj: obj,
}
}
#[inline]
fn type_id(_: Option<CFNumber>) -> CFTypeID {
unsafe {
CFNumberGetTypeID()
}
}
}
// TODO(pcwalton): Floating point.
impl ToPrimitive for CFNumber {
#[inline]
fn to_i64(&self) -> Option<i64> {
unsafe {
let mut value: i64 = 0;
let ok = CFNumberGetValue(self.obj, kCFNumberSInt64Type, mem::transmute(&mut value));
assert!(ok);
Some(value)
}
}
#[inline]
fn to_u64(&self) -> Option<u64> {
// CFNumber does not support unsigned 64-bit values.
None
}
#[inline]
fn to_f64(&self) -> Option<f64> {
unsafe {
let mut value: f64 = 0.0;
let ok = CFNumberGetValue(self.obj, kCFNumberFloat64Type, mem::transmute(&mut value));
assert!(ok);
Some(value)
}
}
}
// TODO(pcwalton): Floating point.
impl FromPrimitive for CFNumber {
#[inline]
fn from_i64(value: i64) -> Option<CFNumber> {
unsafe {
let number_ref = CFNumberCreate(kCFAllocatorDefault,
kCFNumberSInt64Type,
mem::transmute(&value));
Some(TCFType::wrap_under_create_rule(number_ref))
}
}
#[inline]
fn from_u64(_: u64) -> Option<CFNumber> {
// CFNumber does not support unsigned 64-bit values.
None
}
#[inline]
fn from_f64(value: f64) -> Option<CFNumber> {
unsafe {
let number_ref = CFNumberCreate(kCFAllocatorDefault,
|
}
}
/// A convenience function to create CFNumbers.
pub fn number(value: i64) -> CFNumber {
FromPrimitive::from_i64(value).unwrap()
}
#[link(name = "CoreFoundation", kind = "framework")]
extern {
/*
* CFNumber.h
*/
fn CFNumberCreate(allocator: CFAllocatorRef, theType: CFNumberType, valuePtr: *const c_void)
-> CFNumberRef;
//fn CFNumberGetByteSize
fn CFNumberGetValue(number: CFNumberRef, theType: CFNumberType, valuePtr: *mut c_void) -> bool;
//fn CFNumberCompare
fn CFNumberGetTypeID() -> CFTypeID;
}
|
kCFNumberFloat64Type,
mem::transmute(&value));
Some(TCFType::wrap_under_create_rule(number_ref))
}
|
random_line_split
|
number.rs
|
// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Immutable numbers.
#![allow(non_uppercase_statics)]
use base::{CFAllocatorRef, CFRelease, CFRetain, CFTypeID, CFTypeRef};
use base::{TCFType, kCFAllocatorDefault};
use libc::c_void;
use std::mem;
pub type CFNumberType = u32;
// members of enum CFNumberType
// static kCFNumberSInt8Type: CFNumberType = 1;
// static kCFNumberSInt16Type: CFNumberType = 2;
// static kCFNumberSInt32Type: CFNumberType = 3;
static kCFNumberSInt64Type: CFNumberType = 4;
// static kCFNumberFloat32Type: CFNumberType = 5;
static kCFNumberFloat64Type: CFNumberType = 6;
// static kCFNumberCharType: CFNumberType = 7;
// static kCFNumberShortType: CFNumberType = 8;
// static kCFNumberIntType: CFNumberType = 9;
// static kCFNumberLongType: CFNumberType = 10;
// static kCFNumberLongLongType: CFNumberType = 11;
// static kCFNumberFloatType: CFNumberType = 12;
// static kCFNumberDoubleType: CFNumberType = 13;
// static kCFNumberCFIndexType: CFNumberType = 14;
// static kCFNumberNSIntegerType: CFNumberType = 15;
// static kCFNumberCGFloatType: CFNumberType = 16;
// static kCFNumberMaxType: CFNumberType = 16;
struct __CFNumber;
pub type CFNumberRef = *const __CFNumber;
/// An immutable numeric value.
///
/// FIXME(pcwalton): Should be a newtype struct, but that fails due to a Rust compiler bug.
pub struct CFNumber {
obj: CFNumberRef,
}
impl Drop for CFNumber {
fn drop(&mut self) {
unsafe {
CFRelease(self.as_CFTypeRef())
}
}
}
impl TCFType<CFNumberRef> for CFNumber {
#[inline]
fn as_concrete_TypeRef(&self) -> CFNumberRef {
self.obj
}
#[inline]
unsafe fn wrap_under_get_rule(reference: CFNumberRef) -> CFNumber {
let reference: CFNumberRef = mem::transmute(CFRetain(mem::transmute(reference)));
TCFType::wrap_under_create_rule(reference)
}
#[inline]
fn as_CFTypeRef(&self) -> CFTypeRef {
unsafe {
mem::transmute(self.as_concrete_TypeRef())
}
}
unsafe fn
|
(obj: CFNumberRef) -> CFNumber {
CFNumber {
obj: obj,
}
}
#[inline]
fn type_id(_: Option<CFNumber>) -> CFTypeID {
unsafe {
CFNumberGetTypeID()
}
}
}
// TODO(pcwalton): Floating point.
impl ToPrimitive for CFNumber {
#[inline]
fn to_i64(&self) -> Option<i64> {
unsafe {
let mut value: i64 = 0;
let ok = CFNumberGetValue(self.obj, kCFNumberSInt64Type, mem::transmute(&mut value));
assert!(ok);
Some(value)
}
}
#[inline]
fn to_u64(&self) -> Option<u64> {
// CFNumber does not support unsigned 64-bit values.
None
}
#[inline]
fn to_f64(&self) -> Option<f64> {
unsafe {
let mut value: f64 = 0.0;
let ok = CFNumberGetValue(self.obj, kCFNumberFloat64Type, mem::transmute(&mut value));
assert!(ok);
Some(value)
}
}
}
// TODO(pcwalton): Floating point.
impl FromPrimitive for CFNumber {
#[inline]
fn from_i64(value: i64) -> Option<CFNumber> {
unsafe {
let number_ref = CFNumberCreate(kCFAllocatorDefault,
kCFNumberSInt64Type,
mem::transmute(&value));
Some(TCFType::wrap_under_create_rule(number_ref))
}
}
#[inline]
fn from_u64(_: u64) -> Option<CFNumber> {
// CFNumber does not support unsigned 64-bit values.
None
}
#[inline]
fn from_f64(value: f64) -> Option<CFNumber> {
unsafe {
let number_ref = CFNumberCreate(kCFAllocatorDefault,
kCFNumberFloat64Type,
mem::transmute(&value));
Some(TCFType::wrap_under_create_rule(number_ref))
}
}
}
/// A convenience function to create CFNumbers.
pub fn number(value: i64) -> CFNumber {
FromPrimitive::from_i64(value).unwrap()
}
#[link(name = "CoreFoundation", kind = "framework")]
extern {
/*
* CFNumber.h
*/
fn CFNumberCreate(allocator: CFAllocatorRef, theType: CFNumberType, valuePtr: *const c_void)
-> CFNumberRef;
//fn CFNumberGetByteSize
fn CFNumberGetValue(number: CFNumberRef, theType: CFNumberType, valuePtr: *mut c_void) -> bool;
//fn CFNumberCompare
fn CFNumberGetTypeID() -> CFTypeID;
}
|
wrap_under_create_rule
|
identifier_name
|
number.rs
|
// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Immutable numbers.
#![allow(non_uppercase_statics)]
use base::{CFAllocatorRef, CFRelease, CFRetain, CFTypeID, CFTypeRef};
use base::{TCFType, kCFAllocatorDefault};
use libc::c_void;
use std::mem;
pub type CFNumberType = u32;
// members of enum CFNumberType
// static kCFNumberSInt8Type: CFNumberType = 1;
// static kCFNumberSInt16Type: CFNumberType = 2;
// static kCFNumberSInt32Type: CFNumberType = 3;
static kCFNumberSInt64Type: CFNumberType = 4;
// static kCFNumberFloat32Type: CFNumberType = 5;
static kCFNumberFloat64Type: CFNumberType = 6;
// static kCFNumberCharType: CFNumberType = 7;
// static kCFNumberShortType: CFNumberType = 8;
// static kCFNumberIntType: CFNumberType = 9;
// static kCFNumberLongType: CFNumberType = 10;
// static kCFNumberLongLongType: CFNumberType = 11;
// static kCFNumberFloatType: CFNumberType = 12;
// static kCFNumberDoubleType: CFNumberType = 13;
// static kCFNumberCFIndexType: CFNumberType = 14;
// static kCFNumberNSIntegerType: CFNumberType = 15;
// static kCFNumberCGFloatType: CFNumberType = 16;
// static kCFNumberMaxType: CFNumberType = 16;
struct __CFNumber;
pub type CFNumberRef = *const __CFNumber;
/// An immutable numeric value.
///
/// FIXME(pcwalton): Should be a newtype struct, but that fails due to a Rust compiler bug.
pub struct CFNumber {
obj: CFNumberRef,
}
impl Drop for CFNumber {
fn drop(&mut self) {
unsafe {
CFRelease(self.as_CFTypeRef())
}
}
}
impl TCFType<CFNumberRef> for CFNumber {
#[inline]
fn as_concrete_TypeRef(&self) -> CFNumberRef {
self.obj
}
#[inline]
unsafe fn wrap_under_get_rule(reference: CFNumberRef) -> CFNumber {
let reference: CFNumberRef = mem::transmute(CFRetain(mem::transmute(reference)));
TCFType::wrap_under_create_rule(reference)
}
#[inline]
fn as_CFTypeRef(&self) -> CFTypeRef {
unsafe {
mem::transmute(self.as_concrete_TypeRef())
}
}
unsafe fn wrap_under_create_rule(obj: CFNumberRef) -> CFNumber {
CFNumber {
obj: obj,
}
}
#[inline]
fn type_id(_: Option<CFNumber>) -> CFTypeID {
unsafe {
CFNumberGetTypeID()
}
}
}
// TODO(pcwalton): Floating point.
impl ToPrimitive for CFNumber {
#[inline]
fn to_i64(&self) -> Option<i64> {
unsafe {
let mut value: i64 = 0;
let ok = CFNumberGetValue(self.obj, kCFNumberSInt64Type, mem::transmute(&mut value));
assert!(ok);
Some(value)
}
}
#[inline]
fn to_u64(&self) -> Option<u64> {
// CFNumber does not support unsigned 64-bit values.
None
}
#[inline]
fn to_f64(&self) -> Option<f64> {
unsafe {
let mut value: f64 = 0.0;
let ok = CFNumberGetValue(self.obj, kCFNumberFloat64Type, mem::transmute(&mut value));
assert!(ok);
Some(value)
}
}
}
// TODO(pcwalton): Floating point.
impl FromPrimitive for CFNumber {
#[inline]
fn from_i64(value: i64) -> Option<CFNumber> {
unsafe {
let number_ref = CFNumberCreate(kCFAllocatorDefault,
kCFNumberSInt64Type,
mem::transmute(&value));
Some(TCFType::wrap_under_create_rule(number_ref))
}
}
#[inline]
fn from_u64(_: u64) -> Option<CFNumber> {
// CFNumber does not support unsigned 64-bit values.
None
}
#[inline]
fn from_f64(value: f64) -> Option<CFNumber>
|
}
/// A convenience function to create CFNumbers.
pub fn number(value: i64) -> CFNumber {
FromPrimitive::from_i64(value).unwrap()
}
#[link(name = "CoreFoundation", kind = "framework")]
extern {
/*
* CFNumber.h
*/
fn CFNumberCreate(allocator: CFAllocatorRef, theType: CFNumberType, valuePtr: *const c_void)
-> CFNumberRef;
//fn CFNumberGetByteSize
fn CFNumberGetValue(number: CFNumberRef, theType: CFNumberType, valuePtr: *mut c_void) -> bool;
//fn CFNumberCompare
fn CFNumberGetTypeID() -> CFTypeID;
}
|
{
unsafe {
let number_ref = CFNumberCreate(kCFAllocatorDefault,
kCFNumberFloat64Type,
mem::transmute(&value));
Some(TCFType::wrap_under_create_rule(number_ref))
}
}
|
identifier_body
|
windows.rs
|
use winapi::um::processenv::GetStdHandle;
use winapi::um::winbase::STD_OUTPUT_HANDLE;
use winapi::um::wincon::GetConsoleScreenBufferInfo;
use winapi::um::wincon::{CONSOLE_SCREEN_BUFFER_INFO, COORD, SMALL_RECT};
/// Query the current processes's output, returning its width and height as a
/// number of characters.
///
/// # Errors
///
/// Returns `None` if the output isn't to a terminal.
///
/// # Example
///
/// To get the dimensions of your terminal window, simply use the following:
///
/// ```no_run
/// # use term_size;
/// if let Some((w, h)) = term_size::dimensions() {
/// println!("Width: {}\nHeight: {}", w, h);
/// } else {
/// println!("Unable to get term size :(")
/// }
/// ```
pub fn dimensions() -> Option<(usize, usize)> {
let null_coord = COORD { X: 0, Y: 0 };
let null_smallrect = SMALL_RECT {
Left: 0,
Top: 0,
Right: 0,
Bottom: 0,
};
let stdout_h = unsafe { GetStdHandle(STD_OUTPUT_HANDLE) };
let mut console_data = CONSOLE_SCREEN_BUFFER_INFO {
dwSize: null_coord,
dwCursorPosition: null_coord,
wAttributes: 0,
srWindow: null_smallrect,
dwMaximumWindowSize: null_coord,
};
if unsafe { GetConsoleScreenBufferInfo(stdout_h, &mut console_data) }!= 0 {
Some(((console_data.srWindow.Right - console_data.srWindow.Left) as usize,
(console_data.srWindow.Bottom - console_data.srWindow.Top) as usize))
} else
|
}
/// Query the current processes's output, returning its width and height as a
/// number of characters. Returns `None` if the output isn't to a terminal.
///
/// # Errors
///
/// Returns `None` if the output isn't to a terminal.
///
/// # Example
///
/// To get the dimensions of your terminal window, simply use the following:
///
/// ```no_run
/// # use term_size;
/// if let Some((w, h)) = term_size::dimensions() {
/// println!("Width: {}\nHeight: {}", w, h);
/// } else {
/// println!("Unable to get term size :(")
/// }
/// ```
pub fn dimensions_stdout() -> Option<(usize, usize)> { dimensions() }
/// This isn't implemented for Windows
///
/// # Panics
///
/// This function `panic!`s unconditionally with the `unimplemented!`
/// macro
pub fn dimensions_stdin() -> Option<(usize, usize)> { unimplemented!() }
/// This isn't implemented for Windows
///
/// # Panics
///
/// This function `panic!`s unconditionally with the `unimplemented!`
/// macro
pub fn dimensions_stderr() -> Option<(usize, usize)> { unimplemented!() }
|
{
None
}
|
conditional_block
|
windows.rs
|
use winapi::um::processenv::GetStdHandle;
use winapi::um::winbase::STD_OUTPUT_HANDLE;
use winapi::um::wincon::GetConsoleScreenBufferInfo;
use winapi::um::wincon::{CONSOLE_SCREEN_BUFFER_INFO, COORD, SMALL_RECT};
/// Query the current processes's output, returning its width and height as a
/// number of characters.
///
/// # Errors
///
/// Returns `None` if the output isn't to a terminal.
///
/// # Example
///
/// To get the dimensions of your terminal window, simply use the following:
///
/// ```no_run
/// # use term_size;
/// if let Some((w, h)) = term_size::dimensions() {
/// println!("Width: {}\nHeight: {}", w, h);
/// } else {
/// println!("Unable to get term size :(")
/// }
/// ```
pub fn dimensions() -> Option<(usize, usize)> {
let null_coord = COORD { X: 0, Y: 0 };
let null_smallrect = SMALL_RECT {
Left: 0,
Top: 0,
Right: 0,
Bottom: 0,
};
let stdout_h = unsafe { GetStdHandle(STD_OUTPUT_HANDLE) };
let mut console_data = CONSOLE_SCREEN_BUFFER_INFO {
dwSize: null_coord,
|
srWindow: null_smallrect,
dwMaximumWindowSize: null_coord,
};
if unsafe { GetConsoleScreenBufferInfo(stdout_h, &mut console_data) }!= 0 {
Some(((console_data.srWindow.Right - console_data.srWindow.Left) as usize,
(console_data.srWindow.Bottom - console_data.srWindow.Top) as usize))
} else {
None
}
}
/// Query the current processes's output, returning its width and height as a
/// number of characters. Returns `None` if the output isn't to a terminal.
///
/// # Errors
///
/// Returns `None` if the output isn't to a terminal.
///
/// # Example
///
/// To get the dimensions of your terminal window, simply use the following:
///
/// ```no_run
/// # use term_size;
/// if let Some((w, h)) = term_size::dimensions() {
/// println!("Width: {}\nHeight: {}", w, h);
/// } else {
/// println!("Unable to get term size :(")
/// }
/// ```
pub fn dimensions_stdout() -> Option<(usize, usize)> { dimensions() }
/// This isn't implemented for Windows
///
/// # Panics
///
/// This function `panic!`s unconditionally with the `unimplemented!`
/// macro
pub fn dimensions_stdin() -> Option<(usize, usize)> { unimplemented!() }
/// This isn't implemented for Windows
///
/// # Panics
///
/// This function `panic!`s unconditionally with the `unimplemented!`
/// macro
pub fn dimensions_stderr() -> Option<(usize, usize)> { unimplemented!() }
|
dwCursorPosition: null_coord,
wAttributes: 0,
|
random_line_split
|
windows.rs
|
use winapi::um::processenv::GetStdHandle;
use winapi::um::winbase::STD_OUTPUT_HANDLE;
use winapi::um::wincon::GetConsoleScreenBufferInfo;
use winapi::um::wincon::{CONSOLE_SCREEN_BUFFER_INFO, COORD, SMALL_RECT};
/// Query the current processes's output, returning its width and height as a
/// number of characters.
///
/// # Errors
///
/// Returns `None` if the output isn't to a terminal.
///
/// # Example
///
/// To get the dimensions of your terminal window, simply use the following:
///
/// ```no_run
/// # use term_size;
/// if let Some((w, h)) = term_size::dimensions() {
/// println!("Width: {}\nHeight: {}", w, h);
/// } else {
/// println!("Unable to get term size :(")
/// }
/// ```
pub fn dimensions() -> Option<(usize, usize)> {
let null_coord = COORD { X: 0, Y: 0 };
let null_smallrect = SMALL_RECT {
Left: 0,
Top: 0,
Right: 0,
Bottom: 0,
};
let stdout_h = unsafe { GetStdHandle(STD_OUTPUT_HANDLE) };
let mut console_data = CONSOLE_SCREEN_BUFFER_INFO {
dwSize: null_coord,
dwCursorPosition: null_coord,
wAttributes: 0,
srWindow: null_smallrect,
dwMaximumWindowSize: null_coord,
};
if unsafe { GetConsoleScreenBufferInfo(stdout_h, &mut console_data) }!= 0 {
Some(((console_data.srWindow.Right - console_data.srWindow.Left) as usize,
(console_data.srWindow.Bottom - console_data.srWindow.Top) as usize))
} else {
None
}
}
/// Query the current processes's output, returning its width and height as a
/// number of characters. Returns `None` if the output isn't to a terminal.
///
/// # Errors
///
/// Returns `None` if the output isn't to a terminal.
///
/// # Example
///
/// To get the dimensions of your terminal window, simply use the following:
///
/// ```no_run
/// # use term_size;
/// if let Some((w, h)) = term_size::dimensions() {
/// println!("Width: {}\nHeight: {}", w, h);
/// } else {
/// println!("Unable to get term size :(")
/// }
/// ```
pub fn
|
() -> Option<(usize, usize)> { dimensions() }
/// This isn't implemented for Windows
///
/// # Panics
///
/// This function `panic!`s unconditionally with the `unimplemented!`
/// macro
pub fn dimensions_stdin() -> Option<(usize, usize)> { unimplemented!() }
/// This isn't implemented for Windows
///
/// # Panics
///
/// This function `panic!`s unconditionally with the `unimplemented!`
/// macro
pub fn dimensions_stderr() -> Option<(usize, usize)> { unimplemented!() }
|
dimensions_stdout
|
identifier_name
|
windows.rs
|
use winapi::um::processenv::GetStdHandle;
use winapi::um::winbase::STD_OUTPUT_HANDLE;
use winapi::um::wincon::GetConsoleScreenBufferInfo;
use winapi::um::wincon::{CONSOLE_SCREEN_BUFFER_INFO, COORD, SMALL_RECT};
/// Query the current processes's output, returning its width and height as a
/// number of characters.
///
/// # Errors
///
/// Returns `None` if the output isn't to a terminal.
///
/// # Example
///
/// To get the dimensions of your terminal window, simply use the following:
///
/// ```no_run
/// # use term_size;
/// if let Some((w, h)) = term_size::dimensions() {
/// println!("Width: {}\nHeight: {}", w, h);
/// } else {
/// println!("Unable to get term size :(")
/// }
/// ```
pub fn dimensions() -> Option<(usize, usize)> {
let null_coord = COORD { X: 0, Y: 0 };
let null_smallrect = SMALL_RECT {
Left: 0,
Top: 0,
Right: 0,
Bottom: 0,
};
let stdout_h = unsafe { GetStdHandle(STD_OUTPUT_HANDLE) };
let mut console_data = CONSOLE_SCREEN_BUFFER_INFO {
dwSize: null_coord,
dwCursorPosition: null_coord,
wAttributes: 0,
srWindow: null_smallrect,
dwMaximumWindowSize: null_coord,
};
if unsafe { GetConsoleScreenBufferInfo(stdout_h, &mut console_data) }!= 0 {
Some(((console_data.srWindow.Right - console_data.srWindow.Left) as usize,
(console_data.srWindow.Bottom - console_data.srWindow.Top) as usize))
} else {
None
}
}
/// Query the current processes's output, returning its width and height as a
/// number of characters. Returns `None` if the output isn't to a terminal.
///
/// # Errors
///
/// Returns `None` if the output isn't to a terminal.
///
/// # Example
///
/// To get the dimensions of your terminal window, simply use the following:
///
/// ```no_run
/// # use term_size;
/// if let Some((w, h)) = term_size::dimensions() {
/// println!("Width: {}\nHeight: {}", w, h);
/// } else {
/// println!("Unable to get term size :(")
/// }
/// ```
pub fn dimensions_stdout() -> Option<(usize, usize)> { dimensions() }
/// This isn't implemented for Windows
///
/// # Panics
///
/// This function `panic!`s unconditionally with the `unimplemented!`
/// macro
pub fn dimensions_stdin() -> Option<(usize, usize)> { unimplemented!() }
/// This isn't implemented for Windows
///
/// # Panics
///
/// This function `panic!`s unconditionally with the `unimplemented!`
/// macro
pub fn dimensions_stderr() -> Option<(usize, usize)>
|
{ unimplemented!() }
|
identifier_body
|
|
local_sched.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Access to the thread-local Scheduler
use prelude::*;
use ptr::mut_null;
use libc::c_void;
use cast::transmute;
use super::Scheduler;
use super::super::rtio::IoFactoryObject;
use tls = super::super::thread_local_storage;
#[cfg(test)] use super::super::uvio::UvEventLoop;
/// Give the Scheduler to thread-local storage
pub fn put(sched: ~Scheduler) {
unsafe {
let key = tls_key();
let void_sched: *mut c_void = transmute::<~Scheduler, *mut c_void>(sched);
tls::set(key, void_sched);
}
}
/// Take ownership of the Scheduler from thread-local storage
pub fn take() -> ~Scheduler {
unsafe {
let key = tls_key();
let void_sched: *mut c_void = tls::get(key);
assert!(void_sched.is_not_null());
let sched = transmute::<*mut c_void, ~Scheduler>(void_sched);
tls::set(key, mut_null());
return sched;
}
}
/// Check whether there is a thread-local Scheduler attached to the running thread
pub fn exists() -> bool {
unsafe {
match maybe_tls_key() {
Some(key) => tls::get(key).is_not_null(),
None => false
}
}
}
/// Borrow the thread-local scheduler from thread-local storage.
/// While the scheduler is borrowed it is not available in TLS.
pub fn borrow(f: &fn(&mut Scheduler)) {
let mut sched = take();
f(sched);
put(sched);
}
/// Borrow a mutable reference to the thread-local Scheduler
///
/// # Safety Note
///
/// Because this leaves the Scheduler in thread-local storage it is possible
/// For the Scheduler pointer to be aliased
pub unsafe fn unsafe_borrow() -> &mut Scheduler {
let key = tls_key();
let mut void_sched: *mut c_void = tls::get(key);
assert!(void_sched.is_not_null());
{
let void_sched_ptr = &mut void_sched;
let sched: &mut ~Scheduler = {
transmute::<&mut *mut c_void, &mut ~Scheduler>(void_sched_ptr)
};
let sched: &mut Scheduler = &mut **sched;
return sched;
}
}
pub unsafe fn unsafe_borrow_io() -> &mut IoFactoryObject {
let sched = unsafe_borrow();
return sched.event_loop.io().unwrap();
}
fn tls_key() -> tls::Key {
maybe_tls_key().get()
}
fn maybe_tls_key() -> Option<tls::Key> {
unsafe {
let key: *mut c_void = rust_get_sched_tls_key();
let key: &mut tls::Key = transmute(key);
let key = *key;
// Check that the key has been initialized.
// NB: This is a little racy because, while the key is
// initalized under a mutex and it's assumed to be initalized
// in the Scheduler ctor by any thread that needs to use it,
// we are not accessing the key under a mutex. Threads that
// are not using the new Scheduler but still *want to check*
// whether they are running under a new Scheduler may see a 0
// value here that is in the process of being initialized in
// another thread. I think this is fine since the only action
// they could take if it was initialized would be to check the
// thread-local value and see that it's not set.
if key!= 0 {
return Some(key);
} else {
return None;
}
}
}
extern {
fn rust_get_sched_tls_key() -> *mut c_void;
}
#[test]
fn thread_local_scheduler_smoke_test() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
}
#[test]
fn
|
() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
}
#[test]
fn borrow_smoke_test() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
unsafe {
let _scheduler = unsafe_borrow();
}
let _scheduler = take();
}
|
thread_local_scheduler_two_instances
|
identifier_name
|
local_sched.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Access to the thread-local Scheduler
use prelude::*;
use ptr::mut_null;
use libc::c_void;
use cast::transmute;
use super::Scheduler;
use super::super::rtio::IoFactoryObject;
use tls = super::super::thread_local_storage;
#[cfg(test)] use super::super::uvio::UvEventLoop;
/// Give the Scheduler to thread-local storage
pub fn put(sched: ~Scheduler) {
unsafe {
let key = tls_key();
let void_sched: *mut c_void = transmute::<~Scheduler, *mut c_void>(sched);
tls::set(key, void_sched);
}
}
/// Take ownership of the Scheduler from thread-local storage
pub fn take() -> ~Scheduler {
unsafe {
let key = tls_key();
let void_sched: *mut c_void = tls::get(key);
assert!(void_sched.is_not_null());
let sched = transmute::<*mut c_void, ~Scheduler>(void_sched);
tls::set(key, mut_null());
return sched;
}
}
/// Check whether there is a thread-local Scheduler attached to the running thread
pub fn exists() -> bool {
unsafe {
match maybe_tls_key() {
Some(key) => tls::get(key).is_not_null(),
None => false
}
}
}
/// Borrow the thread-local scheduler from thread-local storage.
/// While the scheduler is borrowed it is not available in TLS.
pub fn borrow(f: &fn(&mut Scheduler))
|
/// Borrow a mutable reference to the thread-local Scheduler
///
/// # Safety Note
///
/// Because this leaves the Scheduler in thread-local storage it is possible
/// For the Scheduler pointer to be aliased
pub unsafe fn unsafe_borrow() -> &mut Scheduler {
let key = tls_key();
let mut void_sched: *mut c_void = tls::get(key);
assert!(void_sched.is_not_null());
{
let void_sched_ptr = &mut void_sched;
let sched: &mut ~Scheduler = {
transmute::<&mut *mut c_void, &mut ~Scheduler>(void_sched_ptr)
};
let sched: &mut Scheduler = &mut **sched;
return sched;
}
}
pub unsafe fn unsafe_borrow_io() -> &mut IoFactoryObject {
let sched = unsafe_borrow();
return sched.event_loop.io().unwrap();
}
fn tls_key() -> tls::Key {
maybe_tls_key().get()
}
fn maybe_tls_key() -> Option<tls::Key> {
unsafe {
let key: *mut c_void = rust_get_sched_tls_key();
let key: &mut tls::Key = transmute(key);
let key = *key;
// Check that the key has been initialized.
// NB: This is a little racy because, while the key is
// initalized under a mutex and it's assumed to be initalized
// in the Scheduler ctor by any thread that needs to use it,
// we are not accessing the key under a mutex. Threads that
// are not using the new Scheduler but still *want to check*
// whether they are running under a new Scheduler may see a 0
// value here that is in the process of being initialized in
// another thread. I think this is fine since the only action
// they could take if it was initialized would be to check the
// thread-local value and see that it's not set.
if key!= 0 {
return Some(key);
} else {
return None;
}
}
}
extern {
fn rust_get_sched_tls_key() -> *mut c_void;
}
#[test]
fn thread_local_scheduler_smoke_test() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
}
#[test]
fn thread_local_scheduler_two_instances() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
}
#[test]
fn borrow_smoke_test() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
unsafe {
let _scheduler = unsafe_borrow();
}
let _scheduler = take();
}
|
{
let mut sched = take();
f(sched);
put(sched);
}
|
identifier_body
|
local_sched.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Access to the thread-local Scheduler
use prelude::*;
use ptr::mut_null;
use libc::c_void;
use cast::transmute;
use super::Scheduler;
use super::super::rtio::IoFactoryObject;
use tls = super::super::thread_local_storage;
#[cfg(test)] use super::super::uvio::UvEventLoop;
/// Give the Scheduler to thread-local storage
pub fn put(sched: ~Scheduler) {
unsafe {
|
}
}
/// Take ownership of the Scheduler from thread-local storage
pub fn take() -> ~Scheduler {
unsafe {
let key = tls_key();
let void_sched: *mut c_void = tls::get(key);
assert!(void_sched.is_not_null());
let sched = transmute::<*mut c_void, ~Scheduler>(void_sched);
tls::set(key, mut_null());
return sched;
}
}
/// Check whether there is a thread-local Scheduler attached to the running thread
pub fn exists() -> bool {
unsafe {
match maybe_tls_key() {
Some(key) => tls::get(key).is_not_null(),
None => false
}
}
}
/// Borrow the thread-local scheduler from thread-local storage.
/// While the scheduler is borrowed it is not available in TLS.
pub fn borrow(f: &fn(&mut Scheduler)) {
let mut sched = take();
f(sched);
put(sched);
}
/// Borrow a mutable reference to the thread-local Scheduler
///
/// # Safety Note
///
/// Because this leaves the Scheduler in thread-local storage it is possible
/// For the Scheduler pointer to be aliased
pub unsafe fn unsafe_borrow() -> &mut Scheduler {
let key = tls_key();
let mut void_sched: *mut c_void = tls::get(key);
assert!(void_sched.is_not_null());
{
let void_sched_ptr = &mut void_sched;
let sched: &mut ~Scheduler = {
transmute::<&mut *mut c_void, &mut ~Scheduler>(void_sched_ptr)
};
let sched: &mut Scheduler = &mut **sched;
return sched;
}
}
pub unsafe fn unsafe_borrow_io() -> &mut IoFactoryObject {
let sched = unsafe_borrow();
return sched.event_loop.io().unwrap();
}
fn tls_key() -> tls::Key {
maybe_tls_key().get()
}
fn maybe_tls_key() -> Option<tls::Key> {
unsafe {
let key: *mut c_void = rust_get_sched_tls_key();
let key: &mut tls::Key = transmute(key);
let key = *key;
// Check that the key has been initialized.
// NB: This is a little racy because, while the key is
// initalized under a mutex and it's assumed to be initalized
// in the Scheduler ctor by any thread that needs to use it,
// we are not accessing the key under a mutex. Threads that
// are not using the new Scheduler but still *want to check*
// whether they are running under a new Scheduler may see a 0
// value here that is in the process of being initialized in
// another thread. I think this is fine since the only action
// they could take if it was initialized would be to check the
// thread-local value and see that it's not set.
if key!= 0 {
return Some(key);
} else {
return None;
}
}
}
extern {
fn rust_get_sched_tls_key() -> *mut c_void;
}
#[test]
fn thread_local_scheduler_smoke_test() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
}
#[test]
fn thread_local_scheduler_two_instances() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
}
#[test]
fn borrow_smoke_test() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
unsafe {
let _scheduler = unsafe_borrow();
}
let _scheduler = take();
}
|
let key = tls_key();
let void_sched: *mut c_void = transmute::<~Scheduler, *mut c_void>(sched);
tls::set(key, void_sched);
|
random_line_split
|
local_sched.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Access to the thread-local Scheduler
use prelude::*;
use ptr::mut_null;
use libc::c_void;
use cast::transmute;
use super::Scheduler;
use super::super::rtio::IoFactoryObject;
use tls = super::super::thread_local_storage;
#[cfg(test)] use super::super::uvio::UvEventLoop;
/// Give the Scheduler to thread-local storage
pub fn put(sched: ~Scheduler) {
unsafe {
let key = tls_key();
let void_sched: *mut c_void = transmute::<~Scheduler, *mut c_void>(sched);
tls::set(key, void_sched);
}
}
/// Take ownership of the Scheduler from thread-local storage
pub fn take() -> ~Scheduler {
unsafe {
let key = tls_key();
let void_sched: *mut c_void = tls::get(key);
assert!(void_sched.is_not_null());
let sched = transmute::<*mut c_void, ~Scheduler>(void_sched);
tls::set(key, mut_null());
return sched;
}
}
/// Check whether there is a thread-local Scheduler attached to the running thread
pub fn exists() -> bool {
unsafe {
match maybe_tls_key() {
Some(key) => tls::get(key).is_not_null(),
None => false
}
}
}
/// Borrow the thread-local scheduler from thread-local storage.
/// While the scheduler is borrowed it is not available in TLS.
pub fn borrow(f: &fn(&mut Scheduler)) {
let mut sched = take();
f(sched);
put(sched);
}
/// Borrow a mutable reference to the thread-local Scheduler
///
/// # Safety Note
///
/// Because this leaves the Scheduler in thread-local storage it is possible
/// For the Scheduler pointer to be aliased
pub unsafe fn unsafe_borrow() -> &mut Scheduler {
let key = tls_key();
let mut void_sched: *mut c_void = tls::get(key);
assert!(void_sched.is_not_null());
{
let void_sched_ptr = &mut void_sched;
let sched: &mut ~Scheduler = {
transmute::<&mut *mut c_void, &mut ~Scheduler>(void_sched_ptr)
};
let sched: &mut Scheduler = &mut **sched;
return sched;
}
}
pub unsafe fn unsafe_borrow_io() -> &mut IoFactoryObject {
let sched = unsafe_borrow();
return sched.event_loop.io().unwrap();
}
fn tls_key() -> tls::Key {
maybe_tls_key().get()
}
fn maybe_tls_key() -> Option<tls::Key> {
unsafe {
let key: *mut c_void = rust_get_sched_tls_key();
let key: &mut tls::Key = transmute(key);
let key = *key;
// Check that the key has been initialized.
// NB: This is a little racy because, while the key is
// initalized under a mutex and it's assumed to be initalized
// in the Scheduler ctor by any thread that needs to use it,
// we are not accessing the key under a mutex. Threads that
// are not using the new Scheduler but still *want to check*
// whether they are running under a new Scheduler may see a 0
// value here that is in the process of being initialized in
// another thread. I think this is fine since the only action
// they could take if it was initialized would be to check the
// thread-local value and see that it's not set.
if key!= 0 {
return Some(key);
} else
|
}
}
extern {
fn rust_get_sched_tls_key() -> *mut c_void;
}
#[test]
fn thread_local_scheduler_smoke_test() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
}
#[test]
fn thread_local_scheduler_two_instances() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
let _scheduler = take();
}
#[test]
fn borrow_smoke_test() {
let scheduler = ~UvEventLoop::new_scheduler();
put(scheduler);
unsafe {
let _scheduler = unsafe_borrow();
}
let _scheduler = take();
}
|
{
return None;
}
|
conditional_block
|
remote_info.rs
|
use helpers;
use regex::Regex;
use slog::Logger;
use std::io;
use std::process::Command;
pub struct RemoteInfo {
pub is_remote: bool,
pub path: String,
pub user: String,
pub host: String,
pub port: String,
}
impl RemoteInfo {
pub fn build(remote_dir: &str, port: Option<&str>) -> Self {
#[cfg_attr(feature="clippy", allow(result_unwrap_used))]
// Unwrap is safe - hard coded string
let regex = Regex::new("([^@]+)@([^:]+):(.+)").unwrap();
if let Some(captures) = regex.captures(remote_dir) {
Self {
is_remote: true,
// Unwrap is safe - capture group exists in regex
path: captures.get(3).unwrap().as_str().to_owned(),
user: captures.get(1).unwrap().as_str().to_owned(),
host: captures.get(2).unwrap().as_str().to_owned(),
port: match port {
Some(p) => p.to_owned(),
None => "22".to_owned(),
},
}
} else {
Self {
is_remote: false,
path: remote_dir.to_owned(),
user: "".to_owned(),
host: "".to_owned(),
port: "".to_owned(),
}
}
}
pub fn base_command(&self, cmd: &str) -> Command {
if self.is_remote {
Command::new("ssh")
} else {
let mut iter = cmd.split_whitespace();
let main_cmd = iter.next().unwrap_or("");
Command::new(main_cmd)
}
}
pub fn generate_command<'a>(&self, command: &'a mut Command, cmd: &str) -> &'a mut Command {
if self.is_remote {
command
.arg("-q")
.arg(format!("{}@{}", self.user, self.host))
.arg("-p")
.arg(&self.port)
.arg("-C")
.arg(cmd)
} else {
let iter = cmd.split_whitespace();
let mut args = vec![];
for arg in iter.skip(1) {
args.push(arg)
}
command.args(&args)
}
}
|
pub fn full_path(&self) -> String {
if self.is_remote {
format!("{}@{}:{}", self.user, self.host, self.path)
} else {
self.path.clone()
}
}
pub fn full_path_trailing_slash(&self) -> String {
format!("{}/", self.full_path())
}
pub fn check_cmd_output(
&self,
log: &Logger,
cmd: &str,
wanted_output: &[String],
match_output: bool,
) -> Result<String, ()> {
match self.get_cmd_output(cmd) {
Ok(output) => {
if match_output ^ wanted_output.contains(&output) {
Err(())
} else {
Ok(output)
}
}
Err(e) => {
helpers::log_error_and_exit(log, &format!("Failed to run '{}' on remote: {}", cmd, e));
panic!(); // For compilation
}
}
}
pub fn get_cmd_output(&self, cmd: &str) -> Result<String, io::Error> {
let output = self.generate_command(&mut self.base_command(cmd), cmd)
.output()?;
Ok(String::from_utf8_lossy(&output.stdout).trim().to_owned())
}
pub fn run_cmd(&self, cmd: &str) -> bool {
match self.generate_command(&mut self.base_command(cmd), cmd)
.status() {
Ok(status) => status.success(),
Err(_) => false,
}
}
}
|
random_line_split
|
|
remote_info.rs
|
use helpers;
use regex::Regex;
use slog::Logger;
use std::io;
use std::process::Command;
pub struct RemoteInfo {
pub is_remote: bool,
pub path: String,
pub user: String,
pub host: String,
pub port: String,
}
impl RemoteInfo {
pub fn build(remote_dir: &str, port: Option<&str>) -> Self {
#[cfg_attr(feature="clippy", allow(result_unwrap_used))]
// Unwrap is safe - hard coded string
let regex = Regex::new("([^@]+)@([^:]+):(.+)").unwrap();
if let Some(captures) = regex.captures(remote_dir) {
Self {
is_remote: true,
// Unwrap is safe - capture group exists in regex
path: captures.get(3).unwrap().as_str().to_owned(),
user: captures.get(1).unwrap().as_str().to_owned(),
host: captures.get(2).unwrap().as_str().to_owned(),
port: match port {
Some(p) => p.to_owned(),
None => "22".to_owned(),
},
}
} else {
Self {
is_remote: false,
path: remote_dir.to_owned(),
user: "".to_owned(),
host: "".to_owned(),
port: "".to_owned(),
}
}
}
pub fn base_command(&self, cmd: &str) -> Command {
if self.is_remote {
Command::new("ssh")
} else {
let mut iter = cmd.split_whitespace();
let main_cmd = iter.next().unwrap_or("");
Command::new(main_cmd)
}
}
pub fn generate_command<'a>(&self, command: &'a mut Command, cmd: &str) -> &'a mut Command {
if self.is_remote {
command
.arg("-q")
.arg(format!("{}@{}", self.user, self.host))
.arg("-p")
.arg(&self.port)
.arg("-C")
.arg(cmd)
} else {
let iter = cmd.split_whitespace();
let mut args = vec![];
for arg in iter.skip(1) {
args.push(arg)
}
command.args(&args)
}
}
pub fn full_path(&self) -> String {
if self.is_remote {
format!("{}@{}:{}", self.user, self.host, self.path)
} else {
self.path.clone()
}
}
pub fn full_path_trailing_slash(&self) -> String {
format!("{}/", self.full_path())
}
pub fn check_cmd_output(
&self,
log: &Logger,
cmd: &str,
wanted_output: &[String],
match_output: bool,
) -> Result<String, ()> {
match self.get_cmd_output(cmd) {
Ok(output) => {
if match_output ^ wanted_output.contains(&output) {
Err(())
} else {
Ok(output)
}
}
Err(e) =>
|
}
}
pub fn get_cmd_output(&self, cmd: &str) -> Result<String, io::Error> {
let output = self.generate_command(&mut self.base_command(cmd), cmd)
.output()?;
Ok(String::from_utf8_lossy(&output.stdout).trim().to_owned())
}
pub fn run_cmd(&self, cmd: &str) -> bool {
match self.generate_command(&mut self.base_command(cmd), cmd)
.status() {
Ok(status) => status.success(),
Err(_) => false,
}
}
}
|
{
helpers::log_error_and_exit(log, &format!("Failed to run '{}' on remote: {}", cmd, e));
panic!(); // For compilation
}
|
conditional_block
|
remote_info.rs
|
use helpers;
use regex::Regex;
use slog::Logger;
use std::io;
use std::process::Command;
pub struct RemoteInfo {
pub is_remote: bool,
pub path: String,
pub user: String,
pub host: String,
pub port: String,
}
impl RemoteInfo {
pub fn build(remote_dir: &str, port: Option<&str>) -> Self {
#[cfg_attr(feature="clippy", allow(result_unwrap_used))]
// Unwrap is safe - hard coded string
let regex = Regex::new("([^@]+)@([^:]+):(.+)").unwrap();
if let Some(captures) = regex.captures(remote_dir) {
Self {
is_remote: true,
// Unwrap is safe - capture group exists in regex
path: captures.get(3).unwrap().as_str().to_owned(),
user: captures.get(1).unwrap().as_str().to_owned(),
host: captures.get(2).unwrap().as_str().to_owned(),
port: match port {
Some(p) => p.to_owned(),
None => "22".to_owned(),
},
}
} else {
Self {
is_remote: false,
path: remote_dir.to_owned(),
user: "".to_owned(),
host: "".to_owned(),
port: "".to_owned(),
}
}
}
pub fn base_command(&self, cmd: &str) -> Command {
if self.is_remote {
Command::new("ssh")
} else {
let mut iter = cmd.split_whitespace();
let main_cmd = iter.next().unwrap_or("");
Command::new(main_cmd)
}
}
pub fn
|
<'a>(&self, command: &'a mut Command, cmd: &str) -> &'a mut Command {
if self.is_remote {
command
.arg("-q")
.arg(format!("{}@{}", self.user, self.host))
.arg("-p")
.arg(&self.port)
.arg("-C")
.arg(cmd)
} else {
let iter = cmd.split_whitespace();
let mut args = vec![];
for arg in iter.skip(1) {
args.push(arg)
}
command.args(&args)
}
}
pub fn full_path(&self) -> String {
if self.is_remote {
format!("{}@{}:{}", self.user, self.host, self.path)
} else {
self.path.clone()
}
}
pub fn full_path_trailing_slash(&self) -> String {
format!("{}/", self.full_path())
}
pub fn check_cmd_output(
&self,
log: &Logger,
cmd: &str,
wanted_output: &[String],
match_output: bool,
) -> Result<String, ()> {
match self.get_cmd_output(cmd) {
Ok(output) => {
if match_output ^ wanted_output.contains(&output) {
Err(())
} else {
Ok(output)
}
}
Err(e) => {
helpers::log_error_and_exit(log, &format!("Failed to run '{}' on remote: {}", cmd, e));
panic!(); // For compilation
}
}
}
pub fn get_cmd_output(&self, cmd: &str) -> Result<String, io::Error> {
let output = self.generate_command(&mut self.base_command(cmd), cmd)
.output()?;
Ok(String::from_utf8_lossy(&output.stdout).trim().to_owned())
}
pub fn run_cmd(&self, cmd: &str) -> bool {
match self.generate_command(&mut self.base_command(cmd), cmd)
.status() {
Ok(status) => status.success(),
Err(_) => false,
}
}
}
|
generate_command
|
identifier_name
|
loader.rs
|
self.find_library_crate()
}
pub fn load_library_crate(&mut self) -> Library {
match self.find_library_crate() {
Some(t) => t,
None => {
self.report_load_errs();
unreachable!()
}
}
}
pub fn report_load_errs(&mut self) {
let message = if self.rejected_via_hash.len() > 0 {
format!("found possibly newer version of crate `{}`",
self.ident)
} else if self.rejected_via_triple.len() > 0 {
format!("found incorrect triple for crate `{}`", self.ident)
} else {
format!("can't find crate for `{}`", self.ident)
};
let message = match self.root {
&None => message,
&Some(ref r) => format!("{} which `{}` depends on",
message, r.ident)
};
self.sess.span_err(self.span, message);
let mismatches = self.rejected_via_triple.iter();
if self.rejected_via_triple.len() > 0 {
self.sess.span_note(self.span, format!("expected triple of {}", self.triple));
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}, triple {}: {}",
self.ident, i+1, got, path.display()));
}
}
if self.rejected_via_hash.len() > 0 {
self.sess.span_note(self.span, "perhaps this crate needs \
to be recompiled?");
let mismatches = self.rejected_via_hash.iter();
for (i, &CrateMismatch{ ref path,.. }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
self.ident, i+1, path.display()));
}
match self.root {
&None => {}
&Some(ref r) => {
for (i, path) in r.paths().iter().enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
r.ident, i+1, path.display()));
}
}
}
}
self.sess.abort_if_errors();
}
fn find_library_crate(&mut self) -> Option<Library> {
let (dyprefix, dysuffix) = self.dylibname();
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
let dylib_prefix = format!("{}{}-", dyprefix, self.crate_id.name);
let rlib_prefix = format!("lib{}-", self.crate_id.name);
let mut candidates = HashMap::new();
// First, find all possible candidate rlibs and dylibs purely based on
// the name of the files themselves. We're trying to match against an
// exact crate_id and a possibly an exact hash.
//
// During this step, we can filter all found libraries based on the
// name and id found in the crate id (we ignore the path portion for
// filename matching), as well as the exact hash (if specified). If we
// end up having many candidates, we must look at the metadata to
// perform exact matches against hashes/crate ids. Note that opening up
// the metadata is where we do an exact match against the full contents
// of the crate id (path/name/id).
//
// The goal of this step is to look at as little metadata as possible.
self.filesearch.search(|path| {
let file = match path.filename_str() {
None => return FileDoesntMatch,
Some(file) => file,
};
if file.starts_with(rlib_prefix) && file.ends_with(".rlib") {
info!("rlib candidate: {}", path.display());
match self.try_match(file, rlib_prefix, ".rlib") {
Some(hash) => {
info!("rlib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (ref mut rlibs, _) = *slot;
rlibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("rlib rejected");
FileDoesntMatch
}
}
} else if file.starts_with(dylib_prefix) && file.ends_with(dysuffix){
info!("dylib candidate: {}", path.display());
match self.try_match(file, dylib_prefix, dysuffix) {
Some(hash) => {
info!("dylib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (_, ref mut dylibs) = *slot;
dylibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("dylib rejected");
FileDoesntMatch
}
}
} else {
FileDoesntMatch
}
});
// We have now collected all known libraries into a set of candidates
// keyed of the filename hash listed. For each filename, we also have a
// list of rlibs/dylibs that apply. Here, we map each of these lists
// (per hash), to a Library candidate for returning.
//
// A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this
// search is being performed for.
let mut libraries = Vec::new();
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
let mut metadata = None;
let rlib = self.extract_one(rlibs, "rlib", &mut metadata);
let dylib = self.extract_one(dylibs, "dylib", &mut metadata);
match metadata {
Some(metadata) => {
libraries.push(Library {
dylib: dylib,
rlib: rlib,
metadata: metadata,
})
}
None => {}
}
}
// Having now translated all relevant found hashes into libraries, see
// what we've got and figure out if we found multiple candidates for
// libraries or not.
match libraries.len() {
0 => None,
1 => Some(libraries.move_iter().next().unwrap()),
_ => {
self.sess.span_err(self.span,
format!("multiple matching crates for `{}`",
self.crate_id.name));
self.sess.note("candidates:");
for lib in libraries.iter() {
match lib.dylib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
match lib.rlib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
let data = lib.metadata.as_slice();
let crate_id = decoder::get_crate_id(data);
note_crateid_attr(self.sess.diagnostic(), &crate_id);
}
None
}
}
}
// Attempts to match the requested version of a library against the file
// specified. The prefix/suffix are specified (disambiguates between
// rlib/dylib).
//
// The return value is `None` if `file` doesn't look like a rust-generated
// library, or if a specific version was requested and it doesn't match the
// apparent file's version.
//
// If everything checks out, then `Some(hash)` is returned where `hash` is
// the listed hash in the filename itself.
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<StrBuf>{
let middle = file.slice(prefix.len(), file.len() - suffix.len());
debug!("matching -- {}, middle: {}", file, middle);
let mut parts = middle.splitn('-', 1);
let hash = match parts.next() { Some(h) => h, None => return None };
debug!("matching -- {}, hash: {} (want {})", file, hash, self.id_hash);
let vers = match parts.next() { Some(v) => v, None => return None };
debug!("matching -- {}, vers: {} (want {})", file, vers,
self.crate_id.version);
match self.crate_id.version {
Some(ref version) if version.as_slice()!= vers => return None,
Some(..) => {} // check the hash
// hash is irrelevant, no version specified
None => return Some(hash.to_strbuf())
}
debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file);
Some(hash.to_strbuf())
} else {
None
}
}
// Attempts to extract *one* library from the set `m`. If the set has no
// elements, `None` is returned. If the set has more than one element, then
// the errors and notes are emitted about the set of libraries.
//
// With only one library in the set, this function will extract it, and then
// read the metadata from it if `*slot` is `None`. If the metadata couldn't
// be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted).
fn extract_one(&mut self, m: HashSet<Path>, flavor: &str,
slot: &mut Option<MetadataBlob>) -> Option<Path> {
let mut ret = None::<Path>;
let mut error = 0;
if slot.is_some() {
// FIXME(#10786): for an optimization, we only read one of the
// library's metadata sections. In theory we should
// read both, but reading dylib metadata is quite
// slow.
if m.len() == 0 {
return None
} else if m.len() == 1 {
return Some(m.move_iter().next().unwrap())
}
}
for lib in m.move_iter() {
info!("{} reading metadata from: {}", flavor, lib.display());
let metadata = match get_metadata_section(self.os, &lib) {
Ok(blob) => {
if self.crate_matches(blob.as_slice(), &lib) {
blob
} else {
info!("metadata mismatch");
continue
}
}
Err(_) => {
info!("no metadata found");
continue
}
};
if ret.is_some() {
self.sess.span_err(self.span,
format!("multiple {} candidates for `{}` \
found", flavor, self.crate_id.name));
self.sess.span_note(self.span,
format!(r"candidate \#1: {}",
ret.get_ref().display()));
error = 1;
ret = None;
}
if error > 0 {
error += 1;
self.sess.span_note(self.span,
format!(r"candidate \#{}: {}", error,
lib.display()));
continue
}
*slot = Some(metadata);
ret = Some(lib);
}
return if error > 0 {None} else {ret}
}
fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool {
match decoder::maybe_get_crate_id(crate_data) {
Some(ref id) if self.crate_id.matches(id) => {}
_ => { info!("Rejecting via crate_id"); return false }
}
let hash = match decoder::maybe_get_crate_hash(crate_data) {
Some(hash) => hash, None => {
info!("Rejecting via lack of crate hash");
return false;
}
};
let triple = decoder::get_crate_triple(crate_data);
if triple.as_slice()!= self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch {
path: libpath.clone(),
got: triple.to_strbuf()
});
return false;
}
match self.hash {
None => true,
Some(myhash) => {
if *myhash!= hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch {
path: libpath.clone(),
got: myhash.as_str().to_strbuf()
});
false
} else {
true
}
}
}
}
// Returns the corresponding (prefix, suffix) that files need to have for
// dynamic libraries
fn dylibname(&self) -> (&'static str, &'static str) {
match self.os {
OsWin32 => (WIN32_DLL_PREFIX, WIN32_DLL_SUFFIX),
OsMacos => (MACOS_DLL_PREFIX, MACOS_DLL_SUFFIX),
OsLinux => (LINUX_DLL_PREFIX, LINUX_DLL_SUFFIX),
OsAndroid => (ANDROID_DLL_PREFIX, ANDROID_DLL_SUFFIX),
OsFreebsd => (FREEBSD_DLL_PREFIX, FREEBSD_DLL_SUFFIX),
}
}
}
pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) {
diag.handler().note(format!("crate_id: {}", crateid.to_str()));
}
impl ArchiveMetadata {
fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
let data: &'static [u8] = {
let data = match ar.read(METADATA_FILENAME) {
Some(data) => data,
None => {
debug!("didn't find '{}' in the archive", METADATA_FILENAME);
return None;
}
};
// This data is actually a pointer inside of the archive itself, but
// we essentially want to cache it because the lookup inside the
// archive is a fairly expensive operation (and it's queried for
// *very* frequently). For this reason, we transmute it to the
// static lifetime to put into the struct. Note that the buffer is
// never actually handed out with a static lifetime, but rather the
// buffer is loaned with the lifetime of this containing object.
// Hence, we're guaranteed that the buffer will never be used after
// this object is dead, so this is a safe operation to transmute and
// store the data as a static buffer.
unsafe { mem::transmute(data) }
};
Some(ArchiveMetadata {
archive: ar,
data: data,
})
}
pub fn as_slice<'a>(&'a self) -> &'a [u8] { self.data }
}
|
let start = time::precise_time_ns();
let ret = get_metadata_section_imp(os, filename);
info!("reading {} => {}ms", filename.filename_display(),
(time::precise_time_ns() - start) / 1000000);
return ret;
}
fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
if!filename.exists() {
return Err(format_strbuf!("no such file: '{}'", filename.display()));
}
if filename.filename_str().unwrap().ends_with(".rlib") {
// Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
// internally to read the file. We also avoid even using a memcpy by
// just keeping the archive along while the metadata is in use.
let archive = match ArchiveRO::open(filename) {
|
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
|
random_line_split
|
loader.rs
|
self.find_library_crate()
}
pub fn load_library_crate(&mut self) -> Library {
match self.find_library_crate() {
Some(t) => t,
None => {
self.report_load_errs();
unreachable!()
}
}
}
pub fn report_load_errs(&mut self) {
let message = if self.rejected_via_hash.len() > 0 {
format!("found possibly newer version of crate `{}`",
self.ident)
} else if self.rejected_via_triple.len() > 0 {
format!("found incorrect triple for crate `{}`", self.ident)
} else {
format!("can't find crate for `{}`", self.ident)
};
let message = match self.root {
&None => message,
&Some(ref r) => format!("{} which `{}` depends on",
message, r.ident)
};
self.sess.span_err(self.span, message);
let mismatches = self.rejected_via_triple.iter();
if self.rejected_via_triple.len() > 0 {
self.sess.span_note(self.span, format!("expected triple of {}", self.triple));
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}, triple {}: {}",
self.ident, i+1, got, path.display()));
}
}
if self.rejected_via_hash.len() > 0 {
self.sess.span_note(self.span, "perhaps this crate needs \
to be recompiled?");
let mismatches = self.rejected_via_hash.iter();
for (i, &CrateMismatch{ ref path,.. }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
self.ident, i+1, path.display()));
}
match self.root {
&None => {}
&Some(ref r) => {
for (i, path) in r.paths().iter().enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
r.ident, i+1, path.display()));
}
}
}
}
self.sess.abort_if_errors();
}
fn find_library_crate(&mut self) -> Option<Library> {
let (dyprefix, dysuffix) = self.dylibname();
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
let dylib_prefix = format!("{}{}-", dyprefix, self.crate_id.name);
let rlib_prefix = format!("lib{}-", self.crate_id.name);
let mut candidates = HashMap::new();
// First, find all possible candidate rlibs and dylibs purely based on
// the name of the files themselves. We're trying to match against an
// exact crate_id and a possibly an exact hash.
//
// During this step, we can filter all found libraries based on the
// name and id found in the crate id (we ignore the path portion for
// filename matching), as well as the exact hash (if specified). If we
// end up having many candidates, we must look at the metadata to
// perform exact matches against hashes/crate ids. Note that opening up
// the metadata is where we do an exact match against the full contents
// of the crate id (path/name/id).
//
// The goal of this step is to look at as little metadata as possible.
self.filesearch.search(|path| {
let file = match path.filename_str() {
None => return FileDoesntMatch,
Some(file) => file,
};
if file.starts_with(rlib_prefix) && file.ends_with(".rlib") {
info!("rlib candidate: {}", path.display());
match self.try_match(file, rlib_prefix, ".rlib") {
Some(hash) => {
info!("rlib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (ref mut rlibs, _) = *slot;
rlibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("rlib rejected");
FileDoesntMatch
}
}
} else if file.starts_with(dylib_prefix) && file.ends_with(dysuffix){
info!("dylib candidate: {}", path.display());
match self.try_match(file, dylib_prefix, dysuffix) {
Some(hash) => {
info!("dylib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (_, ref mut dylibs) = *slot;
dylibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("dylib rejected");
FileDoesntMatch
}
}
} else {
FileDoesntMatch
}
});
// We have now collected all known libraries into a set of candidates
// keyed of the filename hash listed. For each filename, we also have a
// list of rlibs/dylibs that apply. Here, we map each of these lists
// (per hash), to a Library candidate for returning.
//
// A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this
// search is being performed for.
let mut libraries = Vec::new();
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
let mut metadata = None;
let rlib = self.extract_one(rlibs, "rlib", &mut metadata);
let dylib = self.extract_one(dylibs, "dylib", &mut metadata);
match metadata {
Some(metadata) => {
libraries.push(Library {
dylib: dylib,
rlib: rlib,
metadata: metadata,
})
}
None => {}
}
}
// Having now translated all relevant found hashes into libraries, see
// what we've got and figure out if we found multiple candidates for
// libraries or not.
match libraries.len() {
0 => None,
1 => Some(libraries.move_iter().next().unwrap()),
_ => {
self.sess.span_err(self.span,
format!("multiple matching crates for `{}`",
self.crate_id.name));
self.sess.note("candidates:");
for lib in libraries.iter() {
match lib.dylib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
match lib.rlib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
let data = lib.metadata.as_slice();
let crate_id = decoder::get_crate_id(data);
note_crateid_attr(self.sess.diagnostic(), &crate_id);
}
None
}
}
}
// Attempts to match the requested version of a library against the file
// specified. The prefix/suffix are specified (disambiguates between
// rlib/dylib).
//
// The return value is `None` if `file` doesn't look like a rust-generated
// library, or if a specific version was requested and it doesn't match the
// apparent file's version.
//
// If everything checks out, then `Some(hash)` is returned where `hash` is
// the listed hash in the filename itself.
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<StrBuf>{
let middle = file.slice(prefix.len(), file.len() - suffix.len());
debug!("matching -- {}, middle: {}", file, middle);
let mut parts = middle.splitn('-', 1);
let hash = match parts.next() { Some(h) => h, None => return None };
debug!("matching -- {}, hash: {} (want {})", file, hash, self.id_hash);
let vers = match parts.next() { Some(v) => v, None => return None };
debug!("matching -- {}, vers: {} (want {})", file, vers,
self.crate_id.version);
match self.crate_id.version {
Some(ref version) if version.as_slice()!= vers => return None,
Some(..) => {} // check the hash
// hash is irrelevant, no version specified
None => return Some(hash.to_strbuf())
}
debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file);
Some(hash.to_strbuf())
} else {
None
}
}
// Attempts to extract *one* library from the set `m`. If the set has no
// elements, `None` is returned. If the set has more than one element, then
// the errors and notes are emitted about the set of libraries.
//
// With only one library in the set, this function will extract it, and then
// read the metadata from it if `*slot` is `None`. If the metadata couldn't
// be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted).
fn extract_one(&mut self, m: HashSet<Path>, flavor: &str,
slot: &mut Option<MetadataBlob>) -> Option<Path> {
let mut ret = None::<Path>;
let mut error = 0;
if slot.is_some() {
// FIXME(#10786): for an optimization, we only read one of the
// library's metadata sections. In theory we should
// read both, but reading dylib metadata is quite
// slow.
if m.len() == 0 {
return None
} else if m.len() == 1 {
return Some(m.move_iter().next().unwrap())
}
}
for lib in m.move_iter() {
info!("{} reading metadata from: {}", flavor, lib.display());
let metadata = match get_metadata_section(self.os, &lib) {
Ok(blob) => {
if self.crate_matches(blob.as_slice(), &lib) {
blob
} else {
info!("metadata mismatch");
continue
}
}
Err(_) => {
info!("no metadata found");
continue
}
};
if ret.is_some() {
self.sess.span_err(self.span,
format!("multiple {} candidates for `{}` \
found", flavor, self.crate_id.name));
self.sess.span_note(self.span,
format!(r"candidate \#1: {}",
ret.get_ref().display()));
error = 1;
ret = None;
}
if error > 0 {
error += 1;
self.sess.span_note(self.span,
format!(r"candidate \#{}: {}", error,
lib.display()));
continue
}
*slot = Some(metadata);
ret = Some(lib);
}
return if error > 0 {None} else {ret}
}
fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool {
match decoder::maybe_get_crate_id(crate_data) {
Some(ref id) if self.crate_id.matches(id) => {}
_ => { info!("Rejecting via crate_id"); return false }
}
let hash = match decoder::maybe_get_crate_hash(crate_data) {
Some(hash) => hash, None => {
info!("Rejecting via lack of crate hash");
return false;
}
};
let triple = decoder::get_crate_triple(crate_data);
if triple.as_slice()!= self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch {
path: libpath.clone(),
got: triple.to_strbuf()
});
return false;
}
match self.hash {
None => true,
Some(myhash) => {
if *myhash!= hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch {
path: libpath.clone(),
got: myhash.as_str().to_strbuf()
});
false
} else {
true
}
}
}
}
// Returns the corresponding (prefix, suffix) that files need to have for
// dynamic libraries
fn dylibname(&self) -> (&'static str, &'static str) {
match self.os {
OsWin32 => (WIN32_DLL_PREFIX, WIN32_DLL_SUFFIX),
OsMacos => (MACOS_DLL_PREFIX, MACOS_DLL_SUFFIX),
OsLinux => (LINUX_DLL_PREFIX, LINUX_DLL_SUFFIX),
OsAndroid => (ANDROID_DLL_PREFIX, ANDROID_DLL_SUFFIX),
OsFreebsd => (FREEBSD_DLL_PREFIX, FREEBSD_DLL_SUFFIX),
}
}
}
pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId)
|
impl ArchiveMetadata {
fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
let data: &'static [u8] = {
let data = match ar.read(METADATA_FILENAME) {
Some(data) => data,
None => {
debug!("didn't find '{}' in the archive", METADATA_FILENAME);
return None;
}
};
// This data is actually a pointer inside of the archive itself, but
// we essentially want to cache it because the lookup inside the
// archive is a fairly expensive operation (and it's queried for
// *very* frequently). For this reason, we transmute it to the
// static lifetime to put into the struct. Note that the buffer is
// never actually handed out with a static lifetime, but rather the
// buffer is loaned with the lifetime of this containing object.
// Hence, we're guaranteed that the buffer will never be used after
// this object is dead, so this is a safe operation to transmute and
// store the data as a static buffer.
unsafe { mem::transmute(data) }
};
Some(ArchiveMetadata {
archive: ar,
data: data,
})
}
pub fn as_slice<'a>(&'a self) -> &'a [u8] { self.data }
}
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
let start = time::precise_time_ns();
let ret = get_metadata_section_imp(os, filename);
info!("reading {} => {}ms", filename.filename_display(),
(time::precise_time_ns() - start) / 1000000);
return ret;
}
fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
if!filename.exists() {
return Err(format_strbuf!("no such file: '{}'", filename.display()));
}
if filename.filename_str().unwrap().ends_with(".rlib") {
// Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
// internally to read the file. We also avoid even using a memcpy by
// just keeping the archive along while the metadata is in use.
let archive = match ArchiveRO::open(filename) {
|
{
diag.handler().note(format!("crate_id: {}", crateid.to_str()));
}
|
identifier_body
|
loader.rs
|
self.find_library_crate()
}
pub fn load_library_crate(&mut self) -> Library {
match self.find_library_crate() {
Some(t) => t,
None => {
self.report_load_errs();
unreachable!()
}
}
}
pub fn report_load_errs(&mut self) {
let message = if self.rejected_via_hash.len() > 0 {
format!("found possibly newer version of crate `{}`",
self.ident)
} else if self.rejected_via_triple.len() > 0 {
format!("found incorrect triple for crate `{}`", self.ident)
} else {
format!("can't find crate for `{}`", self.ident)
};
let message = match self.root {
&None => message,
&Some(ref r) => format!("{} which `{}` depends on",
message, r.ident)
};
self.sess.span_err(self.span, message);
let mismatches = self.rejected_via_triple.iter();
if self.rejected_via_triple.len() > 0 {
self.sess.span_note(self.span, format!("expected triple of {}", self.triple));
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}, triple {}: {}",
self.ident, i+1, got, path.display()));
}
}
if self.rejected_via_hash.len() > 0 {
self.sess.span_note(self.span, "perhaps this crate needs \
to be recompiled?");
let mismatches = self.rejected_via_hash.iter();
for (i, &CrateMismatch{ ref path,.. }) in mismatches.enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
self.ident, i+1, path.display()));
}
match self.root {
&None => {}
&Some(ref r) => {
for (i, path) in r.paths().iter().enumerate() {
self.sess.fileline_note(self.span,
format!("crate `{}` path \\#{}: {}",
r.ident, i+1, path.display()));
}
}
}
}
self.sess.abort_if_errors();
}
fn find_library_crate(&mut self) -> Option<Library> {
let (dyprefix, dysuffix) = self.dylibname();
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
let dylib_prefix = format!("{}{}-", dyprefix, self.crate_id.name);
let rlib_prefix = format!("lib{}-", self.crate_id.name);
let mut candidates = HashMap::new();
// First, find all possible candidate rlibs and dylibs purely based on
// the name of the files themselves. We're trying to match against an
// exact crate_id and a possibly an exact hash.
//
// During this step, we can filter all found libraries based on the
// name and id found in the crate id (we ignore the path portion for
// filename matching), as well as the exact hash (if specified). If we
// end up having many candidates, we must look at the metadata to
// perform exact matches against hashes/crate ids. Note that opening up
// the metadata is where we do an exact match against the full contents
// of the crate id (path/name/id).
//
// The goal of this step is to look at as little metadata as possible.
self.filesearch.search(|path| {
let file = match path.filename_str() {
None => return FileDoesntMatch,
Some(file) => file,
};
if file.starts_with(rlib_prefix) && file.ends_with(".rlib") {
info!("rlib candidate: {}", path.display());
match self.try_match(file, rlib_prefix, ".rlib") {
Some(hash) => {
info!("rlib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (ref mut rlibs, _) = *slot;
rlibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("rlib rejected");
FileDoesntMatch
}
}
} else if file.starts_with(dylib_prefix) && file.ends_with(dysuffix)
|
else {
FileDoesntMatch
}
});
// We have now collected all known libraries into a set of candidates
// keyed of the filename hash listed. For each filename, we also have a
// list of rlibs/dylibs that apply. Here, we map each of these lists
// (per hash), to a Library candidate for returning.
//
// A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this
// search is being performed for.
let mut libraries = Vec::new();
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
let mut metadata = None;
let rlib = self.extract_one(rlibs, "rlib", &mut metadata);
let dylib = self.extract_one(dylibs, "dylib", &mut metadata);
match metadata {
Some(metadata) => {
libraries.push(Library {
dylib: dylib,
rlib: rlib,
metadata: metadata,
})
}
None => {}
}
}
// Having now translated all relevant found hashes into libraries, see
// what we've got and figure out if we found multiple candidates for
// libraries or not.
match libraries.len() {
0 => None,
1 => Some(libraries.move_iter().next().unwrap()),
_ => {
self.sess.span_err(self.span,
format!("multiple matching crates for `{}`",
self.crate_id.name));
self.sess.note("candidates:");
for lib in libraries.iter() {
match lib.dylib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
match lib.rlib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
let data = lib.metadata.as_slice();
let crate_id = decoder::get_crate_id(data);
note_crateid_attr(self.sess.diagnostic(), &crate_id);
}
None
}
}
}
// Attempts to match the requested version of a library against the file
// specified. The prefix/suffix are specified (disambiguates between
// rlib/dylib).
//
// The return value is `None` if `file` doesn't look like a rust-generated
// library, or if a specific version was requested and it doesn't match the
// apparent file's version.
//
// If everything checks out, then `Some(hash)` is returned where `hash` is
// the listed hash in the filename itself.
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<StrBuf>{
let middle = file.slice(prefix.len(), file.len() - suffix.len());
debug!("matching -- {}, middle: {}", file, middle);
let mut parts = middle.splitn('-', 1);
let hash = match parts.next() { Some(h) => h, None => return None };
debug!("matching -- {}, hash: {} (want {})", file, hash, self.id_hash);
let vers = match parts.next() { Some(v) => v, None => return None };
debug!("matching -- {}, vers: {} (want {})", file, vers,
self.crate_id.version);
match self.crate_id.version {
Some(ref version) if version.as_slice()!= vers => return None,
Some(..) => {} // check the hash
// hash is irrelevant, no version specified
None => return Some(hash.to_strbuf())
}
debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file);
Some(hash.to_strbuf())
} else {
None
}
}
// Attempts to extract *one* library from the set `m`. If the set has no
// elements, `None` is returned. If the set has more than one element, then
// the errors and notes are emitted about the set of libraries.
//
// With only one library in the set, this function will extract it, and then
// read the metadata from it if `*slot` is `None`. If the metadata couldn't
// be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted).
fn extract_one(&mut self, m: HashSet<Path>, flavor: &str,
slot: &mut Option<MetadataBlob>) -> Option<Path> {
let mut ret = None::<Path>;
let mut error = 0;
if slot.is_some() {
// FIXME(#10786): for an optimization, we only read one of the
// library's metadata sections. In theory we should
// read both, but reading dylib metadata is quite
// slow.
if m.len() == 0 {
return None
} else if m.len() == 1 {
return Some(m.move_iter().next().unwrap())
}
}
for lib in m.move_iter() {
info!("{} reading metadata from: {}", flavor, lib.display());
let metadata = match get_metadata_section(self.os, &lib) {
Ok(blob) => {
if self.crate_matches(blob.as_slice(), &lib) {
blob
} else {
info!("metadata mismatch");
continue
}
}
Err(_) => {
info!("no metadata found");
continue
}
};
if ret.is_some() {
self.sess.span_err(self.span,
format!("multiple {} candidates for `{}` \
found", flavor, self.crate_id.name));
self.sess.span_note(self.span,
format!(r"candidate \#1: {}",
ret.get_ref().display()));
error = 1;
ret = None;
}
if error > 0 {
error += 1;
self.sess.span_note(self.span,
format!(r"candidate \#{}: {}", error,
lib.display()));
continue
}
*slot = Some(metadata);
ret = Some(lib);
}
return if error > 0 {None} else {ret}
}
fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool {
match decoder::maybe_get_crate_id(crate_data) {
Some(ref id) if self.crate_id.matches(id) => {}
_ => { info!("Rejecting via crate_id"); return false }
}
let hash = match decoder::maybe_get_crate_hash(crate_data) {
Some(hash) => hash, None => {
info!("Rejecting via lack of crate hash");
return false;
}
};
let triple = decoder::get_crate_triple(crate_data);
if triple.as_slice()!= self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch {
path: libpath.clone(),
got: triple.to_strbuf()
});
return false;
}
match self.hash {
None => true,
Some(myhash) => {
if *myhash!= hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch {
path: libpath.clone(),
got: myhash.as_str().to_strbuf()
});
false
} else {
true
}
}
}
}
// Returns the corresponding (prefix, suffix) that files need to have for
// dynamic libraries
fn dylibname(&self) -> (&'static str, &'static str) {
match self.os {
OsWin32 => (WIN32_DLL_PREFIX, WIN32_DLL_SUFFIX),
OsMacos => (MACOS_DLL_PREFIX, MACOS_DLL_SUFFIX),
OsLinux => (LINUX_DLL_PREFIX, LINUX_DLL_SUFFIX),
OsAndroid => (ANDROID_DLL_PREFIX, ANDROID_DLL_SUFFIX),
OsFreebsd => (FREEBSD_DLL_PREFIX, FREEBSD_DLL_SUFFIX),
}
}
}
pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) {
diag.handler().note(format!("crate_id: {}", crateid.to_str()));
}
impl ArchiveMetadata {
fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
let data: &'static [u8] = {
let data = match ar.read(METADATA_FILENAME) {
Some(data) => data,
None => {
debug!("didn't find '{}' in the archive", METADATA_FILENAME);
return None;
}
};
// This data is actually a pointer inside of the archive itself, but
// we essentially want to cache it because the lookup inside the
// archive is a fairly expensive operation (and it's queried for
// *very* frequently). For this reason, we transmute it to the
// static lifetime to put into the struct. Note that the buffer is
// never actually handed out with a static lifetime, but rather the
// buffer is loaned with the lifetime of this containing object.
// Hence, we're guaranteed that the buffer will never be used after
// this object is dead, so this is a safe operation to transmute and
// store the data as a static buffer.
unsafe { mem::transmute(data) }
};
Some(ArchiveMetadata {
archive: ar,
data: data,
})
}
pub fn as_slice<'a>(&'a self) -> &'a [u8] { self.data }
}
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
let start = time::precise_time_ns();
let ret = get_metadata_section_imp(os, filename);
info!("reading {} => {}ms", filename.filename_display(),
(time::precise_time_ns() - start) / 1000000);
return ret;
}
fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
if!filename.exists() {
return Err(format_strbuf!("no such file: '{}'", filename.display()));
}
if filename.filename_str().unwrap().ends_with(".rlib") {
// Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
// internally to read the file. We also avoid even using a memcpy by
// just keeping the archive along while the metadata is in use.
let archive = match ArchiveRO::open(filename) {
|
{
info!("dylib candidate: {}", path.display());
match self.try_match(file, dylib_prefix, dysuffix) {
Some(hash) => {
info!("dylib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (_, ref mut dylibs) = *slot;
dylibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("dylib rejected");
FileDoesntMatch
}
}
}
|
conditional_block
|
loader.rs
|
}
None => {
info!("rlib rejected");
FileDoesntMatch
}
}
} else if file.starts_with(dylib_prefix) && file.ends_with(dysuffix){
info!("dylib candidate: {}", path.display());
match self.try_match(file, dylib_prefix, dysuffix) {
Some(hash) => {
info!("dylib accepted, hash: {}", hash);
let slot = candidates.find_or_insert_with(hash, |_| {
(HashSet::new(), HashSet::new())
});
let (_, ref mut dylibs) = *slot;
dylibs.insert(fs::realpath(path).unwrap());
FileMatches
}
None => {
info!("dylib rejected");
FileDoesntMatch
}
}
} else {
FileDoesntMatch
}
});
// We have now collected all known libraries into a set of candidates
// keyed of the filename hash listed. For each filename, we also have a
// list of rlibs/dylibs that apply. Here, we map each of these lists
// (per hash), to a Library candidate for returning.
//
// A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this
// search is being performed for.
let mut libraries = Vec::new();
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
let mut metadata = None;
let rlib = self.extract_one(rlibs, "rlib", &mut metadata);
let dylib = self.extract_one(dylibs, "dylib", &mut metadata);
match metadata {
Some(metadata) => {
libraries.push(Library {
dylib: dylib,
rlib: rlib,
metadata: metadata,
})
}
None => {}
}
}
// Having now translated all relevant found hashes into libraries, see
// what we've got and figure out if we found multiple candidates for
// libraries or not.
match libraries.len() {
0 => None,
1 => Some(libraries.move_iter().next().unwrap()),
_ => {
self.sess.span_err(self.span,
format!("multiple matching crates for `{}`",
self.crate_id.name));
self.sess.note("candidates:");
for lib in libraries.iter() {
match lib.dylib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
match lib.rlib {
Some(ref p) => {
self.sess.note(format!("path: {}", p.display()));
}
None => {}
}
let data = lib.metadata.as_slice();
let crate_id = decoder::get_crate_id(data);
note_crateid_attr(self.sess.diagnostic(), &crate_id);
}
None
}
}
}
// Attempts to match the requested version of a library against the file
// specified. The prefix/suffix are specified (disambiguates between
// rlib/dylib).
//
// The return value is `None` if `file` doesn't look like a rust-generated
// library, or if a specific version was requested and it doesn't match the
// apparent file's version.
//
// If everything checks out, then `Some(hash)` is returned where `hash` is
// the listed hash in the filename itself.
fn try_match(&self, file: &str, prefix: &str, suffix: &str) -> Option<StrBuf>{
let middle = file.slice(prefix.len(), file.len() - suffix.len());
debug!("matching -- {}, middle: {}", file, middle);
let mut parts = middle.splitn('-', 1);
let hash = match parts.next() { Some(h) => h, None => return None };
debug!("matching -- {}, hash: {} (want {})", file, hash, self.id_hash);
let vers = match parts.next() { Some(v) => v, None => return None };
debug!("matching -- {}, vers: {} (want {})", file, vers,
self.crate_id.version);
match self.crate_id.version {
Some(ref version) if version.as_slice()!= vers => return None,
Some(..) => {} // check the hash
// hash is irrelevant, no version specified
None => return Some(hash.to_strbuf())
}
debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file);
Some(hash.to_strbuf())
} else {
None
}
}
// Attempts to extract *one* library from the set `m`. If the set has no
// elements, `None` is returned. If the set has more than one element, then
// the errors and notes are emitted about the set of libraries.
//
// With only one library in the set, this function will extract it, and then
// read the metadata from it if `*slot` is `None`. If the metadata couldn't
// be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted).
fn extract_one(&mut self, m: HashSet<Path>, flavor: &str,
slot: &mut Option<MetadataBlob>) -> Option<Path> {
let mut ret = None::<Path>;
let mut error = 0;
if slot.is_some() {
// FIXME(#10786): for an optimization, we only read one of the
// library's metadata sections. In theory we should
// read both, but reading dylib metadata is quite
// slow.
if m.len() == 0 {
return None
} else if m.len() == 1 {
return Some(m.move_iter().next().unwrap())
}
}
for lib in m.move_iter() {
info!("{} reading metadata from: {}", flavor, lib.display());
let metadata = match get_metadata_section(self.os, &lib) {
Ok(blob) => {
if self.crate_matches(blob.as_slice(), &lib) {
blob
} else {
info!("metadata mismatch");
continue
}
}
Err(_) => {
info!("no metadata found");
continue
}
};
if ret.is_some() {
self.sess.span_err(self.span,
format!("multiple {} candidates for `{}` \
found", flavor, self.crate_id.name));
self.sess.span_note(self.span,
format!(r"candidate \#1: {}",
ret.get_ref().display()));
error = 1;
ret = None;
}
if error > 0 {
error += 1;
self.sess.span_note(self.span,
format!(r"candidate \#{}: {}", error,
lib.display()));
continue
}
*slot = Some(metadata);
ret = Some(lib);
}
return if error > 0 {None} else {ret}
}
fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool {
match decoder::maybe_get_crate_id(crate_data) {
Some(ref id) if self.crate_id.matches(id) => {}
_ => { info!("Rejecting via crate_id"); return false }
}
let hash = match decoder::maybe_get_crate_hash(crate_data) {
Some(hash) => hash, None => {
info!("Rejecting via lack of crate hash");
return false;
}
};
let triple = decoder::get_crate_triple(crate_data);
if triple.as_slice()!= self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch {
path: libpath.clone(),
got: triple.to_strbuf()
});
return false;
}
match self.hash {
None => true,
Some(myhash) => {
if *myhash!= hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch {
path: libpath.clone(),
got: myhash.as_str().to_strbuf()
});
false
} else {
true
}
}
}
}
// Returns the corresponding (prefix, suffix) that files need to have for
// dynamic libraries
fn dylibname(&self) -> (&'static str, &'static str) {
match self.os {
OsWin32 => (WIN32_DLL_PREFIX, WIN32_DLL_SUFFIX),
OsMacos => (MACOS_DLL_PREFIX, MACOS_DLL_SUFFIX),
OsLinux => (LINUX_DLL_PREFIX, LINUX_DLL_SUFFIX),
OsAndroid => (ANDROID_DLL_PREFIX, ANDROID_DLL_SUFFIX),
OsFreebsd => (FREEBSD_DLL_PREFIX, FREEBSD_DLL_SUFFIX),
}
}
}
pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) {
diag.handler().note(format!("crate_id: {}", crateid.to_str()));
}
impl ArchiveMetadata {
fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
let data: &'static [u8] = {
let data = match ar.read(METADATA_FILENAME) {
Some(data) => data,
None => {
debug!("didn't find '{}' in the archive", METADATA_FILENAME);
return None;
}
};
// This data is actually a pointer inside of the archive itself, but
// we essentially want to cache it because the lookup inside the
// archive is a fairly expensive operation (and it's queried for
// *very* frequently). For this reason, we transmute it to the
// static lifetime to put into the struct. Note that the buffer is
// never actually handed out with a static lifetime, but rather the
// buffer is loaned with the lifetime of this containing object.
// Hence, we're guaranteed that the buffer will never be used after
// this object is dead, so this is a safe operation to transmute and
// store the data as a static buffer.
unsafe { mem::transmute(data) }
};
Some(ArchiveMetadata {
archive: ar,
data: data,
})
}
pub fn as_slice<'a>(&'a self) -> &'a [u8] { self.data }
}
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
let start = time::precise_time_ns();
let ret = get_metadata_section_imp(os, filename);
info!("reading {} => {}ms", filename.filename_display(),
(time::precise_time_ns() - start) / 1000000);
return ret;
}
fn get_metadata_section_imp(os: Os, filename: &Path) -> Result<MetadataBlob, StrBuf> {
if!filename.exists() {
return Err(format_strbuf!("no such file: '{}'", filename.display()));
}
if filename.filename_str().unwrap().ends_with(".rlib") {
// Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
// internally to read the file. We also avoid even using a memcpy by
// just keeping the archive along while the metadata is in use.
let archive = match ArchiveRO::open(filename) {
Some(ar) => ar,
None => {
debug!("llvm didn't like `{}`", filename.display());
return Err(format_strbuf!("failed to read rlib metadata: \
'{}'",
filename.display()));
}
};
return match ArchiveMetadata::new(archive).map(|ar| MetadataArchive(ar)) {
None => {
return Err((format_strbuf!("failed to read rlib metadata: \
'{}'",
filename.display())))
}
Some(blob) => return Ok(blob)
}
}
unsafe {
let mb = filename.with_c_str(|buf| {
llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf)
});
if mb as int == 0 {
return Err(format_strbuf!("error reading library: '{}'",
filename.display()))
}
let of = match ObjectFile::new(mb) {
Some(of) => of,
_ => {
return Err((format_strbuf!("provided path not an object \
file: '{}'",
filename.display())))
}
};
let si = mk_section_iter(of.llof);
while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
let mut name_buf = ptr::null();
let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf);
let name = str::raw::from_buf_len(name_buf as *u8, name_len as uint);
debug!("get_metadata_section: name {}", name);
if read_meta_section_name(os) == name {
let cbuf = llvm::LLVMGetSectionContents(si.llsi);
let csz = llvm::LLVMGetSectionSize(si.llsi) as uint;
let mut found =
Err(format_strbuf!("metadata not found: '{}'",
filename.display()));
let cvbuf: *u8 = mem::transmute(cbuf);
let vlen = encoder::metadata_encoding_version.len();
debug!("checking {} bytes of metadata-version stamp",
vlen);
let minsz = cmp::min(vlen, csz);
let version_ok = slice::raw::buf_as_slice(cvbuf, minsz,
|buf0| buf0 == encoder::metadata_encoding_version);
if!version_ok {
return Err((format_strbuf!("incompatible metadata \
version found: '{}'",
filename.display())));
}
let cvbuf1 = cvbuf.offset(vlen as int);
debug!("inflating {} bytes of compressed metadata",
csz - vlen);
slice::raw::buf_as_slice(cvbuf1, csz-vlen, |bytes| {
match flate::inflate_bytes(bytes) {
Some(inflated) => found = Ok(MetadataVec(inflated)),
None => {
found =
Err(format_strbuf!("failed to decompress \
metadata for: '{}'",
filename.display()))
}
}
});
if found.is_ok() {
return found;
}
}
llvm::LLVMMoveToNextSection(si.llsi);
}
return Err(format_strbuf!("metadata not found: '{}'",
filename.display()));
}
}
pub fn
|
meta_section_name
|
identifier_name
|
|
conf.rs
|
// Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
//! Schema for configuration files, exact encoding json/xml to be worked out.
use crate::io::gpio::{PinMode, PullMode};
use serde::Deserialize;
/// Configuration of a particular GPIO pin.
#[derive(Deserialize, Clone, Debug)]
pub struct PinConfiguration {
/// The user-visible name of the GPIO pin.
pub name: String,
/// The input/output mode of the GPIO pin.
pub mode: Option<PinMode>,
/// The default/initial level of the pin (true means high).
pub level: Option<bool>,
/// Whether the pin has pullup/down resistor enabled.
pub pull_mode: Option<PullMode>,
/// Name of a pin defined by the transport (or a lower level
/// PinConfiguration).
pub alias_of: Option<String>,
}
/// Configuration of a particular GPIO pin.
#[derive(Deserialize, Clone, Debug)]
pub struct StrappingConfiguration {
/// The user-visible name of the strapping combination.
pub name: String,
/// List of GPIO pin configurations (the alias_of) field should not be used in these.
#[serde(default)]
pub pins: Vec<PinConfiguration>,
}
/// Parity configuration for UART communication.
#[derive(Deserialize, Clone, Debug)]
pub enum UartParity {
None,
Even,
Odd,
Mark,
Space,
}
/// Stop bits configuration for UART communication.
#[derive(Deserialize, Clone, Debug)]
pub enum UartStopBits {
Stop1,
Stop1_5,
Stop2,
}
/// Configuration of a particular UART port.
#[derive(Deserialize, Clone, Debug)]
pub struct UartConfiguration {
/// The user-visible name of the UART.
pub name: String,
/// Data communication rate in bits/second.
pub baudrate: Option<u32>,
/// Parity configuration for UART communication.
pub parity: Option<UartParity>,
/// Stop bits configuration for UART communication.
pub stopbits: Option<UartStopBits>,
/// Name of the UART as defined by the transport.
pub alias_of: Option<String>,
}
/// Configuration of a particular SPI controller port.
#[derive(Deserialize, Clone, Debug)]
pub struct SpiConfiguration {
/// The user-visible name of the SPI controller port.
pub name: String,
/// Name of the SPI controller as defined by the transport.
pub alias_of: Option<String>,
}
/// Representation of the complete and unresolved content of a single
/// confguration file.
#[derive(Deserialize, Clone, Debug)]
pub struct
|
{
/// Optional specification of transport backend, for which this
/// configuration applies (to be implemented).
pub interface: Option<String>,
/// List of names of other configuration files to include recursively.
#[serde(default)]
pub includes: Vec<String>,
/// List of GPIO pin configurations.
#[serde(default)]
pub pins: Vec<PinConfiguration>,
/// List of named sets of additional GPIO pin configurations (pullup/pulldown).
#[serde(default)]
pub strappings: Vec<StrappingConfiguration>,
/// List of UART configurations.
#[serde(default)]
pub uarts: Vec<UartConfiguration>,
}
|
ConfigurationFile
|
identifier_name
|
conf.rs
|
// Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
//! Schema for configuration files, exact encoding json/xml to be worked out.
use crate::io::gpio::{PinMode, PullMode};
use serde::Deserialize;
/// Configuration of a particular GPIO pin.
#[derive(Deserialize, Clone, Debug)]
pub struct PinConfiguration {
/// The user-visible name of the GPIO pin.
pub name: String,
/// The input/output mode of the GPIO pin.
pub mode: Option<PinMode>,
/// The default/initial level of the pin (true means high).
pub level: Option<bool>,
/// Whether the pin has pullup/down resistor enabled.
pub pull_mode: Option<PullMode>,
/// Name of a pin defined by the transport (or a lower level
/// PinConfiguration).
pub alias_of: Option<String>,
}
|
pub struct StrappingConfiguration {
/// The user-visible name of the strapping combination.
pub name: String,
/// List of GPIO pin configurations (the alias_of) field should not be used in these.
#[serde(default)]
pub pins: Vec<PinConfiguration>,
}
/// Parity configuration for UART communication.
#[derive(Deserialize, Clone, Debug)]
pub enum UartParity {
None,
Even,
Odd,
Mark,
Space,
}
/// Stop bits configuration for UART communication.
#[derive(Deserialize, Clone, Debug)]
pub enum UartStopBits {
Stop1,
Stop1_5,
Stop2,
}
/// Configuration of a particular UART port.
#[derive(Deserialize, Clone, Debug)]
pub struct UartConfiguration {
/// The user-visible name of the UART.
pub name: String,
/// Data communication rate in bits/second.
pub baudrate: Option<u32>,
/// Parity configuration for UART communication.
pub parity: Option<UartParity>,
/// Stop bits configuration for UART communication.
pub stopbits: Option<UartStopBits>,
/// Name of the UART as defined by the transport.
pub alias_of: Option<String>,
}
/// Configuration of a particular SPI controller port.
#[derive(Deserialize, Clone, Debug)]
pub struct SpiConfiguration {
/// The user-visible name of the SPI controller port.
pub name: String,
/// Name of the SPI controller as defined by the transport.
pub alias_of: Option<String>,
}
/// Representation of the complete and unresolved content of a single
/// confguration file.
#[derive(Deserialize, Clone, Debug)]
pub struct ConfigurationFile {
/// Optional specification of transport backend, for which this
/// configuration applies (to be implemented).
pub interface: Option<String>,
/// List of names of other configuration files to include recursively.
#[serde(default)]
pub includes: Vec<String>,
/// List of GPIO pin configurations.
#[serde(default)]
pub pins: Vec<PinConfiguration>,
/// List of named sets of additional GPIO pin configurations (pullup/pulldown).
#[serde(default)]
pub strappings: Vec<StrappingConfiguration>,
/// List of UART configurations.
#[serde(default)]
pub uarts: Vec<UartConfiguration>,
}
|
/// Configuration of a particular GPIO pin.
#[derive(Deserialize, Clone, Debug)]
|
random_line_split
|
util.rs
|
use std::fs::File;
use std::io::{self, Read};
use std::path::Path;
use std::process::{Command, Stdio};
pub trait SplitEOL {
fn split_eol(&self) -> Vec<String>;
}
impl SplitEOL for String {
fn split_eol(&self) -> Vec<String> {
if self.trim() == "" {
Vec::new()
} else {
self.split("\n").map(ToOwned::to_owned).collect()
}
}
}
pub fn communicate(name: &str, args: &[&str]) -> Result<(String, String, i32), io::Error> {
use std::process::*;
|
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()?;
let stdout = String::from_utf8_lossy(&output.stdout).into_owned();
let stderr = String::from_utf8_lossy(&output.stdout).into_owned();
let status = output.status
.code()
.ok_or(io::Error::new(io::ErrorKind::Other,
"The process was terminated by a signal"))?;
Ok((stdout, stderr, status))
}
pub fn get_lines(name: &str, args: &[&str]) -> Result<Vec<String>, io::Error> {
communicate(name, args).map(|(stdout, _, _)| stdout.split_eol())
}
pub fn read_content<P: AsRef<Path>>(path: P) -> io::Result<String> {
let mut buf = String::new();
File::open(path)
.and_then(|mut f| f.read_to_string(&mut buf))
.and(Ok(buf.trim().to_owned()))
}
pub fn wait_exec(cmd: &str, args: &[&str], curr_dir: Option<&Path>) -> Result<i32, io::Error> {
let mut command = Command::new(cmd);
command.args(args)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
if let Some(curr_dir) = curr_dir {
command.current_dir(curr_dir);
}
let mut child = command.spawn()?;
child.wait()
.and_then(|st| st.code().ok_or(io::Error::new(io::ErrorKind::Other, "")))
}
|
let output = Command::new(name).args(args)
.stdin(Stdio::null())
|
random_line_split
|
util.rs
|
use std::fs::File;
use std::io::{self, Read};
use std::path::Path;
use std::process::{Command, Stdio};
pub trait SplitEOL {
fn split_eol(&self) -> Vec<String>;
}
impl SplitEOL for String {
fn split_eol(&self) -> Vec<String> {
if self.trim() == "" {
Vec::new()
} else {
self.split("\n").map(ToOwned::to_owned).collect()
}
}
}
pub fn
|
(name: &str, args: &[&str]) -> Result<(String, String, i32), io::Error> {
use std::process::*;
let output = Command::new(name).args(args)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()?;
let stdout = String::from_utf8_lossy(&output.stdout).into_owned();
let stderr = String::from_utf8_lossy(&output.stdout).into_owned();
let status = output.status
.code()
.ok_or(io::Error::new(io::ErrorKind::Other,
"The process was terminated by a signal"))?;
Ok((stdout, stderr, status))
}
pub fn get_lines(name: &str, args: &[&str]) -> Result<Vec<String>, io::Error> {
communicate(name, args).map(|(stdout, _, _)| stdout.split_eol())
}
pub fn read_content<P: AsRef<Path>>(path: P) -> io::Result<String> {
let mut buf = String::new();
File::open(path)
.and_then(|mut f| f.read_to_string(&mut buf))
.and(Ok(buf.trim().to_owned()))
}
pub fn wait_exec(cmd: &str, args: &[&str], curr_dir: Option<&Path>) -> Result<i32, io::Error> {
let mut command = Command::new(cmd);
command.args(args)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
if let Some(curr_dir) = curr_dir {
command.current_dir(curr_dir);
}
let mut child = command.spawn()?;
child.wait()
.and_then(|st| st.code().ok_or(io::Error::new(io::ErrorKind::Other, "")))
}
|
communicate
|
identifier_name
|
util.rs
|
use std::fs::File;
use std::io::{self, Read};
use std::path::Path;
use std::process::{Command, Stdio};
pub trait SplitEOL {
fn split_eol(&self) -> Vec<String>;
}
impl SplitEOL for String {
fn split_eol(&self) -> Vec<String> {
if self.trim() == "" {
Vec::new()
} else {
self.split("\n").map(ToOwned::to_owned).collect()
}
}
}
pub fn communicate(name: &str, args: &[&str]) -> Result<(String, String, i32), io::Error>
|
pub fn get_lines(name: &str, args: &[&str]) -> Result<Vec<String>, io::Error> {
communicate(name, args).map(|(stdout, _, _)| stdout.split_eol())
}
pub fn read_content<P: AsRef<Path>>(path: P) -> io::Result<String> {
let mut buf = String::new();
File::open(path)
.and_then(|mut f| f.read_to_string(&mut buf))
.and(Ok(buf.trim().to_owned()))
}
pub fn wait_exec(cmd: &str, args: &[&str], curr_dir: Option<&Path>) -> Result<i32, io::Error> {
let mut command = Command::new(cmd);
command.args(args)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
if let Some(curr_dir) = curr_dir {
command.current_dir(curr_dir);
}
let mut child = command.spawn()?;
child.wait()
.and_then(|st| st.code().ok_or(io::Error::new(io::ErrorKind::Other, "")))
}
|
{
use std::process::*;
let output = Command::new(name).args(args)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()?;
let stdout = String::from_utf8_lossy(&output.stdout).into_owned();
let stderr = String::from_utf8_lossy(&output.stdout).into_owned();
let status = output.status
.code()
.ok_or(io::Error::new(io::ErrorKind::Other,
"The process was terminated by a signal"))?;
Ok((stdout, stderr, status))
}
|
identifier_body
|
util.rs
|
use std::fs::File;
use std::io::{self, Read};
use std::path::Path;
use std::process::{Command, Stdio};
pub trait SplitEOL {
fn split_eol(&self) -> Vec<String>;
}
impl SplitEOL for String {
fn split_eol(&self) -> Vec<String> {
if self.trim() == ""
|
else {
self.split("\n").map(ToOwned::to_owned).collect()
}
}
}
pub fn communicate(name: &str, args: &[&str]) -> Result<(String, String, i32), io::Error> {
use std::process::*;
let output = Command::new(name).args(args)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()?;
let stdout = String::from_utf8_lossy(&output.stdout).into_owned();
let stderr = String::from_utf8_lossy(&output.stdout).into_owned();
let status = output.status
.code()
.ok_or(io::Error::new(io::ErrorKind::Other,
"The process was terminated by a signal"))?;
Ok((stdout, stderr, status))
}
pub fn get_lines(name: &str, args: &[&str]) -> Result<Vec<String>, io::Error> {
communicate(name, args).map(|(stdout, _, _)| stdout.split_eol())
}
pub fn read_content<P: AsRef<Path>>(path: P) -> io::Result<String> {
let mut buf = String::new();
File::open(path)
.and_then(|mut f| f.read_to_string(&mut buf))
.and(Ok(buf.trim().to_owned()))
}
pub fn wait_exec(cmd: &str, args: &[&str], curr_dir: Option<&Path>) -> Result<i32, io::Error> {
let mut command = Command::new(cmd);
command.args(args)
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit());
if let Some(curr_dir) = curr_dir {
command.current_dir(curr_dir);
}
let mut child = command.spawn()?;
child.wait()
.and_then(|st| st.code().ok_or(io::Error::new(io::ErrorKind::Other, "")))
}
|
{
Vec::new()
}
|
conditional_block
|
packed-struct-with-destructor.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// ignore-android: FIXME(#10381)
// compile-flags:-g
// debugger:set print pretty off
// debugger:rbreak zzz
// debugger:run
// debugger:finish
// debugger:print packed
// check:$1 = {x = 123, y = 234, z = 345}
// debugger:print packedInPacked
// check:$2 = {a = 1111, b = {x = 2222, y = 3333, z = 4444}, c = 5555, d = {x = 6666, y = 7777, z = 8888}}
// debugger:print packedInUnpacked
// check:$3 = {a = -1111, b = {x = -2222, y = -3333, z = -4444}, c = -5555, d = {x = -6666, y = -7777, z = -8888}}
// debugger:print unpackedInPacked
// check:$4 = {a = 987, b = {x = 876, y = 765, z = 654}, c = {x = 543, y = 432, z = 321}, d = 210}
// debugger:print packedInPackedWithDrop
// check:$5 = {a = 11, b = {x = 22, y = 33, z = 44}, c = 55, d = {x = 66, y = 77, z = 88}}
// debugger:print packedInUnpackedWithDrop
// check:$6 = {a = -11, b = {x = -22, y = -33, z = -44}, c = -55, d = {x = -66, y = -77, z = -88}}
// debugger:print unpackedInPackedWithDrop
// check:$7 = {a = 98, b = {x = 87, y = 76, z = 65}, c = {x = 54, y = 43, z = 32}, d = 21}
// debugger:print deeplyNested
// check:$8 = {a = {a = 1, b = {x = 2, y = 3, z = 4}, c = 5, d = {x = 6, y = 7, z = 8}}, b = {a = 9, b = {x = 10, y = 11, z = 12}, c = {x = 13, y = 14, z = 15}, d = 16}, c = {a = 17, b = {x = 18, y = 19, z = 20}, c = 21, d = {x = 22, y = 23, z = 24}}, d = {a = 25, b = {x = 26, y = 27, z = 28}, c = 29, d = {x = 30, y = 31, z = 32}}, e = {a = 33, b = {x = 34, y = 35, z = 36}, c = {x = 37, y = 38, z = 39}, d = 40}, f = {a = 41, b = {x = 42, y = 43, z = 44}, c = 45, d = {x = 46, y = 47, z = 48}}}
#[allow(unused_variable)];
#[packed]
struct Packed {
x: i16,
y: i32,
z: i64
}
impl Drop for Packed {
fn drop(&mut self) {}
}
#[packed]
struct PackedInPacked {
a: i32,
b: Packed,
c: i64,
d: Packed
}
struct PackedInUnpacked {
a: i32,
b: Packed,
c: i64,
d: Packed
}
struct
|
{
x: i64,
y: i32,
z: i16
}
impl Drop for Unpacked {
fn drop(&mut self) {}
}
#[packed]
struct UnpackedInPacked {
a: i16,
b: Unpacked,
c: Unpacked,
d: i64
}
#[packed]
struct PackedInPackedWithDrop {
a: i32,
b: Packed,
c: i64,
d: Packed
}
impl Drop for PackedInPackedWithDrop {
fn drop(&mut self) {}
}
struct PackedInUnpackedWithDrop {
a: i32,
b: Packed,
c: i64,
d: Packed
}
impl Drop for PackedInUnpackedWithDrop {
fn drop(&mut self) {}
}
#[packed]
struct UnpackedInPackedWithDrop {
a: i16,
b: Unpacked,
c: Unpacked,
d: i64
}
impl Drop for UnpackedInPackedWithDrop {
fn drop(&mut self) {}
}
struct DeeplyNested {
a: PackedInPacked,
b: UnpackedInPackedWithDrop,
c: PackedInUnpacked,
d: PackedInUnpackedWithDrop,
e: UnpackedInPacked,
f: PackedInPackedWithDrop
}
fn main() {
let packed = Packed { x: 123, y: 234, z: 345 };
let packedInPacked = PackedInPacked {
a: 1111,
b: Packed { x: 2222, y: 3333, z: 4444 },
c: 5555,
d: Packed { x: 6666, y: 7777, z: 8888 }
};
let packedInUnpacked = PackedInUnpacked {
a: -1111,
b: Packed { x: -2222, y: -3333, z: -4444 },
c: -5555,
d: Packed { x: -6666, y: -7777, z: -8888 }
};
let unpackedInPacked = UnpackedInPacked {
a: 987,
b: Unpacked { x: 876, y: 765, z: 654 },
c: Unpacked { x: 543, y: 432, z: 321 },
d: 210
};
let packedInPackedWithDrop = PackedInPackedWithDrop {
a: 11,
b: Packed { x: 22, y: 33, z: 44 },
c: 55,
d: Packed { x: 66, y: 77, z: 88 }
};
let packedInUnpackedWithDrop = PackedInUnpackedWithDrop {
a: -11,
b: Packed { x: -22, y: -33, z: -44 },
c: -55,
d: Packed { x: -66, y: -77, z: -88 }
};
let unpackedInPackedWithDrop = UnpackedInPackedWithDrop {
a: 98,
b: Unpacked { x: 87, y: 76, z: 65 },
c: Unpacked { x: 54, y: 43, z: 32 },
d: 21
};
let deeplyNested = DeeplyNested {
a: PackedInPacked {
a: 1,
b: Packed { x: 2, y: 3, z: 4 },
c: 5,
d: Packed { x: 6, y: 7, z: 8 }
},
b: UnpackedInPackedWithDrop {
a: 9,
b: Unpacked { x: 10, y: 11, z: 12 },
c: Unpacked { x: 13, y: 14, z: 15 },
d: 16
},
c: PackedInUnpacked {
a: 17,
b: Packed { x: 18, y: 19, z: 20 },
c: 21,
d: Packed { x: 22, y: 23, z: 24 }
},
d: PackedInUnpackedWithDrop {
a: 25,
b: Packed { x: 26, y: 27, z: 28 },
c: 29,
d: Packed { x: 30, y: 31, z: 32 }
},
e: UnpackedInPacked {
a: 33,
b: Unpacked { x: 34, y: 35, z: 36 },
c: Unpacked { x: 37, y: 38, z: 39 },
d: 40
},
f: PackedInPackedWithDrop {
a: 41,
b: Packed { x: 42, y: 43, z: 44 },
c: 45,
d: Packed { x: 46, y: 47, z: 48 }
}
};
zzz();
}
fn zzz() {()}
|
Unpacked
|
identifier_name
|
packed-struct-with-destructor.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
// ignore-tidy-linelength
// ignore-android: FIXME(#10381)
// compile-flags:-g
// debugger:set print pretty off
// debugger:rbreak zzz
// debugger:run
// debugger:finish
// debugger:print packed
// check:$1 = {x = 123, y = 234, z = 345}
// debugger:print packedInPacked
// check:$2 = {a = 1111, b = {x = 2222, y = 3333, z = 4444}, c = 5555, d = {x = 6666, y = 7777, z = 8888}}
// debugger:print packedInUnpacked
// check:$3 = {a = -1111, b = {x = -2222, y = -3333, z = -4444}, c = -5555, d = {x = -6666, y = -7777, z = -8888}}
// debugger:print unpackedInPacked
// check:$4 = {a = 987, b = {x = 876, y = 765, z = 654}, c = {x = 543, y = 432, z = 321}, d = 210}
// debugger:print packedInPackedWithDrop
// check:$5 = {a = 11, b = {x = 22, y = 33, z = 44}, c = 55, d = {x = 66, y = 77, z = 88}}
// debugger:print packedInUnpackedWithDrop
// check:$6 = {a = -11, b = {x = -22, y = -33, z = -44}, c = -55, d = {x = -66, y = -77, z = -88}}
// debugger:print unpackedInPackedWithDrop
// check:$7 = {a = 98, b = {x = 87, y = 76, z = 65}, c = {x = 54, y = 43, z = 32}, d = 21}
// debugger:print deeplyNested
// check:$8 = {a = {a = 1, b = {x = 2, y = 3, z = 4}, c = 5, d = {x = 6, y = 7, z = 8}}, b = {a = 9, b = {x = 10, y = 11, z = 12}, c = {x = 13, y = 14, z = 15}, d = 16}, c = {a = 17, b = {x = 18, y = 19, z = 20}, c = 21, d = {x = 22, y = 23, z = 24}}, d = {a = 25, b = {x = 26, y = 27, z = 28}, c = 29, d = {x = 30, y = 31, z = 32}}, e = {a = 33, b = {x = 34, y = 35, z = 36}, c = {x = 37, y = 38, z = 39}, d = 40}, f = {a = 41, b = {x = 42, y = 43, z = 44}, c = 45, d = {x = 46, y = 47, z = 48}}}
#[allow(unused_variable)];
#[packed]
struct Packed {
x: i16,
y: i32,
z: i64
}
impl Drop for Packed {
fn drop(&mut self) {}
}
#[packed]
struct PackedInPacked {
a: i32,
b: Packed,
c: i64,
d: Packed
}
struct PackedInUnpacked {
a: i32,
b: Packed,
c: i64,
d: Packed
}
struct Unpacked {
x: i64,
y: i32,
z: i16
}
impl Drop for Unpacked {
fn drop(&mut self) {}
}
#[packed]
struct UnpackedInPacked {
a: i16,
b: Unpacked,
c: Unpacked,
d: i64
}
#[packed]
struct PackedInPackedWithDrop {
a: i32,
b: Packed,
c: i64,
d: Packed
}
impl Drop for PackedInPackedWithDrop {
fn drop(&mut self) {}
}
struct PackedInUnpackedWithDrop {
a: i32,
b: Packed,
c: i64,
d: Packed
}
impl Drop for PackedInUnpackedWithDrop {
fn drop(&mut self) {}
}
#[packed]
struct UnpackedInPackedWithDrop {
a: i16,
b: Unpacked,
c: Unpacked,
d: i64
}
impl Drop for UnpackedInPackedWithDrop {
fn drop(&mut self) {}
}
struct DeeplyNested {
a: PackedInPacked,
b: UnpackedInPackedWithDrop,
c: PackedInUnpacked,
d: PackedInUnpackedWithDrop,
e: UnpackedInPacked,
f: PackedInPackedWithDrop
}
fn main() {
let packed = Packed { x: 123, y: 234, z: 345 };
let packedInPacked = PackedInPacked {
a: 1111,
b: Packed { x: 2222, y: 3333, z: 4444 },
c: 5555,
d: Packed { x: 6666, y: 7777, z: 8888 }
};
let packedInUnpacked = PackedInUnpacked {
a: -1111,
b: Packed { x: -2222, y: -3333, z: -4444 },
c: -5555,
d: Packed { x: -6666, y: -7777, z: -8888 }
};
let unpackedInPacked = UnpackedInPacked {
a: 987,
b: Unpacked { x: 876, y: 765, z: 654 },
c: Unpacked { x: 543, y: 432, z: 321 },
d: 210
};
let packedInPackedWithDrop = PackedInPackedWithDrop {
a: 11,
b: Packed { x: 22, y: 33, z: 44 },
c: 55,
d: Packed { x: 66, y: 77, z: 88 }
};
let packedInUnpackedWithDrop = PackedInUnpackedWithDrop {
a: -11,
b: Packed { x: -22, y: -33, z: -44 },
c: -55,
d: Packed { x: -66, y: -77, z: -88 }
};
let unpackedInPackedWithDrop = UnpackedInPackedWithDrop {
a: 98,
b: Unpacked { x: 87, y: 76, z: 65 },
c: Unpacked { x: 54, y: 43, z: 32 },
d: 21
};
let deeplyNested = DeeplyNested {
a: PackedInPacked {
a: 1,
b: Packed { x: 2, y: 3, z: 4 },
c: 5,
d: Packed { x: 6, y: 7, z: 8 }
},
b: UnpackedInPackedWithDrop {
a: 9,
b: Unpacked { x: 10, y: 11, z: 12 },
c: Unpacked { x: 13, y: 14, z: 15 },
d: 16
},
c: PackedInUnpacked {
a: 17,
b: Packed { x: 18, y: 19, z: 20 },
c: 21,
d: Packed { x: 22, y: 23, z: 24 }
},
d: PackedInUnpackedWithDrop {
a: 25,
b: Packed { x: 26, y: 27, z: 28 },
c: 29,
d: Packed { x: 30, y: 31, z: 32 }
},
e: UnpackedInPacked {
a: 33,
b: Unpacked { x: 34, y: 35, z: 36 },
c: Unpacked { x: 37, y: 38, z: 39 },
d: 40
},
f: PackedInPackedWithDrop {
a: 41,
b: Packed { x: 42, y: 43, z: 44 },
c: 45,
d: Packed { x: 46, y: 47, z: 48 }
}
};
zzz();
}
fn zzz() {()}
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.