file_name
large_stringlengths 4
69
| prefix
large_stringlengths 0
26.7k
| suffix
large_stringlengths 0
24.8k
| middle
large_stringlengths 0
2.12k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
wasm_testsuite.rs
|
use cranelift_codegen::isa;
use cranelift_codegen::print_errors::pretty_verifier_error;
use cranelift_codegen::settings::{self, Flags};
use cranelift_codegen::verifier;
use cranelift_wasm::{translate_module, DummyEnvironment, ReturnMode};
use std::fs;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::Path;
use std::str::FromStr;
use target_lexicon::triple;
use wabt::wat2wasm;
#[test]
fn testsuite() {
let mut paths: Vec<_> = fs::read_dir("../wasmtests")
.unwrap()
.map(|r| r.unwrap())
.filter(|p| {
// Ignore files starting with `.`, which could be editor temporary files
if let Some(stem) = p.path().file_stem() {
if let Some(stemstr) = stem.to_str() {
return!stemstr.starts_with('.');
}
}
false
})
.collect();
paths.sort_by_key(|dir| dir.path());
let flags = Flags::new(settings::builder());
for path in paths {
let path = path.path();
handle_module(&path, &flags, ReturnMode::NormalReturns);
}
}
|
#[test]
fn use_fallthrough_return() {
let flags = Flags::new(settings::builder());
handle_module(
Path::new("../wasmtests/use_fallthrough_return.wat"),
&flags,
ReturnMode::FallthroughReturn,
);
}
fn read_file(path: &Path) -> io::Result<Vec<u8>> {
let mut buf: Vec<u8> = Vec::new();
let mut file = File::open(path)?;
file.read_to_end(&mut buf)?;
Ok(buf)
}
fn handle_module(path: &Path, flags: &Flags, return_mode: ReturnMode) {
let data = match path.extension() {
None => {
panic!("the file extension is not wasm or wat");
}
Some(ext) => match ext.to_str() {
Some("wasm") => read_file(path).expect("error reading wasm file"),
Some("wat") => {
let wat = read_file(path).expect("error reading wat file");
match wat2wasm(&wat) {
Ok(wasm) => wasm,
Err(e) => {
panic!("error converting wat to wasm: {:?}", e);
}
}
}
None | Some(&_) => panic!("the file extension for {:?} is not wasm or wat", path),
},
};
let triple = triple!("riscv64");
let isa = isa::lookup(triple).unwrap().finish(flags.clone());
let mut dummy_environ = DummyEnvironment::new(isa.frontend_config(), return_mode);
translate_module(&data, &mut dummy_environ).unwrap();
for func in dummy_environ.info.function_bodies.values() {
verifier::verify_function(func, &*isa)
.map_err(|errors| panic!(pretty_verifier_error(func, Some(&*isa), None, errors)))
.unwrap();
}
}
|
random_line_split
|
|
build.rs
|
extern crate symbiosis;
use std::path::Path;
use std::fs::{File, create_dir_all};
use std::io::Read;
use std::default::Default;
use symbiosis::TemplateGroup;
use symbiosis::rust::{self, Rust};
use symbiosis::javascript::{self, JavaScript};
fn
|
() {
let out_dir = std::env::var("OUT_DIR").unwrap();
let rust_dest = Path::new(&out_dir).join("symbiosis/");
if let Err(e) = create_dir_all(&rust_dest) {
panic!("failed to create Symbiosis output directory: {}", e);
}
let js_dest = Path::new("res");
let mut templates = TemplateGroup::new();
if let Err(e) = templates.parse_directory("templates/shared") {
panic!("failed to precompile templates/shared: {}", e);
}
let js = JavaScript {
namespace: Some("templates"),
..Default::default()
};
let rust = Rust {..Default::default() };
if let Err(e) = File::create(js_dest.join("templates.js")).map_err(|e| javascript::Error::Io(e)).and_then(|mut file| templates.emit_code(&mut file, &js)) {
panic!("failed to create res/templates.js: {}", e);
}
let mut source = String::new();
if let Err(e) = File::open("templates/Document.html").and_then(|mut f| f.read_to_string(&mut source)) {
panic!("failed to read templates/Document.html: {}", e);
}
if let Err(e) = templates.parse_string("Document".into(), source) {
panic!("failed to parse templates/Document.html: {}", e);
}
if let Err(e) = File::create(rust_dest.join("templates.rs")).map_err(|e| rust::Error::Io(e)).and_then(|mut file| templates.emit_code(&mut file, &rust)) {
panic!("failed to create symbiosis/templates.rs: {}", e);
}
}
|
main
|
identifier_name
|
build.rs
|
extern crate symbiosis;
use std::path::Path;
use std::fs::{File, create_dir_all};
use std::io::Read;
use std::default::Default;
use symbiosis::TemplateGroup;
use symbiosis::rust::{self, Rust};
use symbiosis::javascript::{self, JavaScript};
fn main() {
let out_dir = std::env::var("OUT_DIR").unwrap();
let rust_dest = Path::new(&out_dir).join("symbiosis/");
if let Err(e) = create_dir_all(&rust_dest) {
panic!("failed to create Symbiosis output directory: {}", e);
}
let js_dest = Path::new("res");
let mut templates = TemplateGroup::new();
if let Err(e) = templates.parse_directory("templates/shared")
|
let js = JavaScript {
namespace: Some("templates"),
..Default::default()
};
let rust = Rust {..Default::default() };
if let Err(e) = File::create(js_dest.join("templates.js")).map_err(|e| javascript::Error::Io(e)).and_then(|mut file| templates.emit_code(&mut file, &js)) {
panic!("failed to create res/templates.js: {}", e);
}
let mut source = String::new();
if let Err(e) = File::open("templates/Document.html").and_then(|mut f| f.read_to_string(&mut source)) {
panic!("failed to read templates/Document.html: {}", e);
}
if let Err(e) = templates.parse_string("Document".into(), source) {
panic!("failed to parse templates/Document.html: {}", e);
}
if let Err(e) = File::create(rust_dest.join("templates.rs")).map_err(|e| rust::Error::Io(e)).and_then(|mut file| templates.emit_code(&mut file, &rust)) {
panic!("failed to create symbiosis/templates.rs: {}", e);
}
}
|
{
panic!("failed to precompile templates/shared: {}", e);
}
|
conditional_block
|
build.rs
|
extern crate symbiosis;
use std::path::Path;
use std::fs::{File, create_dir_all};
use std::io::Read;
use std::default::Default;
use symbiosis::TemplateGroup;
use symbiosis::rust::{self, Rust};
use symbiosis::javascript::{self, JavaScript};
fn main() {
|
let rust_dest = Path::new(&out_dir).join("symbiosis/");
if let Err(e) = create_dir_all(&rust_dest) {
panic!("failed to create Symbiosis output directory: {}", e);
}
let js_dest = Path::new("res");
let mut templates = TemplateGroup::new();
if let Err(e) = templates.parse_directory("templates/shared") {
panic!("failed to precompile templates/shared: {}", e);
}
let js = JavaScript {
namespace: Some("templates"),
..Default::default()
};
let rust = Rust {..Default::default() };
if let Err(e) = File::create(js_dest.join("templates.js")).map_err(|e| javascript::Error::Io(e)).and_then(|mut file| templates.emit_code(&mut file, &js)) {
panic!("failed to create res/templates.js: {}", e);
}
let mut source = String::new();
if let Err(e) = File::open("templates/Document.html").and_then(|mut f| f.read_to_string(&mut source)) {
panic!("failed to read templates/Document.html: {}", e);
}
if let Err(e) = templates.parse_string("Document".into(), source) {
panic!("failed to parse templates/Document.html: {}", e);
}
if let Err(e) = File::create(rust_dest.join("templates.rs")).map_err(|e| rust::Error::Io(e)).and_then(|mut file| templates.emit_code(&mut file, &rust)) {
panic!("failed to create symbiosis/templates.rs: {}", e);
}
}
|
let out_dir = std::env::var("OUT_DIR").unwrap();
|
random_line_split
|
build.rs
|
extern crate symbiosis;
use std::path::Path;
use std::fs::{File, create_dir_all};
use std::io::Read;
use std::default::Default;
use symbiosis::TemplateGroup;
use symbiosis::rust::{self, Rust};
use symbiosis::javascript::{self, JavaScript};
fn main()
|
if let Err(e) = File::create(js_dest.join("templates.js")).map_err(|e| javascript::Error::Io(e)).and_then(|mut file| templates.emit_code(&mut file, &js)) {
panic!("failed to create res/templates.js: {}", e);
}
let mut source = String::new();
if let Err(e) = File::open("templates/Document.html").and_then(|mut f| f.read_to_string(&mut source)) {
panic!("failed to read templates/Document.html: {}", e);
}
if let Err(e) = templates.parse_string("Document".into(), source) {
panic!("failed to parse templates/Document.html: {}", e);
}
if let Err(e) = File::create(rust_dest.join("templates.rs")).map_err(|e| rust::Error::Io(e)).and_then(|mut file| templates.emit_code(&mut file, &rust)) {
panic!("failed to create symbiosis/templates.rs: {}", e);
}
}
|
{
let out_dir = std::env::var("OUT_DIR").unwrap();
let rust_dest = Path::new(&out_dir).join("symbiosis/");
if let Err(e) = create_dir_all(&rust_dest) {
panic!("failed to create Symbiosis output directory: {}", e);
}
let js_dest = Path::new("res");
let mut templates = TemplateGroup::new();
if let Err(e) = templates.parse_directory("templates/shared") {
panic!("failed to precompile templates/shared: {}", e);
}
let js = JavaScript {
namespace: Some("templates"),
..Default::default()
};
let rust = Rust { ..Default::default() };
|
identifier_body
|
diagnostic.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! LLVM diagnostic reports.
pub use self::OptimizationDiagnosticKind::*;
pub use self::Diagnostic::*;
use libc::{c_char, c_uint};
use std::ptr;
use {ValueRef, TwineRef, DebugLocRef, DiagnosticInfoRef};
#[derive(Copy)]
pub enum OptimizationDiagnosticKind {
OptimizationRemark,
OptimizationMissed,
OptimizationAnalysis,
OptimizationFailure,
}
impl OptimizationDiagnosticKind {
pub fn describe(self) -> &'static str {
match self {
OptimizationRemark => "remark",
OptimizationMissed => "missed",
OptimizationAnalysis => "analysis",
OptimizationFailure => "failure",
}
}
}
#[allow(raw_pointer_derive)]
#[derive(Copy)]
pub struct OptimizationDiagnostic {
pub kind: OptimizationDiagnosticKind,
pub pass_name: *const c_char,
pub function: ValueRef,
pub debug_loc: DebugLocRef,
pub message: TwineRef,
}
impl OptimizationDiagnostic {
unsafe fn unpack(kind: OptimizationDiagnosticKind, di: DiagnosticInfoRef)
-> OptimizationDiagnostic {
let mut opt = OptimizationDiagnostic {
kind: kind,
pass_name: ptr::null(),
function: ptr::null_mut(),
debug_loc: ptr::null_mut(),
message: ptr::null_mut(),
};
super::LLVMUnpackOptimizationDiagnostic(di,
&mut opt.pass_name,
&mut opt.function,
&mut opt.debug_loc,
&mut opt.message);
opt
}
}
pub struct InlineAsmDiagnostic {
pub cookie: c_uint,
pub message: TwineRef,
pub instruction: ValueRef,
}
impl Copy for InlineAsmDiagnostic {}
impl InlineAsmDiagnostic {
unsafe fn unpack(di: DiagnosticInfoRef)
-> InlineAsmDiagnostic {
let mut opt = InlineAsmDiagnostic {
cookie: 0,
message: ptr::null_mut(),
instruction: ptr::null_mut(),
};
super::LLVMUnpackInlineAsmDiagnostic(di,
&mut opt.cookie,
&mut opt.message,
&mut opt.instruction);
opt
}
}
#[derive(Copy)]
pub enum Diagnostic {
Optimization(OptimizationDiagnostic),
InlineAsm(InlineAsmDiagnostic),
/// LLVM has other types that we do not wrap here.
UnknownDiagnostic(DiagnosticInfoRef),
}
impl Diagnostic {
pub unsafe fn
|
(di: DiagnosticInfoRef) -> Diagnostic {
let kind = super::LLVMGetDiagInfoKind(di);
match kind {
super::DK_InlineAsm
=> InlineAsm(InlineAsmDiagnostic::unpack(di)),
super::DK_OptimizationRemark
=> Optimization(OptimizationDiagnostic::unpack(OptimizationRemark, di)),
super::DK_OptimizationRemarkMissed
=> Optimization(OptimizationDiagnostic::unpack(OptimizationMissed, di)),
super::DK_OptimizationRemarkAnalysis
=> Optimization(OptimizationDiagnostic::unpack(OptimizationAnalysis, di)),
super::DK_OptimizationFailure
=> Optimization(OptimizationDiagnostic::unpack(OptimizationFailure, di)),
_ => UnknownDiagnostic(di)
}
}
}
|
unpack
|
identifier_name
|
diagnostic.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! LLVM diagnostic reports.
pub use self::OptimizationDiagnosticKind::*;
pub use self::Diagnostic::*;
use libc::{c_char, c_uint};
use std::ptr;
use {ValueRef, TwineRef, DebugLocRef, DiagnosticInfoRef};
#[derive(Copy)]
pub enum OptimizationDiagnosticKind {
OptimizationRemark,
OptimizationMissed,
OptimizationAnalysis,
OptimizationFailure,
}
impl OptimizationDiagnosticKind {
pub fn describe(self) -> &'static str {
match self {
OptimizationRemark => "remark",
OptimizationMissed => "missed",
OptimizationAnalysis => "analysis",
OptimizationFailure => "failure",
}
}
}
#[allow(raw_pointer_derive)]
#[derive(Copy)]
pub struct OptimizationDiagnostic {
pub kind: OptimizationDiagnosticKind,
pub pass_name: *const c_char,
pub function: ValueRef,
pub debug_loc: DebugLocRef,
pub message: TwineRef,
}
impl OptimizationDiagnostic {
unsafe fn unpack(kind: OptimizationDiagnosticKind, di: DiagnosticInfoRef)
-> OptimizationDiagnostic {
let mut opt = OptimizationDiagnostic {
|
message: ptr::null_mut(),
};
super::LLVMUnpackOptimizationDiagnostic(di,
&mut opt.pass_name,
&mut opt.function,
&mut opt.debug_loc,
&mut opt.message);
opt
}
}
pub struct InlineAsmDiagnostic {
pub cookie: c_uint,
pub message: TwineRef,
pub instruction: ValueRef,
}
impl Copy for InlineAsmDiagnostic {}
impl InlineAsmDiagnostic {
unsafe fn unpack(di: DiagnosticInfoRef)
-> InlineAsmDiagnostic {
let mut opt = InlineAsmDiagnostic {
cookie: 0,
message: ptr::null_mut(),
instruction: ptr::null_mut(),
};
super::LLVMUnpackInlineAsmDiagnostic(di,
&mut opt.cookie,
&mut opt.message,
&mut opt.instruction);
opt
}
}
#[derive(Copy)]
pub enum Diagnostic {
Optimization(OptimizationDiagnostic),
InlineAsm(InlineAsmDiagnostic),
/// LLVM has other types that we do not wrap here.
UnknownDiagnostic(DiagnosticInfoRef),
}
impl Diagnostic {
pub unsafe fn unpack(di: DiagnosticInfoRef) -> Diagnostic {
let kind = super::LLVMGetDiagInfoKind(di);
match kind {
super::DK_InlineAsm
=> InlineAsm(InlineAsmDiagnostic::unpack(di)),
super::DK_OptimizationRemark
=> Optimization(OptimizationDiagnostic::unpack(OptimizationRemark, di)),
super::DK_OptimizationRemarkMissed
=> Optimization(OptimizationDiagnostic::unpack(OptimizationMissed, di)),
super::DK_OptimizationRemarkAnalysis
=> Optimization(OptimizationDiagnostic::unpack(OptimizationAnalysis, di)),
super::DK_OptimizationFailure
=> Optimization(OptimizationDiagnostic::unpack(OptimizationFailure, di)),
_ => UnknownDiagnostic(di)
}
}
}
|
kind: kind,
pass_name: ptr::null(),
function: ptr::null_mut(),
debug_loc: ptr::null_mut(),
|
random_line_split
|
htmlvideoelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::document_loader::{LoadBlocker, LoadType};
use crate::dom::attr::Attr;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::HTMLVideoElementBinding::HTMLVideoElementMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::element::{AttributeMutation, Element};
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlmediaelement::{HTMLMediaElement, ReadyState};
use crate::dom::node::{document_from_node, window_from_node, Node};
use crate::dom::performanceresourcetiming::InitiatorType;
use crate::dom::virtualmethods::VirtualMethods;
use crate::fetch::FetchCanceller;
use crate::image_listener::{generate_cache_listener_for_element, ImageCacheListener};
use crate::network_listener::{self, NetworkListener, PreInvoke, ResourceTimingListener};
use dom_struct::dom_struct;
use euclid::default::Size2D;
use html5ever::{LocalName, Prefix};
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use net_traits::image_cache::{
ImageCache, ImageCacheResult, ImageOrMetadataAvailable, ImageResponse, PendingImageId,
UsePlaceholder,
};
use net_traits::request::{CredentialsMode, Destination, RequestBuilder};
use net_traits::{
CoreResourceMsg, FetchChannels, FetchMetadata, FetchResponseListener, FetchResponseMsg,
};
use net_traits::{NetworkError, ResourceFetchTiming, ResourceTimingType};
use servo_media::player::video::VideoFrame;
use servo_url::ServoUrl;
use std::cell::Cell;
use std::sync::{Arc, Mutex};
const DEFAULT_WIDTH: u32 = 300;
const DEFAULT_HEIGHT: u32 = 150;
#[dom_struct]
pub struct HTMLVideoElement {
htmlmediaelement: HTMLMediaElement,
/// https://html.spec.whatwg.org/multipage/#dom-video-videowidth
video_width: Cell<u32>,
/// https://html.spec.whatwg.org/multipage/#dom-video-videoheight
video_height: Cell<u32>,
/// Incremented whenever tasks associated with this element are cancelled.
generation_id: Cell<u32>,
/// Poster frame fetch request canceller.
poster_frame_canceller: DomRefCell<FetchCanceller>,
/// Load event blocker. Will block the load event while the poster frame
/// is being fetched.
load_blocker: DomRefCell<Option<LoadBlocker>>,
/// A copy of the last frame
#[ignore_malloc_size_of = "VideoFrame"]
last_frame: DomRefCell<Option<VideoFrame>>,
}
impl HTMLVideoElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLVideoElement {
HTMLVideoElement {
htmlmediaelement: HTMLMediaElement::new_inherited(local_name, prefix, document),
video_width: Cell::new(DEFAULT_WIDTH),
video_height: Cell::new(DEFAULT_HEIGHT),
generation_id: Cell::new(0),
poster_frame_canceller: DomRefCell::new(Default::default()),
load_blocker: Default::default(),
last_frame: Default::default(),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLVideoElement> {
Node::reflect_node(
Box::new(HTMLVideoElement::new_inherited(
local_name, prefix, document,
)),
document,
)
}
pub fn get_video_width(&self) -> u32
|
pub fn set_video_width(&self, width: u32) {
self.video_width.set(width);
}
pub fn get_video_height(&self) -> u32 {
self.video_height.get()
}
pub fn set_video_height(&self, height: u32) {
self.video_height.set(height);
}
pub fn allow_load_event(&self) {
LoadBlocker::terminate(&mut *self.load_blocker.borrow_mut());
}
pub fn get_current_frame_data(&self) -> Option<(Option<ipc::IpcSharedMemory>, Size2D<u32>)> {
let frame = self.htmlmediaelement.get_current_frame();
if frame.is_some() {
*self.last_frame.borrow_mut() = frame;
}
match self.last_frame.borrow().as_ref() {
Some(frame) => {
let size = Size2D::new(frame.get_width() as u32, frame.get_height() as u32);
if!frame.is_gl_texture() {
let data = Some(ipc::IpcSharedMemory::from_bytes(&frame.get_data()));
Some((data, size))
} else {
// XXX(victor): here we only have the GL texture ID.
Some((None, size))
}
},
None => None,
}
}
/// https://html.spec.whatwg.org/multipage/#poster-frame
fn fetch_poster_frame(&self, poster_url: &str) {
// Step 1.
let cancel_receiver = self.poster_frame_canceller.borrow_mut().initialize();
self.generation_id.set(self.generation_id.get() + 1);
// Step 2.
if poster_url.is_empty() {
return;
}
// Step 3.
let poster_url = match document_from_node(self).url().join(&poster_url) {
Ok(url) => url,
Err(_) => return,
};
// Step 4.
// We use the image cache for poster frames so we save as much
// network activity as possible.
let window = window_from_node(self);
let image_cache = window.image_cache();
let sender = generate_cache_listener_for_element(self);
let cache_result = image_cache.track_image(
poster_url.clone(),
window.origin().immutable().clone(),
None,
sender,
UsePlaceholder::No,
);
match cache_result {
ImageCacheResult::Available(ImageOrMetadataAvailable::ImageAvailable {
image,
url,
..
}) => {
self.process_image_response(ImageResponse::Loaded(image, url));
},
ImageCacheResult::ReadyForRequest(id) => {
self.do_fetch_poster_frame(poster_url, id, cancel_receiver)
},
_ => (),
}
}
/// https://html.spec.whatwg.org/multipage/#poster-frame
fn do_fetch_poster_frame(
&self,
poster_url: ServoUrl,
id: PendingImageId,
cancel_receiver: ipc::IpcReceiver<()>,
) {
// Continuation of step 4.
let document = document_from_node(self);
let request = RequestBuilder::new(poster_url.clone(), document.global().get_referrer())
.destination(Destination::Image)
.credentials_mode(CredentialsMode::Include)
.use_url_credentials(true)
.origin(document.origin().immutable().clone())
.pipeline_id(Some(document.global().pipeline_id()));
// Step 5.
// This delay must be independent from the ones created by HTMLMediaElement during
// its media load algorithm, otherwise a code like
// <video poster="poster.png"></video>
// (which triggers no media load algorithm unless a explicit call to.load() is done)
// will block the document's load event forever.
let mut blocker = self.load_blocker.borrow_mut();
LoadBlocker::terminate(&mut *blocker);
*blocker = Some(LoadBlocker::new(
&document_from_node(self),
LoadType::Image(poster_url.clone()),
));
let window = window_from_node(self);
let context = Arc::new(Mutex::new(PosterFrameFetchContext::new(
self, poster_url, id,
)));
let (action_sender, action_receiver) = ipc::channel().unwrap();
let (task_source, canceller) = window
.task_manager()
.networking_task_source_with_canceller();
let listener = NetworkListener {
context,
task_source,
canceller: Some(canceller),
};
ROUTER.add_route(
action_receiver.to_opaque(),
Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}),
);
let global = self.global();
global
.core_resource_thread()
.send(CoreResourceMsg::Fetch(
request,
FetchChannels::ResponseMsg(action_sender, Some(cancel_receiver)),
))
.unwrap();
}
}
impl HTMLVideoElementMethods for HTMLVideoElement {
// https://html.spec.whatwg.org/multipage/#dom-video-videowidth
fn VideoWidth(&self) -> u32 {
if self.htmlmediaelement.get_ready_state() == ReadyState::HaveNothing {
return 0;
}
self.video_width.get()
}
// https://html.spec.whatwg.org/multipage/#dom-video-videoheight
fn VideoHeight(&self) -> u32 {
if self.htmlmediaelement.get_ready_state() == ReadyState::HaveNothing {
return 0;
}
self.video_height.get()
}
// https://html.spec.whatwg.org/multipage/#dom-video-poster
make_getter!(Poster, "poster");
// https://html.spec.whatwg.org/multipage/#dom-video-poster
make_setter!(SetPoster, "poster");
// For testing purposes only. This is not an event from
// https://html.spec.whatwg.org/multipage/#dom-video-poster
event_handler!(postershown, GetOnpostershown, SetOnpostershown);
}
impl VirtualMethods for HTMLVideoElement {
fn super_type(&self) -> Option<&dyn VirtualMethods> {
Some(self.upcast::<HTMLMediaElement>() as &dyn VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if let Some(new_value) = mutation.new_value(attr) {
match attr.local_name() {
&local_name!("poster") => {
self.fetch_poster_frame(&new_value);
},
_ => (),
};
}
}
}
impl ImageCacheListener for HTMLVideoElement {
fn generation_id(&self) -> u32 {
self.generation_id.get()
}
fn process_image_response(&self, response: ImageResponse) {
self.htmlmediaelement.process_poster_response(response);
}
}
struct PosterFrameFetchContext {
/// Reference to the script thread image cache.
image_cache: Arc<dyn ImageCache>,
/// The element that initiated the request.
elem: Trusted<HTMLVideoElement>,
/// The cache ID for this request.
id: PendingImageId,
/// True if this response is invalid and should be ignored.
cancelled: bool,
/// Timing data for this resource
resource_timing: ResourceFetchTiming,
/// Url for the resource
url: ServoUrl,
}
impl FetchResponseListener for PosterFrameFetchContext {
fn process_request_body(&mut self) {}
fn process_request_eof(&mut self) {}
fn process_response(&mut self, metadata: Result<FetchMetadata, NetworkError>) {
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponse(metadata.clone()));
let metadata = metadata.ok().map(|meta| match meta {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_,.. } => unsafe_,
});
let status_is_ok = metadata
.as_ref()
.and_then(|m| m.status.as_ref())
.map_or(true, |s| s.0 >= 200 && s.0 < 300);
if!status_is_ok {
self.cancelled = true;
self.elem
.root()
.poster_frame_canceller
.borrow_mut()
.cancel();
}
}
fn process_response_chunk(&mut self, payload: Vec<u8>) {
if self.cancelled {
// An error was received previously, skip processing the payload.
return;
}
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponseChunk(payload));
}
fn process_response_eof(&mut self, response: Result<ResourceFetchTiming, NetworkError>) {
self.elem.root().allow_load_event();
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponseEOF(response));
}
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
&mut self.resource_timing
}
fn resource_timing(&self) -> &ResourceFetchTiming {
&self.resource_timing
}
fn submit_resource_timing(&mut self) {
network_listener::submit_timing(self)
}
}
impl ResourceTimingListener for PosterFrameFetchContext {
fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
let initiator_type = InitiatorType::LocalName(
self.elem
.root()
.upcast::<Element>()
.local_name()
.to_string(),
);
(initiator_type, self.url.clone())
}
fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
document_from_node(&*self.elem.root()).global()
}
}
impl PreInvoke for PosterFrameFetchContext {
fn should_invoke(&self) -> bool {
true
}
}
impl PosterFrameFetchContext {
fn new(elem: &HTMLVideoElement, url: ServoUrl, id: PendingImageId) -> PosterFrameFetchContext {
let window = window_from_node(elem);
PosterFrameFetchContext {
image_cache: window.image_cache(),
elem: Trusted::new(elem),
id,
cancelled: false,
resource_timing: ResourceFetchTiming::new(ResourceTimingType::Resource),
url,
}
}
}
|
{
self.video_width.get()
}
|
identifier_body
|
htmlvideoelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::document_loader::{LoadBlocker, LoadType};
use crate::dom::attr::Attr;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::HTMLVideoElementBinding::HTMLVideoElementMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::element::{AttributeMutation, Element};
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlmediaelement::{HTMLMediaElement, ReadyState};
use crate::dom::node::{document_from_node, window_from_node, Node};
use crate::dom::performanceresourcetiming::InitiatorType;
use crate::dom::virtualmethods::VirtualMethods;
use crate::fetch::FetchCanceller;
use crate::image_listener::{generate_cache_listener_for_element, ImageCacheListener};
use crate::network_listener::{self, NetworkListener, PreInvoke, ResourceTimingListener};
use dom_struct::dom_struct;
use euclid::default::Size2D;
use html5ever::{LocalName, Prefix};
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use net_traits::image_cache::{
ImageCache, ImageCacheResult, ImageOrMetadataAvailable, ImageResponse, PendingImageId,
UsePlaceholder,
};
use net_traits::request::{CredentialsMode, Destination, RequestBuilder};
use net_traits::{
CoreResourceMsg, FetchChannels, FetchMetadata, FetchResponseListener, FetchResponseMsg,
};
use net_traits::{NetworkError, ResourceFetchTiming, ResourceTimingType};
use servo_media::player::video::VideoFrame;
use servo_url::ServoUrl;
use std::cell::Cell;
use std::sync::{Arc, Mutex};
const DEFAULT_WIDTH: u32 = 300;
const DEFAULT_HEIGHT: u32 = 150;
#[dom_struct]
pub struct HTMLVideoElement {
htmlmediaelement: HTMLMediaElement,
/// https://html.spec.whatwg.org/multipage/#dom-video-videowidth
video_width: Cell<u32>,
/// https://html.spec.whatwg.org/multipage/#dom-video-videoheight
video_height: Cell<u32>,
/// Incremented whenever tasks associated with this element are cancelled.
generation_id: Cell<u32>,
/// Poster frame fetch request canceller.
poster_frame_canceller: DomRefCell<FetchCanceller>,
/// Load event blocker. Will block the load event while the poster frame
/// is being fetched.
load_blocker: DomRefCell<Option<LoadBlocker>>,
/// A copy of the last frame
#[ignore_malloc_size_of = "VideoFrame"]
last_frame: DomRefCell<Option<VideoFrame>>,
}
impl HTMLVideoElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLVideoElement {
HTMLVideoElement {
htmlmediaelement: HTMLMediaElement::new_inherited(local_name, prefix, document),
video_width: Cell::new(DEFAULT_WIDTH),
video_height: Cell::new(DEFAULT_HEIGHT),
generation_id: Cell::new(0),
poster_frame_canceller: DomRefCell::new(Default::default()),
load_blocker: Default::default(),
last_frame: Default::default(),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLVideoElement> {
Node::reflect_node(
Box::new(HTMLVideoElement::new_inherited(
local_name, prefix, document,
)),
document,
)
}
pub fn get_video_width(&self) -> u32 {
self.video_width.get()
}
pub fn set_video_width(&self, width: u32) {
self.video_width.set(width);
}
pub fn get_video_height(&self) -> u32 {
self.video_height.get()
}
pub fn set_video_height(&self, height: u32) {
self.video_height.set(height);
}
pub fn allow_load_event(&self) {
LoadBlocker::terminate(&mut *self.load_blocker.borrow_mut());
}
pub fn get_current_frame_data(&self) -> Option<(Option<ipc::IpcSharedMemory>, Size2D<u32>)> {
let frame = self.htmlmediaelement.get_current_frame();
if frame.is_some() {
*self.last_frame.borrow_mut() = frame;
}
match self.last_frame.borrow().as_ref() {
Some(frame) => {
let size = Size2D::new(frame.get_width() as u32, frame.get_height() as u32);
if!frame.is_gl_texture() {
let data = Some(ipc::IpcSharedMemory::from_bytes(&frame.get_data()));
Some((data, size))
} else {
// XXX(victor): here we only have the GL texture ID.
Some((None, size))
}
},
None => None,
}
}
/// https://html.spec.whatwg.org/multipage/#poster-frame
fn fetch_poster_frame(&self, poster_url: &str) {
// Step 1.
let cancel_receiver = self.poster_frame_canceller.borrow_mut().initialize();
self.generation_id.set(self.generation_id.get() + 1);
// Step 2.
if poster_url.is_empty() {
return;
}
// Step 3.
let poster_url = match document_from_node(self).url().join(&poster_url) {
Ok(url) => url,
Err(_) => return,
};
// Step 4.
// We use the image cache for poster frames so we save as much
// network activity as possible.
let window = window_from_node(self);
let image_cache = window.image_cache();
let sender = generate_cache_listener_for_element(self);
let cache_result = image_cache.track_image(
poster_url.clone(),
window.origin().immutable().clone(),
None,
sender,
UsePlaceholder::No,
);
match cache_result {
ImageCacheResult::Available(ImageOrMetadataAvailable::ImageAvailable {
image,
url,
..
}) => {
self.process_image_response(ImageResponse::Loaded(image, url));
},
ImageCacheResult::ReadyForRequest(id) => {
self.do_fetch_poster_frame(poster_url, id, cancel_receiver)
},
_ => (),
}
}
/// https://html.spec.whatwg.org/multipage/#poster-frame
fn do_fetch_poster_frame(
&self,
poster_url: ServoUrl,
id: PendingImageId,
cancel_receiver: ipc::IpcReceiver<()>,
) {
// Continuation of step 4.
let document = document_from_node(self);
let request = RequestBuilder::new(poster_url.clone(), document.global().get_referrer())
.destination(Destination::Image)
.credentials_mode(CredentialsMode::Include)
.use_url_credentials(true)
.origin(document.origin().immutable().clone())
.pipeline_id(Some(document.global().pipeline_id()));
// Step 5.
// This delay must be independent from the ones created by HTMLMediaElement during
// its media load algorithm, otherwise a code like
// <video poster="poster.png"></video>
// (which triggers no media load algorithm unless a explicit call to.load() is done)
// will block the document's load event forever.
let mut blocker = self.load_blocker.borrow_mut();
LoadBlocker::terminate(&mut *blocker);
*blocker = Some(LoadBlocker::new(
&document_from_node(self),
LoadType::Image(poster_url.clone()),
));
let window = window_from_node(self);
let context = Arc::new(Mutex::new(PosterFrameFetchContext::new(
self, poster_url, id,
)));
let (action_sender, action_receiver) = ipc::channel().unwrap();
let (task_source, canceller) = window
.task_manager()
.networking_task_source_with_canceller();
let listener = NetworkListener {
context,
task_source,
canceller: Some(canceller),
};
ROUTER.add_route(
action_receiver.to_opaque(),
Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}),
);
let global = self.global();
global
.core_resource_thread()
.send(CoreResourceMsg::Fetch(
request,
FetchChannels::ResponseMsg(action_sender, Some(cancel_receiver)),
))
.unwrap();
}
}
impl HTMLVideoElementMethods for HTMLVideoElement {
// https://html.spec.whatwg.org/multipage/#dom-video-videowidth
fn VideoWidth(&self) -> u32 {
if self.htmlmediaelement.get_ready_state() == ReadyState::HaveNothing {
return 0;
}
self.video_width.get()
}
// https://html.spec.whatwg.org/multipage/#dom-video-videoheight
fn VideoHeight(&self) -> u32 {
if self.htmlmediaelement.get_ready_state() == ReadyState::HaveNothing {
return 0;
}
self.video_height.get()
}
// https://html.spec.whatwg.org/multipage/#dom-video-poster
make_getter!(Poster, "poster");
// https://html.spec.whatwg.org/multipage/#dom-video-poster
make_setter!(SetPoster, "poster");
// For testing purposes only. This is not an event from
// https://html.spec.whatwg.org/multipage/#dom-video-poster
event_handler!(postershown, GetOnpostershown, SetOnpostershown);
}
impl VirtualMethods for HTMLVideoElement {
fn super_type(&self) -> Option<&dyn VirtualMethods> {
Some(self.upcast::<HTMLMediaElement>() as &dyn VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if let Some(new_value) = mutation.new_value(attr) {
match attr.local_name() {
&local_name!("poster") => {
self.fetch_poster_frame(&new_value);
},
_ => (),
};
}
}
}
impl ImageCacheListener for HTMLVideoElement {
fn generation_id(&self) -> u32 {
self.generation_id.get()
}
fn process_image_response(&self, response: ImageResponse) {
self.htmlmediaelement.process_poster_response(response);
}
}
struct PosterFrameFetchContext {
/// Reference to the script thread image cache.
image_cache: Arc<dyn ImageCache>,
/// The element that initiated the request.
elem: Trusted<HTMLVideoElement>,
/// The cache ID for this request.
id: PendingImageId,
/// True if this response is invalid and should be ignored.
cancelled: bool,
/// Timing data for this resource
resource_timing: ResourceFetchTiming,
/// Url for the resource
url: ServoUrl,
}
impl FetchResponseListener for PosterFrameFetchContext {
fn process_request_body(&mut self) {}
fn process_request_eof(&mut self) {}
fn process_response(&mut self, metadata: Result<FetchMetadata, NetworkError>) {
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponse(metadata.clone()));
let metadata = metadata.ok().map(|meta| match meta {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_,.. } => unsafe_,
});
let status_is_ok = metadata
.as_ref()
.and_then(|m| m.status.as_ref())
.map_or(true, |s| s.0 >= 200 && s.0 < 300);
if!status_is_ok {
self.cancelled = true;
self.elem
.root()
.poster_frame_canceller
.borrow_mut()
.cancel();
}
}
fn
|
(&mut self, payload: Vec<u8>) {
if self.cancelled {
// An error was received previously, skip processing the payload.
return;
}
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponseChunk(payload));
}
fn process_response_eof(&mut self, response: Result<ResourceFetchTiming, NetworkError>) {
self.elem.root().allow_load_event();
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponseEOF(response));
}
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
&mut self.resource_timing
}
fn resource_timing(&self) -> &ResourceFetchTiming {
&self.resource_timing
}
fn submit_resource_timing(&mut self) {
network_listener::submit_timing(self)
}
}
impl ResourceTimingListener for PosterFrameFetchContext {
fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
let initiator_type = InitiatorType::LocalName(
self.elem
.root()
.upcast::<Element>()
.local_name()
.to_string(),
);
(initiator_type, self.url.clone())
}
fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
document_from_node(&*self.elem.root()).global()
}
}
impl PreInvoke for PosterFrameFetchContext {
fn should_invoke(&self) -> bool {
true
}
}
impl PosterFrameFetchContext {
fn new(elem: &HTMLVideoElement, url: ServoUrl, id: PendingImageId) -> PosterFrameFetchContext {
let window = window_from_node(elem);
PosterFrameFetchContext {
image_cache: window.image_cache(),
elem: Trusted::new(elem),
id,
cancelled: false,
resource_timing: ResourceFetchTiming::new(ResourceTimingType::Resource),
url,
}
}
}
|
process_response_chunk
|
identifier_name
|
htmlvideoelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::document_loader::{LoadBlocker, LoadType};
use crate::dom::attr::Attr;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::HTMLVideoElementBinding::HTMLVideoElementMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::element::{AttributeMutation, Element};
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlmediaelement::{HTMLMediaElement, ReadyState};
use crate::dom::node::{document_from_node, window_from_node, Node};
use crate::dom::performanceresourcetiming::InitiatorType;
use crate::dom::virtualmethods::VirtualMethods;
use crate::fetch::FetchCanceller;
use crate::image_listener::{generate_cache_listener_for_element, ImageCacheListener};
use crate::network_listener::{self, NetworkListener, PreInvoke, ResourceTimingListener};
use dom_struct::dom_struct;
use euclid::default::Size2D;
use html5ever::{LocalName, Prefix};
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use net_traits::image_cache::{
ImageCache, ImageCacheResult, ImageOrMetadataAvailable, ImageResponse, PendingImageId,
UsePlaceholder,
};
use net_traits::request::{CredentialsMode, Destination, RequestBuilder};
use net_traits::{
CoreResourceMsg, FetchChannels, FetchMetadata, FetchResponseListener, FetchResponseMsg,
};
use net_traits::{NetworkError, ResourceFetchTiming, ResourceTimingType};
use servo_media::player::video::VideoFrame;
use servo_url::ServoUrl;
use std::cell::Cell;
use std::sync::{Arc, Mutex};
const DEFAULT_WIDTH: u32 = 300;
const DEFAULT_HEIGHT: u32 = 150;
#[dom_struct]
pub struct HTMLVideoElement {
htmlmediaelement: HTMLMediaElement,
/// https://html.spec.whatwg.org/multipage/#dom-video-videowidth
video_width: Cell<u32>,
/// https://html.spec.whatwg.org/multipage/#dom-video-videoheight
video_height: Cell<u32>,
/// Incremented whenever tasks associated with this element are cancelled.
generation_id: Cell<u32>,
/// Poster frame fetch request canceller.
poster_frame_canceller: DomRefCell<FetchCanceller>,
/// Load event blocker. Will block the load event while the poster frame
/// is being fetched.
load_blocker: DomRefCell<Option<LoadBlocker>>,
/// A copy of the last frame
#[ignore_malloc_size_of = "VideoFrame"]
last_frame: DomRefCell<Option<VideoFrame>>,
}
impl HTMLVideoElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLVideoElement {
HTMLVideoElement {
htmlmediaelement: HTMLMediaElement::new_inherited(local_name, prefix, document),
video_width: Cell::new(DEFAULT_WIDTH),
video_height: Cell::new(DEFAULT_HEIGHT),
generation_id: Cell::new(0),
poster_frame_canceller: DomRefCell::new(Default::default()),
load_blocker: Default::default(),
last_frame: Default::default(),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLVideoElement> {
Node::reflect_node(
Box::new(HTMLVideoElement::new_inherited(
local_name, prefix, document,
)),
document,
)
}
pub fn get_video_width(&self) -> u32 {
self.video_width.get()
}
pub fn set_video_width(&self, width: u32) {
self.video_width.set(width);
}
pub fn get_video_height(&self) -> u32 {
self.video_height.get()
}
pub fn set_video_height(&self, height: u32) {
self.video_height.set(height);
}
pub fn allow_load_event(&self) {
LoadBlocker::terminate(&mut *self.load_blocker.borrow_mut());
}
pub fn get_current_frame_data(&self) -> Option<(Option<ipc::IpcSharedMemory>, Size2D<u32>)> {
let frame = self.htmlmediaelement.get_current_frame();
if frame.is_some() {
*self.last_frame.borrow_mut() = frame;
}
match self.last_frame.borrow().as_ref() {
Some(frame) => {
let size = Size2D::new(frame.get_width() as u32, frame.get_height() as u32);
if!frame.is_gl_texture() {
let data = Some(ipc::IpcSharedMemory::from_bytes(&frame.get_data()));
Some((data, size))
} else {
// XXX(victor): here we only have the GL texture ID.
Some((None, size))
}
},
None => None,
}
}
/// https://html.spec.whatwg.org/multipage/#poster-frame
fn fetch_poster_frame(&self, poster_url: &str) {
// Step 1.
let cancel_receiver = self.poster_frame_canceller.borrow_mut().initialize();
self.generation_id.set(self.generation_id.get() + 1);
// Step 2.
if poster_url.is_empty() {
return;
}
// Step 3.
let poster_url = match document_from_node(self).url().join(&poster_url) {
Ok(url) => url,
Err(_) => return,
};
// Step 4.
// We use the image cache for poster frames so we save as much
// network activity as possible.
let window = window_from_node(self);
let image_cache = window.image_cache();
let sender = generate_cache_listener_for_element(self);
let cache_result = image_cache.track_image(
poster_url.clone(),
window.origin().immutable().clone(),
None,
sender,
UsePlaceholder::No,
);
match cache_result {
ImageCacheResult::Available(ImageOrMetadataAvailable::ImageAvailable {
image,
url,
..
}) => {
self.process_image_response(ImageResponse::Loaded(image, url));
},
ImageCacheResult::ReadyForRequest(id) => {
self.do_fetch_poster_frame(poster_url, id, cancel_receiver)
},
_ => (),
}
}
/// https://html.spec.whatwg.org/multipage/#poster-frame
fn do_fetch_poster_frame(
&self,
poster_url: ServoUrl,
id: PendingImageId,
cancel_receiver: ipc::IpcReceiver<()>,
) {
// Continuation of step 4.
let document = document_from_node(self);
let request = RequestBuilder::new(poster_url.clone(), document.global().get_referrer())
.destination(Destination::Image)
.credentials_mode(CredentialsMode::Include)
.use_url_credentials(true)
.origin(document.origin().immutable().clone())
.pipeline_id(Some(document.global().pipeline_id()));
// Step 5.
// This delay must be independent from the ones created by HTMLMediaElement during
// its media load algorithm, otherwise a code like
// <video poster="poster.png"></video>
// (which triggers no media load algorithm unless a explicit call to.load() is done)
// will block the document's load event forever.
let mut blocker = self.load_blocker.borrow_mut();
LoadBlocker::terminate(&mut *blocker);
*blocker = Some(LoadBlocker::new(
&document_from_node(self),
LoadType::Image(poster_url.clone()),
));
let window = window_from_node(self);
let context = Arc::new(Mutex::new(PosterFrameFetchContext::new(
self, poster_url, id,
)));
let (action_sender, action_receiver) = ipc::channel().unwrap();
let (task_source, canceller) = window
.task_manager()
.networking_task_source_with_canceller();
let listener = NetworkListener {
context,
task_source,
canceller: Some(canceller),
};
ROUTER.add_route(
action_receiver.to_opaque(),
Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}),
);
let global = self.global();
global
.core_resource_thread()
.send(CoreResourceMsg::Fetch(
request,
FetchChannels::ResponseMsg(action_sender, Some(cancel_receiver)),
))
.unwrap();
}
}
impl HTMLVideoElementMethods for HTMLVideoElement {
// https://html.spec.whatwg.org/multipage/#dom-video-videowidth
fn VideoWidth(&self) -> u32 {
|
if self.htmlmediaelement.get_ready_state() == ReadyState::HaveNothing {
return 0;
}
self.video_width.get()
}
// https://html.spec.whatwg.org/multipage/#dom-video-videoheight
fn VideoHeight(&self) -> u32 {
if self.htmlmediaelement.get_ready_state() == ReadyState::HaveNothing {
return 0;
}
self.video_height.get()
}
// https://html.spec.whatwg.org/multipage/#dom-video-poster
make_getter!(Poster, "poster");
// https://html.spec.whatwg.org/multipage/#dom-video-poster
make_setter!(SetPoster, "poster");
// For testing purposes only. This is not an event from
// https://html.spec.whatwg.org/multipage/#dom-video-poster
event_handler!(postershown, GetOnpostershown, SetOnpostershown);
}
impl VirtualMethods for HTMLVideoElement {
fn super_type(&self) -> Option<&dyn VirtualMethods> {
Some(self.upcast::<HTMLMediaElement>() as &dyn VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if let Some(new_value) = mutation.new_value(attr) {
match attr.local_name() {
&local_name!("poster") => {
self.fetch_poster_frame(&new_value);
},
_ => (),
};
}
}
}
impl ImageCacheListener for HTMLVideoElement {
fn generation_id(&self) -> u32 {
self.generation_id.get()
}
fn process_image_response(&self, response: ImageResponse) {
self.htmlmediaelement.process_poster_response(response);
}
}
struct PosterFrameFetchContext {
/// Reference to the script thread image cache.
image_cache: Arc<dyn ImageCache>,
/// The element that initiated the request.
elem: Trusted<HTMLVideoElement>,
/// The cache ID for this request.
id: PendingImageId,
/// True if this response is invalid and should be ignored.
cancelled: bool,
/// Timing data for this resource
resource_timing: ResourceFetchTiming,
/// Url for the resource
url: ServoUrl,
}
impl FetchResponseListener for PosterFrameFetchContext {
fn process_request_body(&mut self) {}
fn process_request_eof(&mut self) {}
fn process_response(&mut self, metadata: Result<FetchMetadata, NetworkError>) {
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponse(metadata.clone()));
let metadata = metadata.ok().map(|meta| match meta {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_,.. } => unsafe_,
});
let status_is_ok = metadata
.as_ref()
.and_then(|m| m.status.as_ref())
.map_or(true, |s| s.0 >= 200 && s.0 < 300);
if!status_is_ok {
self.cancelled = true;
self.elem
.root()
.poster_frame_canceller
.borrow_mut()
.cancel();
}
}
fn process_response_chunk(&mut self, payload: Vec<u8>) {
if self.cancelled {
// An error was received previously, skip processing the payload.
return;
}
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponseChunk(payload));
}
fn process_response_eof(&mut self, response: Result<ResourceFetchTiming, NetworkError>) {
self.elem.root().allow_load_event();
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponseEOF(response));
}
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
&mut self.resource_timing
}
fn resource_timing(&self) -> &ResourceFetchTiming {
&self.resource_timing
}
fn submit_resource_timing(&mut self) {
network_listener::submit_timing(self)
}
}
impl ResourceTimingListener for PosterFrameFetchContext {
fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
let initiator_type = InitiatorType::LocalName(
self.elem
.root()
.upcast::<Element>()
.local_name()
.to_string(),
);
(initiator_type, self.url.clone())
}
fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
document_from_node(&*self.elem.root()).global()
}
}
impl PreInvoke for PosterFrameFetchContext {
fn should_invoke(&self) -> bool {
true
}
}
impl PosterFrameFetchContext {
fn new(elem: &HTMLVideoElement, url: ServoUrl, id: PendingImageId) -> PosterFrameFetchContext {
let window = window_from_node(elem);
PosterFrameFetchContext {
image_cache: window.image_cache(),
elem: Trusted::new(elem),
id,
cancelled: false,
resource_timing: ResourceFetchTiming::new(ResourceTimingType::Resource),
url,
}
}
}
|
random_line_split
|
|
cargo_output_metadata.rs
|
use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData};
use crate::core::dependency::DepKind;
use crate::core::resolver::{HasDevUnits, Resolve, ResolveOpts};
use crate::core::{Dependency, InternedString, Package, PackageId, Workspace};
use crate::ops::{self, Packages};
use crate::util::CargoResult;
use cargo_platform::Platform;
use serde::Serialize;
use std::collections::HashMap;
use std::path::PathBuf;
const VERSION: u32 = 1;
pub struct OutputMetadataOptions {
pub features: Vec<String>,
pub no_default_features: bool,
pub all_features: bool,
pub no_deps: bool,
pub version: u32,
pub filter_platform: Option<String>,
}
/// Loads the manifest, resolves the dependencies of the package to the concrete
/// used versions - considering overrides - and writes all dependencies in a JSON
/// format to stdout.
pub fn output_metadata(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
if opt.version!= VERSION {
anyhow::bail!(
"metadata version {} not supported, only {} is currently supported",
opt.version,
VERSION
);
}
let (packages, resolve) = if opt.no_deps {
let packages = ws.members().cloned().collect();
(packages, None)
} else {
let (packages, resolve) = build_resolve_graph(ws, opt)?;
(packages, Some(resolve))
};
Ok(ExportInfo {
packages,
workspace_members: ws.members().map(|pkg| pkg.package_id()).collect(),
resolve,
target_directory: ws.target_dir().into_path_unlocked(),
version: VERSION,
workspace_root: ws.root().to_path_buf(),
})
}
/// This is the structure that is serialized and displayed to the user.
///
/// See cargo-metadata.adoc for detailed documentation of the format.
#[derive(Serialize)]
pub struct ExportInfo {
packages: Vec<Package>,
workspace_members: Vec<PackageId>,
resolve: Option<MetadataResolve>,
target_directory: PathBuf,
version: u32,
workspace_root: PathBuf,
}
#[derive(Serialize)]
struct MetadataResolve {
nodes: Vec<MetadataResolveNode>,
root: Option<PackageId>,
}
#[derive(Serialize)]
struct MetadataResolveNode {
id: PackageId,
dependencies: Vec<PackageId>,
deps: Vec<Dep>,
features: Vec<InternedString>,
}
#[derive(Serialize)]
struct Dep {
name: String,
pkg: PackageId,
dep_kinds: Vec<DepKindInfo>,
}
#[derive(Serialize, PartialEq, Eq, PartialOrd, Ord)]
struct DepKindInfo {
kind: DepKind,
target: Option<Platform>,
}
impl From<&Dependency> for DepKindInfo {
fn from(dep: &Dependency) -> DepKindInfo {
DepKindInfo {
kind: dep.kind(),
target: dep.platform().cloned(),
}
}
}
/// Builds the resolve graph as it will be displayed to the user.
fn build_resolve_graph(
ws: &Workspace<'_>,
metadata_opts: &OutputMetadataOptions,
) -> CargoResult<(Vec<Package>, MetadataResolve)> {
// TODO: Without --filter-platform, features are being resolved for `host` only.
// How should this work?
let requested_kind = match &metadata_opts.filter_platform {
Some(t) => CompileKind::Target(CompileTarget::new(t)?),
None => CompileKind::Host,
};
let target_data = RustcTargetData::new(ws, requested_kind)?;
// Resolve entire workspace.
let specs = Packages::All.to_package_id_specs(ws)?;
let resolve_opts = ResolveOpts::new(
/*dev_deps*/ true,
&metadata_opts.features,
metadata_opts.all_features,
!metadata_opts.no_default_features,
);
let ws_resolve = ops::resolve_ws_with_opts(
ws,
&target_data,
requested_kind,
&resolve_opts,
&specs,
HasDevUnits::Yes,
)?;
// Download all Packages. This is needed to serialize the information
// for every package. In theory this could honor target filtering,
// but that would be somewhat complex.
let mut package_map: HashMap<PackageId, Package> = ws_resolve
.pkg_set
.get_many(ws_resolve.pkg_set.package_ids())?
.into_iter()
.map(|pkg| (pkg.package_id(), pkg.clone()))
.collect();
// Start from the workspace roots, and recurse through filling out the
// map, filtering targets as necessary.
let mut node_map = HashMap::new();
for member_pkg in ws.members() {
build_resolve_graph_r(
&mut node_map,
member_pkg.package_id(),
&ws_resolve.targeted_resolve,
&package_map,
&target_data,
requested_kind,
);
}
// Get a Vec of Packages.
let actual_packages = package_map
.drain()
.filter_map(|(pkg_id, pkg)| node_map.get(&pkg_id).map(|_| pkg))
.collect();
let mr = MetadataResolve {
nodes: node_map.drain().map(|(_pkg_id, node)| node).collect(),
root: ws.current_opt().map(|pkg| pkg.package_id()),
};
Ok((actual_packages, mr))
}
fn build_resolve_graph_r(
node_map: &mut HashMap<PackageId, MetadataResolveNode>,
pkg_id: PackageId,
resolve: &Resolve,
package_map: &HashMap<PackageId, Package>,
target_data: &RustcTargetData,
requested_kind: CompileKind,
) {
if node_map.contains_key(&pkg_id) {
return;
}
let features = resolve.features(pkg_id).to_vec();
let deps: Vec<Dep> = resolve
.deps(pkg_id)
.filter(|(_dep_id, deps)| match requested_kind {
CompileKind::Target(_) => deps
.iter()
.any(|dep| target_data.dep_platform_activated(dep, requested_kind)),
// No --filter-platform is interpreted as "all platforms".
CompileKind::Host => true,
})
.filter_map(|(dep_id, deps)| {
let dep_kinds: Vec<_> = deps.iter().map(DepKindInfo::from).collect();
package_map
.get(&dep_id)
.and_then(|pkg| pkg.targets().iter().find(|t| t.is_lib()))
.and_then(|lib_target| resolve.extern_crate_name(pkg_id, dep_id, lib_target).ok())
.map(|name| Dep {
name,
pkg: dep_id,
dep_kinds,
})
})
.collect();
let dumb_deps: Vec<PackageId> = deps.iter().map(|dep| dep.pkg).collect();
let to_visit = dumb_deps.clone();
let node = MetadataResolveNode {
id: pkg_id,
dependencies: dumb_deps,
deps,
features,
};
node_map.insert(pkg_id, node);
for dep_id in to_visit {
build_resolve_graph_r(
node_map,
dep_id,
resolve,
|
requested_kind,
);
}
}
|
package_map,
target_data,
|
random_line_split
|
cargo_output_metadata.rs
|
use crate::core::compiler::{CompileKind, CompileTarget, RustcTargetData};
use crate::core::dependency::DepKind;
use crate::core::resolver::{HasDevUnits, Resolve, ResolveOpts};
use crate::core::{Dependency, InternedString, Package, PackageId, Workspace};
use crate::ops::{self, Packages};
use crate::util::CargoResult;
use cargo_platform::Platform;
use serde::Serialize;
use std::collections::HashMap;
use std::path::PathBuf;
const VERSION: u32 = 1;
pub struct OutputMetadataOptions {
pub features: Vec<String>,
pub no_default_features: bool,
pub all_features: bool,
pub no_deps: bool,
pub version: u32,
pub filter_platform: Option<String>,
}
/// Loads the manifest, resolves the dependencies of the package to the concrete
/// used versions - considering overrides - and writes all dependencies in a JSON
/// format to stdout.
pub fn output_metadata(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
if opt.version!= VERSION {
anyhow::bail!(
"metadata version {} not supported, only {} is currently supported",
opt.version,
VERSION
);
}
let (packages, resolve) = if opt.no_deps {
let packages = ws.members().cloned().collect();
(packages, None)
} else {
let (packages, resolve) = build_resolve_graph(ws, opt)?;
(packages, Some(resolve))
};
Ok(ExportInfo {
packages,
workspace_members: ws.members().map(|pkg| pkg.package_id()).collect(),
resolve,
target_directory: ws.target_dir().into_path_unlocked(),
version: VERSION,
workspace_root: ws.root().to_path_buf(),
})
}
/// This is the structure that is serialized and displayed to the user.
///
/// See cargo-metadata.adoc for detailed documentation of the format.
#[derive(Serialize)]
pub struct ExportInfo {
packages: Vec<Package>,
workspace_members: Vec<PackageId>,
resolve: Option<MetadataResolve>,
target_directory: PathBuf,
version: u32,
workspace_root: PathBuf,
}
#[derive(Serialize)]
struct
|
{
nodes: Vec<MetadataResolveNode>,
root: Option<PackageId>,
}
#[derive(Serialize)]
struct MetadataResolveNode {
id: PackageId,
dependencies: Vec<PackageId>,
deps: Vec<Dep>,
features: Vec<InternedString>,
}
#[derive(Serialize)]
struct Dep {
name: String,
pkg: PackageId,
dep_kinds: Vec<DepKindInfo>,
}
#[derive(Serialize, PartialEq, Eq, PartialOrd, Ord)]
struct DepKindInfo {
kind: DepKind,
target: Option<Platform>,
}
impl From<&Dependency> for DepKindInfo {
fn from(dep: &Dependency) -> DepKindInfo {
DepKindInfo {
kind: dep.kind(),
target: dep.platform().cloned(),
}
}
}
/// Builds the resolve graph as it will be displayed to the user.
fn build_resolve_graph(
ws: &Workspace<'_>,
metadata_opts: &OutputMetadataOptions,
) -> CargoResult<(Vec<Package>, MetadataResolve)> {
// TODO: Without --filter-platform, features are being resolved for `host` only.
// How should this work?
let requested_kind = match &metadata_opts.filter_platform {
Some(t) => CompileKind::Target(CompileTarget::new(t)?),
None => CompileKind::Host,
};
let target_data = RustcTargetData::new(ws, requested_kind)?;
// Resolve entire workspace.
let specs = Packages::All.to_package_id_specs(ws)?;
let resolve_opts = ResolveOpts::new(
/*dev_deps*/ true,
&metadata_opts.features,
metadata_opts.all_features,
!metadata_opts.no_default_features,
);
let ws_resolve = ops::resolve_ws_with_opts(
ws,
&target_data,
requested_kind,
&resolve_opts,
&specs,
HasDevUnits::Yes,
)?;
// Download all Packages. This is needed to serialize the information
// for every package. In theory this could honor target filtering,
// but that would be somewhat complex.
let mut package_map: HashMap<PackageId, Package> = ws_resolve
.pkg_set
.get_many(ws_resolve.pkg_set.package_ids())?
.into_iter()
.map(|pkg| (pkg.package_id(), pkg.clone()))
.collect();
// Start from the workspace roots, and recurse through filling out the
// map, filtering targets as necessary.
let mut node_map = HashMap::new();
for member_pkg in ws.members() {
build_resolve_graph_r(
&mut node_map,
member_pkg.package_id(),
&ws_resolve.targeted_resolve,
&package_map,
&target_data,
requested_kind,
);
}
// Get a Vec of Packages.
let actual_packages = package_map
.drain()
.filter_map(|(pkg_id, pkg)| node_map.get(&pkg_id).map(|_| pkg))
.collect();
let mr = MetadataResolve {
nodes: node_map.drain().map(|(_pkg_id, node)| node).collect(),
root: ws.current_opt().map(|pkg| pkg.package_id()),
};
Ok((actual_packages, mr))
}
fn build_resolve_graph_r(
node_map: &mut HashMap<PackageId, MetadataResolveNode>,
pkg_id: PackageId,
resolve: &Resolve,
package_map: &HashMap<PackageId, Package>,
target_data: &RustcTargetData,
requested_kind: CompileKind,
) {
if node_map.contains_key(&pkg_id) {
return;
}
let features = resolve.features(pkg_id).to_vec();
let deps: Vec<Dep> = resolve
.deps(pkg_id)
.filter(|(_dep_id, deps)| match requested_kind {
CompileKind::Target(_) => deps
.iter()
.any(|dep| target_data.dep_platform_activated(dep, requested_kind)),
// No --filter-platform is interpreted as "all platforms".
CompileKind::Host => true,
})
.filter_map(|(dep_id, deps)| {
let dep_kinds: Vec<_> = deps.iter().map(DepKindInfo::from).collect();
package_map
.get(&dep_id)
.and_then(|pkg| pkg.targets().iter().find(|t| t.is_lib()))
.and_then(|lib_target| resolve.extern_crate_name(pkg_id, dep_id, lib_target).ok())
.map(|name| Dep {
name,
pkg: dep_id,
dep_kinds,
})
})
.collect();
let dumb_deps: Vec<PackageId> = deps.iter().map(|dep| dep.pkg).collect();
let to_visit = dumb_deps.clone();
let node = MetadataResolveNode {
id: pkg_id,
dependencies: dumb_deps,
deps,
features,
};
node_map.insert(pkg_id, node);
for dep_id in to_visit {
build_resolve_graph_r(
node_map,
dep_id,
resolve,
package_map,
target_data,
requested_kind,
);
}
}
|
MetadataResolve
|
identifier_name
|
htmlcanvaselement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::{CanvasMsg, FromLayoutMsg};
use dom::attr::Attr;
use dom::attr::AttrValue;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasRenderingContext2DMethods;
use dom::bindings::codegen::Bindings::HTMLCanvasElementBinding;
use dom::bindings::codegen::Bindings::HTMLCanvasElementBinding::HTMLCanvasElementMethods;
use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLContextAttributes;
use dom::bindings::codegen::UnionTypes::CanvasRenderingContext2DOrWebGLRenderingContext;
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{HeapGCValue, JS, LayoutJS, Root};
use dom::bindings::num::Finite;
use dom::bindings::reflector::Reflectable;
use dom::canvasrenderingcontext2d::{CanvasRenderingContext2D, LayoutCanvasRenderingContext2DHelpers};
use dom::document::Document;
use dom::element::{AttributeMutation, Element, RawLayoutElementHelpers};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, window_from_node};
use dom::virtualmethods::VirtualMethods;
use dom::webglrenderingcontext::{LayoutCanvasWebGLRenderingContextHelpers, WebGLRenderingContext};
use euclid::size::Size2D;
use image::ColorType;
use image::png::PNGEncoder;
use ipc_channel::ipc::{self, IpcSender};
use js::jsapi::{HandleValue, JSContext};
use offscreen_gl_context::GLContextAttributes;
use rustc_serialize::base64::{STANDARD, ToBase64};
use std::iter::repeat;
use string_cache::Atom;
use util::str::DOMString;
const DEFAULT_WIDTH: u32 = 300;
const DEFAULT_HEIGHT: u32 = 150;
#[must_root]
#[derive(JSTraceable, Clone, HeapSizeOf)]
pub enum CanvasContext {
Context2d(JS<CanvasRenderingContext2D>),
WebGL(JS<WebGLRenderingContext>),
}
impl HeapGCValue for CanvasContext {}
#[dom_struct]
pub struct HTMLCanvasElement {
htmlelement: HTMLElement,
context: DOMRefCell<Option<CanvasContext>>,
}
impl PartialEq for HTMLCanvasElement {
fn eq(&self, other: &HTMLCanvasElement) -> bool {
self as *const HTMLCanvasElement == &*other
}
}
impl HTMLCanvasElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLCanvasElement {
HTMLCanvasElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document),
context: DOMRefCell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLCanvasElement> {
let element = HTMLCanvasElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLCanvasElementBinding::Wrap)
}
fn recreate_contexts(&self) {
let size = self.get_size();
if let Some(ref context) = *self.context.borrow() {
match *context {
CanvasContext::Context2d(ref context) => context.recreate(size),
CanvasContext::WebGL(ref context) => context.recreate(size),
}
}
}
pub fn get_size(&self) -> Size2D<i32> {
Size2D::new(self.Width() as i32, self.Height() as i32)
}
}
pub struct HTMLCanvasData {
pub renderer_id: Option<usize>,
pub ipc_renderer: Option<IpcSender<CanvasMsg>>,
pub width: u32,
pub height: u32,
}
pub trait LayoutHTMLCanvasElementHelpers {
fn data(&self) -> HTMLCanvasData;
}
impl LayoutHTMLCanvasElementHelpers for LayoutJS<HTMLCanvasElement> {
#[allow(unsafe_code)]
fn data(&self) -> HTMLCanvasData {
unsafe {
let canvas = &*self.unsafe_get();
let (renderer_id, ipc_renderer) = match canvas.context.borrow_for_layout().as_ref() {
Some(&CanvasContext::Context2d(ref context)) => {
let context = context.to_layout();
(Some(context.get_renderer_id()), Some(context.get_ipc_renderer()))
},
Some(&CanvasContext::WebGL(ref context)) => {
let context = context.to_layout();
(Some(context.get_renderer_id()), Some(context.get_ipc_renderer()))
},
None => (None, None),
};
let width_attr = canvas.upcast::<Element>().get_attr_for_layout(&ns!(""), &atom!(width));
let height_attr = canvas.upcast::<Element>().get_attr_for_layout(&ns!(""), &atom!(height));
HTMLCanvasData {
renderer_id: renderer_id,
ipc_renderer: ipc_renderer,
width: width_attr.map_or(DEFAULT_WIDTH, |val| val.as_uint()),
height: height_attr.map_or(DEFAULT_HEIGHT, |val| val.as_uint()),
}
}
}
}
impl HTMLCanvasElement {
pub fn ipc_renderer(&self) -> Option<IpcSender<CanvasMsg>> {
self.context.borrow().as_ref().map(|context| {
match *context {
CanvasContext::Context2d(ref context) => context.ipc_renderer(),
CanvasContext::WebGL(ref context) => context.ipc_renderer(),
}
})
}
pub fn get_or_init_2d_context(&self) -> Option<Root<CanvasRenderingContext2D>> {
if self.context.borrow().is_none() {
let window = window_from_node(self);
let size = self.get_size();
let context = CanvasRenderingContext2D::new(GlobalRef::Window(window.r()), self, size);
*self.context.borrow_mut() = Some(CanvasContext::Context2d(JS::from_rooted(&context)));
}
match *self.context.borrow().as_ref().unwrap() {
CanvasContext::Context2d(ref context) => Some(Root::from_ref(&*context)),
_ => None,
}
}
#[allow(unsafe_code)]
pub fn get_or_init_webgl_context(&self,
cx: *mut JSContext,
attrs: Option<HandleValue>) -> Option<Root<WebGLRenderingContext>> {
if self.context.borrow().is_none() {
let window = window_from_node(self);
let size = self.get_size();
let attrs = if let Some(webgl_attributes) = attrs {
if let Ok(ref attrs) = unsafe { WebGLContextAttributes::new(cx, webgl_attributes) } {
From::from(attrs)
} else {
debug!("Unexpected error on conversion of WebGLContextAttributes");
return None;
}
} else {
GLContextAttributes::default()
};
let maybe_ctx = WebGLRenderingContext::new(GlobalRef::Window(window.r()), self, size, attrs);
*self.context.borrow_mut() = maybe_ctx.map( |ctx| CanvasContext::WebGL(JS::from_rooted(&ctx)));
}
if let Some(CanvasContext::WebGL(ref context)) = *self.context.borrow() {
Some(Root::from_ref(&*context))
} else {
None
}
}
pub fn is_valid(&self) -> bool {
self.Height()!= 0 && self.Width()!= 0
}
pub fn fetch_all_data(&self) -> Option<(Vec<u8>, Size2D<i32>)> {
let size = self.get_size();
if size.width == 0 || size.height == 0 {
return None
}
let data = if let Some(renderer) = self.ipc_renderer() {
let (sender, receiver) = ipc::channel().unwrap();
let msg = CanvasMsg::FromLayout(FromLayoutMsg::SendPixelContents(sender));
renderer.send(msg).unwrap();
receiver.recv().unwrap().to_vec()
} else {
repeat(0xffu8).take((size.height as usize) * (size.width as usize) * 4).collect()
};
Some((data, size))
}
}
impl HTMLCanvasElementMethods for HTMLCanvasElement {
// https://html.spec.whatwg.org/multipage/#dom-canvas-width
make_uint_getter!(Width, "width", DEFAULT_WIDTH);
// https://html.spec.whatwg.org/multipage/#dom-canvas-width
make_uint_setter!(SetWidth, "width", DEFAULT_WIDTH);
// https://html.spec.whatwg.org/multipage/#dom-canvas-height
make_uint_getter!(Height, "height", DEFAULT_HEIGHT);
// https://html.spec.whatwg.org/multipage/#dom-canvas-height
make_uint_setter!(SetHeight, "height", DEFAULT_HEIGHT);
// https://html.spec.whatwg.org/multipage/#dom-canvas-getcontext
fn GetContext(&self,
cx: *mut JSContext,
id: DOMString,
attributes: Vec<HandleValue>)
-> Option<CanvasRenderingContext2DOrWebGLRenderingContext> {
match &*id {
"2d" => {
self.get_or_init_2d_context()
.map(CanvasRenderingContext2DOrWebGLRenderingContext::eCanvasRenderingContext2D)
}
"webgl" | "experimental-webgl" => {
self.get_or_init_webgl_context(cx, attributes.get(0).map(|p| *p))
.map(CanvasRenderingContext2DOrWebGLRenderingContext::eWebGLRenderingContext)
}
_ => None
}
}
// https://html.spec.whatwg.org/multipage/#dom-canvas-todataurl
fn ToDataURL(&self,
_context: *mut JSContext,
_mime_type: Option<DOMString>,
_arguments: Vec<HandleValue>) -> Fallible<DOMString> {
// Step 1: Check the origin-clean flag (should be set in fillText/strokeText
// and currently unimplemented)
// Step 2.
if self.Width() == 0 || self.Height() == 0 {
return Ok(DOMString::from("data:,"));
}
// Step 3.
if let Some(CanvasContext::Context2d(ref context)) = *self.context.borrow() {
let window = window_from_node(self);
let image_data = try!(context.GetImageData(Finite::wrap(0f64), Finite::wrap(0f64),
Finite::wrap(self.Width() as f64),
Finite::wrap(self.Height() as f64)));
let raw_data = image_data.get_data_array(&GlobalRef::Window(window.r()));
// Only handle image/png for now.
let mime_type = "image/png";
let mut encoded = Vec::new();
{
let encoder: PNGEncoder<&mut Vec<u8>> = PNGEncoder::new(&mut encoded);
encoder.encode(&raw_data, self.Width(), self.Height(), ColorType::RGBA(8)).unwrap();
}
let encoded = encoded.to_base64(STANDARD);
Ok(DOMString::from(format!("data:{};base64,{}", mime_type, encoded)))
} else {
Err(Error::NotSupported)
}
}
}
impl VirtualMethods for HTMLCanvasElement {
fn super_type(&self) -> Option<&VirtualMethods>
|
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&atom!(width) | &atom!(height) => self.recreate_contexts(),
_ => (),
};
}
fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {
match name {
&atom!("width") => AttrValue::from_u32(value, DEFAULT_WIDTH),
&atom!("height") => AttrValue::from_u32(value, DEFAULT_HEIGHT),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
}
impl<'a> From<&'a WebGLContextAttributes> for GLContextAttributes {
fn from(attrs: &'a WebGLContextAttributes) -> GLContextAttributes {
GLContextAttributes {
alpha: attrs.alpha,
depth: attrs.depth,
stencil: attrs.stencil,
antialias: attrs.antialias,
premultiplied_alpha: attrs.premultipliedAlpha,
preserve_drawing_buffer: attrs.preserveDrawingBuffer,
}
}
}
pub mod utils {
use dom::window::Window;
use ipc_channel::ipc;
use net_traits::image_cache_task::{ImageCacheChan, ImageResponse};
use url::Url;
pub fn request_image_from_cache(window: &Window, url: Url) -> ImageResponse {
let image_cache = window.image_cache_task();
let (response_chan, response_port) = ipc::channel().unwrap();
image_cache.request_image(url, ImageCacheChan(response_chan), None);
let result = response_port.recv().unwrap();
result.image_response
}
}
|
{
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
|
identifier_body
|
htmlcanvaselement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::{CanvasMsg, FromLayoutMsg};
use dom::attr::Attr;
use dom::attr::AttrValue;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasRenderingContext2DMethods;
use dom::bindings::codegen::Bindings::HTMLCanvasElementBinding;
use dom::bindings::codegen::Bindings::HTMLCanvasElementBinding::HTMLCanvasElementMethods;
use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLContextAttributes;
use dom::bindings::codegen::UnionTypes::CanvasRenderingContext2DOrWebGLRenderingContext;
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{HeapGCValue, JS, LayoutJS, Root};
use dom::bindings::num::Finite;
use dom::bindings::reflector::Reflectable;
use dom::canvasrenderingcontext2d::{CanvasRenderingContext2D, LayoutCanvasRenderingContext2DHelpers};
use dom::document::Document;
use dom::element::{AttributeMutation, Element, RawLayoutElementHelpers};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, window_from_node};
use dom::virtualmethods::VirtualMethods;
use dom::webglrenderingcontext::{LayoutCanvasWebGLRenderingContextHelpers, WebGLRenderingContext};
use euclid::size::Size2D;
use image::ColorType;
use image::png::PNGEncoder;
use ipc_channel::ipc::{self, IpcSender};
use js::jsapi::{HandleValue, JSContext};
use offscreen_gl_context::GLContextAttributes;
use rustc_serialize::base64::{STANDARD, ToBase64};
use std::iter::repeat;
use string_cache::Atom;
use util::str::DOMString;
const DEFAULT_WIDTH: u32 = 300;
const DEFAULT_HEIGHT: u32 = 150;
#[must_root]
#[derive(JSTraceable, Clone, HeapSizeOf)]
pub enum CanvasContext {
Context2d(JS<CanvasRenderingContext2D>),
WebGL(JS<WebGLRenderingContext>),
}
impl HeapGCValue for CanvasContext {}
#[dom_struct]
pub struct HTMLCanvasElement {
htmlelement: HTMLElement,
context: DOMRefCell<Option<CanvasContext>>,
}
impl PartialEq for HTMLCanvasElement {
fn eq(&self, other: &HTMLCanvasElement) -> bool {
self as *const HTMLCanvasElement == &*other
}
}
impl HTMLCanvasElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLCanvasElement {
HTMLCanvasElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document),
context: DOMRefCell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLCanvasElement> {
let element = HTMLCanvasElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLCanvasElementBinding::Wrap)
}
fn recreate_contexts(&self) {
let size = self.get_size();
if let Some(ref context) = *self.context.borrow() {
match *context {
CanvasContext::Context2d(ref context) => context.recreate(size),
CanvasContext::WebGL(ref context) => context.recreate(size),
}
}
}
pub fn get_size(&self) -> Size2D<i32> {
Size2D::new(self.Width() as i32, self.Height() as i32)
}
}
pub struct
|
{
pub renderer_id: Option<usize>,
pub ipc_renderer: Option<IpcSender<CanvasMsg>>,
pub width: u32,
pub height: u32,
}
pub trait LayoutHTMLCanvasElementHelpers {
fn data(&self) -> HTMLCanvasData;
}
impl LayoutHTMLCanvasElementHelpers for LayoutJS<HTMLCanvasElement> {
#[allow(unsafe_code)]
fn data(&self) -> HTMLCanvasData {
unsafe {
let canvas = &*self.unsafe_get();
let (renderer_id, ipc_renderer) = match canvas.context.borrow_for_layout().as_ref() {
Some(&CanvasContext::Context2d(ref context)) => {
let context = context.to_layout();
(Some(context.get_renderer_id()), Some(context.get_ipc_renderer()))
},
Some(&CanvasContext::WebGL(ref context)) => {
let context = context.to_layout();
(Some(context.get_renderer_id()), Some(context.get_ipc_renderer()))
},
None => (None, None),
};
let width_attr = canvas.upcast::<Element>().get_attr_for_layout(&ns!(""), &atom!(width));
let height_attr = canvas.upcast::<Element>().get_attr_for_layout(&ns!(""), &atom!(height));
HTMLCanvasData {
renderer_id: renderer_id,
ipc_renderer: ipc_renderer,
width: width_attr.map_or(DEFAULT_WIDTH, |val| val.as_uint()),
height: height_attr.map_or(DEFAULT_HEIGHT, |val| val.as_uint()),
}
}
}
}
impl HTMLCanvasElement {
pub fn ipc_renderer(&self) -> Option<IpcSender<CanvasMsg>> {
self.context.borrow().as_ref().map(|context| {
match *context {
CanvasContext::Context2d(ref context) => context.ipc_renderer(),
CanvasContext::WebGL(ref context) => context.ipc_renderer(),
}
})
}
pub fn get_or_init_2d_context(&self) -> Option<Root<CanvasRenderingContext2D>> {
if self.context.borrow().is_none() {
let window = window_from_node(self);
let size = self.get_size();
let context = CanvasRenderingContext2D::new(GlobalRef::Window(window.r()), self, size);
*self.context.borrow_mut() = Some(CanvasContext::Context2d(JS::from_rooted(&context)));
}
match *self.context.borrow().as_ref().unwrap() {
CanvasContext::Context2d(ref context) => Some(Root::from_ref(&*context)),
_ => None,
}
}
#[allow(unsafe_code)]
pub fn get_or_init_webgl_context(&self,
cx: *mut JSContext,
attrs: Option<HandleValue>) -> Option<Root<WebGLRenderingContext>> {
if self.context.borrow().is_none() {
let window = window_from_node(self);
let size = self.get_size();
let attrs = if let Some(webgl_attributes) = attrs {
if let Ok(ref attrs) = unsafe { WebGLContextAttributes::new(cx, webgl_attributes) } {
From::from(attrs)
} else {
debug!("Unexpected error on conversion of WebGLContextAttributes");
return None;
}
} else {
GLContextAttributes::default()
};
let maybe_ctx = WebGLRenderingContext::new(GlobalRef::Window(window.r()), self, size, attrs);
*self.context.borrow_mut() = maybe_ctx.map( |ctx| CanvasContext::WebGL(JS::from_rooted(&ctx)));
}
if let Some(CanvasContext::WebGL(ref context)) = *self.context.borrow() {
Some(Root::from_ref(&*context))
} else {
None
}
}
pub fn is_valid(&self) -> bool {
self.Height()!= 0 && self.Width()!= 0
}
pub fn fetch_all_data(&self) -> Option<(Vec<u8>, Size2D<i32>)> {
let size = self.get_size();
if size.width == 0 || size.height == 0 {
return None
}
let data = if let Some(renderer) = self.ipc_renderer() {
let (sender, receiver) = ipc::channel().unwrap();
let msg = CanvasMsg::FromLayout(FromLayoutMsg::SendPixelContents(sender));
renderer.send(msg).unwrap();
receiver.recv().unwrap().to_vec()
} else {
repeat(0xffu8).take((size.height as usize) * (size.width as usize) * 4).collect()
};
Some((data, size))
}
}
impl HTMLCanvasElementMethods for HTMLCanvasElement {
// https://html.spec.whatwg.org/multipage/#dom-canvas-width
make_uint_getter!(Width, "width", DEFAULT_WIDTH);
// https://html.spec.whatwg.org/multipage/#dom-canvas-width
make_uint_setter!(SetWidth, "width", DEFAULT_WIDTH);
// https://html.spec.whatwg.org/multipage/#dom-canvas-height
make_uint_getter!(Height, "height", DEFAULT_HEIGHT);
// https://html.spec.whatwg.org/multipage/#dom-canvas-height
make_uint_setter!(SetHeight, "height", DEFAULT_HEIGHT);
// https://html.spec.whatwg.org/multipage/#dom-canvas-getcontext
fn GetContext(&self,
cx: *mut JSContext,
id: DOMString,
attributes: Vec<HandleValue>)
-> Option<CanvasRenderingContext2DOrWebGLRenderingContext> {
match &*id {
"2d" => {
self.get_or_init_2d_context()
.map(CanvasRenderingContext2DOrWebGLRenderingContext::eCanvasRenderingContext2D)
}
"webgl" | "experimental-webgl" => {
self.get_or_init_webgl_context(cx, attributes.get(0).map(|p| *p))
.map(CanvasRenderingContext2DOrWebGLRenderingContext::eWebGLRenderingContext)
}
_ => None
}
}
// https://html.spec.whatwg.org/multipage/#dom-canvas-todataurl
fn ToDataURL(&self,
_context: *mut JSContext,
_mime_type: Option<DOMString>,
_arguments: Vec<HandleValue>) -> Fallible<DOMString> {
// Step 1: Check the origin-clean flag (should be set in fillText/strokeText
// and currently unimplemented)
// Step 2.
if self.Width() == 0 || self.Height() == 0 {
return Ok(DOMString::from("data:,"));
}
// Step 3.
if let Some(CanvasContext::Context2d(ref context)) = *self.context.borrow() {
let window = window_from_node(self);
let image_data = try!(context.GetImageData(Finite::wrap(0f64), Finite::wrap(0f64),
Finite::wrap(self.Width() as f64),
Finite::wrap(self.Height() as f64)));
let raw_data = image_data.get_data_array(&GlobalRef::Window(window.r()));
// Only handle image/png for now.
let mime_type = "image/png";
let mut encoded = Vec::new();
{
let encoder: PNGEncoder<&mut Vec<u8>> = PNGEncoder::new(&mut encoded);
encoder.encode(&raw_data, self.Width(), self.Height(), ColorType::RGBA(8)).unwrap();
}
let encoded = encoded.to_base64(STANDARD);
Ok(DOMString::from(format!("data:{};base64,{}", mime_type, encoded)))
} else {
Err(Error::NotSupported)
}
}
}
impl VirtualMethods for HTMLCanvasElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&atom!(width) | &atom!(height) => self.recreate_contexts(),
_ => (),
};
}
fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {
match name {
&atom!("width") => AttrValue::from_u32(value, DEFAULT_WIDTH),
&atom!("height") => AttrValue::from_u32(value, DEFAULT_HEIGHT),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
}
impl<'a> From<&'a WebGLContextAttributes> for GLContextAttributes {
fn from(attrs: &'a WebGLContextAttributes) -> GLContextAttributes {
GLContextAttributes {
alpha: attrs.alpha,
depth: attrs.depth,
stencil: attrs.stencil,
antialias: attrs.antialias,
premultiplied_alpha: attrs.premultipliedAlpha,
preserve_drawing_buffer: attrs.preserveDrawingBuffer,
}
}
}
pub mod utils {
use dom::window::Window;
use ipc_channel::ipc;
use net_traits::image_cache_task::{ImageCacheChan, ImageResponse};
use url::Url;
pub fn request_image_from_cache(window: &Window, url: Url) -> ImageResponse {
let image_cache = window.image_cache_task();
let (response_chan, response_port) = ipc::channel().unwrap();
image_cache.request_image(url, ImageCacheChan(response_chan), None);
let result = response_port.recv().unwrap();
result.image_response
}
}
|
HTMLCanvasData
|
identifier_name
|
htmlcanvaselement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::{CanvasMsg, FromLayoutMsg};
use dom::attr::Attr;
use dom::attr::AttrValue;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasRenderingContext2DMethods;
use dom::bindings::codegen::Bindings::HTMLCanvasElementBinding;
use dom::bindings::codegen::Bindings::HTMLCanvasElementBinding::HTMLCanvasElementMethods;
use dom::bindings::codegen::Bindings::WebGLRenderingContextBinding::WebGLContextAttributes;
use dom::bindings::codegen::UnionTypes::CanvasRenderingContext2DOrWebGLRenderingContext;
use dom::bindings::error::{Error, Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{HeapGCValue, JS, LayoutJS, Root};
use dom::bindings::num::Finite;
use dom::bindings::reflector::Reflectable;
use dom::canvasrenderingcontext2d::{CanvasRenderingContext2D, LayoutCanvasRenderingContext2DHelpers};
use dom::document::Document;
use dom::element::{AttributeMutation, Element, RawLayoutElementHelpers};
use dom::htmlelement::HTMLElement;
use dom::node::{Node, window_from_node};
use dom::virtualmethods::VirtualMethods;
use dom::webglrenderingcontext::{LayoutCanvasWebGLRenderingContextHelpers, WebGLRenderingContext};
use euclid::size::Size2D;
use image::ColorType;
|
use ipc_channel::ipc::{self, IpcSender};
use js::jsapi::{HandleValue, JSContext};
use offscreen_gl_context::GLContextAttributes;
use rustc_serialize::base64::{STANDARD, ToBase64};
use std::iter::repeat;
use string_cache::Atom;
use util::str::DOMString;
const DEFAULT_WIDTH: u32 = 300;
const DEFAULT_HEIGHT: u32 = 150;
#[must_root]
#[derive(JSTraceable, Clone, HeapSizeOf)]
pub enum CanvasContext {
Context2d(JS<CanvasRenderingContext2D>),
WebGL(JS<WebGLRenderingContext>),
}
impl HeapGCValue for CanvasContext {}
#[dom_struct]
pub struct HTMLCanvasElement {
htmlelement: HTMLElement,
context: DOMRefCell<Option<CanvasContext>>,
}
impl PartialEq for HTMLCanvasElement {
fn eq(&self, other: &HTMLCanvasElement) -> bool {
self as *const HTMLCanvasElement == &*other
}
}
impl HTMLCanvasElement {
fn new_inherited(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> HTMLCanvasElement {
HTMLCanvasElement {
htmlelement: HTMLElement::new_inherited(localName, prefix, document),
context: DOMRefCell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLCanvasElement> {
let element = HTMLCanvasElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLCanvasElementBinding::Wrap)
}
fn recreate_contexts(&self) {
let size = self.get_size();
if let Some(ref context) = *self.context.borrow() {
match *context {
CanvasContext::Context2d(ref context) => context.recreate(size),
CanvasContext::WebGL(ref context) => context.recreate(size),
}
}
}
pub fn get_size(&self) -> Size2D<i32> {
Size2D::new(self.Width() as i32, self.Height() as i32)
}
}
pub struct HTMLCanvasData {
pub renderer_id: Option<usize>,
pub ipc_renderer: Option<IpcSender<CanvasMsg>>,
pub width: u32,
pub height: u32,
}
pub trait LayoutHTMLCanvasElementHelpers {
fn data(&self) -> HTMLCanvasData;
}
impl LayoutHTMLCanvasElementHelpers for LayoutJS<HTMLCanvasElement> {
#[allow(unsafe_code)]
fn data(&self) -> HTMLCanvasData {
unsafe {
let canvas = &*self.unsafe_get();
let (renderer_id, ipc_renderer) = match canvas.context.borrow_for_layout().as_ref() {
Some(&CanvasContext::Context2d(ref context)) => {
let context = context.to_layout();
(Some(context.get_renderer_id()), Some(context.get_ipc_renderer()))
},
Some(&CanvasContext::WebGL(ref context)) => {
let context = context.to_layout();
(Some(context.get_renderer_id()), Some(context.get_ipc_renderer()))
},
None => (None, None),
};
let width_attr = canvas.upcast::<Element>().get_attr_for_layout(&ns!(""), &atom!(width));
let height_attr = canvas.upcast::<Element>().get_attr_for_layout(&ns!(""), &atom!(height));
HTMLCanvasData {
renderer_id: renderer_id,
ipc_renderer: ipc_renderer,
width: width_attr.map_or(DEFAULT_WIDTH, |val| val.as_uint()),
height: height_attr.map_or(DEFAULT_HEIGHT, |val| val.as_uint()),
}
}
}
}
impl HTMLCanvasElement {
pub fn ipc_renderer(&self) -> Option<IpcSender<CanvasMsg>> {
self.context.borrow().as_ref().map(|context| {
match *context {
CanvasContext::Context2d(ref context) => context.ipc_renderer(),
CanvasContext::WebGL(ref context) => context.ipc_renderer(),
}
})
}
pub fn get_or_init_2d_context(&self) -> Option<Root<CanvasRenderingContext2D>> {
if self.context.borrow().is_none() {
let window = window_from_node(self);
let size = self.get_size();
let context = CanvasRenderingContext2D::new(GlobalRef::Window(window.r()), self, size);
*self.context.borrow_mut() = Some(CanvasContext::Context2d(JS::from_rooted(&context)));
}
match *self.context.borrow().as_ref().unwrap() {
CanvasContext::Context2d(ref context) => Some(Root::from_ref(&*context)),
_ => None,
}
}
#[allow(unsafe_code)]
pub fn get_or_init_webgl_context(&self,
cx: *mut JSContext,
attrs: Option<HandleValue>) -> Option<Root<WebGLRenderingContext>> {
if self.context.borrow().is_none() {
let window = window_from_node(self);
let size = self.get_size();
let attrs = if let Some(webgl_attributes) = attrs {
if let Ok(ref attrs) = unsafe { WebGLContextAttributes::new(cx, webgl_attributes) } {
From::from(attrs)
} else {
debug!("Unexpected error on conversion of WebGLContextAttributes");
return None;
}
} else {
GLContextAttributes::default()
};
let maybe_ctx = WebGLRenderingContext::new(GlobalRef::Window(window.r()), self, size, attrs);
*self.context.borrow_mut() = maybe_ctx.map( |ctx| CanvasContext::WebGL(JS::from_rooted(&ctx)));
}
if let Some(CanvasContext::WebGL(ref context)) = *self.context.borrow() {
Some(Root::from_ref(&*context))
} else {
None
}
}
pub fn is_valid(&self) -> bool {
self.Height()!= 0 && self.Width()!= 0
}
pub fn fetch_all_data(&self) -> Option<(Vec<u8>, Size2D<i32>)> {
let size = self.get_size();
if size.width == 0 || size.height == 0 {
return None
}
let data = if let Some(renderer) = self.ipc_renderer() {
let (sender, receiver) = ipc::channel().unwrap();
let msg = CanvasMsg::FromLayout(FromLayoutMsg::SendPixelContents(sender));
renderer.send(msg).unwrap();
receiver.recv().unwrap().to_vec()
} else {
repeat(0xffu8).take((size.height as usize) * (size.width as usize) * 4).collect()
};
Some((data, size))
}
}
impl HTMLCanvasElementMethods for HTMLCanvasElement {
// https://html.spec.whatwg.org/multipage/#dom-canvas-width
make_uint_getter!(Width, "width", DEFAULT_WIDTH);
// https://html.spec.whatwg.org/multipage/#dom-canvas-width
make_uint_setter!(SetWidth, "width", DEFAULT_WIDTH);
// https://html.spec.whatwg.org/multipage/#dom-canvas-height
make_uint_getter!(Height, "height", DEFAULT_HEIGHT);
// https://html.spec.whatwg.org/multipage/#dom-canvas-height
make_uint_setter!(SetHeight, "height", DEFAULT_HEIGHT);
// https://html.spec.whatwg.org/multipage/#dom-canvas-getcontext
fn GetContext(&self,
cx: *mut JSContext,
id: DOMString,
attributes: Vec<HandleValue>)
-> Option<CanvasRenderingContext2DOrWebGLRenderingContext> {
match &*id {
"2d" => {
self.get_or_init_2d_context()
.map(CanvasRenderingContext2DOrWebGLRenderingContext::eCanvasRenderingContext2D)
}
"webgl" | "experimental-webgl" => {
self.get_or_init_webgl_context(cx, attributes.get(0).map(|p| *p))
.map(CanvasRenderingContext2DOrWebGLRenderingContext::eWebGLRenderingContext)
}
_ => None
}
}
// https://html.spec.whatwg.org/multipage/#dom-canvas-todataurl
fn ToDataURL(&self,
_context: *mut JSContext,
_mime_type: Option<DOMString>,
_arguments: Vec<HandleValue>) -> Fallible<DOMString> {
// Step 1: Check the origin-clean flag (should be set in fillText/strokeText
// and currently unimplemented)
// Step 2.
if self.Width() == 0 || self.Height() == 0 {
return Ok(DOMString::from("data:,"));
}
// Step 3.
if let Some(CanvasContext::Context2d(ref context)) = *self.context.borrow() {
let window = window_from_node(self);
let image_data = try!(context.GetImageData(Finite::wrap(0f64), Finite::wrap(0f64),
Finite::wrap(self.Width() as f64),
Finite::wrap(self.Height() as f64)));
let raw_data = image_data.get_data_array(&GlobalRef::Window(window.r()));
// Only handle image/png for now.
let mime_type = "image/png";
let mut encoded = Vec::new();
{
let encoder: PNGEncoder<&mut Vec<u8>> = PNGEncoder::new(&mut encoded);
encoder.encode(&raw_data, self.Width(), self.Height(), ColorType::RGBA(8)).unwrap();
}
let encoded = encoded.to_base64(STANDARD);
Ok(DOMString::from(format!("data:{};base64,{}", mime_type, encoded)))
} else {
Err(Error::NotSupported)
}
}
}
impl VirtualMethods for HTMLCanvasElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&atom!(width) | &atom!(height) => self.recreate_contexts(),
_ => (),
};
}
fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {
match name {
&atom!("width") => AttrValue::from_u32(value, DEFAULT_WIDTH),
&atom!("height") => AttrValue::from_u32(value, DEFAULT_HEIGHT),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
}
impl<'a> From<&'a WebGLContextAttributes> for GLContextAttributes {
fn from(attrs: &'a WebGLContextAttributes) -> GLContextAttributes {
GLContextAttributes {
alpha: attrs.alpha,
depth: attrs.depth,
stencil: attrs.stencil,
antialias: attrs.antialias,
premultiplied_alpha: attrs.premultipliedAlpha,
preserve_drawing_buffer: attrs.preserveDrawingBuffer,
}
}
}
pub mod utils {
use dom::window::Window;
use ipc_channel::ipc;
use net_traits::image_cache_task::{ImageCacheChan, ImageResponse};
use url::Url;
pub fn request_image_from_cache(window: &Window, url: Url) -> ImageResponse {
let image_cache = window.image_cache_task();
let (response_chan, response_port) = ipc::channel().unwrap();
image_cache.request_image(url, ImageCacheChan(response_chan), None);
let result = response_port.recv().unwrap();
result.image_response
}
}
|
use image::png::PNGEncoder;
|
random_line_split
|
manifest.rs
|
// Copyright (c) 2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::str::FromStr;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use base64;
use clap::ArgMatches;
use hcore::package::{PackageArchive, PackageIdent};
use common::ui::UI;
use export_docker::{Result, DockerImage};
use manifestjson::ManifestJson;
use service_bind::ServiceBind;
use topology::Topology;
/// Represents a Kubernetes manifest.
#[derive(Debug, Clone)]
pub struct
|
{
/// The identifier of the Habitat package
pub pkg_ident: PackageIdent,
/// Name of the Kubernetes resource.
pub metadata_name: String,
/// The docker image.
pub image: String,
/// The number of desired instances in the service group.
pub count: u64,
/// The relationship of a service with peers in the same service group.
pub service_topology: Topology,
/// The logical group of services in the service group.
pub service_group: Option<String>,
/// The config file content (in base64 encoded format).
pub config: Option<String>,
/// The name of the Kubernetes secret that contains the ring key, which encrypts the
/// communication between Habitat supervisors.
pub ring_secret_name: Option<String>,
/// Any binds, as `ServiceBind` instances.
pub binds: Vec<ServiceBind>,
}
impl Manifest {
///
/// Create a Manifest instance from command-line arguments passed as [`clap::ArgMatches`].
///
/// [`clap::ArgMatches`]: https://kbknapp.github.io/clap-rs/clap/struct.ArgMatches.html
pub fn new_from_cli_matches(
_ui: &mut UI,
matches: &ArgMatches,
image: Option<DockerImage>,
) -> Result<Self> {
let count = matches.value_of("COUNT").unwrap_or("1").parse()?;
let topology: Topology = matches
.value_of("TOPOLOGY")
.unwrap_or("standalone")
.parse()
.unwrap_or(Default::default());
let group = matches.value_of("GROUP").map(|s| s.to_string());
let config_file = matches.value_of("CONFIG");
let ring_secret_name = matches.value_of("RING_SECRET_NAME").map(|s| s.to_string());
// clap ensures that we do have the mandatory args so unwrap() is fine here
let pkg_ident_str = matches.value_of("PKG_IDENT_OR_ARTIFACT").expect(
"No package specified",
);
let pkg_ident = if Path::new(pkg_ident_str).is_file() {
// We're going to use the `$pkg_origin/$pkg_name`, fuzzy form of a package
// identifier to ensure that update strategies will work if desired
PackageArchive::new(pkg_ident_str).ident()?
} else {
PackageIdent::from_str(pkg_ident_str)?
};
let version_suffix = match pkg_ident.version {
Some(ref v) => {
pkg_ident
.release
.as_ref()
.map(|r| format!("{}-{}", v, r))
.unwrap_or(v.to_string())
}
None => "latest".to_owned(),
};
let name = matches
.value_of("K8S_NAME")
.map(|s| s.to_string())
.unwrap_or_else(|| format!("{}-{}", pkg_ident.name, version_suffix));
let image_name = match matches.value_of("IMAGE_NAME") {
Some(i) => i.to_string(),
None => {
let (image_name, tag) = match image {
Some(i) => {
(
i.name().to_owned(),
i.tags().get(0).cloned().unwrap_or_else(
|| "latest".to_owned(),
),
)
}
None => {
(
format!("{}/{}", pkg_ident.origin, pkg_ident.name),
version_suffix,
)
}
};
format!("{}:{}", image_name, tag)
}
};
let binds = ServiceBind::from_args(&matches)?;
let config = match config_file {
None => None,
Some(name) => {
let mut contents = String::new();
File::open(name)?.read_to_string(&mut contents)?;
Some(base64::encode(&format!("{}", contents)))
}
};
Ok(Manifest {
pkg_ident: pkg_ident,
metadata_name: name,
image: image_name,
count: count,
service_topology: topology,
service_group: group,
config: config,
ring_secret_name: ring_secret_name,
binds: binds,
})
}
/// Generates the manifest as a string and writes it to `write`.
pub fn generate(&mut self, write: &mut Write) -> Result<()> {
let out: String = ManifestJson::new(&self).into();
write.write(out.as_bytes())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_manifest_generation() {
let mut m = Manifest {
pkg_ident: PackageIdent::from_str("core/nginx").unwrap(),
metadata_name: "nginx-latest".to_owned(),
image: "core/nginx:latest".to_owned(),
count: 3,
service_topology: Default::default(),
service_group: Some("group1".to_owned()),
config: Some(base64::encode(&format!("{}", "port = 4444"))),
ring_secret_name: Some("deltaechofoxtrot".to_owned()),
binds: vec![],
};
let expected = include_str!("../tests/KubernetesManifestTest.yaml");
let mut o = vec![];
m.generate(&mut o).unwrap();
let out = String::from_utf8(o).unwrap();
assert_eq!(out, expected);
}
#[test]
fn test_manifest_generation_binds() {
let mut m = Manifest {
pkg_ident: PackageIdent::from_str("core/nginx").unwrap(),
metadata_name: "nginx-latest".to_owned(),
image: "core/nginx:latest".to_owned(),
count: 3,
service_topology: Default::default(),
service_group: Some("group1".to_owned()),
config: None,
ring_secret_name: Some("deltaechofoxtrot".to_owned()),
binds: vec!["name1:service1.group1".parse().unwrap()],
};
let expected = include_str!("../tests/KubernetesManifestTestBinds.yaml");
let mut o = vec![];
m.generate(&mut o).unwrap();
let out = String::from_utf8(o).unwrap();
assert_eq!(out, expected);
}
}
|
Manifest
|
identifier_name
|
manifest.rs
|
// Copyright (c) 2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::str::FromStr;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use base64;
use clap::ArgMatches;
use hcore::package::{PackageArchive, PackageIdent};
use common::ui::UI;
use export_docker::{Result, DockerImage};
use manifestjson::ManifestJson;
use service_bind::ServiceBind;
use topology::Topology;
/// Represents a Kubernetes manifest.
#[derive(Debug, Clone)]
pub struct Manifest {
/// The identifier of the Habitat package
pub pkg_ident: PackageIdent,
/// Name of the Kubernetes resource.
pub metadata_name: String,
/// The docker image.
pub image: String,
/// The number of desired instances in the service group.
pub count: u64,
/// The relationship of a service with peers in the same service group.
pub service_topology: Topology,
/// The logical group of services in the service group.
pub service_group: Option<String>,
/// The config file content (in base64 encoded format).
pub config: Option<String>,
/// The name of the Kubernetes secret that contains the ring key, which encrypts the
/// communication between Habitat supervisors.
pub ring_secret_name: Option<String>,
/// Any binds, as `ServiceBind` instances.
pub binds: Vec<ServiceBind>,
}
impl Manifest {
///
/// Create a Manifest instance from command-line arguments passed as [`clap::ArgMatches`].
///
/// [`clap::ArgMatches`]: https://kbknapp.github.io/clap-rs/clap/struct.ArgMatches.html
pub fn new_from_cli_matches(
_ui: &mut UI,
matches: &ArgMatches,
image: Option<DockerImage>,
) -> Result<Self> {
let count = matches.value_of("COUNT").unwrap_or("1").parse()?;
let topology: Topology = matches
.value_of("TOPOLOGY")
.unwrap_or("standalone")
.parse()
.unwrap_or(Default::default());
let group = matches.value_of("GROUP").map(|s| s.to_string());
let config_file = matches.value_of("CONFIG");
let ring_secret_name = matches.value_of("RING_SECRET_NAME").map(|s| s.to_string());
// clap ensures that we do have the mandatory args so unwrap() is fine here
let pkg_ident_str = matches.value_of("PKG_IDENT_OR_ARTIFACT").expect(
"No package specified",
);
let pkg_ident = if Path::new(pkg_ident_str).is_file() {
// We're going to use the `$pkg_origin/$pkg_name`, fuzzy form of a package
// identifier to ensure that update strategies will work if desired
PackageArchive::new(pkg_ident_str).ident()?
} else {
PackageIdent::from_str(pkg_ident_str)?
};
let version_suffix = match pkg_ident.version {
Some(ref v) => {
pkg_ident
.release
.as_ref()
.map(|r| format!("{}-{}", v, r))
.unwrap_or(v.to_string())
}
None => "latest".to_owned(),
};
let name = matches
.value_of("K8S_NAME")
.map(|s| s.to_string())
.unwrap_or_else(|| format!("{}-{}", pkg_ident.name, version_suffix));
let image_name = match matches.value_of("IMAGE_NAME") {
Some(i) => i.to_string(),
None => {
let (image_name, tag) = match image {
Some(i) => {
(
i.name().to_owned(),
i.tags().get(0).cloned().unwrap_or_else(
|| "latest".to_owned(),
),
)
}
None => {
(
format!("{}/{}", pkg_ident.origin, pkg_ident.name),
version_suffix,
)
}
};
format!("{}:{}", image_name, tag)
}
};
let binds = ServiceBind::from_args(&matches)?;
let config = match config_file {
None => None,
Some(name) => {
let mut contents = String::new();
File::open(name)?.read_to_string(&mut contents)?;
Some(base64::encode(&format!("{}", contents)))
}
};
Ok(Manifest {
pkg_ident: pkg_ident,
metadata_name: name,
image: image_name,
count: count,
service_topology: topology,
service_group: group,
config: config,
ring_secret_name: ring_secret_name,
binds: binds,
})
}
/// Generates the manifest as a string and writes it to `write`.
pub fn generate(&mut self, write: &mut Write) -> Result<()> {
let out: String = ManifestJson::new(&self).into();
write.write(out.as_bytes())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_manifest_generation() {
let mut m = Manifest {
pkg_ident: PackageIdent::from_str("core/nginx").unwrap(),
metadata_name: "nginx-latest".to_owned(),
image: "core/nginx:latest".to_owned(),
count: 3,
service_topology: Default::default(),
service_group: Some("group1".to_owned()),
config: Some(base64::encode(&format!("{}", "port = 4444"))),
ring_secret_name: Some("deltaechofoxtrot".to_owned()),
binds: vec![],
};
let expected = include_str!("../tests/KubernetesManifestTest.yaml");
let mut o = vec![];
m.generate(&mut o).unwrap();
let out = String::from_utf8(o).unwrap();
|
}
#[test]
fn test_manifest_generation_binds() {
let mut m = Manifest {
pkg_ident: PackageIdent::from_str("core/nginx").unwrap(),
metadata_name: "nginx-latest".to_owned(),
image: "core/nginx:latest".to_owned(),
count: 3,
service_topology: Default::default(),
service_group: Some("group1".to_owned()),
config: None,
ring_secret_name: Some("deltaechofoxtrot".to_owned()),
binds: vec!["name1:service1.group1".parse().unwrap()],
};
let expected = include_str!("../tests/KubernetesManifestTestBinds.yaml");
let mut o = vec![];
m.generate(&mut o).unwrap();
let out = String::from_utf8(o).unwrap();
assert_eq!(out, expected);
}
}
|
assert_eq!(out, expected);
|
random_line_split
|
manifest.rs
|
// Copyright (c) 2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::str::FromStr;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use base64;
use clap::ArgMatches;
use hcore::package::{PackageArchive, PackageIdent};
use common::ui::UI;
use export_docker::{Result, DockerImage};
use manifestjson::ManifestJson;
use service_bind::ServiceBind;
use topology::Topology;
/// Represents a Kubernetes manifest.
#[derive(Debug, Clone)]
pub struct Manifest {
/// The identifier of the Habitat package
pub pkg_ident: PackageIdent,
/// Name of the Kubernetes resource.
pub metadata_name: String,
/// The docker image.
pub image: String,
/// The number of desired instances in the service group.
pub count: u64,
/// The relationship of a service with peers in the same service group.
pub service_topology: Topology,
/// The logical group of services in the service group.
pub service_group: Option<String>,
/// The config file content (in base64 encoded format).
pub config: Option<String>,
/// The name of the Kubernetes secret that contains the ring key, which encrypts the
/// communication between Habitat supervisors.
pub ring_secret_name: Option<String>,
/// Any binds, as `ServiceBind` instances.
pub binds: Vec<ServiceBind>,
}
impl Manifest {
///
/// Create a Manifest instance from command-line arguments passed as [`clap::ArgMatches`].
///
/// [`clap::ArgMatches`]: https://kbknapp.github.io/clap-rs/clap/struct.ArgMatches.html
pub fn new_from_cli_matches(
_ui: &mut UI,
matches: &ArgMatches,
image: Option<DockerImage>,
) -> Result<Self>
|
};
let version_suffix = match pkg_ident.version {
Some(ref v) => {
pkg_ident
.release
.as_ref()
.map(|r| format!("{}-{}", v, r))
.unwrap_or(v.to_string())
}
None => "latest".to_owned(),
};
let name = matches
.value_of("K8S_NAME")
.map(|s| s.to_string())
.unwrap_or_else(|| format!("{}-{}", pkg_ident.name, version_suffix));
let image_name = match matches.value_of("IMAGE_NAME") {
Some(i) => i.to_string(),
None => {
let (image_name, tag) = match image {
Some(i) => {
(
i.name().to_owned(),
i.tags().get(0).cloned().unwrap_or_else(
|| "latest".to_owned(),
),
)
}
None => {
(
format!("{}/{}", pkg_ident.origin, pkg_ident.name),
version_suffix,
)
}
};
format!("{}:{}", image_name, tag)
}
};
let binds = ServiceBind::from_args(&matches)?;
let config = match config_file {
None => None,
Some(name) => {
let mut contents = String::new();
File::open(name)?.read_to_string(&mut contents)?;
Some(base64::encode(&format!("{}", contents)))
}
};
Ok(Manifest {
pkg_ident: pkg_ident,
metadata_name: name,
image: image_name,
count: count,
service_topology: topology,
service_group: group,
config: config,
ring_secret_name: ring_secret_name,
binds: binds,
})
}
/// Generates the manifest as a string and writes it to `write`.
pub fn generate(&mut self, write: &mut Write) -> Result<()> {
let out: String = ManifestJson::new(&self).into();
write.write(out.as_bytes())?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_manifest_generation() {
let mut m = Manifest {
pkg_ident: PackageIdent::from_str("core/nginx").unwrap(),
metadata_name: "nginx-latest".to_owned(),
image: "core/nginx:latest".to_owned(),
count: 3,
service_topology: Default::default(),
service_group: Some("group1".to_owned()),
config: Some(base64::encode(&format!("{}", "port = 4444"))),
ring_secret_name: Some("deltaechofoxtrot".to_owned()),
binds: vec![],
};
let expected = include_str!("../tests/KubernetesManifestTest.yaml");
let mut o = vec![];
m.generate(&mut o).unwrap();
let out = String::from_utf8(o).unwrap();
assert_eq!(out, expected);
}
#[test]
fn test_manifest_generation_binds() {
let mut m = Manifest {
pkg_ident: PackageIdent::from_str("core/nginx").unwrap(),
metadata_name: "nginx-latest".to_owned(),
image: "core/nginx:latest".to_owned(),
count: 3,
service_topology: Default::default(),
service_group: Some("group1".to_owned()),
config: None,
ring_secret_name: Some("deltaechofoxtrot".to_owned()),
binds: vec!["name1:service1.group1".parse().unwrap()],
};
let expected = include_str!("../tests/KubernetesManifestTestBinds.yaml");
let mut o = vec![];
m.generate(&mut o).unwrap();
let out = String::from_utf8(o).unwrap();
assert_eq!(out, expected);
}
}
|
{
let count = matches.value_of("COUNT").unwrap_or("1").parse()?;
let topology: Topology = matches
.value_of("TOPOLOGY")
.unwrap_or("standalone")
.parse()
.unwrap_or(Default::default());
let group = matches.value_of("GROUP").map(|s| s.to_string());
let config_file = matches.value_of("CONFIG");
let ring_secret_name = matches.value_of("RING_SECRET_NAME").map(|s| s.to_string());
// clap ensures that we do have the mandatory args so unwrap() is fine here
let pkg_ident_str = matches.value_of("PKG_IDENT_OR_ARTIFACT").expect(
"No package specified",
);
let pkg_ident = if Path::new(pkg_ident_str).is_file() {
// We're going to use the `$pkg_origin/$pkg_name`, fuzzy form of a package
// identifier to ensure that update strategies will work if desired
PackageArchive::new(pkg_ident_str).ident()?
} else {
PackageIdent::from_str(pkg_ident_str)?
|
identifier_body
|
struct.rs
|
// First attempt: No explicit lifetimes
// Error! Compiler needs explicit lifetime
//struct Singleton {
//one: &mut i32,
//}
// TODO ^ Try uncommenting this struct
// Second attempt: Add lifetimes to all the references
struct
|
<'a, 'b> {
one: &'a mut i32,
two: &'b mut i32,
}
fn main() {
// Let's say that `one` has lifetime `o`
let mut one = 1;
{
// And that `two` has lifetime `t`
// `two` has a shorter (and different) lifetime than `one` (`'t < 'o`)
let mut two = 2;
println!("Before: ({}, {})", one, two);
// `Pair` gets specialized for `'a = 'o` and `'b = 't`
let pair = Pair { one: &mut one, two: &mut two };
*pair.one = 2;
*pair.two = 1;
println!("After: ({}, {})", pair.one, pair.two);
}
}
|
Pair
|
identifier_name
|
struct.rs
|
// First attempt: No explicit lifetimes
// Error! Compiler needs explicit lifetime
//struct Singleton {
//one: &mut i32,
//}
// TODO ^ Try uncommenting this struct
// Second attempt: Add lifetimes to all the references
struct Pair<'a, 'b> {
one: &'a mut i32,
two: &'b mut i32,
}
fn main() {
// Let's say that `one` has lifetime `o`
let mut one = 1;
{
// And that `two` has lifetime `t`
// `two` has a shorter (and different) lifetime than `one` (`'t < 'o`)
let mut two = 2;
println!("Before: ({}, {})", one, two);
// `Pair` gets specialized for `'a = 'o` and `'b = 't`
let pair = Pair { one: &mut one, two: &mut two };
*pair.one = 2;
*pair.two = 1;
println!("After: ({}, {})", pair.one, pair.two);
}
|
}
|
random_line_split
|
|
render.rs
|
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{CardTemplate, NoteType, NoteTypeKind};
use crate::{
card::{Card, CardID},
collection::Collection,
err::{AnkiError, Result},
i18n::{I18n, TR},
notes::{Note, NoteID},
template::{field_is_empty, render_card, ParsedTemplate, RenderedNode},
};
use std::{borrow::Cow, collections::HashMap};
pub struct RenderCardOutput {
pub qnodes: Vec<RenderedNode>,
pub anodes: Vec<RenderedNode>,
}
impl Collection {
/// Render an existing card saved in the database.
pub fn render_existing_card(&mut self, cid: CardID, browser: bool) -> Result<RenderCardOutput> {
let card = self
.storage
.get_card(cid)?
.ok_or_else(|| AnkiError::invalid_input("no such card"))?;
let note = self
.storage
.get_note(card.note_id)?
.ok_or_else(|| AnkiError::invalid_input("no such note"))?;
let nt = self
.get_notetype(note.notetype_id)?
.ok_or_else(|| AnkiError::invalid_input("no such notetype"))?;
let template = match nt.config.kind() {
NoteTypeKind::Normal => nt.templates.get(card.template_idx as usize),
NoteTypeKind::Cloze => nt.templates.get(0),
}
.ok_or_else(|| AnkiError::invalid_input("missing template"))?;
self.render_card_inner(¬e, &card, &nt, template, browser)
}
/// Render a card that may not yet have been added.
/// The provided ordinal will be used if the template has not yet been saved.
/// If fill_empty is set, note will be mutated.
pub fn render_uncommitted_card(
&mut self,
note: &mut Note,
template: &CardTemplate,
card_ord: u16,
fill_empty: bool,
) -> Result<RenderCardOutput> {
let card = self.existing_or_synthesized_card(note.id, template.ord, card_ord)?;
let nt = self
.get_notetype(note.notetype_id)?
.ok_or_else(|| AnkiError::invalid_input("no such notetype"))?;
if fill_empty {
fill_empty_fields(note, &template.config.q_format, &nt, &self.i18n);
}
self.render_card_inner(note, &card, &nt, template, false)
}
fn existing_or_synthesized_card(
&self,
nid: NoteID,
template_ord: Option<u32>,
card_ord: u16,
) -> Result<Card>
|
fn render_card_inner(
&mut self,
note: &Note,
card: &Card,
nt: &NoteType,
template: &CardTemplate,
browser: bool,
) -> Result<RenderCardOutput> {
let mut field_map = note.fields_map(&nt.fields);
let card_num;
self.add_special_fields(&mut field_map, note, card, &nt, template)?;
// due to lifetime restrictions we need to add card number here
card_num = format!("c{}", card.template_idx + 1);
field_map.entry(&card_num).or_insert_with(|| "1".into());
let (qfmt, afmt) = if browser {
(
template.question_format_for_browser(),
template.answer_format_for_browser(),
)
} else {
(
template.config.q_format.as_str(),
template.config.a_format.as_str(),
)
};
let (qnodes, anodes) = render_card(
qfmt,
afmt,
&field_map,
card.template_idx,
nt.is_cloze(),
&self.i18n,
)?;
Ok(RenderCardOutput { qnodes, anodes })
}
// Add special fields if they don't clobber note fields
fn add_special_fields(
&mut self,
map: &mut HashMap<&str, Cow<str>>,
note: &Note,
card: &Card,
nt: &NoteType,
template: &CardTemplate,
) -> Result<()> {
let tags = note.tags.join(" ");
map.entry("Tags").or_insert_with(|| tags.into());
map.entry("Type").or_insert_with(|| nt.name.clone().into());
let deck_name: Cow<str> = self
.get_deck(if card.original_deck_id.0 > 0 {
card.original_deck_id
} else {
card.deck_id
})?
.map(|d| d.human_name().into())
.unwrap_or_else(|| "(Deck)".into());
let subdeck_name = deck_name.rsplit("::").next().unwrap();
map.entry("Subdeck")
.or_insert_with(|| subdeck_name.to_string().into());
map.entry("Deck")
.or_insert_with(|| deck_name.to_string().into());
map.entry("CardFlag")
.or_insert_with(|| flag_name(card.flags).into());
map.entry("Card")
.or_insert_with(|| template.name.clone().into());
Ok(())
}
}
fn flag_name(n: u8) -> &'static str {
match n {
1 => "flag1",
2 => "flag2",
3 => "flag3",
4 => "flag4",
_ => "",
}
}
fn fill_empty_fields(note: &mut Note, qfmt: &str, nt: &NoteType, i18n: &I18n) {
if let Ok(tmpl) = ParsedTemplate::from_text(qfmt) {
let cloze_fields = tmpl.cloze_fields();
for (val, field) in note.fields.iter_mut().zip(nt.fields.iter()) {
if field_is_empty(val) {
if cloze_fields.contains(&field.name.as_str()) {
*val = i18n.tr(TR::CardTemplatesSampleCloze).into();
} else {
*val = format!("({})", field.name);
}
}
}
}
}
|
{
// fetch existing card
if let Some(ord) = template_ord {
if let Some(card) = self.storage.get_card_by_ordinal(nid, ord as u16)? {
return Ok(card);
}
}
// no existing card; synthesize one
Ok(Card {
template_idx: card_ord,
..Default::default()
})
}
|
identifier_body
|
render.rs
|
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{CardTemplate, NoteType, NoteTypeKind};
use crate::{
card::{Card, CardID},
collection::Collection,
err::{AnkiError, Result},
i18n::{I18n, TR},
notes::{Note, NoteID},
template::{field_is_empty, render_card, ParsedTemplate, RenderedNode},
};
use std::{borrow::Cow, collections::HashMap};
pub struct RenderCardOutput {
pub qnodes: Vec<RenderedNode>,
pub anodes: Vec<RenderedNode>,
}
impl Collection {
/// Render an existing card saved in the database.
pub fn render_existing_card(&mut self, cid: CardID, browser: bool) -> Result<RenderCardOutput> {
let card = self
.storage
.get_card(cid)?
.ok_or_else(|| AnkiError::invalid_input("no such card"))?;
let note = self
.storage
.get_note(card.note_id)?
.ok_or_else(|| AnkiError::invalid_input("no such note"))?;
let nt = self
.get_notetype(note.notetype_id)?
.ok_or_else(|| AnkiError::invalid_input("no such notetype"))?;
let template = match nt.config.kind() {
NoteTypeKind::Normal => nt.templates.get(card.template_idx as usize),
NoteTypeKind::Cloze => nt.templates.get(0),
}
.ok_or_else(|| AnkiError::invalid_input("missing template"))?;
self.render_card_inner(¬e, &card, &nt, template, browser)
}
/// Render a card that may not yet have been added.
/// The provided ordinal will be used if the template has not yet been saved.
/// If fill_empty is set, note will be mutated.
pub fn render_uncommitted_card(
&mut self,
note: &mut Note,
template: &CardTemplate,
card_ord: u16,
fill_empty: bool,
) -> Result<RenderCardOutput> {
let card = self.existing_or_synthesized_card(note.id, template.ord, card_ord)?;
let nt = self
.get_notetype(note.notetype_id)?
.ok_or_else(|| AnkiError::invalid_input("no such notetype"))?;
if fill_empty {
fill_empty_fields(note, &template.config.q_format, &nt, &self.i18n);
}
self.render_card_inner(note, &card, &nt, template, false)
}
fn existing_or_synthesized_card(
&self,
nid: NoteID,
template_ord: Option<u32>,
card_ord: u16,
) -> Result<Card> {
// fetch existing card
if let Some(ord) = template_ord {
if let Some(card) = self.storage.get_card_by_ordinal(nid, ord as u16)? {
return Ok(card);
}
}
// no existing card; synthesize one
Ok(Card {
template_idx: card_ord,
..Default::default()
})
}
fn render_card_inner(
&mut self,
note: &Note,
card: &Card,
nt: &NoteType,
template: &CardTemplate,
browser: bool,
) -> Result<RenderCardOutput> {
let mut field_map = note.fields_map(&nt.fields);
let card_num;
self.add_special_fields(&mut field_map, note, card, &nt, template)?;
// due to lifetime restrictions we need to add card number here
card_num = format!("c{}", card.template_idx + 1);
field_map.entry(&card_num).or_insert_with(|| "1".into());
let (qfmt, afmt) = if browser {
(
template.question_format_for_browser(),
template.answer_format_for_browser(),
)
} else {
(
template.config.q_format.as_str(),
template.config.a_format.as_str(),
)
};
let (qnodes, anodes) = render_card(
qfmt,
afmt,
&field_map,
card.template_idx,
nt.is_cloze(),
&self.i18n,
)?;
Ok(RenderCardOutput { qnodes, anodes })
}
// Add special fields if they don't clobber note fields
fn add_special_fields(
&mut self,
map: &mut HashMap<&str, Cow<str>>,
note: &Note,
card: &Card,
nt: &NoteType,
template: &CardTemplate,
) -> Result<()> {
let tags = note.tags.join(" ");
map.entry("Tags").or_insert_with(|| tags.into());
map.entry("Type").or_insert_with(|| nt.name.clone().into());
let deck_name: Cow<str> = self
.get_deck(if card.original_deck_id.0 > 0 {
card.original_deck_id
} else {
card.deck_id
})?
.map(|d| d.human_name().into())
.unwrap_or_else(|| "(Deck)".into());
let subdeck_name = deck_name.rsplit("::").next().unwrap();
map.entry("Subdeck")
.or_insert_with(|| subdeck_name.to_string().into());
map.entry("Deck")
.or_insert_with(|| deck_name.to_string().into());
map.entry("CardFlag")
.or_insert_with(|| flag_name(card.flags).into());
map.entry("Card")
.or_insert_with(|| template.name.clone().into());
Ok(())
}
}
fn flag_name(n: u8) -> &'static str {
match n {
1 => "flag1",
2 => "flag2",
3 => "flag3",
4 => "flag4",
_ => "",
}
}
fn fill_empty_fields(note: &mut Note, qfmt: &str, nt: &NoteType, i18n: &I18n) {
if let Ok(tmpl) = ParsedTemplate::from_text(qfmt) {
let cloze_fields = tmpl.cloze_fields();
for (val, field) in note.fields.iter_mut().zip(nt.fields.iter()) {
if field_is_empty(val) {
if cloze_fields.contains(&field.name.as_str()) {
*val = i18n.tr(TR::CardTemplatesSampleCloze).into();
} else
|
}
}
}
}
|
{
*val = format!("({})", field.name);
}
|
conditional_block
|
render.rs
|
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{CardTemplate, NoteType, NoteTypeKind};
use crate::{
card::{Card, CardID},
collection::Collection,
err::{AnkiError, Result},
i18n::{I18n, TR},
notes::{Note, NoteID},
template::{field_is_empty, render_card, ParsedTemplate, RenderedNode},
};
use std::{borrow::Cow, collections::HashMap};
pub struct RenderCardOutput {
pub qnodes: Vec<RenderedNode>,
pub anodes: Vec<RenderedNode>,
}
impl Collection {
/// Render an existing card saved in the database.
pub fn render_existing_card(&mut self, cid: CardID, browser: bool) -> Result<RenderCardOutput> {
let card = self
.storage
.get_card(cid)?
.ok_or_else(|| AnkiError::invalid_input("no such card"))?;
let note = self
.storage
.get_note(card.note_id)?
.ok_or_else(|| AnkiError::invalid_input("no such note"))?;
let nt = self
.get_notetype(note.notetype_id)?
.ok_or_else(|| AnkiError::invalid_input("no such notetype"))?;
let template = match nt.config.kind() {
NoteTypeKind::Normal => nt.templates.get(card.template_idx as usize),
NoteTypeKind::Cloze => nt.templates.get(0),
}
.ok_or_else(|| AnkiError::invalid_input("missing template"))?;
self.render_card_inner(¬e, &card, &nt, template, browser)
}
/// Render a card that may not yet have been added.
/// The provided ordinal will be used if the template has not yet been saved.
/// If fill_empty is set, note will be mutated.
pub fn render_uncommitted_card(
&mut self,
note: &mut Note,
template: &CardTemplate,
card_ord: u16,
fill_empty: bool,
) -> Result<RenderCardOutput> {
let card = self.existing_or_synthesized_card(note.id, template.ord, card_ord)?;
let nt = self
.get_notetype(note.notetype_id)?
.ok_or_else(|| AnkiError::invalid_input("no such notetype"))?;
if fill_empty {
fill_empty_fields(note, &template.config.q_format, &nt, &self.i18n);
}
self.render_card_inner(note, &card, &nt, template, false)
}
fn existing_or_synthesized_card(
&self,
nid: NoteID,
|
if let Some(ord) = template_ord {
if let Some(card) = self.storage.get_card_by_ordinal(nid, ord as u16)? {
return Ok(card);
}
}
// no existing card; synthesize one
Ok(Card {
template_idx: card_ord,
..Default::default()
})
}
fn render_card_inner(
&mut self,
note: &Note,
card: &Card,
nt: &NoteType,
template: &CardTemplate,
browser: bool,
) -> Result<RenderCardOutput> {
let mut field_map = note.fields_map(&nt.fields);
let card_num;
self.add_special_fields(&mut field_map, note, card, &nt, template)?;
// due to lifetime restrictions we need to add card number here
card_num = format!("c{}", card.template_idx + 1);
field_map.entry(&card_num).or_insert_with(|| "1".into());
let (qfmt, afmt) = if browser {
(
template.question_format_for_browser(),
template.answer_format_for_browser(),
)
} else {
(
template.config.q_format.as_str(),
template.config.a_format.as_str(),
)
};
let (qnodes, anodes) = render_card(
qfmt,
afmt,
&field_map,
card.template_idx,
nt.is_cloze(),
&self.i18n,
)?;
Ok(RenderCardOutput { qnodes, anodes })
}
// Add special fields if they don't clobber note fields
fn add_special_fields(
&mut self,
map: &mut HashMap<&str, Cow<str>>,
note: &Note,
card: &Card,
nt: &NoteType,
template: &CardTemplate,
) -> Result<()> {
let tags = note.tags.join(" ");
map.entry("Tags").or_insert_with(|| tags.into());
map.entry("Type").or_insert_with(|| nt.name.clone().into());
let deck_name: Cow<str> = self
.get_deck(if card.original_deck_id.0 > 0 {
card.original_deck_id
} else {
card.deck_id
})?
.map(|d| d.human_name().into())
.unwrap_or_else(|| "(Deck)".into());
let subdeck_name = deck_name.rsplit("::").next().unwrap();
map.entry("Subdeck")
.or_insert_with(|| subdeck_name.to_string().into());
map.entry("Deck")
.or_insert_with(|| deck_name.to_string().into());
map.entry("CardFlag")
.or_insert_with(|| flag_name(card.flags).into());
map.entry("Card")
.or_insert_with(|| template.name.clone().into());
Ok(())
}
}
fn flag_name(n: u8) -> &'static str {
match n {
1 => "flag1",
2 => "flag2",
3 => "flag3",
4 => "flag4",
_ => "",
}
}
fn fill_empty_fields(note: &mut Note, qfmt: &str, nt: &NoteType, i18n: &I18n) {
if let Ok(tmpl) = ParsedTemplate::from_text(qfmt) {
let cloze_fields = tmpl.cloze_fields();
for (val, field) in note.fields.iter_mut().zip(nt.fields.iter()) {
if field_is_empty(val) {
if cloze_fields.contains(&field.name.as_str()) {
*val = i18n.tr(TR::CardTemplatesSampleCloze).into();
} else {
*val = format!("({})", field.name);
}
}
}
}
}
|
template_ord: Option<u32>,
card_ord: u16,
) -> Result<Card> {
// fetch existing card
|
random_line_split
|
render.rs
|
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{CardTemplate, NoteType, NoteTypeKind};
use crate::{
card::{Card, CardID},
collection::Collection,
err::{AnkiError, Result},
i18n::{I18n, TR},
notes::{Note, NoteID},
template::{field_is_empty, render_card, ParsedTemplate, RenderedNode},
};
use std::{borrow::Cow, collections::HashMap};
pub struct RenderCardOutput {
pub qnodes: Vec<RenderedNode>,
pub anodes: Vec<RenderedNode>,
}
impl Collection {
/// Render an existing card saved in the database.
pub fn render_existing_card(&mut self, cid: CardID, browser: bool) -> Result<RenderCardOutput> {
let card = self
.storage
.get_card(cid)?
.ok_or_else(|| AnkiError::invalid_input("no such card"))?;
let note = self
.storage
.get_note(card.note_id)?
.ok_or_else(|| AnkiError::invalid_input("no such note"))?;
let nt = self
.get_notetype(note.notetype_id)?
.ok_or_else(|| AnkiError::invalid_input("no such notetype"))?;
let template = match nt.config.kind() {
NoteTypeKind::Normal => nt.templates.get(card.template_idx as usize),
NoteTypeKind::Cloze => nt.templates.get(0),
}
.ok_or_else(|| AnkiError::invalid_input("missing template"))?;
self.render_card_inner(¬e, &card, &nt, template, browser)
}
/// Render a card that may not yet have been added.
/// The provided ordinal will be used if the template has not yet been saved.
/// If fill_empty is set, note will be mutated.
pub fn render_uncommitted_card(
&mut self,
note: &mut Note,
template: &CardTemplate,
card_ord: u16,
fill_empty: bool,
) -> Result<RenderCardOutput> {
let card = self.existing_or_synthesized_card(note.id, template.ord, card_ord)?;
let nt = self
.get_notetype(note.notetype_id)?
.ok_or_else(|| AnkiError::invalid_input("no such notetype"))?;
if fill_empty {
fill_empty_fields(note, &template.config.q_format, &nt, &self.i18n);
}
self.render_card_inner(note, &card, &nt, template, false)
}
fn existing_or_synthesized_card(
&self,
nid: NoteID,
template_ord: Option<u32>,
card_ord: u16,
) -> Result<Card> {
// fetch existing card
if let Some(ord) = template_ord {
if let Some(card) = self.storage.get_card_by_ordinal(nid, ord as u16)? {
return Ok(card);
}
}
// no existing card; synthesize one
Ok(Card {
template_idx: card_ord,
..Default::default()
})
}
fn render_card_inner(
&mut self,
note: &Note,
card: &Card,
nt: &NoteType,
template: &CardTemplate,
browser: bool,
) -> Result<RenderCardOutput> {
let mut field_map = note.fields_map(&nt.fields);
let card_num;
self.add_special_fields(&mut field_map, note, card, &nt, template)?;
// due to lifetime restrictions we need to add card number here
card_num = format!("c{}", card.template_idx + 1);
field_map.entry(&card_num).or_insert_with(|| "1".into());
let (qfmt, afmt) = if browser {
(
template.question_format_for_browser(),
template.answer_format_for_browser(),
)
} else {
(
template.config.q_format.as_str(),
template.config.a_format.as_str(),
)
};
let (qnodes, anodes) = render_card(
qfmt,
afmt,
&field_map,
card.template_idx,
nt.is_cloze(),
&self.i18n,
)?;
Ok(RenderCardOutput { qnodes, anodes })
}
// Add special fields if they don't clobber note fields
fn add_special_fields(
&mut self,
map: &mut HashMap<&str, Cow<str>>,
note: &Note,
card: &Card,
nt: &NoteType,
template: &CardTemplate,
) -> Result<()> {
let tags = note.tags.join(" ");
map.entry("Tags").or_insert_with(|| tags.into());
map.entry("Type").or_insert_with(|| nt.name.clone().into());
let deck_name: Cow<str> = self
.get_deck(if card.original_deck_id.0 > 0 {
card.original_deck_id
} else {
card.deck_id
})?
.map(|d| d.human_name().into())
.unwrap_or_else(|| "(Deck)".into());
let subdeck_name = deck_name.rsplit("::").next().unwrap();
map.entry("Subdeck")
.or_insert_with(|| subdeck_name.to_string().into());
map.entry("Deck")
.or_insert_with(|| deck_name.to_string().into());
map.entry("CardFlag")
.or_insert_with(|| flag_name(card.flags).into());
map.entry("Card")
.or_insert_with(|| template.name.clone().into());
Ok(())
}
}
fn
|
(n: u8) -> &'static str {
match n {
1 => "flag1",
2 => "flag2",
3 => "flag3",
4 => "flag4",
_ => "",
}
}
fn fill_empty_fields(note: &mut Note, qfmt: &str, nt: &NoteType, i18n: &I18n) {
if let Ok(tmpl) = ParsedTemplate::from_text(qfmt) {
let cloze_fields = tmpl.cloze_fields();
for (val, field) in note.fields.iter_mut().zip(nt.fields.iter()) {
if field_is_empty(val) {
if cloze_fields.contains(&field.name.as_str()) {
*val = i18n.tr(TR::CardTemplatesSampleCloze).into();
} else {
*val = format!("({})", field.name);
}
}
}
}
}
|
flag_name
|
identifier_name
|
mod.rs
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
//! COSE_Key functionality.
use crate::{
cbor::value::Value,
common::AsCborValue,
iana,
iana::EnumI64,
util::{to_cbor_array, ValueTryAs},
Algorithm, CoseError, Label, Result,
};
use alloc::{collections::BTreeSet, vec, vec::Vec};
#[cfg(test)]
mod tests;
/// Key type.
pub type KeyType = crate::RegisteredLabel<iana::KeyType>;
impl Default for KeyType {
fn default() -> Self {
KeyType::Assigned(iana::KeyType::Reserved)
}
}
/// Key operation.
pub type KeyOperation = crate::RegisteredLabel<iana::KeyOperation>;
/// A collection of [`CoseKey`] objects.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct CoseKeySet(pub Vec<CoseKey>);
impl crate::CborSerializable for CoseKeySet {}
impl AsCborValue for CoseKeySet {
fn from_cbor_value(value: Value) -> Result<Self> {
Ok(Self(
value.try_as_array_then_convert(CoseKey::from_cbor_value)?,
))
}
fn to_cbor_value(self) -> Result<Value> {
to_cbor_array(self.0)
}
}
/// Structure representing a cryptographic key.
///
/// ```cddl
/// COSE_Key = {
/// 1 => tstr / int, ; kty
/// ? 2 => bstr, ; kid
/// ? 3 => tstr / int, ; alg
/// ? 4 => [+ (tstr / int) ], ; key_ops
/// ? 5 => bstr, ; Base IV
/// * label => values
/// }
/// ```
#[derive(Clone, Debug, Default, PartialEq)]
pub struct CoseKey {
/// Key type identification.
pub kty: KeyType,
/// Key identification.
pub key_id: Vec<u8>,
/// Key use restriction to this algorithm.
pub alg: Option<Algorithm>,
/// Restrict set of possible operations.
pub key_ops: BTreeSet<KeyOperation>,
/// Base IV to be xor-ed with partial IVs.
pub base_iv: Vec<u8>,
/// Any additional parameter (label,value) pairs. If duplicate labels are present,
/// CBOR-encoding will fail.
pub params: Vec<(Label, Value)>,
}
impl crate::CborSerializable for CoseKey {}
const KTY: Label = Label::Int(iana::KeyParameter::Kty as i64);
const KID: Label = Label::Int(iana::KeyParameter::Kid as i64);
const ALG: Label = Label::Int(iana::KeyParameter::Alg as i64);
const KEY_OPS: Label = Label::Int(iana::KeyParameter::KeyOps as i64);
const BASE_IV: Label = Label::Int(iana::KeyParameter::BaseIv as i64);
impl AsCborValue for CoseKey {
fn from_cbor_value(value: Value) -> Result<Self> {
let m = value.try_as_map()?;
let mut key = Self::default();
let mut seen = BTreeSet::new();
for (l, value) in m.into_iter() {
// The `ciborium` CBOR library does not police duplicate map keys.
// RFC 8152 section 14 requires that COSE does police duplicates, so do it here.
let label = Label::from_cbor_value(l)?;
if seen.contains(&label) {
return Err(CoseError::DuplicateMapKey);
}
seen.insert(label.clone());
match label {
KTY => key.kty = KeyType::from_cbor_value(value)?,
KID => {
key.key_id = value.try_as_nonempty_bytes()?;
}
ALG => key.alg = Some(Algorithm::from_cbor_value(value)?),
KEY_OPS => {
let key_ops = value.try_as_array()?;
for key_op in key_ops.into_iter() {
if!key.key_ops.insert(KeyOperation::from_cbor_value(key_op)?) {
return Err(CoseError::UnexpectedItem(
"repeated array entry",
"unique array label",
));
}
}
if key.key_ops.is_empty() {
return Err(CoseError::UnexpectedItem("empty array", "non-empty array"));
}
}
BASE_IV => {
key.base_iv = value.try_as_nonempty_bytes()?;
}
label => key.params.push((label, value)),
}
}
// Check that key type has been set.
if key.kty == KeyType::Assigned(iana::KeyType::Reserved) {
return Err(CoseError::UnexpectedItem(
"no kty label",
"mandatory kty label",
));
}
Ok(key)
}
fn to_cbor_value(self) -> Result<Value> {
let mut map: Vec<(Value, Value)> = vec![(KTY.to_cbor_value()?, self.kty.to_cbor_value()?)];
if!self.key_id.is_empty() {
map.push((KID.to_cbor_value()?, Value::Bytes(self.key_id)));
}
if let Some(alg) = self.alg {
map.push((ALG.to_cbor_value()?, alg.to_cbor_value()?));
}
if!self.key_ops.is_empty() {
map.push((KEY_OPS.to_cbor_value()?, to_cbor_array(self.key_ops)?));
}
if!self.base_iv.is_empty() {
map.push((BASE_IV.to_cbor_value()?, Value::Bytes(self.base_iv)));
}
let mut seen = BTreeSet::new();
for (label, value) in self.params {
if seen.contains(&label) {
return Err(CoseError::DuplicateMapKey);
}
seen.insert(label.clone());
map.push((label.to_cbor_value()?, value));
}
Ok(Value::Map(map))
}
}
/// Builder for [`CoseKey`] objects.
#[derive(Debug, Default)]
pub struct CoseKeyBuilder(CoseKey);
impl CoseKeyBuilder {
builder! {CoseKey}
builder_set! {key_id: Vec<u8>}
builder_set! {base_iv: Vec<u8>}
/// Constructor for an elliptic curve public key specified by `x` and `y` coordinates.
pub fn new_ec2_pub_key(curve: iana::EllipticCurve, x: Vec<u8>, y: Vec<u8>) -> Self {
Self(CoseKey {
kty: KeyType::Assigned(iana::KeyType::EC2),
params: vec![
(
Label::Int(iana::Ec2KeyParameter::Crv as i64),
Value::from(curve as u64),
),
(Label::Int(iana::Ec2KeyParameter::X as i64), Value::Bytes(x)),
(Label::Int(iana::Ec2KeyParameter::Y as i64), Value::Bytes(y)),
],
..Default::default()
})
}
/// Constructor for an elliptic curve public key specified by `x` coordinate plus sign of `y`
/// coordinate.
pub fn new_ec2_pub_key_y_sign(curve: iana::EllipticCurve, x: Vec<u8>, y_sign: bool) -> Self {
Self(CoseKey {
kty: KeyType::Assigned(iana::KeyType::EC2),
params: vec![
(
Label::Int(iana::Ec2KeyParameter::Crv as i64),
Value::from(curve as u64),
),
(Label::Int(iana::Ec2KeyParameter::X as i64), Value::Bytes(x)),
(
Label::Int(iana::Ec2KeyParameter::Y as i64),
Value::Bool(y_sign),
),
],
..Default::default()
})
}
/// Constructor for an elliptic curve private key specified by `d`, together with public `x` and
/// `y` coordinates.
pub fn new_ec2_priv_key(
curve: iana::EllipticCurve,
x: Vec<u8>,
y: Vec<u8>,
d: Vec<u8>,
) -> Self {
let mut builder = Self::new_ec2_pub_key(curve, x, y);
builder
.0
.params
.push((Label::Int(iana::Ec2KeyParameter::D as i64), Value::Bytes(d)));
builder
}
/// Constructor for a symmetric key specified by `k`.
pub fn new_symmetric_key(k: Vec<u8>) -> Self {
Self(CoseKey {
kty: KeyType::Assigned(iana::KeyType::Symmetric),
params: vec![(
Label::Int(iana::SymmetricKeyParameter::K as i64),
Value::Bytes(k),
)],
..Default::default()
})
}
/// Set the algorithm.
#[must_use]
pub fn algorithm(mut self, alg: iana::Algorithm) -> Self {
self.0.alg = Some(Algorithm::Assigned(alg));
self
}
/// Add a key operation.
#[must_use]
pub fn add_key_op(mut self, op: iana::KeyOperation) -> Self {
self.0.key_ops.insert(KeyOperation::Assigned(op));
self
}
/// Set a parameter value.
///
/// # Panics
///
/// This function will panic if it used to set a parameter label from the [`iana::KeyParameter`]
/// range.
#[must_use]
pub fn param(mut self, label: i64, value: Value) -> Self
|
}
|
{
if iana::KeyParameter::from_i64(label).is_some() {
panic!("param() method used to set KeyParameter"); // safe: invalid input
}
self.0.params.push((Label::Int(label), value));
self
}
|
identifier_body
|
mod.rs
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
|
cbor::value::Value,
common::AsCborValue,
iana,
iana::EnumI64,
util::{to_cbor_array, ValueTryAs},
Algorithm, CoseError, Label, Result,
};
use alloc::{collections::BTreeSet, vec, vec::Vec};
#[cfg(test)]
mod tests;
/// Key type.
pub type KeyType = crate::RegisteredLabel<iana::KeyType>;
impl Default for KeyType {
fn default() -> Self {
KeyType::Assigned(iana::KeyType::Reserved)
}
}
/// Key operation.
pub type KeyOperation = crate::RegisteredLabel<iana::KeyOperation>;
/// A collection of [`CoseKey`] objects.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct CoseKeySet(pub Vec<CoseKey>);
impl crate::CborSerializable for CoseKeySet {}
impl AsCborValue for CoseKeySet {
fn from_cbor_value(value: Value) -> Result<Self> {
Ok(Self(
value.try_as_array_then_convert(CoseKey::from_cbor_value)?,
))
}
fn to_cbor_value(self) -> Result<Value> {
to_cbor_array(self.0)
}
}
/// Structure representing a cryptographic key.
///
/// ```cddl
/// COSE_Key = {
/// 1 => tstr / int, ; kty
/// ? 2 => bstr, ; kid
/// ? 3 => tstr / int, ; alg
/// ? 4 => [+ (tstr / int) ], ; key_ops
/// ? 5 => bstr, ; Base IV
/// * label => values
/// }
/// ```
#[derive(Clone, Debug, Default, PartialEq)]
pub struct CoseKey {
/// Key type identification.
pub kty: KeyType,
/// Key identification.
pub key_id: Vec<u8>,
/// Key use restriction to this algorithm.
pub alg: Option<Algorithm>,
/// Restrict set of possible operations.
pub key_ops: BTreeSet<KeyOperation>,
/// Base IV to be xor-ed with partial IVs.
pub base_iv: Vec<u8>,
/// Any additional parameter (label,value) pairs. If duplicate labels are present,
/// CBOR-encoding will fail.
pub params: Vec<(Label, Value)>,
}
impl crate::CborSerializable for CoseKey {}
const KTY: Label = Label::Int(iana::KeyParameter::Kty as i64);
const KID: Label = Label::Int(iana::KeyParameter::Kid as i64);
const ALG: Label = Label::Int(iana::KeyParameter::Alg as i64);
const KEY_OPS: Label = Label::Int(iana::KeyParameter::KeyOps as i64);
const BASE_IV: Label = Label::Int(iana::KeyParameter::BaseIv as i64);
impl AsCborValue for CoseKey {
fn from_cbor_value(value: Value) -> Result<Self> {
let m = value.try_as_map()?;
let mut key = Self::default();
let mut seen = BTreeSet::new();
for (l, value) in m.into_iter() {
// The `ciborium` CBOR library does not police duplicate map keys.
// RFC 8152 section 14 requires that COSE does police duplicates, so do it here.
let label = Label::from_cbor_value(l)?;
if seen.contains(&label) {
return Err(CoseError::DuplicateMapKey);
}
seen.insert(label.clone());
match label {
KTY => key.kty = KeyType::from_cbor_value(value)?,
KID => {
key.key_id = value.try_as_nonempty_bytes()?;
}
ALG => key.alg = Some(Algorithm::from_cbor_value(value)?),
KEY_OPS => {
let key_ops = value.try_as_array()?;
for key_op in key_ops.into_iter() {
if!key.key_ops.insert(KeyOperation::from_cbor_value(key_op)?) {
return Err(CoseError::UnexpectedItem(
"repeated array entry",
"unique array label",
));
}
}
if key.key_ops.is_empty() {
return Err(CoseError::UnexpectedItem("empty array", "non-empty array"));
}
}
BASE_IV => {
key.base_iv = value.try_as_nonempty_bytes()?;
}
label => key.params.push((label, value)),
}
}
// Check that key type has been set.
if key.kty == KeyType::Assigned(iana::KeyType::Reserved) {
return Err(CoseError::UnexpectedItem(
"no kty label",
"mandatory kty label",
));
}
Ok(key)
}
fn to_cbor_value(self) -> Result<Value> {
let mut map: Vec<(Value, Value)> = vec![(KTY.to_cbor_value()?, self.kty.to_cbor_value()?)];
if!self.key_id.is_empty() {
map.push((KID.to_cbor_value()?, Value::Bytes(self.key_id)));
}
if let Some(alg) = self.alg {
map.push((ALG.to_cbor_value()?, alg.to_cbor_value()?));
}
if!self.key_ops.is_empty() {
map.push((KEY_OPS.to_cbor_value()?, to_cbor_array(self.key_ops)?));
}
if!self.base_iv.is_empty() {
map.push((BASE_IV.to_cbor_value()?, Value::Bytes(self.base_iv)));
}
let mut seen = BTreeSet::new();
for (label, value) in self.params {
if seen.contains(&label) {
return Err(CoseError::DuplicateMapKey);
}
seen.insert(label.clone());
map.push((label.to_cbor_value()?, value));
}
Ok(Value::Map(map))
}
}
/// Builder for [`CoseKey`] objects.
#[derive(Debug, Default)]
pub struct CoseKeyBuilder(CoseKey);
impl CoseKeyBuilder {
builder! {CoseKey}
builder_set! {key_id: Vec<u8>}
builder_set! {base_iv: Vec<u8>}
/// Constructor for an elliptic curve public key specified by `x` and `y` coordinates.
pub fn new_ec2_pub_key(curve: iana::EllipticCurve, x: Vec<u8>, y: Vec<u8>) -> Self {
Self(CoseKey {
kty: KeyType::Assigned(iana::KeyType::EC2),
params: vec![
(
Label::Int(iana::Ec2KeyParameter::Crv as i64),
Value::from(curve as u64),
),
(Label::Int(iana::Ec2KeyParameter::X as i64), Value::Bytes(x)),
(Label::Int(iana::Ec2KeyParameter::Y as i64), Value::Bytes(y)),
],
..Default::default()
})
}
/// Constructor for an elliptic curve public key specified by `x` coordinate plus sign of `y`
/// coordinate.
pub fn new_ec2_pub_key_y_sign(curve: iana::EllipticCurve, x: Vec<u8>, y_sign: bool) -> Self {
Self(CoseKey {
kty: KeyType::Assigned(iana::KeyType::EC2),
params: vec![
(
Label::Int(iana::Ec2KeyParameter::Crv as i64),
Value::from(curve as u64),
),
(Label::Int(iana::Ec2KeyParameter::X as i64), Value::Bytes(x)),
(
Label::Int(iana::Ec2KeyParameter::Y as i64),
Value::Bool(y_sign),
),
],
..Default::default()
})
}
/// Constructor for an elliptic curve private key specified by `d`, together with public `x` and
/// `y` coordinates.
pub fn new_ec2_priv_key(
curve: iana::EllipticCurve,
x: Vec<u8>,
y: Vec<u8>,
d: Vec<u8>,
) -> Self {
let mut builder = Self::new_ec2_pub_key(curve, x, y);
builder
.0
.params
.push((Label::Int(iana::Ec2KeyParameter::D as i64), Value::Bytes(d)));
builder
}
/// Constructor for a symmetric key specified by `k`.
pub fn new_symmetric_key(k: Vec<u8>) -> Self {
Self(CoseKey {
kty: KeyType::Assigned(iana::KeyType::Symmetric),
params: vec![(
Label::Int(iana::SymmetricKeyParameter::K as i64),
Value::Bytes(k),
)],
..Default::default()
})
}
/// Set the algorithm.
#[must_use]
pub fn algorithm(mut self, alg: iana::Algorithm) -> Self {
self.0.alg = Some(Algorithm::Assigned(alg));
self
}
/// Add a key operation.
#[must_use]
pub fn add_key_op(mut self, op: iana::KeyOperation) -> Self {
self.0.key_ops.insert(KeyOperation::Assigned(op));
self
}
/// Set a parameter value.
///
/// # Panics
///
/// This function will panic if it used to set a parameter label from the [`iana::KeyParameter`]
/// range.
#[must_use]
pub fn param(mut self, label: i64, value: Value) -> Self {
if iana::KeyParameter::from_i64(label).is_some() {
panic!("param() method used to set KeyParameter"); // safe: invalid input
}
self.0.params.push((Label::Int(label), value));
self
}
}
|
//! COSE_Key functionality.
use crate::{
|
random_line_split
|
mod.rs
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
//! COSE_Key functionality.
use crate::{
cbor::value::Value,
common::AsCborValue,
iana,
iana::EnumI64,
util::{to_cbor_array, ValueTryAs},
Algorithm, CoseError, Label, Result,
};
use alloc::{collections::BTreeSet, vec, vec::Vec};
#[cfg(test)]
mod tests;
/// Key type.
pub type KeyType = crate::RegisteredLabel<iana::KeyType>;
impl Default for KeyType {
fn default() -> Self {
KeyType::Assigned(iana::KeyType::Reserved)
}
}
/// Key operation.
pub type KeyOperation = crate::RegisteredLabel<iana::KeyOperation>;
/// A collection of [`CoseKey`] objects.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct CoseKeySet(pub Vec<CoseKey>);
impl crate::CborSerializable for CoseKeySet {}
impl AsCborValue for CoseKeySet {
fn from_cbor_value(value: Value) -> Result<Self> {
Ok(Self(
value.try_as_array_then_convert(CoseKey::from_cbor_value)?,
))
}
fn to_cbor_value(self) -> Result<Value> {
to_cbor_array(self.0)
}
}
/// Structure representing a cryptographic key.
///
/// ```cddl
/// COSE_Key = {
/// 1 => tstr / int, ; kty
/// ? 2 => bstr, ; kid
/// ? 3 => tstr / int, ; alg
/// ? 4 => [+ (tstr / int) ], ; key_ops
/// ? 5 => bstr, ; Base IV
/// * label => values
/// }
/// ```
#[derive(Clone, Debug, Default, PartialEq)]
pub struct CoseKey {
/// Key type identification.
pub kty: KeyType,
/// Key identification.
pub key_id: Vec<u8>,
/// Key use restriction to this algorithm.
pub alg: Option<Algorithm>,
/// Restrict set of possible operations.
pub key_ops: BTreeSet<KeyOperation>,
/// Base IV to be xor-ed with partial IVs.
pub base_iv: Vec<u8>,
/// Any additional parameter (label,value) pairs. If duplicate labels are present,
/// CBOR-encoding will fail.
pub params: Vec<(Label, Value)>,
}
impl crate::CborSerializable for CoseKey {}
const KTY: Label = Label::Int(iana::KeyParameter::Kty as i64);
const KID: Label = Label::Int(iana::KeyParameter::Kid as i64);
const ALG: Label = Label::Int(iana::KeyParameter::Alg as i64);
const KEY_OPS: Label = Label::Int(iana::KeyParameter::KeyOps as i64);
const BASE_IV: Label = Label::Int(iana::KeyParameter::BaseIv as i64);
impl AsCborValue for CoseKey {
fn from_cbor_value(value: Value) -> Result<Self> {
let m = value.try_as_map()?;
let mut key = Self::default();
let mut seen = BTreeSet::new();
for (l, value) in m.into_iter() {
// The `ciborium` CBOR library does not police duplicate map keys.
// RFC 8152 section 14 requires that COSE does police duplicates, so do it here.
let label = Label::from_cbor_value(l)?;
if seen.contains(&label) {
return Err(CoseError::DuplicateMapKey);
}
seen.insert(label.clone());
match label {
KTY => key.kty = KeyType::from_cbor_value(value)?,
KID => {
key.key_id = value.try_as_nonempty_bytes()?;
}
ALG => key.alg = Some(Algorithm::from_cbor_value(value)?),
KEY_OPS => {
let key_ops = value.try_as_array()?;
for key_op in key_ops.into_iter() {
if!key.key_ops.insert(KeyOperation::from_cbor_value(key_op)?) {
return Err(CoseError::UnexpectedItem(
"repeated array entry",
"unique array label",
));
}
}
if key.key_ops.is_empty() {
return Err(CoseError::UnexpectedItem("empty array", "non-empty array"));
}
}
BASE_IV => {
key.base_iv = value.try_as_nonempty_bytes()?;
}
label => key.params.push((label, value)),
}
}
// Check that key type has been set.
if key.kty == KeyType::Assigned(iana::KeyType::Reserved) {
return Err(CoseError::UnexpectedItem(
"no kty label",
"mandatory kty label",
));
}
Ok(key)
}
fn
|
(self) -> Result<Value> {
let mut map: Vec<(Value, Value)> = vec![(KTY.to_cbor_value()?, self.kty.to_cbor_value()?)];
if!self.key_id.is_empty() {
map.push((KID.to_cbor_value()?, Value::Bytes(self.key_id)));
}
if let Some(alg) = self.alg {
map.push((ALG.to_cbor_value()?, alg.to_cbor_value()?));
}
if!self.key_ops.is_empty() {
map.push((KEY_OPS.to_cbor_value()?, to_cbor_array(self.key_ops)?));
}
if!self.base_iv.is_empty() {
map.push((BASE_IV.to_cbor_value()?, Value::Bytes(self.base_iv)));
}
let mut seen = BTreeSet::new();
for (label, value) in self.params {
if seen.contains(&label) {
return Err(CoseError::DuplicateMapKey);
}
seen.insert(label.clone());
map.push((label.to_cbor_value()?, value));
}
Ok(Value::Map(map))
}
}
/// Builder for [`CoseKey`] objects.
#[derive(Debug, Default)]
pub struct CoseKeyBuilder(CoseKey);
impl CoseKeyBuilder {
builder! {CoseKey}
builder_set! {key_id: Vec<u8>}
builder_set! {base_iv: Vec<u8>}
/// Constructor for an elliptic curve public key specified by `x` and `y` coordinates.
pub fn new_ec2_pub_key(curve: iana::EllipticCurve, x: Vec<u8>, y: Vec<u8>) -> Self {
Self(CoseKey {
kty: KeyType::Assigned(iana::KeyType::EC2),
params: vec![
(
Label::Int(iana::Ec2KeyParameter::Crv as i64),
Value::from(curve as u64),
),
(Label::Int(iana::Ec2KeyParameter::X as i64), Value::Bytes(x)),
(Label::Int(iana::Ec2KeyParameter::Y as i64), Value::Bytes(y)),
],
..Default::default()
})
}
/// Constructor for an elliptic curve public key specified by `x` coordinate plus sign of `y`
/// coordinate.
pub fn new_ec2_pub_key_y_sign(curve: iana::EllipticCurve, x: Vec<u8>, y_sign: bool) -> Self {
Self(CoseKey {
kty: KeyType::Assigned(iana::KeyType::EC2),
params: vec![
(
Label::Int(iana::Ec2KeyParameter::Crv as i64),
Value::from(curve as u64),
),
(Label::Int(iana::Ec2KeyParameter::X as i64), Value::Bytes(x)),
(
Label::Int(iana::Ec2KeyParameter::Y as i64),
Value::Bool(y_sign),
),
],
..Default::default()
})
}
/// Constructor for an elliptic curve private key specified by `d`, together with public `x` and
/// `y` coordinates.
pub fn new_ec2_priv_key(
curve: iana::EllipticCurve,
x: Vec<u8>,
y: Vec<u8>,
d: Vec<u8>,
) -> Self {
let mut builder = Self::new_ec2_pub_key(curve, x, y);
builder
.0
.params
.push((Label::Int(iana::Ec2KeyParameter::D as i64), Value::Bytes(d)));
builder
}
/// Constructor for a symmetric key specified by `k`.
pub fn new_symmetric_key(k: Vec<u8>) -> Self {
Self(CoseKey {
kty: KeyType::Assigned(iana::KeyType::Symmetric),
params: vec![(
Label::Int(iana::SymmetricKeyParameter::K as i64),
Value::Bytes(k),
)],
..Default::default()
})
}
/// Set the algorithm.
#[must_use]
pub fn algorithm(mut self, alg: iana::Algorithm) -> Self {
self.0.alg = Some(Algorithm::Assigned(alg));
self
}
/// Add a key operation.
#[must_use]
pub fn add_key_op(mut self, op: iana::KeyOperation) -> Self {
self.0.key_ops.insert(KeyOperation::Assigned(op));
self
}
/// Set a parameter value.
///
/// # Panics
///
/// This function will panic if it used to set a parameter label from the [`iana::KeyParameter`]
/// range.
#[must_use]
pub fn param(mut self, label: i64, value: Value) -> Self {
if iana::KeyParameter::from_i64(label).is_some() {
panic!("param() method used to set KeyParameter"); // safe: invalid input
}
self.0.params.push((Label::Int(label), value));
self
}
}
|
to_cbor_value
|
identifier_name
|
keyframes.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Keyframes: https://drafts.csswg.org/css-animations/#keyframes
#![deny(missing_docs)]
use cssparser::{AtRuleParser, Parser, QualifiedRuleParser, RuleListParser};
use cssparser::{DeclarationListParser, DeclarationParser, parse_one_rule};
use parking_lot::RwLock;
use parser::{ParserContext, ParserContextExtraData, log_css_error};
use properties::{Importance, PropertyDeclaration, PropertyDeclarationBlock, PropertyId};
use properties::{PropertyDeclarationId, LonghandId, ParsedDeclaration};
use properties::LonghandIdSet;
use properties::animated_properties::TransitionProperty;
use properties::longhands::transition_timing_function::single_value::SpecifiedValue as SpecifiedTimingFunction;
use std::fmt;
use std::sync::Arc;
use style_traits::ToCss;
use stylesheets::{MemoryHoleReporter, Stylesheet};
/// A number from 0 to 1, indicating the percentage of the animation when this
/// keyframe should run.
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframePercentage(pub f32);
impl ::std::cmp::Ord for KeyframePercentage {
#[inline]
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
// We know we have a number from 0 to 1, so unwrap() here is safe.
self.0.partial_cmp(&other.0).unwrap()
}
}
impl ::std::cmp::Eq for KeyframePercentage { }
impl ToCss for KeyframePercentage {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
write!(dest, "{}%", self.0 * 100.0)
}
}
impl KeyframePercentage {
/// Trivially constructs a new `KeyframePercentage`.
#[inline]
pub fn new(value: f32) -> KeyframePercentage {
debug_assert!(value >= 0. && value <= 1.);
KeyframePercentage(value)
}
fn parse(input: &mut Parser) -> Result<KeyframePercentage, ()> {
let percentage = if input.try(|input| input.expect_ident_matching("from")).is_ok() {
KeyframePercentage::new(0.)
} else if input.try(|input| input.expect_ident_matching("to")).is_ok() {
KeyframePercentage::new(1.)
} else {
let percentage = try!(input.expect_percentage());
if percentage >= 0. && percentage <= 1. {
KeyframePercentage::new(percentage)
} else {
return Err(());
}
};
Ok(percentage)
}
}
/// A keyframes selector is a list of percentages or from/to symbols, which are
/// converted at parse time to percentages.
#[derive(Debug, PartialEq)]
pub struct KeyframeSelector(Vec<KeyframePercentage>);
impl KeyframeSelector {
/// Return the list of percentages this selector contains.
#[inline]
pub fn percentages(&self) -> &[KeyframePercentage] {
&self.0
}
/// A dummy public function so we can write a unit test for this.
pub fn new_for_unit_testing(percentages: Vec<KeyframePercentage>) -> KeyframeSelector {
KeyframeSelector(percentages)
}
/// Parse a keyframe selector from CSS input.
pub fn parse(input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(KeyframePercentage::parse)
.map(KeyframeSelector)
}
}
/// A keyframe.
#[derive(Debug)]
pub struct Keyframe {
/// The selector this keyframe was specified from.
pub selector: KeyframeSelector,
/// The declaration block that was declared inside this keyframe.
///
/// Note that `!important` rules in keyframes don't apply, but we keep this
/// `Arc` just for convenience.
pub block: Arc<RwLock<PropertyDeclarationBlock>>,
}
impl ToCss for Keyframe {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
let mut iter = self.selector.percentages().iter();
try!(iter.next().unwrap().to_css(dest));
for percentage in iter {
try!(write!(dest, ", "));
try!(percentage.to_css(dest));
}
try!(dest.write_str(" { "));
try!(self.block.read().to_css(dest));
try!(dest.write_str(" }"));
Ok(())
}
}
impl Keyframe {
/// Parse a CSS keyframe.
pub fn parse(css: &str,
parent_stylesheet: &Stylesheet,
extra_data: ParserContextExtraData)
-> Result<Arc<RwLock<Self>>, ()> {
let error_reporter = MemoryHoleReporter;
let context = ParserContext::new_with_extra_data(parent_stylesheet.origin,
&parent_stylesheet.base_url,
&error_reporter,
extra_data);
let mut input = Parser::new(css);
let mut rule_parser = KeyframeListParser {
context: &context,
};
parse_one_rule(&mut input, &mut rule_parser)
}
}
/// A keyframes step value. This can be a synthetised keyframes animation, that
/// is, one autogenerated from the current computed values, or a list of
/// declarations to apply.
///
/// TODO: Find a better name for this?
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum KeyframesStepValue {
/// A step formed by a declaration block specified by the CSS.
Declarations {
/// The declaration block per se.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
block: Arc<RwLock<PropertyDeclarationBlock>>
},
/// A synthetic step computed from the current computed values at the time
/// of the animation.
ComputedValues,
}
/// A single step from a keyframe animation.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesStep {
/// The percentage of the animation duration when this step starts.
pub start_percentage: KeyframePercentage,
/// Declarations that will determine the final style during the step, or
/// `ComputedValues` if this is an autogenerated step.
pub value: KeyframesStepValue,
/// Wether a animation-timing-function declaration exists in the list of
/// declarations.
///
/// This is used to know when to override the keyframe animation style.
pub declared_timing_function: bool,
}
impl KeyframesStep {
#[inline]
fn new(percentage: KeyframePercentage,
value: KeyframesStepValue) -> Self {
let declared_timing_function = match value {
KeyframesStepValue::Declarations { ref block } => {
block.read().declarations().iter().any(|&(ref prop_decl, _)| {
match *prop_decl {
PropertyDeclaration::AnimationTimingFunction(..) => true,
_ => false,
}
})
}
_ => false,
};
KeyframesStep {
start_percentage: percentage,
value: value,
declared_timing_function: declared_timing_function,
}
}
/// Return specified TransitionTimingFunction if this KeyframesSteps has 'animation-timing-function'.
pub fn get_animation_timing_function(&self) -> Option<SpecifiedTimingFunction> {
if!self.declared_timing_function {
return None;
}
match self.value {
KeyframesStepValue::Declarations { ref block } => {
let guard = block.read();
let &(ref declaration, _) =
guard.get(PropertyDeclarationId::Longhand(LonghandId::AnimationTimingFunction)).unwrap();
match *declaration {
PropertyDeclaration::AnimationTimingFunction(ref value) => {
// Use the first value.
Some(value.0[0])
},
PropertyDeclaration::CSSWideKeyword(..) => None,
PropertyDeclaration::WithVariables(..) => None,
_ => panic!(),
}
|
},
KeyframesStepValue::ComputedValues => {
panic!("Shouldn't happen to set animation-timing-function in missing keyframes")
},
}
}
}
/// This structure represents a list of animation steps computed from the list
/// of keyframes, in order.
///
/// It only takes into account animable properties.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesAnimation {
/// The difference steps of the animation.
pub steps: Vec<KeyframesStep>,
/// The properties that change in this animation.
pub properties_changed: Vec<TransitionProperty>,
}
/// Get all the animated properties in a keyframes animation.
fn get_animated_properties(keyframes: &[Arc<RwLock<Keyframe>>]) -> Vec<TransitionProperty> {
let mut ret = vec![];
let mut seen = LonghandIdSet::new();
// NB: declarations are already deduplicated, so we don't have to check for
// it here.
for keyframe in keyframes {
let keyframe = keyframe.read();
for &(ref declaration, importance) in keyframe.block.read().declarations().iter() {
assert!(!importance.important());
if let Some(property) = TransitionProperty::from_declaration(declaration) {
if!seen.has_transition_property_bit(&property) {
ret.push(property);
seen.set_transition_property_bit(&property);
}
}
}
}
ret
}
impl KeyframesAnimation {
/// Create a keyframes animation from a given list of keyframes.
///
/// This will return a keyframe animation with empty steps and
/// properties_changed if the list of keyframes is empty, or there are no
// animated properties obtained from the keyframes.
///
/// Otherwise, this will compute and sort the steps used for the animation,
/// and return the animation object.
pub fn from_keyframes(keyframes: &[Arc<RwLock<Keyframe>>]) -> Self {
let mut result = KeyframesAnimation {
steps: vec![],
properties_changed: vec![],
};
if keyframes.is_empty() {
return result;
}
result.properties_changed = get_animated_properties(keyframes);
if result.properties_changed.is_empty() {
return result;
}
for keyframe in keyframes {
let keyframe = keyframe.read();
for percentage in keyframe.selector.0.iter() {
result.steps.push(KeyframesStep::new(*percentage, KeyframesStepValue::Declarations {
block: keyframe.block.clone(),
}));
}
}
// Sort by the start percentage, so we can easily find a frame.
result.steps.sort_by_key(|step| step.start_percentage);
// Prepend autogenerated keyframes if appropriate.
if result.steps[0].start_percentage.0!= 0. {
result.steps.insert(0, KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
if result.steps.last().unwrap().start_percentage.0!= 1. {
result.steps.push(KeyframesStep::new(KeyframePercentage::new(1.),
KeyframesStepValue::ComputedValues));
}
result
}
}
/// Parses a keyframes list, like:
/// 0%, 50% {
/// width: 50%;
/// }
///
/// 40%, 60%, 100% {
/// width: 100%;
/// }
struct KeyframeListParser<'a> {
context: &'a ParserContext<'a>,
}
/// Parses a keyframe list from CSS input.
pub fn parse_keyframe_list(context: &ParserContext, input: &mut Parser) -> Vec<Arc<RwLock<Keyframe>>> {
RuleListParser::new_for_nested_rule(input, KeyframeListParser { context: context })
.filter_map(Result::ok)
.collect()
}
enum Void {}
impl<'a> AtRuleParser for KeyframeListParser<'a> {
type Prelude = Void;
type AtRule = Arc<RwLock<Keyframe>>;
}
impl<'a> QualifiedRuleParser for KeyframeListParser<'a> {
type Prelude = KeyframeSelector;
type QualifiedRule = Arc<RwLock<Keyframe>>;
fn parse_prelude(&mut self, input: &mut Parser) -> Result<Self::Prelude, ()> {
let start = input.position();
match KeyframeSelector::parse(input) {
Ok(sel) => Ok(sel),
Err(()) => {
let message = format!("Invalid keyframe rule: '{}'", input.slice_from(start));
log_css_error(input, start, &message, self.context);
Err(())
}
}
}
fn parse_block(&mut self, prelude: Self::Prelude, input: &mut Parser)
-> Result<Self::QualifiedRule, ()> {
let parser = KeyframeDeclarationParser {
context: self.context,
};
let mut iter = DeclarationListParser::new(input, parser);
let mut block = PropertyDeclarationBlock::new();
while let Some(declaration) = iter.next() {
match declaration {
Ok(parsed) => parsed.expand(|d| block.push(d, Importance::Normal)),
Err(range) => {
let pos = range.start;
let message = format!("Unsupported keyframe property declaration: '{}'",
iter.input.slice(range));
log_css_error(iter.input, pos, &*message, self.context);
}
}
// `parse_important` is not called here, `!important` is not allowed in keyframe blocks.
}
Ok(Arc::new(RwLock::new(Keyframe {
selector: prelude,
block: Arc::new(RwLock::new(block)),
})))
}
}
struct KeyframeDeclarationParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
}
/// Default methods reject all at rules.
impl<'a, 'b> AtRuleParser for KeyframeDeclarationParser<'a, 'b> {
type Prelude = ();
type AtRule = ParsedDeclaration;
}
impl<'a, 'b> DeclarationParser for KeyframeDeclarationParser<'a, 'b> {
type Declaration = ParsedDeclaration;
fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<ParsedDeclaration, ()> {
let id = try!(PropertyId::parse(name.into()));
match ParsedDeclaration::parse(id, self.context, input, true) {
Ok(parsed) => {
// In case there is still unparsed text in the declaration, we should roll back.
if!input.is_exhausted() {
Err(())
} else {
Ok(parsed)
}
}
Err(_) => Err(())
}
}
}
|
random_line_split
|
|
keyframes.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Keyframes: https://drafts.csswg.org/css-animations/#keyframes
#![deny(missing_docs)]
use cssparser::{AtRuleParser, Parser, QualifiedRuleParser, RuleListParser};
use cssparser::{DeclarationListParser, DeclarationParser, parse_one_rule};
use parking_lot::RwLock;
use parser::{ParserContext, ParserContextExtraData, log_css_error};
use properties::{Importance, PropertyDeclaration, PropertyDeclarationBlock, PropertyId};
use properties::{PropertyDeclarationId, LonghandId, ParsedDeclaration};
use properties::LonghandIdSet;
use properties::animated_properties::TransitionProperty;
use properties::longhands::transition_timing_function::single_value::SpecifiedValue as SpecifiedTimingFunction;
use std::fmt;
use std::sync::Arc;
use style_traits::ToCss;
use stylesheets::{MemoryHoleReporter, Stylesheet};
/// A number from 0 to 1, indicating the percentage of the animation when this
/// keyframe should run.
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframePercentage(pub f32);
impl ::std::cmp::Ord for KeyframePercentage {
#[inline]
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
// We know we have a number from 0 to 1, so unwrap() here is safe.
self.0.partial_cmp(&other.0).unwrap()
}
}
impl ::std::cmp::Eq for KeyframePercentage { }
impl ToCss for KeyframePercentage {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
write!(dest, "{}%", self.0 * 100.0)
}
}
impl KeyframePercentage {
/// Trivially constructs a new `KeyframePercentage`.
#[inline]
pub fn new(value: f32) -> KeyframePercentage {
debug_assert!(value >= 0. && value <= 1.);
KeyframePercentage(value)
}
fn parse(input: &mut Parser) -> Result<KeyframePercentage, ()> {
let percentage = if input.try(|input| input.expect_ident_matching("from")).is_ok() {
KeyframePercentage::new(0.)
} else if input.try(|input| input.expect_ident_matching("to")).is_ok() {
KeyframePercentage::new(1.)
} else {
let percentage = try!(input.expect_percentage());
if percentage >= 0. && percentage <= 1. {
KeyframePercentage::new(percentage)
} else {
return Err(());
}
};
Ok(percentage)
}
}
/// A keyframes selector is a list of percentages or from/to symbols, which are
/// converted at parse time to percentages.
#[derive(Debug, PartialEq)]
pub struct KeyframeSelector(Vec<KeyframePercentage>);
impl KeyframeSelector {
/// Return the list of percentages this selector contains.
#[inline]
pub fn percentages(&self) -> &[KeyframePercentage] {
&self.0
}
/// A dummy public function so we can write a unit test for this.
pub fn new_for_unit_testing(percentages: Vec<KeyframePercentage>) -> KeyframeSelector {
KeyframeSelector(percentages)
}
/// Parse a keyframe selector from CSS input.
pub fn parse(input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(KeyframePercentage::parse)
.map(KeyframeSelector)
}
}
/// A keyframe.
#[derive(Debug)]
pub struct Keyframe {
/// The selector this keyframe was specified from.
pub selector: KeyframeSelector,
/// The declaration block that was declared inside this keyframe.
///
/// Note that `!important` rules in keyframes don't apply, but we keep this
/// `Arc` just for convenience.
pub block: Arc<RwLock<PropertyDeclarationBlock>>,
}
impl ToCss for Keyframe {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
let mut iter = self.selector.percentages().iter();
try!(iter.next().unwrap().to_css(dest));
for percentage in iter {
try!(write!(dest, ", "));
try!(percentage.to_css(dest));
}
try!(dest.write_str(" { "));
try!(self.block.read().to_css(dest));
try!(dest.write_str(" }"));
Ok(())
}
}
impl Keyframe {
/// Parse a CSS keyframe.
pub fn parse(css: &str,
parent_stylesheet: &Stylesheet,
extra_data: ParserContextExtraData)
-> Result<Arc<RwLock<Self>>, ()> {
let error_reporter = MemoryHoleReporter;
let context = ParserContext::new_with_extra_data(parent_stylesheet.origin,
&parent_stylesheet.base_url,
&error_reporter,
extra_data);
let mut input = Parser::new(css);
let mut rule_parser = KeyframeListParser {
context: &context,
};
parse_one_rule(&mut input, &mut rule_parser)
}
}
/// A keyframes step value. This can be a synthetised keyframes animation, that
/// is, one autogenerated from the current computed values, or a list of
/// declarations to apply.
///
/// TODO: Find a better name for this?
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum KeyframesStepValue {
/// A step formed by a declaration block specified by the CSS.
Declarations {
/// The declaration block per se.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
block: Arc<RwLock<PropertyDeclarationBlock>>
},
/// A synthetic step computed from the current computed values at the time
/// of the animation.
ComputedValues,
}
/// A single step from a keyframe animation.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesStep {
/// The percentage of the animation duration when this step starts.
pub start_percentage: KeyframePercentage,
/// Declarations that will determine the final style during the step, or
/// `ComputedValues` if this is an autogenerated step.
pub value: KeyframesStepValue,
/// Wether a animation-timing-function declaration exists in the list of
/// declarations.
///
/// This is used to know when to override the keyframe animation style.
pub declared_timing_function: bool,
}
impl KeyframesStep {
#[inline]
fn new(percentage: KeyframePercentage,
value: KeyframesStepValue) -> Self {
let declared_timing_function = match value {
KeyframesStepValue::Declarations { ref block } => {
block.read().declarations().iter().any(|&(ref prop_decl, _)| {
match *prop_decl {
PropertyDeclaration::AnimationTimingFunction(..) => true,
_ => false,
}
})
}
_ => false,
};
KeyframesStep {
start_percentage: percentage,
value: value,
declared_timing_function: declared_timing_function,
}
}
/// Return specified TransitionTimingFunction if this KeyframesSteps has 'animation-timing-function'.
pub fn get_animation_timing_function(&self) -> Option<SpecifiedTimingFunction>
|
panic!("Shouldn't happen to set animation-timing-function in missing keyframes")
},
}
}
}
/// This structure represents a list of animation steps computed from the list
/// of keyframes, in order.
///
/// It only takes into account animable properties.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesAnimation {
/// The difference steps of the animation.
pub steps: Vec<KeyframesStep>,
/// The properties that change in this animation.
pub properties_changed: Vec<TransitionProperty>,
}
/// Get all the animated properties in a keyframes animation.
fn get_animated_properties(keyframes: &[Arc<RwLock<Keyframe>>]) -> Vec<TransitionProperty> {
let mut ret = vec![];
let mut seen = LonghandIdSet::new();
// NB: declarations are already deduplicated, so we don't have to check for
// it here.
for keyframe in keyframes {
let keyframe = keyframe.read();
for &(ref declaration, importance) in keyframe.block.read().declarations().iter() {
assert!(!importance.important());
if let Some(property) = TransitionProperty::from_declaration(declaration) {
if!seen.has_transition_property_bit(&property) {
ret.push(property);
seen.set_transition_property_bit(&property);
}
}
}
}
ret
}
impl KeyframesAnimation {
/// Create a keyframes animation from a given list of keyframes.
///
/// This will return a keyframe animation with empty steps and
/// properties_changed if the list of keyframes is empty, or there are no
// animated properties obtained from the keyframes.
///
/// Otherwise, this will compute and sort the steps used for the animation,
/// and return the animation object.
pub fn from_keyframes(keyframes: &[Arc<RwLock<Keyframe>>]) -> Self {
let mut result = KeyframesAnimation {
steps: vec![],
properties_changed: vec![],
};
if keyframes.is_empty() {
return result;
}
result.properties_changed = get_animated_properties(keyframes);
if result.properties_changed.is_empty() {
return result;
}
for keyframe in keyframes {
let keyframe = keyframe.read();
for percentage in keyframe.selector.0.iter() {
result.steps.push(KeyframesStep::new(*percentage, KeyframesStepValue::Declarations {
block: keyframe.block.clone(),
}));
}
}
// Sort by the start percentage, so we can easily find a frame.
result.steps.sort_by_key(|step| step.start_percentage);
// Prepend autogenerated keyframes if appropriate.
if result.steps[0].start_percentage.0!= 0. {
result.steps.insert(0, KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
if result.steps.last().unwrap().start_percentage.0!= 1. {
result.steps.push(KeyframesStep::new(KeyframePercentage::new(1.),
KeyframesStepValue::ComputedValues));
}
result
}
}
/// Parses a keyframes list, like:
/// 0%, 50% {
/// width: 50%;
/// }
///
/// 40%, 60%, 100% {
/// width: 100%;
/// }
struct KeyframeListParser<'a> {
context: &'a ParserContext<'a>,
}
/// Parses a keyframe list from CSS input.
pub fn parse_keyframe_list(context: &ParserContext, input: &mut Parser) -> Vec<Arc<RwLock<Keyframe>>> {
RuleListParser::new_for_nested_rule(input, KeyframeListParser { context: context })
.filter_map(Result::ok)
.collect()
}
enum Void {}
impl<'a> AtRuleParser for KeyframeListParser<'a> {
type Prelude = Void;
type AtRule = Arc<RwLock<Keyframe>>;
}
impl<'a> QualifiedRuleParser for KeyframeListParser<'a> {
type Prelude = KeyframeSelector;
type QualifiedRule = Arc<RwLock<Keyframe>>;
fn parse_prelude(&mut self, input: &mut Parser) -> Result<Self::Prelude, ()> {
let start = input.position();
match KeyframeSelector::parse(input) {
Ok(sel) => Ok(sel),
Err(()) => {
let message = format!("Invalid keyframe rule: '{}'", input.slice_from(start));
log_css_error(input, start, &message, self.context);
Err(())
}
}
}
fn parse_block(&mut self, prelude: Self::Prelude, input: &mut Parser)
-> Result<Self::QualifiedRule, ()> {
let parser = KeyframeDeclarationParser {
context: self.context,
};
let mut iter = DeclarationListParser::new(input, parser);
let mut block = PropertyDeclarationBlock::new();
while let Some(declaration) = iter.next() {
match declaration {
Ok(parsed) => parsed.expand(|d| block.push(d, Importance::Normal)),
Err(range) => {
let pos = range.start;
let message = format!("Unsupported keyframe property declaration: '{}'",
iter.input.slice(range));
log_css_error(iter.input, pos, &*message, self.context);
}
}
// `parse_important` is not called here, `!important` is not allowed in keyframe blocks.
}
Ok(Arc::new(RwLock::new(Keyframe {
selector: prelude,
block: Arc::new(RwLock::new(block)),
})))
}
}
struct KeyframeDeclarationParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
}
/// Default methods reject all at rules.
impl<'a, 'b> AtRuleParser for KeyframeDeclarationParser<'a, 'b> {
type Prelude = ();
type AtRule = ParsedDeclaration;
}
impl<'a, 'b> DeclarationParser for KeyframeDeclarationParser<'a, 'b> {
type Declaration = ParsedDeclaration;
fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<ParsedDeclaration, ()> {
let id = try!(PropertyId::parse(name.into()));
match ParsedDeclaration::parse(id, self.context, input, true) {
Ok(parsed) => {
// In case there is still unparsed text in the declaration, we should roll back.
if!input.is_exhausted() {
Err(())
} else {
Ok(parsed)
}
}
Err(_) => Err(())
}
}
}
|
{
if !self.declared_timing_function {
return None;
}
match self.value {
KeyframesStepValue::Declarations { ref block } => {
let guard = block.read();
let &(ref declaration, _) =
guard.get(PropertyDeclarationId::Longhand(LonghandId::AnimationTimingFunction)).unwrap();
match *declaration {
PropertyDeclaration::AnimationTimingFunction(ref value) => {
// Use the first value.
Some(value.0[0])
},
PropertyDeclaration::CSSWideKeyword(..) => None,
PropertyDeclaration::WithVariables(..) => None,
_ => panic!(),
}
},
KeyframesStepValue::ComputedValues => {
|
identifier_body
|
keyframes.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Keyframes: https://drafts.csswg.org/css-animations/#keyframes
#![deny(missing_docs)]
use cssparser::{AtRuleParser, Parser, QualifiedRuleParser, RuleListParser};
use cssparser::{DeclarationListParser, DeclarationParser, parse_one_rule};
use parking_lot::RwLock;
use parser::{ParserContext, ParserContextExtraData, log_css_error};
use properties::{Importance, PropertyDeclaration, PropertyDeclarationBlock, PropertyId};
use properties::{PropertyDeclarationId, LonghandId, ParsedDeclaration};
use properties::LonghandIdSet;
use properties::animated_properties::TransitionProperty;
use properties::longhands::transition_timing_function::single_value::SpecifiedValue as SpecifiedTimingFunction;
use std::fmt;
use std::sync::Arc;
use style_traits::ToCss;
use stylesheets::{MemoryHoleReporter, Stylesheet};
/// A number from 0 to 1, indicating the percentage of the animation when this
/// keyframe should run.
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframePercentage(pub f32);
impl ::std::cmp::Ord for KeyframePercentage {
#[inline]
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
// We know we have a number from 0 to 1, so unwrap() here is safe.
self.0.partial_cmp(&other.0).unwrap()
}
}
impl ::std::cmp::Eq for KeyframePercentage { }
impl ToCss for KeyframePercentage {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
write!(dest, "{}%", self.0 * 100.0)
}
}
impl KeyframePercentage {
/// Trivially constructs a new `KeyframePercentage`.
#[inline]
pub fn new(value: f32) -> KeyframePercentage {
debug_assert!(value >= 0. && value <= 1.);
KeyframePercentage(value)
}
fn parse(input: &mut Parser) -> Result<KeyframePercentage, ()> {
let percentage = if input.try(|input| input.expect_ident_matching("from")).is_ok() {
KeyframePercentage::new(0.)
} else if input.try(|input| input.expect_ident_matching("to")).is_ok() {
KeyframePercentage::new(1.)
} else {
let percentage = try!(input.expect_percentage());
if percentage >= 0. && percentage <= 1. {
KeyframePercentage::new(percentage)
} else {
return Err(());
}
};
Ok(percentage)
}
}
/// A keyframes selector is a list of percentages or from/to symbols, which are
/// converted at parse time to percentages.
#[derive(Debug, PartialEq)]
pub struct KeyframeSelector(Vec<KeyframePercentage>);
impl KeyframeSelector {
/// Return the list of percentages this selector contains.
#[inline]
pub fn percentages(&self) -> &[KeyframePercentage] {
&self.0
}
/// A dummy public function so we can write a unit test for this.
pub fn new_for_unit_testing(percentages: Vec<KeyframePercentage>) -> KeyframeSelector {
KeyframeSelector(percentages)
}
/// Parse a keyframe selector from CSS input.
pub fn parse(input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(KeyframePercentage::parse)
.map(KeyframeSelector)
}
}
/// A keyframe.
#[derive(Debug)]
pub struct Keyframe {
/// The selector this keyframe was specified from.
pub selector: KeyframeSelector,
/// The declaration block that was declared inside this keyframe.
///
/// Note that `!important` rules in keyframes don't apply, but we keep this
/// `Arc` just for convenience.
pub block: Arc<RwLock<PropertyDeclarationBlock>>,
}
impl ToCss for Keyframe {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
let mut iter = self.selector.percentages().iter();
try!(iter.next().unwrap().to_css(dest));
for percentage in iter {
try!(write!(dest, ", "));
try!(percentage.to_css(dest));
}
try!(dest.write_str(" { "));
try!(self.block.read().to_css(dest));
try!(dest.write_str(" }"));
Ok(())
}
}
impl Keyframe {
/// Parse a CSS keyframe.
pub fn parse(css: &str,
parent_stylesheet: &Stylesheet,
extra_data: ParserContextExtraData)
-> Result<Arc<RwLock<Self>>, ()> {
let error_reporter = MemoryHoleReporter;
let context = ParserContext::new_with_extra_data(parent_stylesheet.origin,
&parent_stylesheet.base_url,
&error_reporter,
extra_data);
let mut input = Parser::new(css);
let mut rule_parser = KeyframeListParser {
context: &context,
};
parse_one_rule(&mut input, &mut rule_parser)
}
}
/// A keyframes step value. This can be a synthetised keyframes animation, that
/// is, one autogenerated from the current computed values, or a list of
/// declarations to apply.
///
/// TODO: Find a better name for this?
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum KeyframesStepValue {
/// A step formed by a declaration block specified by the CSS.
Declarations {
/// The declaration block per se.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
block: Arc<RwLock<PropertyDeclarationBlock>>
},
/// A synthetic step computed from the current computed values at the time
/// of the animation.
ComputedValues,
}
/// A single step from a keyframe animation.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesStep {
/// The percentage of the animation duration when this step starts.
pub start_percentage: KeyframePercentage,
/// Declarations that will determine the final style during the step, or
/// `ComputedValues` if this is an autogenerated step.
pub value: KeyframesStepValue,
/// Wether a animation-timing-function declaration exists in the list of
/// declarations.
///
/// This is used to know when to override the keyframe animation style.
pub declared_timing_function: bool,
}
impl KeyframesStep {
#[inline]
fn new(percentage: KeyframePercentage,
value: KeyframesStepValue) -> Self {
let declared_timing_function = match value {
KeyframesStepValue::Declarations { ref block } => {
block.read().declarations().iter().any(|&(ref prop_decl, _)| {
match *prop_decl {
PropertyDeclaration::AnimationTimingFunction(..) => true,
_ => false,
}
})
}
_ => false,
};
KeyframesStep {
start_percentage: percentage,
value: value,
declared_timing_function: declared_timing_function,
}
}
/// Return specified TransitionTimingFunction if this KeyframesSteps has 'animation-timing-function'.
pub fn get_animation_timing_function(&self) -> Option<SpecifiedTimingFunction> {
if!self.declared_timing_function {
return None;
}
match self.value {
KeyframesStepValue::Declarations { ref block } => {
let guard = block.read();
let &(ref declaration, _) =
guard.get(PropertyDeclarationId::Longhand(LonghandId::AnimationTimingFunction)).unwrap();
match *declaration {
PropertyDeclaration::AnimationTimingFunction(ref value) => {
// Use the first value.
Some(value.0[0])
},
PropertyDeclaration::CSSWideKeyword(..) => None,
PropertyDeclaration::WithVariables(..) => None,
_ => panic!(),
}
},
KeyframesStepValue::ComputedValues => {
panic!("Shouldn't happen to set animation-timing-function in missing keyframes")
},
}
}
}
/// This structure represents a list of animation steps computed from the list
/// of keyframes, in order.
///
/// It only takes into account animable properties.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesAnimation {
/// The difference steps of the animation.
pub steps: Vec<KeyframesStep>,
/// The properties that change in this animation.
pub properties_changed: Vec<TransitionProperty>,
}
/// Get all the animated properties in a keyframes animation.
fn get_animated_properties(keyframes: &[Arc<RwLock<Keyframe>>]) -> Vec<TransitionProperty> {
let mut ret = vec![];
let mut seen = LonghandIdSet::new();
// NB: declarations are already deduplicated, so we don't have to check for
// it here.
for keyframe in keyframes {
let keyframe = keyframe.read();
for &(ref declaration, importance) in keyframe.block.read().declarations().iter() {
assert!(!importance.important());
if let Some(property) = TransitionProperty::from_declaration(declaration) {
if!seen.has_transition_property_bit(&property) {
ret.push(property);
seen.set_transition_property_bit(&property);
}
}
}
}
ret
}
impl KeyframesAnimation {
/// Create a keyframes animation from a given list of keyframes.
///
/// This will return a keyframe animation with empty steps and
/// properties_changed if the list of keyframes is empty, or there are no
// animated properties obtained from the keyframes.
///
/// Otherwise, this will compute and sort the steps used for the animation,
/// and return the animation object.
pub fn from_keyframes(keyframes: &[Arc<RwLock<Keyframe>>]) -> Self {
let mut result = KeyframesAnimation {
steps: vec![],
properties_changed: vec![],
};
if keyframes.is_empty() {
return result;
}
result.properties_changed = get_animated_properties(keyframes);
if result.properties_changed.is_empty() {
return result;
}
for keyframe in keyframes {
let keyframe = keyframe.read();
for percentage in keyframe.selector.0.iter() {
result.steps.push(KeyframesStep::new(*percentage, KeyframesStepValue::Declarations {
block: keyframe.block.clone(),
}));
}
}
// Sort by the start percentage, so we can easily find a frame.
result.steps.sort_by_key(|step| step.start_percentage);
// Prepend autogenerated keyframes if appropriate.
if result.steps[0].start_percentage.0!= 0. {
result.steps.insert(0, KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues));
}
if result.steps.last().unwrap().start_percentage.0!= 1. {
result.steps.push(KeyframesStep::new(KeyframePercentage::new(1.),
KeyframesStepValue::ComputedValues));
}
result
}
}
/// Parses a keyframes list, like:
/// 0%, 50% {
/// width: 50%;
/// }
///
/// 40%, 60%, 100% {
/// width: 100%;
/// }
struct KeyframeListParser<'a> {
context: &'a ParserContext<'a>,
}
/// Parses a keyframe list from CSS input.
pub fn parse_keyframe_list(context: &ParserContext, input: &mut Parser) -> Vec<Arc<RwLock<Keyframe>>> {
RuleListParser::new_for_nested_rule(input, KeyframeListParser { context: context })
.filter_map(Result::ok)
.collect()
}
enum
|
{}
impl<'a> AtRuleParser for KeyframeListParser<'a> {
type Prelude = Void;
type AtRule = Arc<RwLock<Keyframe>>;
}
impl<'a> QualifiedRuleParser for KeyframeListParser<'a> {
type Prelude = KeyframeSelector;
type QualifiedRule = Arc<RwLock<Keyframe>>;
fn parse_prelude(&mut self, input: &mut Parser) -> Result<Self::Prelude, ()> {
let start = input.position();
match KeyframeSelector::parse(input) {
Ok(sel) => Ok(sel),
Err(()) => {
let message = format!("Invalid keyframe rule: '{}'", input.slice_from(start));
log_css_error(input, start, &message, self.context);
Err(())
}
}
}
fn parse_block(&mut self, prelude: Self::Prelude, input: &mut Parser)
-> Result<Self::QualifiedRule, ()> {
let parser = KeyframeDeclarationParser {
context: self.context,
};
let mut iter = DeclarationListParser::new(input, parser);
let mut block = PropertyDeclarationBlock::new();
while let Some(declaration) = iter.next() {
match declaration {
Ok(parsed) => parsed.expand(|d| block.push(d, Importance::Normal)),
Err(range) => {
let pos = range.start;
let message = format!("Unsupported keyframe property declaration: '{}'",
iter.input.slice(range));
log_css_error(iter.input, pos, &*message, self.context);
}
}
// `parse_important` is not called here, `!important` is not allowed in keyframe blocks.
}
Ok(Arc::new(RwLock::new(Keyframe {
selector: prelude,
block: Arc::new(RwLock::new(block)),
})))
}
}
struct KeyframeDeclarationParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
}
/// Default methods reject all at rules.
impl<'a, 'b> AtRuleParser for KeyframeDeclarationParser<'a, 'b> {
type Prelude = ();
type AtRule = ParsedDeclaration;
}
impl<'a, 'b> DeclarationParser for KeyframeDeclarationParser<'a, 'b> {
type Declaration = ParsedDeclaration;
fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<ParsedDeclaration, ()> {
let id = try!(PropertyId::parse(name.into()));
match ParsedDeclaration::parse(id, self.context, input, true) {
Ok(parsed) => {
// In case there is still unparsed text in the declaration, we should roll back.
if!input.is_exhausted() {
Err(())
} else {
Ok(parsed)
}
}
Err(_) => Err(())
}
}
}
|
Void
|
identifier_name
|
clear.rs
|
use fbo::{self, ValidatedAttachments};
use context::Context;
use ContextExt;
use Rect;
use QueryExt;
use draw_parameters::TimeElapsedQuery;
use Api;
use version::Version;
use gl;
pub fn clear(context: &Context, framebuffer: Option<&ValidatedAttachments>,
rect: Option<&Rect>, color: Option<(f32, f32, f32, f32)>, color_srgb: bool,
depth: Option<f32>, stencil: Option<i32>)
{
unsafe {
let mut ctxt = context.make_current();
let fbo_id = fbo::FramebuffersContainer::get_framebuffer_for_drawing(&mut ctxt, framebuffer);
fbo::bind_framebuffer(&mut ctxt, fbo_id, true, false);
if ctxt.state.enabled_rasterizer_discard {
ctxt.gl.Disable(gl::RASTERIZER_DISCARD);
ctxt.state.enabled_rasterizer_discard = false;
}
if ctxt.state.color_mask!= (1, 1, 1, 1) {
ctxt.state.color_mask = (1, 1, 1, 1);
ctxt.gl.ColorMask(1, 1, 1, 1);
}
if ctxt.version >= &Version(Api::Gl, 3, 0) || ctxt.extensions.gl_arb_framebuffer_srgb ||
ctxt.extensions.gl_ext_framebuffer_srgb || ctxt.extensions.gl_ext_srgb_write_control
{
if!color_srgb &&!ctxt.state.enabled_framebuffer_srgb {
ctxt.gl.Enable(gl::FRAMEBUFFER_SRGB);
ctxt.state.enabled_framebuffer_srgb = true;
} else if color_srgb && ctxt.state.enabled_framebuffer_srgb {
ctxt.gl.Disable(gl::FRAMEBUFFER_SRGB);
ctxt.state.enabled_framebuffer_srgb = false;
}
}
TimeElapsedQuery::end_conditional_render(&mut ctxt);
if let Some(rect) = rect {
let rect = (rect.left as gl::types::GLint, rect.bottom as gl::types::GLint,
rect.width as gl::types::GLsizei, rect.height as gl::types::GLsizei);
if ctxt.state.scissor!= Some(rect) {
ctxt.gl.Scissor(rect.0, rect.1, rect.2, rect.3);
ctxt.state.scissor = Some(rect);
}
if!ctxt.state.enabled_scissor_test {
ctxt.gl.Enable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = true;
}
} else {
if ctxt.state.enabled_scissor_test {
ctxt.gl.Disable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = false;
}
}
let mut flags = 0;
if let Some(color) = color {
let color = (color.0 as gl::types::GLclampf, color.1 as gl::types::GLclampf,
color.2 as gl::types::GLclampf, color.3 as gl::types::GLclampf);
flags |= gl::COLOR_BUFFER_BIT;
if ctxt.state.clear_color!= color {
ctxt.gl.ClearColor(color.0, color.1, color.2, color.3);
ctxt.state.clear_color = color;
}
}
if let Some(depth) = depth
|
}
}
if let Some(stencil) = stencil {
let stencil = stencil as gl::types::GLint;
flags |= gl::STENCIL_BUFFER_BIT;
if ctxt.state.clear_stencil!= stencil {
ctxt.gl.ClearStencil(stencil);
ctxt.state.clear_stencil = stencil;
}
}
ctxt.gl.Clear(flags);
}
}
|
{
let depth = depth as gl::types::GLclampf;
flags |= gl::DEPTH_BUFFER_BIT;
if ctxt.state.clear_depth != depth {
if ctxt.version >= &Version(Api::Gl, 1, 0) {
ctxt.gl.ClearDepth(depth as gl::types::GLclampd);
} else if ctxt.version >= &Version(Api::GlEs, 2, 0) {
ctxt.gl.ClearDepthf(depth);
} else {
unreachable!();
}
ctxt.state.clear_depth = depth;
}
if !ctxt.state.depth_mask {
ctxt.gl.DepthMask(gl::TRUE);
ctxt.state.depth_mask = true;
|
conditional_block
|
clear.rs
|
use fbo::{self, ValidatedAttachments};
use context::Context;
use ContextExt;
use Rect;
use QueryExt;
use draw_parameters::TimeElapsedQuery;
use Api;
use version::Version;
use gl;
pub fn clear(context: &Context, framebuffer: Option<&ValidatedAttachments>,
rect: Option<&Rect>, color: Option<(f32, f32, f32, f32)>, color_srgb: bool,
depth: Option<f32>, stencil: Option<i32>)
{
unsafe {
let mut ctxt = context.make_current();
let fbo_id = fbo::FramebuffersContainer::get_framebuffer_for_drawing(&mut ctxt, framebuffer);
fbo::bind_framebuffer(&mut ctxt, fbo_id, true, false);
if ctxt.state.enabled_rasterizer_discard {
ctxt.gl.Disable(gl::RASTERIZER_DISCARD);
ctxt.state.enabled_rasterizer_discard = false;
}
if ctxt.state.color_mask!= (1, 1, 1, 1) {
ctxt.state.color_mask = (1, 1, 1, 1);
ctxt.gl.ColorMask(1, 1, 1, 1);
}
if ctxt.version >= &Version(Api::Gl, 3, 0) || ctxt.extensions.gl_arb_framebuffer_srgb ||
ctxt.extensions.gl_ext_framebuffer_srgb || ctxt.extensions.gl_ext_srgb_write_control
{
if!color_srgb &&!ctxt.state.enabled_framebuffer_srgb {
ctxt.gl.Enable(gl::FRAMEBUFFER_SRGB);
ctxt.state.enabled_framebuffer_srgb = true;
} else if color_srgb && ctxt.state.enabled_framebuffer_srgb {
ctxt.gl.Disable(gl::FRAMEBUFFER_SRGB);
ctxt.state.enabled_framebuffer_srgb = false;
}
}
TimeElapsedQuery::end_conditional_render(&mut ctxt);
if let Some(rect) = rect {
let rect = (rect.left as gl::types::GLint, rect.bottom as gl::types::GLint,
rect.width as gl::types::GLsizei, rect.height as gl::types::GLsizei);
if ctxt.state.scissor!= Some(rect) {
ctxt.gl.Scissor(rect.0, rect.1, rect.2, rect.3);
ctxt.state.scissor = Some(rect);
}
if!ctxt.state.enabled_scissor_test {
ctxt.gl.Enable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = true;
}
} else {
if ctxt.state.enabled_scissor_test {
ctxt.gl.Disable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = false;
}
}
let mut flags = 0;
if let Some(color) = color {
let color = (color.0 as gl::types::GLclampf, color.1 as gl::types::GLclampf,
color.2 as gl::types::GLclampf, color.3 as gl::types::GLclampf);
flags |= gl::COLOR_BUFFER_BIT;
if ctxt.state.clear_color!= color {
ctxt.gl.ClearColor(color.0, color.1, color.2, color.3);
ctxt.state.clear_color = color;
}
}
if let Some(depth) = depth {
let depth = depth as gl::types::GLclampf;
flags |= gl::DEPTH_BUFFER_BIT;
if ctxt.state.clear_depth!= depth {
if ctxt.version >= &Version(Api::Gl, 1, 0) {
ctxt.gl.ClearDepth(depth as gl::types::GLclampd);
} else if ctxt.version >= &Version(Api::GlEs, 2, 0) {
ctxt.gl.ClearDepthf(depth);
} else {
unreachable!();
}
ctxt.state.clear_depth = depth;
}
if!ctxt.state.depth_mask {
ctxt.gl.DepthMask(gl::TRUE);
ctxt.state.depth_mask = true;
}
}
|
flags |= gl::STENCIL_BUFFER_BIT;
if ctxt.state.clear_stencil!= stencil {
ctxt.gl.ClearStencil(stencil);
ctxt.state.clear_stencil = stencil;
}
}
ctxt.gl.Clear(flags);
}
}
|
if let Some(stencil) = stencil {
let stencil = stencil as gl::types::GLint;
|
random_line_split
|
clear.rs
|
use fbo::{self, ValidatedAttachments};
use context::Context;
use ContextExt;
use Rect;
use QueryExt;
use draw_parameters::TimeElapsedQuery;
use Api;
use version::Version;
use gl;
pub fn
|
(context: &Context, framebuffer: Option<&ValidatedAttachments>,
rect: Option<&Rect>, color: Option<(f32, f32, f32, f32)>, color_srgb: bool,
depth: Option<f32>, stencil: Option<i32>)
{
unsafe {
let mut ctxt = context.make_current();
let fbo_id = fbo::FramebuffersContainer::get_framebuffer_for_drawing(&mut ctxt, framebuffer);
fbo::bind_framebuffer(&mut ctxt, fbo_id, true, false);
if ctxt.state.enabled_rasterizer_discard {
ctxt.gl.Disable(gl::RASTERIZER_DISCARD);
ctxt.state.enabled_rasterizer_discard = false;
}
if ctxt.state.color_mask!= (1, 1, 1, 1) {
ctxt.state.color_mask = (1, 1, 1, 1);
ctxt.gl.ColorMask(1, 1, 1, 1);
}
if ctxt.version >= &Version(Api::Gl, 3, 0) || ctxt.extensions.gl_arb_framebuffer_srgb ||
ctxt.extensions.gl_ext_framebuffer_srgb || ctxt.extensions.gl_ext_srgb_write_control
{
if!color_srgb &&!ctxt.state.enabled_framebuffer_srgb {
ctxt.gl.Enable(gl::FRAMEBUFFER_SRGB);
ctxt.state.enabled_framebuffer_srgb = true;
} else if color_srgb && ctxt.state.enabled_framebuffer_srgb {
ctxt.gl.Disable(gl::FRAMEBUFFER_SRGB);
ctxt.state.enabled_framebuffer_srgb = false;
}
}
TimeElapsedQuery::end_conditional_render(&mut ctxt);
if let Some(rect) = rect {
let rect = (rect.left as gl::types::GLint, rect.bottom as gl::types::GLint,
rect.width as gl::types::GLsizei, rect.height as gl::types::GLsizei);
if ctxt.state.scissor!= Some(rect) {
ctxt.gl.Scissor(rect.0, rect.1, rect.2, rect.3);
ctxt.state.scissor = Some(rect);
}
if!ctxt.state.enabled_scissor_test {
ctxt.gl.Enable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = true;
}
} else {
if ctxt.state.enabled_scissor_test {
ctxt.gl.Disable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = false;
}
}
let mut flags = 0;
if let Some(color) = color {
let color = (color.0 as gl::types::GLclampf, color.1 as gl::types::GLclampf,
color.2 as gl::types::GLclampf, color.3 as gl::types::GLclampf);
flags |= gl::COLOR_BUFFER_BIT;
if ctxt.state.clear_color!= color {
ctxt.gl.ClearColor(color.0, color.1, color.2, color.3);
ctxt.state.clear_color = color;
}
}
if let Some(depth) = depth {
let depth = depth as gl::types::GLclampf;
flags |= gl::DEPTH_BUFFER_BIT;
if ctxt.state.clear_depth!= depth {
if ctxt.version >= &Version(Api::Gl, 1, 0) {
ctxt.gl.ClearDepth(depth as gl::types::GLclampd);
} else if ctxt.version >= &Version(Api::GlEs, 2, 0) {
ctxt.gl.ClearDepthf(depth);
} else {
unreachable!();
}
ctxt.state.clear_depth = depth;
}
if!ctxt.state.depth_mask {
ctxt.gl.DepthMask(gl::TRUE);
ctxt.state.depth_mask = true;
}
}
if let Some(stencil) = stencil {
let stencil = stencil as gl::types::GLint;
flags |= gl::STENCIL_BUFFER_BIT;
if ctxt.state.clear_stencil!= stencil {
ctxt.gl.ClearStencil(stencil);
ctxt.state.clear_stencil = stencil;
}
}
ctxt.gl.Clear(flags);
}
}
|
clear
|
identifier_name
|
clear.rs
|
use fbo::{self, ValidatedAttachments};
use context::Context;
use ContextExt;
use Rect;
use QueryExt;
use draw_parameters::TimeElapsedQuery;
use Api;
use version::Version;
use gl;
pub fn clear(context: &Context, framebuffer: Option<&ValidatedAttachments>,
rect: Option<&Rect>, color: Option<(f32, f32, f32, f32)>, color_srgb: bool,
depth: Option<f32>, stencil: Option<i32>)
|
if!color_srgb &&!ctxt.state.enabled_framebuffer_srgb {
ctxt.gl.Enable(gl::FRAMEBUFFER_SRGB);
ctxt.state.enabled_framebuffer_srgb = true;
} else if color_srgb && ctxt.state.enabled_framebuffer_srgb {
ctxt.gl.Disable(gl::FRAMEBUFFER_SRGB);
ctxt.state.enabled_framebuffer_srgb = false;
}
}
TimeElapsedQuery::end_conditional_render(&mut ctxt);
if let Some(rect) = rect {
let rect = (rect.left as gl::types::GLint, rect.bottom as gl::types::GLint,
rect.width as gl::types::GLsizei, rect.height as gl::types::GLsizei);
if ctxt.state.scissor!= Some(rect) {
ctxt.gl.Scissor(rect.0, rect.1, rect.2, rect.3);
ctxt.state.scissor = Some(rect);
}
if!ctxt.state.enabled_scissor_test {
ctxt.gl.Enable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = true;
}
} else {
if ctxt.state.enabled_scissor_test {
ctxt.gl.Disable(gl::SCISSOR_TEST);
ctxt.state.enabled_scissor_test = false;
}
}
let mut flags = 0;
if let Some(color) = color {
let color = (color.0 as gl::types::GLclampf, color.1 as gl::types::GLclampf,
color.2 as gl::types::GLclampf, color.3 as gl::types::GLclampf);
flags |= gl::COLOR_BUFFER_BIT;
if ctxt.state.clear_color!= color {
ctxt.gl.ClearColor(color.0, color.1, color.2, color.3);
ctxt.state.clear_color = color;
}
}
if let Some(depth) = depth {
let depth = depth as gl::types::GLclampf;
flags |= gl::DEPTH_BUFFER_BIT;
if ctxt.state.clear_depth!= depth {
if ctxt.version >= &Version(Api::Gl, 1, 0) {
ctxt.gl.ClearDepth(depth as gl::types::GLclampd);
} else if ctxt.version >= &Version(Api::GlEs, 2, 0) {
ctxt.gl.ClearDepthf(depth);
} else {
unreachable!();
}
ctxt.state.clear_depth = depth;
}
if!ctxt.state.depth_mask {
ctxt.gl.DepthMask(gl::TRUE);
ctxt.state.depth_mask = true;
}
}
if let Some(stencil) = stencil {
let stencil = stencil as gl::types::GLint;
flags |= gl::STENCIL_BUFFER_BIT;
if ctxt.state.clear_stencil!= stencil {
ctxt.gl.ClearStencil(stencil);
ctxt.state.clear_stencil = stencil;
}
}
ctxt.gl.Clear(flags);
}
}
|
{
unsafe {
let mut ctxt = context.make_current();
let fbo_id = fbo::FramebuffersContainer::get_framebuffer_for_drawing(&mut ctxt, framebuffer);
fbo::bind_framebuffer(&mut ctxt, fbo_id, true, false);
if ctxt.state.enabled_rasterizer_discard {
ctxt.gl.Disable(gl::RASTERIZER_DISCARD);
ctxt.state.enabled_rasterizer_discard = false;
}
if ctxt.state.color_mask != (1, 1, 1, 1) {
ctxt.state.color_mask = (1, 1, 1, 1);
ctxt.gl.ColorMask(1, 1, 1, 1);
}
if ctxt.version >= &Version(Api::Gl, 3, 0) || ctxt.extensions.gl_arb_framebuffer_srgb ||
ctxt.extensions.gl_ext_framebuffer_srgb || ctxt.extensions.gl_ext_srgb_write_control
{
|
identifier_body
|
account_db.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! DB backend wrapper for Account trie
use util::*;
use rlp::NULL_RLP;
static NULL_RLP_STATIC: [u8; 1] = [0x80; 1];
// combines a key with an address hash to ensure uniqueness.
// leaves the first 96 bits untouched in order to support partial key lookup.
#[inline]
fn combine_key<'a>(address_hash: &'a H256, key: &'a H256) -> H256 {
let mut dst = key.clone();
{
let last_src: &[u8] = &*address_hash;
let last_dst: &mut [u8] = &mut *dst;
for (k, a) in last_dst[12..].iter_mut().zip(&last_src[12..]) {
*k ^= *a
}
}
dst
}
/// A factory for different kinds of account dbs.
#[derive(Debug, Clone)]
pub enum Factory {
/// Mangle hashes based on address.
Mangled,
/// Don't mangle hashes.
Plain,
}
impl Default for Factory {
fn default() -> Self { Factory::Mangled }
}
impl Factory {
/// Create a read-only accountdb.
/// This will panic when write operations are called.
pub fn readonly<'db>(&self, db: &'db HashDB, address_hash: H256) -> Box<HashDB + 'db> {
match *self {
Factory::Mangled => Box::new(AccountDB::from_hash(db, address_hash)),
Factory::Plain => Box::new(Wrapping(db)),
}
}
/// Create a new mutable hashdb.
pub fn create<'db>(&self, db: &'db mut HashDB, address_hash: H256) -> Box<HashDB + 'db> {
match *self {
Factory::Mangled => Box::new(AccountDBMut::from_hash(db, address_hash)),
Factory::Plain => Box::new(WrappingMut(db)),
}
}
}
// TODO: introduce HashDBMut?
/// DB backend wrapper for Account trie
/// Transforms trie node keys for the database
pub struct AccountDB<'db> {
db: &'db HashDB,
address_hash: H256,
}
impl<'db> AccountDB<'db> {
/// Create a new AccountDB from an address.
pub fn new(db: &'db HashDB, address: &Address) -> Self {
Self::from_hash(db, address.sha3())
}
/// Create a new AcountDB from an address' hash.
pub fn from_hash(db: &'db HashDB, address_hash: H256) -> Self {
AccountDB {
db: db,
address_hash: address_hash,
}
}
}
impl<'db> HashDB for AccountDB<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.db.get(&combine_key(&self.address_hash, key))
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.db.contains(&combine_key(&self.address_hash, key))
}
fn insert(&mut self, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
unimplemented!()
}
}
/// DB backend wrapper for Account trie
pub struct AccountDBMut<'db> {
db: &'db mut HashDB,
address_hash: H256,
}
impl<'db> AccountDBMut<'db> {
/// Create a new AccountDB from an address.
pub fn new(db: &'db mut HashDB, address: &Address) -> Self {
Self::from_hash(db, address.sha3())
}
/// Create a new AcountDB from an address' hash.
pub fn from_hash(db: &'db mut HashDB, address_hash: H256) -> Self {
AccountDBMut {
db: db,
address_hash: address_hash,
}
}
#[allow(dead_code)]
pub fn immutable(&'db self) -> AccountDB<'db> {
AccountDB { db: self.db, address_hash: self.address_hash.clone() }
}
}
impl<'db> HashDB for AccountDBMut<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.db.get(&combine_key(&self.address_hash, key))
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.db.contains(&combine_key(&self.address_hash, key))
}
fn insert(&mut self, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return SHA3_NULL_RLP.clone();
}
let k = value.sha3();
let ak = combine_key(&self.address_hash, &k);
self.db.emplace(ak, DBValue::from_slice(value));
k
}
fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP {
return;
}
let key = combine_key(&self.address_hash, &key);
self.db.emplace(key, value)
}
fn remove(&mut self, key: &H256) {
if key == &SHA3_NULL_RLP {
return;
}
let key = combine_key(&self.address_hash, key);
self.db.remove(&key)
}
}
struct Wrapping<'db>(&'db HashDB);
impl<'db> HashDB for Wrapping<'db> {
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.0.get(key)
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.0.contains(key)
}
fn insert(&mut self, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
unimplemented!()
}
}
struct WrappingMut<'db>(&'db mut HashDB);
impl<'db> HashDB for WrappingMut<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn
|
(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.0.get(key)
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.0.contains(key)
}
fn insert(&mut self, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return SHA3_NULL_RLP.clone();
}
self.0.insert(value)
}
fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP {
return;
}
self.0.emplace(key, value)
}
fn remove(&mut self, key: &H256) {
if key == &SHA3_NULL_RLP {
return;
}
self.0.remove(key)
}
}
|
get
|
identifier_name
|
account_db.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! DB backend wrapper for Account trie
use util::*;
use rlp::NULL_RLP;
static NULL_RLP_STATIC: [u8; 1] = [0x80; 1];
// combines a key with an address hash to ensure uniqueness.
// leaves the first 96 bits untouched in order to support partial key lookup.
#[inline]
fn combine_key<'a>(address_hash: &'a H256, key: &'a H256) -> H256 {
let mut dst = key.clone();
{
let last_src: &[u8] = &*address_hash;
let last_dst: &mut [u8] = &mut *dst;
for (k, a) in last_dst[12..].iter_mut().zip(&last_src[12..]) {
*k ^= *a
}
}
dst
}
/// A factory for different kinds of account dbs.
#[derive(Debug, Clone)]
pub enum Factory {
/// Mangle hashes based on address.
Mangled,
/// Don't mangle hashes.
Plain,
}
|
}
impl Factory {
/// Create a read-only accountdb.
/// This will panic when write operations are called.
pub fn readonly<'db>(&self, db: &'db HashDB, address_hash: H256) -> Box<HashDB + 'db> {
match *self {
Factory::Mangled => Box::new(AccountDB::from_hash(db, address_hash)),
Factory::Plain => Box::new(Wrapping(db)),
}
}
/// Create a new mutable hashdb.
pub fn create<'db>(&self, db: &'db mut HashDB, address_hash: H256) -> Box<HashDB + 'db> {
match *self {
Factory::Mangled => Box::new(AccountDBMut::from_hash(db, address_hash)),
Factory::Plain => Box::new(WrappingMut(db)),
}
}
}
// TODO: introduce HashDBMut?
/// DB backend wrapper for Account trie
/// Transforms trie node keys for the database
pub struct AccountDB<'db> {
db: &'db HashDB,
address_hash: H256,
}
impl<'db> AccountDB<'db> {
/// Create a new AccountDB from an address.
pub fn new(db: &'db HashDB, address: &Address) -> Self {
Self::from_hash(db, address.sha3())
}
/// Create a new AcountDB from an address' hash.
pub fn from_hash(db: &'db HashDB, address_hash: H256) -> Self {
AccountDB {
db: db,
address_hash: address_hash,
}
}
}
impl<'db> HashDB for AccountDB<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.db.get(&combine_key(&self.address_hash, key))
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.db.contains(&combine_key(&self.address_hash, key))
}
fn insert(&mut self, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
unimplemented!()
}
}
/// DB backend wrapper for Account trie
pub struct AccountDBMut<'db> {
db: &'db mut HashDB,
address_hash: H256,
}
impl<'db> AccountDBMut<'db> {
/// Create a new AccountDB from an address.
pub fn new(db: &'db mut HashDB, address: &Address) -> Self {
Self::from_hash(db, address.sha3())
}
/// Create a new AcountDB from an address' hash.
pub fn from_hash(db: &'db mut HashDB, address_hash: H256) -> Self {
AccountDBMut {
db: db,
address_hash: address_hash,
}
}
#[allow(dead_code)]
pub fn immutable(&'db self) -> AccountDB<'db> {
AccountDB { db: self.db, address_hash: self.address_hash.clone() }
}
}
impl<'db> HashDB for AccountDBMut<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.db.get(&combine_key(&self.address_hash, key))
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.db.contains(&combine_key(&self.address_hash, key))
}
fn insert(&mut self, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return SHA3_NULL_RLP.clone();
}
let k = value.sha3();
let ak = combine_key(&self.address_hash, &k);
self.db.emplace(ak, DBValue::from_slice(value));
k
}
fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP {
return;
}
let key = combine_key(&self.address_hash, &key);
self.db.emplace(key, value)
}
fn remove(&mut self, key: &H256) {
if key == &SHA3_NULL_RLP {
return;
}
let key = combine_key(&self.address_hash, key);
self.db.remove(&key)
}
}
struct Wrapping<'db>(&'db HashDB);
impl<'db> HashDB for Wrapping<'db> {
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.0.get(key)
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.0.contains(key)
}
fn insert(&mut self, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
unimplemented!()
}
}
struct WrappingMut<'db>(&'db mut HashDB);
impl<'db> HashDB for WrappingMut<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.0.get(key)
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.0.contains(key)
}
fn insert(&mut self, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return SHA3_NULL_RLP.clone();
}
self.0.insert(value)
}
fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP {
return;
}
self.0.emplace(key, value)
}
fn remove(&mut self, key: &H256) {
if key == &SHA3_NULL_RLP {
return;
}
self.0.remove(key)
}
}
|
impl Default for Factory {
fn default() -> Self { Factory::Mangled }
|
random_line_split
|
account_db.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! DB backend wrapper for Account trie
use util::*;
use rlp::NULL_RLP;
static NULL_RLP_STATIC: [u8; 1] = [0x80; 1];
// combines a key with an address hash to ensure uniqueness.
// leaves the first 96 bits untouched in order to support partial key lookup.
#[inline]
fn combine_key<'a>(address_hash: &'a H256, key: &'a H256) -> H256 {
let mut dst = key.clone();
{
let last_src: &[u8] = &*address_hash;
let last_dst: &mut [u8] = &mut *dst;
for (k, a) in last_dst[12..].iter_mut().zip(&last_src[12..]) {
*k ^= *a
}
}
dst
}
/// A factory for different kinds of account dbs.
#[derive(Debug, Clone)]
pub enum Factory {
/// Mangle hashes based on address.
Mangled,
/// Don't mangle hashes.
Plain,
}
impl Default for Factory {
fn default() -> Self { Factory::Mangled }
}
impl Factory {
/// Create a read-only accountdb.
/// This will panic when write operations are called.
pub fn readonly<'db>(&self, db: &'db HashDB, address_hash: H256) -> Box<HashDB + 'db> {
match *self {
Factory::Mangled => Box::new(AccountDB::from_hash(db, address_hash)),
Factory::Plain => Box::new(Wrapping(db)),
}
}
/// Create a new mutable hashdb.
pub fn create<'db>(&self, db: &'db mut HashDB, address_hash: H256) -> Box<HashDB + 'db> {
match *self {
Factory::Mangled => Box::new(AccountDBMut::from_hash(db, address_hash)),
Factory::Plain => Box::new(WrappingMut(db)),
}
}
}
// TODO: introduce HashDBMut?
/// DB backend wrapper for Account trie
/// Transforms trie node keys for the database
pub struct AccountDB<'db> {
db: &'db HashDB,
address_hash: H256,
}
impl<'db> AccountDB<'db> {
/// Create a new AccountDB from an address.
pub fn new(db: &'db HashDB, address: &Address) -> Self {
Self::from_hash(db, address.sha3())
}
/// Create a new AcountDB from an address' hash.
pub fn from_hash(db: &'db HashDB, address_hash: H256) -> Self {
AccountDB {
db: db,
address_hash: address_hash,
}
}
}
impl<'db> HashDB for AccountDB<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.db.get(&combine_key(&self.address_hash, key))
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.db.contains(&combine_key(&self.address_hash, key))
}
fn insert(&mut self, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
unimplemented!()
}
}
/// DB backend wrapper for Account trie
pub struct AccountDBMut<'db> {
db: &'db mut HashDB,
address_hash: H256,
}
impl<'db> AccountDBMut<'db> {
/// Create a new AccountDB from an address.
pub fn new(db: &'db mut HashDB, address: &Address) -> Self {
Self::from_hash(db, address.sha3())
}
/// Create a new AcountDB from an address' hash.
pub fn from_hash(db: &'db mut HashDB, address_hash: H256) -> Self {
AccountDBMut {
db: db,
address_hash: address_hash,
}
}
#[allow(dead_code)]
pub fn immutable(&'db self) -> AccountDB<'db> {
AccountDB { db: self.db, address_hash: self.address_hash.clone() }
}
}
impl<'db> HashDB for AccountDBMut<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.db.get(&combine_key(&self.address_hash, key))
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.db.contains(&combine_key(&self.address_hash, key))
}
fn insert(&mut self, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return SHA3_NULL_RLP.clone();
}
let k = value.sha3();
let ak = combine_key(&self.address_hash, &k);
self.db.emplace(ak, DBValue::from_slice(value));
k
}
fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP {
return;
}
let key = combine_key(&self.address_hash, &key);
self.db.emplace(key, value)
}
fn remove(&mut self, key: &H256)
|
}
struct Wrapping<'db>(&'db HashDB);
impl<'db> HashDB for Wrapping<'db> {
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.0.get(key)
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.0.contains(key)
}
fn insert(&mut self, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
unimplemented!()
}
}
struct WrappingMut<'db>(&'db mut HashDB);
impl<'db> HashDB for WrappingMut<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.0.get(key)
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.0.contains(key)
}
fn insert(&mut self, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return SHA3_NULL_RLP.clone();
}
self.0.insert(value)
}
fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP {
return;
}
self.0.emplace(key, value)
}
fn remove(&mut self, key: &H256) {
if key == &SHA3_NULL_RLP {
return;
}
self.0.remove(key)
}
}
|
{
if key == &SHA3_NULL_RLP {
return;
}
let key = combine_key(&self.address_hash, key);
self.db.remove(&key)
}
|
identifier_body
|
account_db.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! DB backend wrapper for Account trie
use util::*;
use rlp::NULL_RLP;
static NULL_RLP_STATIC: [u8; 1] = [0x80; 1];
// combines a key with an address hash to ensure uniqueness.
// leaves the first 96 bits untouched in order to support partial key lookup.
#[inline]
fn combine_key<'a>(address_hash: &'a H256, key: &'a H256) -> H256 {
let mut dst = key.clone();
{
let last_src: &[u8] = &*address_hash;
let last_dst: &mut [u8] = &mut *dst;
for (k, a) in last_dst[12..].iter_mut().zip(&last_src[12..]) {
*k ^= *a
}
}
dst
}
/// A factory for different kinds of account dbs.
#[derive(Debug, Clone)]
pub enum Factory {
/// Mangle hashes based on address.
Mangled,
/// Don't mangle hashes.
Plain,
}
impl Default for Factory {
fn default() -> Self { Factory::Mangled }
}
impl Factory {
/// Create a read-only accountdb.
/// This will panic when write operations are called.
pub fn readonly<'db>(&self, db: &'db HashDB, address_hash: H256) -> Box<HashDB + 'db> {
match *self {
Factory::Mangled => Box::new(AccountDB::from_hash(db, address_hash)),
Factory::Plain => Box::new(Wrapping(db)),
}
}
/// Create a new mutable hashdb.
pub fn create<'db>(&self, db: &'db mut HashDB, address_hash: H256) -> Box<HashDB + 'db> {
match *self {
Factory::Mangled => Box::new(AccountDBMut::from_hash(db, address_hash)),
Factory::Plain => Box::new(WrappingMut(db)),
}
}
}
// TODO: introduce HashDBMut?
/// DB backend wrapper for Account trie
/// Transforms trie node keys for the database
pub struct AccountDB<'db> {
db: &'db HashDB,
address_hash: H256,
}
impl<'db> AccountDB<'db> {
/// Create a new AccountDB from an address.
pub fn new(db: &'db HashDB, address: &Address) -> Self {
Self::from_hash(db, address.sha3())
}
/// Create a new AcountDB from an address' hash.
pub fn from_hash(db: &'db HashDB, address_hash: H256) -> Self {
AccountDB {
db: db,
address_hash: address_hash,
}
}
}
impl<'db> HashDB for AccountDB<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.db.get(&combine_key(&self.address_hash, key))
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.db.contains(&combine_key(&self.address_hash, key))
}
fn insert(&mut self, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
unimplemented!()
}
}
/// DB backend wrapper for Account trie
pub struct AccountDBMut<'db> {
db: &'db mut HashDB,
address_hash: H256,
}
impl<'db> AccountDBMut<'db> {
/// Create a new AccountDB from an address.
pub fn new(db: &'db mut HashDB, address: &Address) -> Self {
Self::from_hash(db, address.sha3())
}
/// Create a new AcountDB from an address' hash.
pub fn from_hash(db: &'db mut HashDB, address_hash: H256) -> Self {
AccountDBMut {
db: db,
address_hash: address_hash,
}
}
#[allow(dead_code)]
pub fn immutable(&'db self) -> AccountDB<'db> {
AccountDB { db: self.db, address_hash: self.address_hash.clone() }
}
}
impl<'db> HashDB for AccountDBMut<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.db.get(&combine_key(&self.address_hash, key))
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.db.contains(&combine_key(&self.address_hash, key))
}
fn insert(&mut self, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return SHA3_NULL_RLP.clone();
}
let k = value.sha3();
let ak = combine_key(&self.address_hash, &k);
self.db.emplace(ak, DBValue::from_slice(value));
k
}
fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP
|
let key = combine_key(&self.address_hash, &key);
self.db.emplace(key, value)
}
fn remove(&mut self, key: &H256) {
if key == &SHA3_NULL_RLP {
return;
}
let key = combine_key(&self.address_hash, key);
self.db.remove(&key)
}
}
struct Wrapping<'db>(&'db HashDB);
impl<'db> HashDB for Wrapping<'db> {
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.0.get(key)
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.0.contains(key)
}
fn insert(&mut self, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
unimplemented!()
}
}
struct WrappingMut<'db>(&'db mut HashDB);
impl<'db> HashDB for WrappingMut<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.0.get(key)
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.0.contains(key)
}
fn insert(&mut self, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return SHA3_NULL_RLP.clone();
}
self.0.insert(value)
}
fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP {
return;
}
self.0.emplace(key, value)
}
fn remove(&mut self, key: &H256) {
if key == &SHA3_NULL_RLP {
return;
}
self.0.remove(key)
}
}
|
{
return;
}
|
conditional_block
|
messages.rs
|
//! Update parsing for user messages and conversation updates.
/// Specification on whether a message is incoming or outgoing.
#[derive(serde::Deserialize, Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum MessageDirectionType {
/// Incoming messages: messages sent by someone other than the subscribed user.
#[serde(rename = "in")]
Incoming,
/// Outgoing messages: messages sent by the subscribed user.
#[serde(rename = "out")]
Outgoing,
}
/// Content of a newly sent or received message update.
#[derive(serde::Deserialize, Clone, Debug)]
pub struct Message {
/// The unique identifier for this message.
#[serde(rename = "_id")]
pub message_id: String,
/// Unknown purpose.
#[serde(rename = "outMessage")]
pub out_message_id: String,
/// The message text - should be displayed as formatted markdown.
pub text: String,
/// The direction the message is going: who sent it.
///
/// For [`ChannelUpdate::UserMessage`] messages, this will always be `Incoming`.
/// For [`ChannelUpdate::UserConversation`] updates, this can be both `Incoming` for new messages,
/// or `Outgoing` for messages sent by either this client or another client logged in as the same user.
///
/// [`ChannelUpdate::UserMessage`]:../enum.ChannelUpdate.html
/// [`ChannelUpdate::UserConversation`]:../enum.ChannelUpdate.html
#[serde(rename = "type")]
pub direction: MessageDirectionType,
/// Whether or not the user who received this message has read it... Probably going to be false, as this is a
/// message that was just sent.
pub unread: bool,
/// The user who is subscribed to the channel and either received or sent this message.
#[serde(rename = "user")]
pub user_id: String,
/// The other user involved in this conversation: the one who isn't the user who received this update.
#[serde(rename = "respondent")]
pub respondent_id: String,
/// Phantom data in order to allow adding any additional fields in the future.
#[serde(skip)]
_non_exhaustive: (),
}
/// Update for a newly received message.
#[derive(serde::Deserialize, Clone, Debug)]
pub struct MessageUpdate {
/// The message.
pub message: Message,
/// Phantom data in order to allow adding any additional fields in the future.
#[serde(skip)]
_non_exhaustive: (),
}
/// Update on whether a message is unread or not.
#[derive(serde::Deserialize, Clone, Debug)]
pub struct
|
{
/// The unique identifier for this message.
#[serde(rename = "_id")]
pub message_id: String,
/// Whether or not it is now unread. Most likely `false`: going from read to unread is not supported in screeps
/// as of this writing.
pub unread: bool,
/// Phantom data in order to allow adding any additional fields in the future.
#[serde(skip)]
_non_exhaustive: (),
}
/// Update on a conversation between two specific users. This is either a new message sent by one of the users
/// (either the subscribed one or the other one), or an update indicating that a message previously sent has now
/// been read.
#[derive(serde::Deserialize, Clone, Debug)]
#[serde(untagged)]
pub enum ConversationUpdate {
/// A new message has been sent.
NewMessage {
/// The update with more information.
message: Message,
},
/// A message's `unread` status has changed.
MessageRead {
/// The update with more information.
message: MessageUnreadUpdate,
},
}
|
MessageUnreadUpdate
|
identifier_name
|
messages.rs
|
//! Update parsing for user messages and conversation updates.
/// Specification on whether a message is incoming or outgoing.
#[derive(serde::Deserialize, Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum MessageDirectionType {
/// Incoming messages: messages sent by someone other than the subscribed user.
#[serde(rename = "in")]
Incoming,
/// Outgoing messages: messages sent by the subscribed user.
#[serde(rename = "out")]
Outgoing,
}
/// Content of a newly sent or received message update.
#[derive(serde::Deserialize, Clone, Debug)]
pub struct Message {
/// The unique identifier for this message.
#[serde(rename = "_id")]
pub message_id: String,
/// Unknown purpose.
#[serde(rename = "outMessage")]
pub out_message_id: String,
/// The message text - should be displayed as formatted markdown.
pub text: String,
/// The direction the message is going: who sent it.
///
/// For [`ChannelUpdate::UserMessage`] messages, this will always be `Incoming`.
/// For [`ChannelUpdate::UserConversation`] updates, this can be both `Incoming` for new messages,
/// or `Outgoing` for messages sent by either this client or another client logged in as the same user.
///
|
/// [`ChannelUpdate::UserConversation`]:../enum.ChannelUpdate.html
#[serde(rename = "type")]
pub direction: MessageDirectionType,
/// Whether or not the user who received this message has read it... Probably going to be false, as this is a
/// message that was just sent.
pub unread: bool,
/// The user who is subscribed to the channel and either received or sent this message.
#[serde(rename = "user")]
pub user_id: String,
/// The other user involved in this conversation: the one who isn't the user who received this update.
#[serde(rename = "respondent")]
pub respondent_id: String,
/// Phantom data in order to allow adding any additional fields in the future.
#[serde(skip)]
_non_exhaustive: (),
}
/// Update for a newly received message.
#[derive(serde::Deserialize, Clone, Debug)]
pub struct MessageUpdate {
/// The message.
pub message: Message,
/// Phantom data in order to allow adding any additional fields in the future.
#[serde(skip)]
_non_exhaustive: (),
}
/// Update on whether a message is unread or not.
#[derive(serde::Deserialize, Clone, Debug)]
pub struct MessageUnreadUpdate {
/// The unique identifier for this message.
#[serde(rename = "_id")]
pub message_id: String,
/// Whether or not it is now unread. Most likely `false`: going from read to unread is not supported in screeps
/// as of this writing.
pub unread: bool,
/// Phantom data in order to allow adding any additional fields in the future.
#[serde(skip)]
_non_exhaustive: (),
}
/// Update on a conversation between two specific users. This is either a new message sent by one of the users
/// (either the subscribed one or the other one), or an update indicating that a message previously sent has now
/// been read.
#[derive(serde::Deserialize, Clone, Debug)]
#[serde(untagged)]
pub enum ConversationUpdate {
/// A new message has been sent.
NewMessage {
/// The update with more information.
message: Message,
},
/// A message's `unread` status has changed.
MessageRead {
/// The update with more information.
message: MessageUnreadUpdate,
},
}
|
/// [`ChannelUpdate::UserMessage`]: ../enum.ChannelUpdate.html
|
random_line_split
|
main.rs
|
#![feature(plugin, start, core_intrinsics)]
#![no_std]
#![plugin(macro_platformtree)]
extern crate zinc;
use zinc::drivers::chario::CharIO;
use zinc::drivers::lcd::hd44780u::{Hd44780u, Font};
platformtree!(
tiva_c@mcu {
clock {
source = "MOSC";
xtal = "X16_0MHz";
pll = false;
}
gpio {
a {
d7@5 { direction = "out"; }
}
b {
rs@0 { direction = "out"; }
en@1 { direction = "out"; }
d6@4 { direction = "out"; }
}
e {
d4@4 { direction = "out"; }
d5@5 { direction = "out"; }
}
}
timer {
/* The mcu contain both 16/32bit and "wide" 32/64bit timers. */
timer@w0 {
/* prescale sysclk to 1Mhz since the wait code expects 1us
* granularity */
prescale = 80;
mode = "periodic";
}
}
}
os {
single_task {
loop = "run";
args {
timer = &timer;
rs = &rs;
en = &en;
d4 = &d4;
d5 = &d5;
d6 = &d6;
d7 = &d7;
}
}
}
);
pub fn run(args: &pt::run_args)
|
lcd.custom_char_5x8(1,
[0b00100,
0b01010,
0b00100,
0b11111,
0b00100,
0b01010,
0b10001,
0b00000]);
// Enable blinking
lcd.display_control(true, false, true);
// Display a message surounded by two hearts
lcd.puts("\0 Hello Zinc \0");
// Move to the 2nd line
lcd.set_pos(0, 1);
// Write a line of stick figures
lcd.puts("\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01");
// Move the cursor back to the middle of the first line
lcd.set_pos(8, 0);
loop {}
}
|
{
let lcd = Hd44780u::new(args.timer,
args.rs,
args.en,
[ args.d4, args.d5, args.d6, args.d7 ]);
lcd.init(true, Font::Font5x8);
// Create custom 'heart' character at index 0.
lcd.custom_char_5x8(0,
[0b00000,
0b01010,
0b11111,
0b11111,
0b11111,
0b01110,
0b00100,
0b00000]);
// Create custom 'stick figure' character at index 1
|
identifier_body
|
main.rs
|
#![feature(plugin, start, core_intrinsics)]
#![no_std]
#![plugin(macro_platformtree)]
extern crate zinc;
use zinc::drivers::chario::CharIO;
use zinc::drivers::lcd::hd44780u::{Hd44780u, Font};
platformtree!(
tiva_c@mcu {
clock {
source = "MOSC";
xtal = "X16_0MHz";
pll = false;
}
gpio {
a {
d7@5 { direction = "out"; }
}
b {
rs@0 { direction = "out"; }
en@1 { direction = "out"; }
d6@4 { direction = "out"; }
}
e {
d4@4 { direction = "out"; }
d5@5 { direction = "out"; }
}
}
timer {
/* The mcu contain both 16/32bit and "wide" 32/64bit timers. */
timer@w0 {
/* prescale sysclk to 1Mhz since the wait code expects 1us
* granularity */
prescale = 80;
mode = "periodic";
}
}
}
os {
single_task {
loop = "run";
args {
timer = &timer;
rs = &rs;
en = &en;
d4 = &d4;
d5 = &d5;
d6 = &d6;
d7 = &d7;
}
}
}
);
pub fn
|
(args: &pt::run_args) {
let lcd = Hd44780u::new(args.timer,
args.rs,
args.en,
[ args.d4, args.d5, args.d6, args.d7 ]);
lcd.init(true, Font::Font5x8);
// Create custom 'heart' character at index 0.
lcd.custom_char_5x8(0,
[0b00000,
0b01010,
0b11111,
0b11111,
0b11111,
0b01110,
0b00100,
0b00000]);
// Create custom'stick figure' character at index 1
lcd.custom_char_5x8(1,
[0b00100,
0b01010,
0b00100,
0b11111,
0b00100,
0b01010,
0b10001,
0b00000]);
// Enable blinking
lcd.display_control(true, false, true);
// Display a message surounded by two hearts
lcd.puts("\0 Hello Zinc \0");
// Move to the 2nd line
lcd.set_pos(0, 1);
// Write a line of stick figures
lcd.puts("\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01");
// Move the cursor back to the middle of the first line
lcd.set_pos(8, 0);
loop {}
}
|
run
|
identifier_name
|
main.rs
|
#![feature(plugin, start, core_intrinsics)]
#![no_std]
#![plugin(macro_platformtree)]
extern crate zinc;
|
use zinc::drivers::chario::CharIO;
use zinc::drivers::lcd::hd44780u::{Hd44780u, Font};
platformtree!(
tiva_c@mcu {
clock {
source = "MOSC";
xtal = "X16_0MHz";
pll = false;
}
gpio {
a {
d7@5 { direction = "out"; }
}
b {
rs@0 { direction = "out"; }
en@1 { direction = "out"; }
d6@4 { direction = "out"; }
}
e {
d4@4 { direction = "out"; }
d5@5 { direction = "out"; }
}
}
timer {
/* The mcu contain both 16/32bit and "wide" 32/64bit timers. */
timer@w0 {
/* prescale sysclk to 1Mhz since the wait code expects 1us
* granularity */
prescale = 80;
mode = "periodic";
}
}
}
os {
single_task {
loop = "run";
args {
timer = &timer;
rs = &rs;
en = &en;
d4 = &d4;
d5 = &d5;
d6 = &d6;
d7 = &d7;
}
}
}
);
pub fn run(args: &pt::run_args) {
let lcd = Hd44780u::new(args.timer,
args.rs,
args.en,
[ args.d4, args.d5, args.d6, args.d7 ]);
lcd.init(true, Font::Font5x8);
// Create custom 'heart' character at index 0.
lcd.custom_char_5x8(0,
[0b00000,
0b01010,
0b11111,
0b11111,
0b11111,
0b01110,
0b00100,
0b00000]);
// Create custom'stick figure' character at index 1
lcd.custom_char_5x8(1,
[0b00100,
0b01010,
0b00100,
0b11111,
0b00100,
0b01010,
0b10001,
0b00000]);
// Enable blinking
lcd.display_control(true, false, true);
// Display a message surounded by two hearts
lcd.puts("\0 Hello Zinc \0");
// Move to the 2nd line
lcd.set_pos(0, 1);
// Write a line of stick figures
lcd.puts("\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01");
// Move the cursor back to the middle of the first line
lcd.set_pos(8, 0);
loop {}
}
|
random_line_split
|
|
check_static_recursion.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This compiler pass detects constants that refer to themselves
// recursively.
use ast_map;
use session::Session;
use middle::def::{DefStatic, DefConst, DefAssociatedConst, DefVariant, DefMap};
use util::nodemap::NodeMap;
use syntax::{ast, ast_util};
use syntax::codemap::Span;
use syntax::feature_gate::emit_feature_err;
use syntax::visit::Visitor;
use syntax::visit;
use std::cell::RefCell;
struct CheckCrateVisitor<'a, 'ast: 'a> {
sess: &'a Session,
def_map: &'a DefMap,
ast_map: &'a ast_map::Map<'ast>,
// `discriminant_map` is a cache that associates the `NodeId`s of local
// variant definitions with the discriminant expression that applies to
// each one. If the variant uses the default values (starting from `0`),
// then `None` is stored.
discriminant_map: RefCell<NodeMap<Option<&'ast ast::Expr>>>,
}
impl<'a, 'ast: 'a> Visitor<'ast> for CheckCrateVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &'ast ast::Item) {
match it.node {
ast::ItemStatic(..) |
ast::ItemConst(..) => {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &it.span);
recursion_visitor.visit_item(it);
},
ast::ItemEnum(ref enum_def, ref generics) => {
// We could process the whole enum, but handling the variants
// with discriminant expressions one by one gives more specific,
// less redundant output.
for variant in &enum_def.variants {
if let Some(_) = variant.node.disr_expr {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &variant.span);
recursion_visitor.populate_enum_discriminants(enum_def);
recursion_visitor.visit_variant(variant, generics);
}
}
}
_ => {}
}
visit::walk_item(self, it)
}
fn visit_trait_item(&mut self, ti: &'ast ast::TraitItem) {
match ti.node {
ast::ConstTraitItem(_, ref default) => {
if let Some(_) = *default {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &ti.span);
recursion_visitor.visit_trait_item(ti);
}
}
_ => {}
}
visit::walk_trait_item(self, ti)
}
fn visit_impl_item(&mut self, ii: &'ast ast::ImplItem) {
match ii.node {
ast::ConstImplItem(..) => {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &ii.span);
recursion_visitor.visit_impl_item(ii);
}
_ => {}
}
visit::walk_impl_item(self, ii)
}
}
pub fn check_crate<'ast>(sess: &Session,
krate: &'ast ast::Crate,
def_map: &DefMap,
ast_map: &ast_map::Map<'ast>) {
let mut visitor = CheckCrateVisitor {
sess: sess,
def_map: def_map,
ast_map: ast_map,
discriminant_map: RefCell::new(NodeMap()),
};
visit::walk_crate(&mut visitor, krate);
sess.abort_if_errors();
}
struct CheckItemRecursionVisitor<'a, 'ast: 'a> {
root_span: &'a Span,
sess: &'a Session,
ast_map: &'a ast_map::Map<'ast>,
def_map: &'a DefMap,
discriminant_map: &'a RefCell<NodeMap<Option<&'ast ast::Expr>>>,
idstack: Vec<ast::NodeId>,
}
impl<'a, 'ast: 'a> CheckItemRecursionVisitor<'a, 'ast> {
fn new(v: &'a CheckCrateVisitor<'a, 'ast>, span: &'a Span)
-> CheckItemRecursionVisitor<'a, 'ast> {
CheckItemRecursionVisitor {
root_span: span,
sess: v.sess,
ast_map: v.ast_map,
def_map: v.def_map,
discriminant_map: &v.discriminant_map,
idstack: Vec::new(),
}
}
fn with_item_id_pushed<F>(&mut self, id: ast::NodeId, f: F)
where F: Fn(&mut Self) {
if self.idstack.iter().any(|&x| x == id) {
let any_static = self.idstack.iter().any(|&x| {
if let ast_map::NodeItem(item) = self.ast_map.get(x) {
if let ast::ItemStatic(..) = item.node {
true
} else {
false
}
} else {
false
}
});
if any_static {
if!self.sess.features.borrow().static_recursion {
emit_feature_err(&self.sess.parse_sess.span_diagnostic,
"static_recursion",
*self.root_span, "recursive static");
}
} else {
span_err!(self.sess, *self.root_span, E0265, "recursive constant");
}
return;
}
self.idstack.push(id);
f(self);
self.idstack.pop();
}
// If a variant has an expression specifying its discriminant, then it needs
// to be checked just like a static or constant. However, if there are more
// variants with no explicitly specified discriminant, those variants will
// increment the same expression to get their values.
//
// So for every variant, we need to track whether there is an expression
// somewhere in the enum definition that controls its discriminant. We do
// this by starting from the end and searching backward.
fn populate_enum_discriminants(&self, enum_definition: &'ast ast::EnumDef) {
// Get the map, and return if we already processed this enum or if it
// has no variants.
let mut discriminant_map = self.discriminant_map.borrow_mut();
match enum_definition.variants.first() {
None => { return; }
Some(variant) if discriminant_map.contains_key(&variant.node.id) => {
return;
}
_ => {}
}
// Go through all the variants.
let mut variant_stack: Vec<ast::NodeId> = Vec::new();
for variant in enum_definition.variants.iter().rev() {
variant_stack.push(variant.node.id);
// When we find an expression, every variant currently on the stack
// is affected by that expression.
if let Some(ref expr) = variant.node.disr_expr {
for id in &variant_stack {
discriminant_map.insert(*id, Some(expr));
}
variant_stack.clear()
}
}
// If we are at the top, that always starts at 0, so any variant on the
// stack has a default value and does not need to be checked.
for id in &variant_stack {
discriminant_map.insert(*id, None);
}
}
}
impl<'a, 'ast: 'a> Visitor<'ast> for CheckItemRecursionVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &'ast ast::Item) {
self.with_item_id_pushed(it.id, |v| visit::walk_item(v, it));
}
fn
|
(&mut self, enum_definition: &'ast ast::EnumDef,
generics: &'ast ast::Generics) {
self.populate_enum_discriminants(enum_definition);
visit::walk_enum_def(self, enum_definition, generics);
}
fn visit_variant(&mut self, variant: &'ast ast::Variant,
_: &'ast ast::Generics) {
let variant_id = variant.node.id;
let maybe_expr;
if let Some(get_expr) = self.discriminant_map.borrow().get(&variant_id) {
// This is necessary because we need to let the `discriminant_map`
// borrow fall out of scope, so that we can reborrow farther down.
maybe_expr = (*get_expr).clone();
} else {
self.sess.span_bug(variant.span,
"`check_static_recursion` attempted to visit \
variant with unknown discriminant")
}
// If `maybe_expr` is `None`, that's because no discriminant is
// specified that affects this variant. Thus, no risk of recursion.
if let Some(expr) = maybe_expr {
self.with_item_id_pushed(expr.id, |v| visit::walk_expr(v, expr));
}
}
fn visit_trait_item(&mut self, ti: &'ast ast::TraitItem) {
self.with_item_id_pushed(ti.id, |v| visit::walk_trait_item(v, ti));
}
fn visit_impl_item(&mut self, ii: &'ast ast::ImplItem) {
self.with_item_id_pushed(ii.id, |v| visit::walk_impl_item(v, ii));
}
fn visit_expr(&mut self, e: &'ast ast::Expr) {
match e.node {
ast::ExprPath(..) => {
match self.def_map.borrow().get(&e.id).map(|d| d.base_def) {
Some(DefStatic(def_id, _)) |
Some(DefAssociatedConst(def_id, _)) |
Some(DefConst(def_id))
if ast_util::is_local(def_id) => {
match self.ast_map.get(def_id.node) {
ast_map::NodeItem(item) =>
self.visit_item(item),
ast_map::NodeTraitItem(item) =>
self.visit_trait_item(item),
ast_map::NodeImplItem(item) =>
self.visit_impl_item(item),
ast_map::NodeForeignItem(_) => {},
_ => {
self.sess.span_bug(
e.span,
&format!("expected item, found {}",
self.ast_map.node_to_string(def_id.node)));
}
}
}
// For variants, we only want to check expressions that
// affect the specific variant used, but we need to check
// the whole enum definition to see what expression that
// might be (if any).
Some(DefVariant(enum_id, variant_id, false))
if ast_util::is_local(enum_id) => {
if let ast::ItemEnum(ref enum_def, ref generics) =
self.ast_map.expect_item(enum_id.local_id()).node {
self.populate_enum_discriminants(enum_def);
let variant = self.ast_map.expect_variant(variant_id.local_id());
self.visit_variant(variant, generics);
} else {
self.sess.span_bug(e.span,
"`check_static_recursion` found \
non-enum in DefVariant");
}
}
_ => ()
}
},
_ => ()
}
visit::walk_expr(self, e);
}
}
|
visit_enum_def
|
identifier_name
|
check_static_recursion.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This compiler pass detects constants that refer to themselves
// recursively.
use ast_map;
use session::Session;
use middle::def::{DefStatic, DefConst, DefAssociatedConst, DefVariant, DefMap};
use util::nodemap::NodeMap;
use syntax::{ast, ast_util};
use syntax::codemap::Span;
use syntax::feature_gate::emit_feature_err;
use syntax::visit::Visitor;
use syntax::visit;
use std::cell::RefCell;
struct CheckCrateVisitor<'a, 'ast: 'a> {
sess: &'a Session,
def_map: &'a DefMap,
ast_map: &'a ast_map::Map<'ast>,
// `discriminant_map` is a cache that associates the `NodeId`s of local
// variant definitions with the discriminant expression that applies to
// each one. If the variant uses the default values (starting from `0`),
// then `None` is stored.
discriminant_map: RefCell<NodeMap<Option<&'ast ast::Expr>>>,
}
impl<'a, 'ast: 'a> Visitor<'ast> for CheckCrateVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &'ast ast::Item) {
match it.node {
ast::ItemStatic(..) |
ast::ItemConst(..) => {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &it.span);
recursion_visitor.visit_item(it);
},
ast::ItemEnum(ref enum_def, ref generics) => {
// We could process the whole enum, but handling the variants
// with discriminant expressions one by one gives more specific,
// less redundant output.
for variant in &enum_def.variants {
if let Some(_) = variant.node.disr_expr {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &variant.span);
recursion_visitor.populate_enum_discriminants(enum_def);
recursion_visitor.visit_variant(variant, generics);
}
}
}
_ => {}
}
visit::walk_item(self, it)
}
fn visit_trait_item(&mut self, ti: &'ast ast::TraitItem) {
match ti.node {
ast::ConstTraitItem(_, ref default) => {
if let Some(_) = *default {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &ti.span);
recursion_visitor.visit_trait_item(ti);
}
}
_ => {}
}
visit::walk_trait_item(self, ti)
}
fn visit_impl_item(&mut self, ii: &'ast ast::ImplItem) {
match ii.node {
ast::ConstImplItem(..) => {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &ii.span);
recursion_visitor.visit_impl_item(ii);
}
_ => {}
}
visit::walk_impl_item(self, ii)
}
}
pub fn check_crate<'ast>(sess: &Session,
krate: &'ast ast::Crate,
def_map: &DefMap,
ast_map: &ast_map::Map<'ast>) {
let mut visitor = CheckCrateVisitor {
sess: sess,
def_map: def_map,
ast_map: ast_map,
discriminant_map: RefCell::new(NodeMap()),
};
visit::walk_crate(&mut visitor, krate);
sess.abort_if_errors();
}
struct CheckItemRecursionVisitor<'a, 'ast: 'a> {
root_span: &'a Span,
sess: &'a Session,
ast_map: &'a ast_map::Map<'ast>,
def_map: &'a DefMap,
discriminant_map: &'a RefCell<NodeMap<Option<&'ast ast::Expr>>>,
idstack: Vec<ast::NodeId>,
}
impl<'a, 'ast: 'a> CheckItemRecursionVisitor<'a, 'ast> {
fn new(v: &'a CheckCrateVisitor<'a, 'ast>, span: &'a Span)
-> CheckItemRecursionVisitor<'a, 'ast> {
CheckItemRecursionVisitor {
root_span: span,
sess: v.sess,
ast_map: v.ast_map,
def_map: v.def_map,
discriminant_map: &v.discriminant_map,
idstack: Vec::new(),
}
}
fn with_item_id_pushed<F>(&mut self, id: ast::NodeId, f: F)
where F: Fn(&mut Self) {
if self.idstack.iter().any(|&x| x == id) {
let any_static = self.idstack.iter().any(|&x| {
if let ast_map::NodeItem(item) = self.ast_map.get(x) {
if let ast::ItemStatic(..) = item.node {
true
} else {
false
}
} else {
false
}
});
if any_static {
if!self.sess.features.borrow().static_recursion {
emit_feature_err(&self.sess.parse_sess.span_diagnostic,
"static_recursion",
*self.root_span, "recursive static");
}
} else {
span_err!(self.sess, *self.root_span, E0265, "recursive constant");
}
return;
}
self.idstack.push(id);
f(self);
self.idstack.pop();
}
// If a variant has an expression specifying its discriminant, then it needs
// to be checked just like a static or constant. However, if there are more
// variants with no explicitly specified discriminant, those variants will
// increment the same expression to get their values.
//
// So for every variant, we need to track whether there is an expression
// somewhere in the enum definition that controls its discriminant. We do
// this by starting from the end and searching backward.
fn populate_enum_discriminants(&self, enum_definition: &'ast ast::EnumDef) {
// Get the map, and return if we already processed this enum or if it
// has no variants.
let mut discriminant_map = self.discriminant_map.borrow_mut();
match enum_definition.variants.first() {
None => { return; }
Some(variant) if discriminant_map.contains_key(&variant.node.id) => {
return;
}
_ => {}
|
// Go through all the variants.
let mut variant_stack: Vec<ast::NodeId> = Vec::new();
for variant in enum_definition.variants.iter().rev() {
variant_stack.push(variant.node.id);
// When we find an expression, every variant currently on the stack
// is affected by that expression.
if let Some(ref expr) = variant.node.disr_expr {
for id in &variant_stack {
discriminant_map.insert(*id, Some(expr));
}
variant_stack.clear()
}
}
// If we are at the top, that always starts at 0, so any variant on the
// stack has a default value and does not need to be checked.
for id in &variant_stack {
discriminant_map.insert(*id, None);
}
}
}
impl<'a, 'ast: 'a> Visitor<'ast> for CheckItemRecursionVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &'ast ast::Item) {
self.with_item_id_pushed(it.id, |v| visit::walk_item(v, it));
}
fn visit_enum_def(&mut self, enum_definition: &'ast ast::EnumDef,
generics: &'ast ast::Generics) {
self.populate_enum_discriminants(enum_definition);
visit::walk_enum_def(self, enum_definition, generics);
}
fn visit_variant(&mut self, variant: &'ast ast::Variant,
_: &'ast ast::Generics) {
let variant_id = variant.node.id;
let maybe_expr;
if let Some(get_expr) = self.discriminant_map.borrow().get(&variant_id) {
// This is necessary because we need to let the `discriminant_map`
// borrow fall out of scope, so that we can reborrow farther down.
maybe_expr = (*get_expr).clone();
} else {
self.sess.span_bug(variant.span,
"`check_static_recursion` attempted to visit \
variant with unknown discriminant")
}
// If `maybe_expr` is `None`, that's because no discriminant is
// specified that affects this variant. Thus, no risk of recursion.
if let Some(expr) = maybe_expr {
self.with_item_id_pushed(expr.id, |v| visit::walk_expr(v, expr));
}
}
fn visit_trait_item(&mut self, ti: &'ast ast::TraitItem) {
self.with_item_id_pushed(ti.id, |v| visit::walk_trait_item(v, ti));
}
fn visit_impl_item(&mut self, ii: &'ast ast::ImplItem) {
self.with_item_id_pushed(ii.id, |v| visit::walk_impl_item(v, ii));
}
fn visit_expr(&mut self, e: &'ast ast::Expr) {
match e.node {
ast::ExprPath(..) => {
match self.def_map.borrow().get(&e.id).map(|d| d.base_def) {
Some(DefStatic(def_id, _)) |
Some(DefAssociatedConst(def_id, _)) |
Some(DefConst(def_id))
if ast_util::is_local(def_id) => {
match self.ast_map.get(def_id.node) {
ast_map::NodeItem(item) =>
self.visit_item(item),
ast_map::NodeTraitItem(item) =>
self.visit_trait_item(item),
ast_map::NodeImplItem(item) =>
self.visit_impl_item(item),
ast_map::NodeForeignItem(_) => {},
_ => {
self.sess.span_bug(
e.span,
&format!("expected item, found {}",
self.ast_map.node_to_string(def_id.node)));
}
}
}
// For variants, we only want to check expressions that
// affect the specific variant used, but we need to check
// the whole enum definition to see what expression that
// might be (if any).
Some(DefVariant(enum_id, variant_id, false))
if ast_util::is_local(enum_id) => {
if let ast::ItemEnum(ref enum_def, ref generics) =
self.ast_map.expect_item(enum_id.local_id()).node {
self.populate_enum_discriminants(enum_def);
let variant = self.ast_map.expect_variant(variant_id.local_id());
self.visit_variant(variant, generics);
} else {
self.sess.span_bug(e.span,
"`check_static_recursion` found \
non-enum in DefVariant");
}
}
_ => ()
}
},
_ => ()
}
visit::walk_expr(self, e);
}
}
|
}
|
random_line_split
|
check_static_recursion.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This compiler pass detects constants that refer to themselves
// recursively.
use ast_map;
use session::Session;
use middle::def::{DefStatic, DefConst, DefAssociatedConst, DefVariant, DefMap};
use util::nodemap::NodeMap;
use syntax::{ast, ast_util};
use syntax::codemap::Span;
use syntax::feature_gate::emit_feature_err;
use syntax::visit::Visitor;
use syntax::visit;
use std::cell::RefCell;
struct CheckCrateVisitor<'a, 'ast: 'a> {
sess: &'a Session,
def_map: &'a DefMap,
ast_map: &'a ast_map::Map<'ast>,
// `discriminant_map` is a cache that associates the `NodeId`s of local
// variant definitions with the discriminant expression that applies to
// each one. If the variant uses the default values (starting from `0`),
// then `None` is stored.
discriminant_map: RefCell<NodeMap<Option<&'ast ast::Expr>>>,
}
impl<'a, 'ast: 'a> Visitor<'ast> for CheckCrateVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &'ast ast::Item) {
match it.node {
ast::ItemStatic(..) |
ast::ItemConst(..) => {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &it.span);
recursion_visitor.visit_item(it);
},
ast::ItemEnum(ref enum_def, ref generics) => {
// We could process the whole enum, but handling the variants
// with discriminant expressions one by one gives more specific,
// less redundant output.
for variant in &enum_def.variants {
if let Some(_) = variant.node.disr_expr {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &variant.span);
recursion_visitor.populate_enum_discriminants(enum_def);
recursion_visitor.visit_variant(variant, generics);
}
}
}
_ => {}
}
visit::walk_item(self, it)
}
fn visit_trait_item(&mut self, ti: &'ast ast::TraitItem) {
match ti.node {
ast::ConstTraitItem(_, ref default) => {
if let Some(_) = *default {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &ti.span);
recursion_visitor.visit_trait_item(ti);
}
}
_ => {}
}
visit::walk_trait_item(self, ti)
}
fn visit_impl_item(&mut self, ii: &'ast ast::ImplItem) {
match ii.node {
ast::ConstImplItem(..) => {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &ii.span);
recursion_visitor.visit_impl_item(ii);
}
_ => {}
}
visit::walk_impl_item(self, ii)
}
}
pub fn check_crate<'ast>(sess: &Session,
krate: &'ast ast::Crate,
def_map: &DefMap,
ast_map: &ast_map::Map<'ast>) {
let mut visitor = CheckCrateVisitor {
sess: sess,
def_map: def_map,
ast_map: ast_map,
discriminant_map: RefCell::new(NodeMap()),
};
visit::walk_crate(&mut visitor, krate);
sess.abort_if_errors();
}
struct CheckItemRecursionVisitor<'a, 'ast: 'a> {
root_span: &'a Span,
sess: &'a Session,
ast_map: &'a ast_map::Map<'ast>,
def_map: &'a DefMap,
discriminant_map: &'a RefCell<NodeMap<Option<&'ast ast::Expr>>>,
idstack: Vec<ast::NodeId>,
}
impl<'a, 'ast: 'a> CheckItemRecursionVisitor<'a, 'ast> {
fn new(v: &'a CheckCrateVisitor<'a, 'ast>, span: &'a Span)
-> CheckItemRecursionVisitor<'a, 'ast> {
CheckItemRecursionVisitor {
root_span: span,
sess: v.sess,
ast_map: v.ast_map,
def_map: v.def_map,
discriminant_map: &v.discriminant_map,
idstack: Vec::new(),
}
}
fn with_item_id_pushed<F>(&mut self, id: ast::NodeId, f: F)
where F: Fn(&mut Self) {
if self.idstack.iter().any(|&x| x == id) {
let any_static = self.idstack.iter().any(|&x| {
if let ast_map::NodeItem(item) = self.ast_map.get(x) {
if let ast::ItemStatic(..) = item.node {
true
} else {
false
}
} else {
false
}
});
if any_static {
if!self.sess.features.borrow().static_recursion {
emit_feature_err(&self.sess.parse_sess.span_diagnostic,
"static_recursion",
*self.root_span, "recursive static");
}
} else {
span_err!(self.sess, *self.root_span, E0265, "recursive constant");
}
return;
}
self.idstack.push(id);
f(self);
self.idstack.pop();
}
// If a variant has an expression specifying its discriminant, then it needs
// to be checked just like a static or constant. However, if there are more
// variants with no explicitly specified discriminant, those variants will
// increment the same expression to get their values.
//
// So for every variant, we need to track whether there is an expression
// somewhere in the enum definition that controls its discriminant. We do
// this by starting from the end and searching backward.
fn populate_enum_discriminants(&self, enum_definition: &'ast ast::EnumDef) {
// Get the map, and return if we already processed this enum or if it
// has no variants.
let mut discriminant_map = self.discriminant_map.borrow_mut();
match enum_definition.variants.first() {
None => { return; }
Some(variant) if discriminant_map.contains_key(&variant.node.id) => {
return;
}
_ => {}
}
// Go through all the variants.
let mut variant_stack: Vec<ast::NodeId> = Vec::new();
for variant in enum_definition.variants.iter().rev() {
variant_stack.push(variant.node.id);
// When we find an expression, every variant currently on the stack
// is affected by that expression.
if let Some(ref expr) = variant.node.disr_expr {
for id in &variant_stack {
discriminant_map.insert(*id, Some(expr));
}
variant_stack.clear()
}
}
// If we are at the top, that always starts at 0, so any variant on the
// stack has a default value and does not need to be checked.
for id in &variant_stack {
discriminant_map.insert(*id, None);
}
}
}
impl<'a, 'ast: 'a> Visitor<'ast> for CheckItemRecursionVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &'ast ast::Item) {
self.with_item_id_pushed(it.id, |v| visit::walk_item(v, it));
}
fn visit_enum_def(&mut self, enum_definition: &'ast ast::EnumDef,
generics: &'ast ast::Generics) {
self.populate_enum_discriminants(enum_definition);
visit::walk_enum_def(self, enum_definition, generics);
}
fn visit_variant(&mut self, variant: &'ast ast::Variant,
_: &'ast ast::Generics) {
let variant_id = variant.node.id;
let maybe_expr;
if let Some(get_expr) = self.discriminant_map.borrow().get(&variant_id) {
// This is necessary because we need to let the `discriminant_map`
// borrow fall out of scope, so that we can reborrow farther down.
maybe_expr = (*get_expr).clone();
} else {
self.sess.span_bug(variant.span,
"`check_static_recursion` attempted to visit \
variant with unknown discriminant")
}
// If `maybe_expr` is `None`, that's because no discriminant is
// specified that affects this variant. Thus, no risk of recursion.
if let Some(expr) = maybe_expr {
self.with_item_id_pushed(expr.id, |v| visit::walk_expr(v, expr));
}
}
fn visit_trait_item(&mut self, ti: &'ast ast::TraitItem) {
self.with_item_id_pushed(ti.id, |v| visit::walk_trait_item(v, ti));
}
fn visit_impl_item(&mut self, ii: &'ast ast::ImplItem) {
self.with_item_id_pushed(ii.id, |v| visit::walk_impl_item(v, ii));
}
fn visit_expr(&mut self, e: &'ast ast::Expr) {
match e.node {
ast::ExprPath(..) => {
match self.def_map.borrow().get(&e.id).map(|d| d.base_def) {
Some(DefStatic(def_id, _)) |
Some(DefAssociatedConst(def_id, _)) |
Some(DefConst(def_id))
if ast_util::is_local(def_id) => {
match self.ast_map.get(def_id.node) {
ast_map::NodeItem(item) =>
self.visit_item(item),
ast_map::NodeTraitItem(item) =>
self.visit_trait_item(item),
ast_map::NodeImplItem(item) =>
self.visit_impl_item(item),
ast_map::NodeForeignItem(_) => {},
_ => {
self.sess.span_bug(
e.span,
&format!("expected item, found {}",
self.ast_map.node_to_string(def_id.node)));
}
}
}
// For variants, we only want to check expressions that
// affect the specific variant used, but we need to check
// the whole enum definition to see what expression that
// might be (if any).
Some(DefVariant(enum_id, variant_id, false))
if ast_util::is_local(enum_id) =>
|
_ => ()
}
},
_ => ()
}
visit::walk_expr(self, e);
}
}
|
{
if let ast::ItemEnum(ref enum_def, ref generics) =
self.ast_map.expect_item(enum_id.local_id()).node {
self.populate_enum_discriminants(enum_def);
let variant = self.ast_map.expect_variant(variant_id.local_id());
self.visit_variant(variant, generics);
} else {
self.sess.span_bug(e.span,
"`check_static_recursion` found \
non-enum in DefVariant");
}
}
|
conditional_block
|
check_static_recursion.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This compiler pass detects constants that refer to themselves
// recursively.
use ast_map;
use session::Session;
use middle::def::{DefStatic, DefConst, DefAssociatedConst, DefVariant, DefMap};
use util::nodemap::NodeMap;
use syntax::{ast, ast_util};
use syntax::codemap::Span;
use syntax::feature_gate::emit_feature_err;
use syntax::visit::Visitor;
use syntax::visit;
use std::cell::RefCell;
struct CheckCrateVisitor<'a, 'ast: 'a> {
sess: &'a Session,
def_map: &'a DefMap,
ast_map: &'a ast_map::Map<'ast>,
// `discriminant_map` is a cache that associates the `NodeId`s of local
// variant definitions with the discriminant expression that applies to
// each one. If the variant uses the default values (starting from `0`),
// then `None` is stored.
discriminant_map: RefCell<NodeMap<Option<&'ast ast::Expr>>>,
}
impl<'a, 'ast: 'a> Visitor<'ast> for CheckCrateVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &'ast ast::Item) {
match it.node {
ast::ItemStatic(..) |
ast::ItemConst(..) => {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &it.span);
recursion_visitor.visit_item(it);
},
ast::ItemEnum(ref enum_def, ref generics) => {
// We could process the whole enum, but handling the variants
// with discriminant expressions one by one gives more specific,
// less redundant output.
for variant in &enum_def.variants {
if let Some(_) = variant.node.disr_expr {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &variant.span);
recursion_visitor.populate_enum_discriminants(enum_def);
recursion_visitor.visit_variant(variant, generics);
}
}
}
_ => {}
}
visit::walk_item(self, it)
}
fn visit_trait_item(&mut self, ti: &'ast ast::TraitItem) {
match ti.node {
ast::ConstTraitItem(_, ref default) => {
if let Some(_) = *default {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &ti.span);
recursion_visitor.visit_trait_item(ti);
}
}
_ => {}
}
visit::walk_trait_item(self, ti)
}
fn visit_impl_item(&mut self, ii: &'ast ast::ImplItem) {
match ii.node {
ast::ConstImplItem(..) => {
let mut recursion_visitor =
CheckItemRecursionVisitor::new(self, &ii.span);
recursion_visitor.visit_impl_item(ii);
}
_ => {}
}
visit::walk_impl_item(self, ii)
}
}
pub fn check_crate<'ast>(sess: &Session,
krate: &'ast ast::Crate,
def_map: &DefMap,
ast_map: &ast_map::Map<'ast>) {
let mut visitor = CheckCrateVisitor {
sess: sess,
def_map: def_map,
ast_map: ast_map,
discriminant_map: RefCell::new(NodeMap()),
};
visit::walk_crate(&mut visitor, krate);
sess.abort_if_errors();
}
struct CheckItemRecursionVisitor<'a, 'ast: 'a> {
root_span: &'a Span,
sess: &'a Session,
ast_map: &'a ast_map::Map<'ast>,
def_map: &'a DefMap,
discriminant_map: &'a RefCell<NodeMap<Option<&'ast ast::Expr>>>,
idstack: Vec<ast::NodeId>,
}
impl<'a, 'ast: 'a> CheckItemRecursionVisitor<'a, 'ast> {
fn new(v: &'a CheckCrateVisitor<'a, 'ast>, span: &'a Span)
-> CheckItemRecursionVisitor<'a, 'ast>
|
fn with_item_id_pushed<F>(&mut self, id: ast::NodeId, f: F)
where F: Fn(&mut Self) {
if self.idstack.iter().any(|&x| x == id) {
let any_static = self.idstack.iter().any(|&x| {
if let ast_map::NodeItem(item) = self.ast_map.get(x) {
if let ast::ItemStatic(..) = item.node {
true
} else {
false
}
} else {
false
}
});
if any_static {
if!self.sess.features.borrow().static_recursion {
emit_feature_err(&self.sess.parse_sess.span_diagnostic,
"static_recursion",
*self.root_span, "recursive static");
}
} else {
span_err!(self.sess, *self.root_span, E0265, "recursive constant");
}
return;
}
self.idstack.push(id);
f(self);
self.idstack.pop();
}
// If a variant has an expression specifying its discriminant, then it needs
// to be checked just like a static or constant. However, if there are more
// variants with no explicitly specified discriminant, those variants will
// increment the same expression to get their values.
//
// So for every variant, we need to track whether there is an expression
// somewhere in the enum definition that controls its discriminant. We do
// this by starting from the end and searching backward.
fn populate_enum_discriminants(&self, enum_definition: &'ast ast::EnumDef) {
// Get the map, and return if we already processed this enum or if it
// has no variants.
let mut discriminant_map = self.discriminant_map.borrow_mut();
match enum_definition.variants.first() {
None => { return; }
Some(variant) if discriminant_map.contains_key(&variant.node.id) => {
return;
}
_ => {}
}
// Go through all the variants.
let mut variant_stack: Vec<ast::NodeId> = Vec::new();
for variant in enum_definition.variants.iter().rev() {
variant_stack.push(variant.node.id);
// When we find an expression, every variant currently on the stack
// is affected by that expression.
if let Some(ref expr) = variant.node.disr_expr {
for id in &variant_stack {
discriminant_map.insert(*id, Some(expr));
}
variant_stack.clear()
}
}
// If we are at the top, that always starts at 0, so any variant on the
// stack has a default value and does not need to be checked.
for id in &variant_stack {
discriminant_map.insert(*id, None);
}
}
}
impl<'a, 'ast: 'a> Visitor<'ast> for CheckItemRecursionVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &'ast ast::Item) {
self.with_item_id_pushed(it.id, |v| visit::walk_item(v, it));
}
fn visit_enum_def(&mut self, enum_definition: &'ast ast::EnumDef,
generics: &'ast ast::Generics) {
self.populate_enum_discriminants(enum_definition);
visit::walk_enum_def(self, enum_definition, generics);
}
fn visit_variant(&mut self, variant: &'ast ast::Variant,
_: &'ast ast::Generics) {
let variant_id = variant.node.id;
let maybe_expr;
if let Some(get_expr) = self.discriminant_map.borrow().get(&variant_id) {
// This is necessary because we need to let the `discriminant_map`
// borrow fall out of scope, so that we can reborrow farther down.
maybe_expr = (*get_expr).clone();
} else {
self.sess.span_bug(variant.span,
"`check_static_recursion` attempted to visit \
variant with unknown discriminant")
}
// If `maybe_expr` is `None`, that's because no discriminant is
// specified that affects this variant. Thus, no risk of recursion.
if let Some(expr) = maybe_expr {
self.with_item_id_pushed(expr.id, |v| visit::walk_expr(v, expr));
}
}
fn visit_trait_item(&mut self, ti: &'ast ast::TraitItem) {
self.with_item_id_pushed(ti.id, |v| visit::walk_trait_item(v, ti));
}
fn visit_impl_item(&mut self, ii: &'ast ast::ImplItem) {
self.with_item_id_pushed(ii.id, |v| visit::walk_impl_item(v, ii));
}
fn visit_expr(&mut self, e: &'ast ast::Expr) {
match e.node {
ast::ExprPath(..) => {
match self.def_map.borrow().get(&e.id).map(|d| d.base_def) {
Some(DefStatic(def_id, _)) |
Some(DefAssociatedConst(def_id, _)) |
Some(DefConst(def_id))
if ast_util::is_local(def_id) => {
match self.ast_map.get(def_id.node) {
ast_map::NodeItem(item) =>
self.visit_item(item),
ast_map::NodeTraitItem(item) =>
self.visit_trait_item(item),
ast_map::NodeImplItem(item) =>
self.visit_impl_item(item),
ast_map::NodeForeignItem(_) => {},
_ => {
self.sess.span_bug(
e.span,
&format!("expected item, found {}",
self.ast_map.node_to_string(def_id.node)));
}
}
}
// For variants, we only want to check expressions that
// affect the specific variant used, but we need to check
// the whole enum definition to see what expression that
// might be (if any).
Some(DefVariant(enum_id, variant_id, false))
if ast_util::is_local(enum_id) => {
if let ast::ItemEnum(ref enum_def, ref generics) =
self.ast_map.expect_item(enum_id.local_id()).node {
self.populate_enum_discriminants(enum_def);
let variant = self.ast_map.expect_variant(variant_id.local_id());
self.visit_variant(variant, generics);
} else {
self.sess.span_bug(e.span,
"`check_static_recursion` found \
non-enum in DefVariant");
}
}
_ => ()
}
},
_ => ()
}
visit::walk_expr(self, e);
}
}
|
{
CheckItemRecursionVisitor {
root_span: span,
sess: v.sess,
ast_map: v.ast_map,
def_map: v.def_map,
discriminant_map: &v.discriminant_map,
idstack: Vec::new(),
}
}
|
identifier_body
|
constellation_msg.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The high-level interface from script to constellation. Using this abstract interface helps
//! reduce coupling between these two components.
use euclid::scale_factor::ScaleFactor;
use euclid::size::TypedSize2D;
use hyper::header::Headers;
use hyper::method::Method;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender, IpcSharedMemory};
use layers::geometry::DevicePixel;
use serde::{Deserialize, Serialize};
use std::cell::Cell;
use std::fmt;
use url::Url;
use util::geometry::{PagePx, ViewportPx};
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
use webrender_traits;
#[derive(Deserialize, Serialize)]
pub struct ConstellationChan<T: Deserialize + Serialize>(pub IpcSender<T>);
impl<T: Deserialize + Serialize> ConstellationChan<T> {
pub fn new() -> (IpcReceiver<T>, ConstellationChan<T>) {
let (chan, port) = ipc::channel().unwrap();
(port, ConstellationChan(chan))
}
}
impl<T: Serialize + Deserialize> Clone for ConstellationChan<T> {
fn clone(&self) -> ConstellationChan<T> {
ConstellationChan(self.0.clone())
}
}
pub type PanicMsg = (Option<PipelineId>, String);
#[derive(Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct WindowSizeData {
/// The size of the initial layout viewport, before parsing an
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport
pub initial_viewport: TypedSize2D<ViewportPx, f32>,
/// The "viewing area" in page px. See `PagePx` documentation for details.
pub visible_viewport: TypedSize2D<PagePx, f32>,
/// The resolution of the window in dppx, not including any "pinch zoom" factor.
pub device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>,
}
#[derive(Deserialize, Eq, PartialEq, Serialize, Copy, Clone, HeapSizeOf)]
pub enum WindowSizeType {
Initial,
Resize,
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum KeyState {
Pressed,
Released,
Repeated,
}
//N.B. Based on the glutin key enum
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub enum Key {
Space,
Apostrophe,
Comma,
Minus,
Period,
Slash,
Num0,
Num1,
Num2,
Num3,
Num4,
Num5,
Num6,
Num7,
Num8,
Num9,
Semicolon,
Equal,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
LeftBracket,
Backslash,
RightBracket,
GraveAccent,
World1,
World2,
Escape,
Enter,
Tab,
Backspace,
Insert,
Delete,
Right,
Left,
Down,
Up,
PageUp,
PageDown,
Home,
End,
CapsLock,
ScrollLock,
NumLock,
PrintScreen,
Pause,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
F25,
Kp0,
Kp1,
Kp2,
Kp3,
Kp4,
Kp5,
Kp6,
Kp7,
Kp8,
Kp9,
KpDecimal,
KpDivide,
KpMultiply,
KpSubtract,
KpAdd,
KpEnter,
KpEqual,
LeftShift,
LeftControl,
LeftAlt,
LeftSuper,
RightShift,
RightControl,
RightAlt,
RightSuper,
Menu,
NavigateBackward,
NavigateForward,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags KeyModifiers: u8 {
const NONE = 0x00,
const SHIFT = 0x01,
const CONTROL = 0x02,
const ALT = 0x04,
const SUPER = 0x08,
}
}
#[derive(Deserialize, Serialize)]
pub enum WebDriverCommandMsg {
LoadUrl(PipelineId, LoadData, IpcSender<LoadStatus>),
Refresh(PipelineId, IpcSender<LoadStatus>),
ScriptCommand(PipelineId, WebDriverScriptCommand),
SendKeys(PipelineId, Vec<(Key, KeyModifiers, KeyState)>),
TakeScreenshot(PipelineId, IpcSender<Option<Image>>),
}
#[derive(Clone, Copy, Deserialize, Eq, PartialEq, Serialize, HeapSizeOf)]
pub enum PixelFormat {
K8, // Luminance channel only
KA8, // Luminance + alpha
RGB8, // RGB, 8 bits per channel
RGBA8, // RGB + alpha, 8 bits per channel
}
#[derive(Clone, Deserialize, Eq, PartialEq, Serialize, HeapSizeOf)]
pub struct ImageMetadata {
pub width: u32,
pub height: u32,
}
#[derive(Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct Image {
pub width: u32,
pub height: u32,
pub format: PixelFormat,
#[ignore_heap_size_of = "Defined in ipc-channel"]
pub bytes: IpcSharedMemory,
#[ignore_heap_size_of = "Defined in webrender_traits"]
pub id: Option<webrender_traits::ImageKey>,
}
/// Similar to net::resource_thread::LoadData
/// can be passed to LoadUrl to load a page with GET/POST
/// parameters or headers
#[derive(Clone, Deserialize, Serialize)]
pub struct LoadData {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub data: Option<Vec<u8>>,
}
impl LoadData {
pub fn new(url: Url) -> LoadData {
LoadData {
url: url,
method: Method::Get,
headers: Headers::new(),
data: None,
}
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub enum NavigationDirection {
Forward,
Back,
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct FrameId(pub u32);
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
/// generate the pipeline ID from an iframe element (this simplifies a lot of other
/// code that makes use of pipeline IDs).
///
/// To achieve this, each pipeline index belongs to a particular namespace. There is
/// a namespace for the constellation thread, and also one for every script thread.
/// This allows pipeline IDs to be generated by any of those threads without conflicting
/// with pipeline IDs created by other script threads or the constellation. The
/// constellation is the only code that is responsible for creating new *namespaces*.
/// This ensures that namespaces are always unique, even when using multi-process mode.
///
/// It may help conceptually to think of the namespace ID as an identifier for the
/// thread that created this pipeline ID - however this is really an implementation
/// detail so shouldn't be relied upon in code logic. It's best to think of the
/// pipeline ID as a simple unique identifier that doesn't convey any more information.
#[derive(Clone, Copy)]
pub struct PipelineNamespace {
id: PipelineNamespaceId,
next_index: PipelineIndex,
}
impl PipelineNamespace {
pub fn install(namespace_id: PipelineNamespaceId) {
PIPELINE_NAMESPACE.with(|tls| {
assert!(tls.get().is_none());
tls.set(Some(PipelineNamespace {
id: namespace_id,
next_index: PipelineIndex(0),
}));
});
}
fn next(&mut self) -> PipelineId {
let pipeline_id = PipelineId {
namespace_id: self.id,
index: self.next_index,
};
let PipelineIndex(current_index) = self.next_index;
self.next_index = PipelineIndex(current_index + 1);
pipeline_id
}
}
thread_local!(pub static PIPELINE_NAMESPACE: Cell<Option<PipelineNamespace>> = Cell::new(None));
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineNamespaceId(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineIndex(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineId {
pub namespace_id: PipelineNamespaceId,
pub index: PipelineIndex
}
impl PipelineId {
pub fn new() -> PipelineId {
PIPELINE_NAMESPACE.with(|tls| {
let mut namespace = tls.get().expect("No namespace set for this thread!");
let new_pipeline_id = namespace.next();
tls.set(Some(namespace));
new_pipeline_id
})
}
// TODO(gw): This should be removed. It's only required because of the code
// that uses it in the devtools lib.rs file (which itself is a TODO). Once
// that is fixed, this should be removed. It also relies on the first
// call to PipelineId::new() returning (0,0), which is checked with an
// assert in handle_init_load().
pub fn
|
() -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(0),
index: PipelineIndex(0),
}
}
}
impl fmt::Display for PipelineId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
write!(fmt, "({},{})", namespace_id, index)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct SubpageId(pub u32);
pub trait ConvertPipelineIdToWebRender {
fn to_webrender(&self) -> webrender_traits::PipelineId;
}
pub trait ConvertPipelineIdFromWebRender {
fn from_webrender(&self) -> PipelineId;
}
impl ConvertPipelineIdToWebRender for PipelineId {
fn to_webrender(&self) -> webrender_traits::PipelineId {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
webrender_traits::PipelineId(namespace_id, index)
}
}
impl ConvertPipelineIdFromWebRender for webrender_traits::PipelineId {
fn from_webrender(&self) -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(self.0),
index: PipelineIndex(self.1),
}
}
}
|
fake_root_pipeline_id
|
identifier_name
|
constellation_msg.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The high-level interface from script to constellation. Using this abstract interface helps
//! reduce coupling between these two components.
use euclid::scale_factor::ScaleFactor;
use euclid::size::TypedSize2D;
use hyper::header::Headers;
use hyper::method::Method;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender, IpcSharedMemory};
use layers::geometry::DevicePixel;
use serde::{Deserialize, Serialize};
use std::cell::Cell;
use std::fmt;
use url::Url;
use util::geometry::{PagePx, ViewportPx};
use webdriver_msg::{LoadStatus, WebDriverScriptCommand};
use webrender_traits;
#[derive(Deserialize, Serialize)]
pub struct ConstellationChan<T: Deserialize + Serialize>(pub IpcSender<T>);
impl<T: Deserialize + Serialize> ConstellationChan<T> {
pub fn new() -> (IpcReceiver<T>, ConstellationChan<T>) {
let (chan, port) = ipc::channel().unwrap();
(port, ConstellationChan(chan))
}
}
impl<T: Serialize + Deserialize> Clone for ConstellationChan<T> {
fn clone(&self) -> ConstellationChan<T> {
ConstellationChan(self.0.clone())
}
}
pub type PanicMsg = (Option<PipelineId>, String);
#[derive(Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct WindowSizeData {
/// The size of the initial layout viewport, before parsing an
/// http://www.w3.org/TR/css-device-adapt/#initial-viewport
pub initial_viewport: TypedSize2D<ViewportPx, f32>,
/// The "viewing area" in page px. See `PagePx` documentation for details.
pub visible_viewport: TypedSize2D<PagePx, f32>,
/// The resolution of the window in dppx, not including any "pinch zoom" factor.
pub device_pixel_ratio: ScaleFactor<ViewportPx, DevicePixel, f32>,
}
#[derive(Deserialize, Eq, PartialEq, Serialize, Copy, Clone, HeapSizeOf)]
pub enum WindowSizeType {
Initial,
Resize,
}
#[derive(PartialEq, Eq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum KeyState {
Pressed,
Released,
Repeated,
}
//N.B. Based on the glutin key enum
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub enum Key {
Space,
Apostrophe,
Comma,
Minus,
Period,
Slash,
Num0,
Num1,
Num2,
Num3,
Num4,
Num5,
Num6,
Num7,
Num8,
Num9,
Semicolon,
Equal,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
LeftBracket,
Backslash,
RightBracket,
GraveAccent,
World1,
World2,
Escape,
Enter,
Tab,
Backspace,
Insert,
Delete,
Right,
Left,
Down,
Up,
PageUp,
PageDown,
Home,
End,
CapsLock,
ScrollLock,
NumLock,
PrintScreen,
Pause,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
F25,
Kp0,
Kp1,
Kp2,
Kp3,
Kp4,
Kp5,
Kp6,
Kp7,
Kp8,
Kp9,
KpDecimal,
KpDivide,
KpMultiply,
KpSubtract,
KpAdd,
KpEnter,
KpEqual,
LeftShift,
|
LeftControl,
LeftAlt,
LeftSuper,
RightShift,
RightControl,
RightAlt,
RightSuper,
Menu,
NavigateBackward,
NavigateForward,
}
bitflags! {
#[derive(Deserialize, Serialize)]
flags KeyModifiers: u8 {
const NONE = 0x00,
const SHIFT = 0x01,
const CONTROL = 0x02,
const ALT = 0x04,
const SUPER = 0x08,
}
}
#[derive(Deserialize, Serialize)]
pub enum WebDriverCommandMsg {
LoadUrl(PipelineId, LoadData, IpcSender<LoadStatus>),
Refresh(PipelineId, IpcSender<LoadStatus>),
ScriptCommand(PipelineId, WebDriverScriptCommand),
SendKeys(PipelineId, Vec<(Key, KeyModifiers, KeyState)>),
TakeScreenshot(PipelineId, IpcSender<Option<Image>>),
}
#[derive(Clone, Copy, Deserialize, Eq, PartialEq, Serialize, HeapSizeOf)]
pub enum PixelFormat {
K8, // Luminance channel only
KA8, // Luminance + alpha
RGB8, // RGB, 8 bits per channel
RGBA8, // RGB + alpha, 8 bits per channel
}
#[derive(Clone, Deserialize, Eq, PartialEq, Serialize, HeapSizeOf)]
pub struct ImageMetadata {
pub width: u32,
pub height: u32,
}
#[derive(Clone, Deserialize, Serialize, HeapSizeOf)]
pub struct Image {
pub width: u32,
pub height: u32,
pub format: PixelFormat,
#[ignore_heap_size_of = "Defined in ipc-channel"]
pub bytes: IpcSharedMemory,
#[ignore_heap_size_of = "Defined in webrender_traits"]
pub id: Option<webrender_traits::ImageKey>,
}
/// Similar to net::resource_thread::LoadData
/// can be passed to LoadUrl to load a page with GET/POST
/// parameters or headers
#[derive(Clone, Deserialize, Serialize)]
pub struct LoadData {
pub url: Url,
pub method: Method,
pub headers: Headers,
pub data: Option<Vec<u8>>,
}
impl LoadData {
pub fn new(url: Url) -> LoadData {
LoadData {
url: url,
method: Method::Get,
headers: Headers::new(),
data: None,
}
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub enum NavigationDirection {
Forward,
Back,
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub struct FrameId(pub u32);
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
/// generate the pipeline ID from an iframe element (this simplifies a lot of other
/// code that makes use of pipeline IDs).
///
/// To achieve this, each pipeline index belongs to a particular namespace. There is
/// a namespace for the constellation thread, and also one for every script thread.
/// This allows pipeline IDs to be generated by any of those threads without conflicting
/// with pipeline IDs created by other script threads or the constellation. The
/// constellation is the only code that is responsible for creating new *namespaces*.
/// This ensures that namespaces are always unique, even when using multi-process mode.
///
/// It may help conceptually to think of the namespace ID as an identifier for the
/// thread that created this pipeline ID - however this is really an implementation
/// detail so shouldn't be relied upon in code logic. It's best to think of the
/// pipeline ID as a simple unique identifier that doesn't convey any more information.
#[derive(Clone, Copy)]
pub struct PipelineNamespace {
id: PipelineNamespaceId,
next_index: PipelineIndex,
}
impl PipelineNamespace {
pub fn install(namespace_id: PipelineNamespaceId) {
PIPELINE_NAMESPACE.with(|tls| {
assert!(tls.get().is_none());
tls.set(Some(PipelineNamespace {
id: namespace_id,
next_index: PipelineIndex(0),
}));
});
}
fn next(&mut self) -> PipelineId {
let pipeline_id = PipelineId {
namespace_id: self.id,
index: self.next_index,
};
let PipelineIndex(current_index) = self.next_index;
self.next_index = PipelineIndex(current_index + 1);
pipeline_id
}
}
thread_local!(pub static PIPELINE_NAMESPACE: Cell<Option<PipelineNamespace>> = Cell::new(None));
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineNamespaceId(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineIndex(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineId {
pub namespace_id: PipelineNamespaceId,
pub index: PipelineIndex
}
impl PipelineId {
pub fn new() -> PipelineId {
PIPELINE_NAMESPACE.with(|tls| {
let mut namespace = tls.get().expect("No namespace set for this thread!");
let new_pipeline_id = namespace.next();
tls.set(Some(namespace));
new_pipeline_id
})
}
// TODO(gw): This should be removed. It's only required because of the code
// that uses it in the devtools lib.rs file (which itself is a TODO). Once
// that is fixed, this should be removed. It also relies on the first
// call to PipelineId::new() returning (0,0), which is checked with an
// assert in handle_init_load().
pub fn fake_root_pipeline_id() -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(0),
index: PipelineIndex(0),
}
}
}
impl fmt::Display for PipelineId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
write!(fmt, "({},{})", namespace_id, index)
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct SubpageId(pub u32);
pub trait ConvertPipelineIdToWebRender {
fn to_webrender(&self) -> webrender_traits::PipelineId;
}
pub trait ConvertPipelineIdFromWebRender {
fn from_webrender(&self) -> PipelineId;
}
impl ConvertPipelineIdToWebRender for PipelineId {
fn to_webrender(&self) -> webrender_traits::PipelineId {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
webrender_traits::PipelineId(namespace_id, index)
}
}
impl ConvertPipelineIdFromWebRender for webrender_traits::PipelineId {
fn from_webrender(&self) -> PipelineId {
PipelineId {
namespace_id: PipelineNamespaceId(self.0),
index: PipelineIndex(self.1),
}
}
}
|
random_line_split
|
|
websocket_session.rs
|
use async_trait::async_trait;
use futures::{stream::SplitSink, FutureExt, SinkExt};
use serde::Serialize;
use std::net::SocketAddr;
use tokio::io::Result;
use warp::ws::{Message, WebSocket};
use super::session::Session;
use super::{SessionOutput, SessionPromptInfo, SessionState, SignInState};
#[derive(Serialize)]
struct Prompt {
player: SessionPromptInfo,
}
#[derive(Debug)]
pub struct WebSocketSession {
pub id: u64,
addr: SocketAddr,
tx: SplitSink<Box<WebSocket>, Message>,
state: SessionState,
}
impl WebSocketSession {
pub fn new(id: u64, tx: SplitSink<Box<WebSocket>, Message>, addr: SocketAddr) -> Self {
Self {
id,
addr,
tx,
state: SessionState::SigningIn(Box::new(SignInState::new())),
}
}
}
#[async_trait]
impl Session for WebSocketSession {
async fn close(&mut self) -> Result<()> {
self.tx.close().map(|_| Ok(())).await
}
async fn send(&mut self, output: SessionOutput) {
match output {
SessionOutput::Json(data) => {
send_message(&mut self.tx, Message::text(data.to_string())).await
}
SessionOutput::Str(output) => {
send_message(&mut self.tx, Message::text(output.to_owned())).await
}
SessionOutput::String(output) => {
send_message(&mut self.tx, Message::text(output.to_owned())).await
}
SessionOutput::Prompt(info) => {
let prompt = Prompt { player: info };
send_message(
&mut self.tx,
Message::text(serde_json::to_string(&prompt).unwrap()),
)
.await
}
SessionOutput::Aggregate(outputs) => {
for output in outputs {
|
}
}
SessionOutput::None => {}
}
}
fn set_state(&mut self, state: SessionState) {
self.state = state;
}
fn source(&self) -> String {
format!("{}", self.addr.ip())
}
fn state(&self) -> &SessionState {
&self.state
}
}
async fn send_message(tx: &mut SplitSink<Box<WebSocket>, Message>, message: Message) {
if let Err(error) = tx.send(message).await {
println!("Could not send data over socket: {:?}", error);
}
}
|
self.send(output).await
|
random_line_split
|
websocket_session.rs
|
use async_trait::async_trait;
use futures::{stream::SplitSink, FutureExt, SinkExt};
use serde::Serialize;
use std::net::SocketAddr;
use tokio::io::Result;
use warp::ws::{Message, WebSocket};
use super::session::Session;
use super::{SessionOutput, SessionPromptInfo, SessionState, SignInState};
#[derive(Serialize)]
struct Prompt {
player: SessionPromptInfo,
}
#[derive(Debug)]
pub struct WebSocketSession {
pub id: u64,
addr: SocketAddr,
tx: SplitSink<Box<WebSocket>, Message>,
state: SessionState,
}
impl WebSocketSession {
pub fn new(id: u64, tx: SplitSink<Box<WebSocket>, Message>, addr: SocketAddr) -> Self {
Self {
id,
addr,
tx,
state: SessionState::SigningIn(Box::new(SignInState::new())),
}
}
}
#[async_trait]
impl Session for WebSocketSession {
async fn
|
(&mut self) -> Result<()> {
self.tx.close().map(|_| Ok(())).await
}
async fn send(&mut self, output: SessionOutput) {
match output {
SessionOutput::Json(data) => {
send_message(&mut self.tx, Message::text(data.to_string())).await
}
SessionOutput::Str(output) => {
send_message(&mut self.tx, Message::text(output.to_owned())).await
}
SessionOutput::String(output) => {
send_message(&mut self.tx, Message::text(output.to_owned())).await
}
SessionOutput::Prompt(info) => {
let prompt = Prompt { player: info };
send_message(
&mut self.tx,
Message::text(serde_json::to_string(&prompt).unwrap()),
)
.await
}
SessionOutput::Aggregate(outputs) => {
for output in outputs {
self.send(output).await
}
}
SessionOutput::None => {}
}
}
fn set_state(&mut self, state: SessionState) {
self.state = state;
}
fn source(&self) -> String {
format!("{}", self.addr.ip())
}
fn state(&self) -> &SessionState {
&self.state
}
}
async fn send_message(tx: &mut SplitSink<Box<WebSocket>, Message>, message: Message) {
if let Err(error) = tx.send(message).await {
println!("Could not send data over socket: {:?}", error);
}
}
|
close
|
identifier_name
|
websocket_session.rs
|
use async_trait::async_trait;
use futures::{stream::SplitSink, FutureExt, SinkExt};
use serde::Serialize;
use std::net::SocketAddr;
use tokio::io::Result;
use warp::ws::{Message, WebSocket};
use super::session::Session;
use super::{SessionOutput, SessionPromptInfo, SessionState, SignInState};
#[derive(Serialize)]
struct Prompt {
player: SessionPromptInfo,
}
#[derive(Debug)]
pub struct WebSocketSession {
pub id: u64,
addr: SocketAddr,
tx: SplitSink<Box<WebSocket>, Message>,
state: SessionState,
}
impl WebSocketSession {
pub fn new(id: u64, tx: SplitSink<Box<WebSocket>, Message>, addr: SocketAddr) -> Self
|
}
#[async_trait]
impl Session for WebSocketSession {
async fn close(&mut self) -> Result<()> {
self.tx.close().map(|_| Ok(())).await
}
async fn send(&mut self, output: SessionOutput) {
match output {
SessionOutput::Json(data) => {
send_message(&mut self.tx, Message::text(data.to_string())).await
}
SessionOutput::Str(output) => {
send_message(&mut self.tx, Message::text(output.to_owned())).await
}
SessionOutput::String(output) => {
send_message(&mut self.tx, Message::text(output.to_owned())).await
}
SessionOutput::Prompt(info) => {
let prompt = Prompt { player: info };
send_message(
&mut self.tx,
Message::text(serde_json::to_string(&prompt).unwrap()),
)
.await
}
SessionOutput::Aggregate(outputs) => {
for output in outputs {
self.send(output).await
}
}
SessionOutput::None => {}
}
}
fn set_state(&mut self, state: SessionState) {
self.state = state;
}
fn source(&self) -> String {
format!("{}", self.addr.ip())
}
fn state(&self) -> &SessionState {
&self.state
}
}
async fn send_message(tx: &mut SplitSink<Box<WebSocket>, Message>, message: Message) {
if let Err(error) = tx.send(message).await {
println!("Could not send data over socket: {:?}", error);
}
}
|
{
Self {
id,
addr,
tx,
state: SessionState::SigningIn(Box::new(SignInState::new())),
}
}
|
identifier_body
|
websocket_session.rs
|
use async_trait::async_trait;
use futures::{stream::SplitSink, FutureExt, SinkExt};
use serde::Serialize;
use std::net::SocketAddr;
use tokio::io::Result;
use warp::ws::{Message, WebSocket};
use super::session::Session;
use super::{SessionOutput, SessionPromptInfo, SessionState, SignInState};
#[derive(Serialize)]
struct Prompt {
player: SessionPromptInfo,
}
#[derive(Debug)]
pub struct WebSocketSession {
pub id: u64,
addr: SocketAddr,
tx: SplitSink<Box<WebSocket>, Message>,
state: SessionState,
}
impl WebSocketSession {
pub fn new(id: u64, tx: SplitSink<Box<WebSocket>, Message>, addr: SocketAddr) -> Self {
Self {
id,
addr,
tx,
state: SessionState::SigningIn(Box::new(SignInState::new())),
}
}
}
#[async_trait]
impl Session for WebSocketSession {
async fn close(&mut self) -> Result<()> {
self.tx.close().map(|_| Ok(())).await
}
async fn send(&mut self, output: SessionOutput) {
match output {
SessionOutput::Json(data) => {
send_message(&mut self.tx, Message::text(data.to_string())).await
}
SessionOutput::Str(output) => {
send_message(&mut self.tx, Message::text(output.to_owned())).await
}
SessionOutput::String(output) => {
send_message(&mut self.tx, Message::text(output.to_owned())).await
}
SessionOutput::Prompt(info) =>
|
SessionOutput::Aggregate(outputs) => {
for output in outputs {
self.send(output).await
}
}
SessionOutput::None => {}
}
}
fn set_state(&mut self, state: SessionState) {
self.state = state;
}
fn source(&self) -> String {
format!("{}", self.addr.ip())
}
fn state(&self) -> &SessionState {
&self.state
}
}
async fn send_message(tx: &mut SplitSink<Box<WebSocket>, Message>, message: Message) {
if let Err(error) = tx.send(message).await {
println!("Could not send data over socket: {:?}", error);
}
}
|
{
let prompt = Prompt { player: info };
send_message(
&mut self.tx,
Message::text(serde_json::to_string(&prompt).unwrap()),
)
.await
}
|
conditional_block
|
freq_handler.rs
|
use compression::BlockDecoder;
use common::VInt;
use common::BinarySerializable;
use compression::{CompositeDecoder, VIntDecoder};
use postings::SegmentPostingsOption;
use compression::NUM_DOCS_PER_BLOCK;
/// `FreqHandler` is in charge of decompressing
/// frequencies and/or positions.
pub struct FreqHandler {
freq_decoder: BlockDecoder,
positions: Vec<u32>,
option: SegmentPostingsOption,
positions_offsets: [usize; NUM_DOCS_PER_BLOCK + 1],
}
fn read_positions(data: &[u8]) -> Vec<u32> {
let mut composite_reader = CompositeDecoder::new();
let mut readable: &[u8] = data;
let uncompressed_len = VInt::deserialize(&mut readable).unwrap().0 as usize;
composite_reader.uncompress_unsorted(readable, uncompressed_len);
composite_reader.into()
}
impl FreqHandler {
/// Returns a `FreqHandler` that just decodes `DocId`s.
pub fn new_without_freq() -> FreqHandler {
FreqHandler {
freq_decoder: BlockDecoder::with_val(1u32),
positions: Vec::new(),
option: SegmentPostingsOption::NoFreq,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
/// Returns a `FreqHandler` that decodes `DocId`s and term frequencies.
pub fn new_with_freq() -> FreqHandler {
FreqHandler {
freq_decoder: BlockDecoder::new(),
positions: Vec::new(),
option: SegmentPostingsOption::Freq,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
/// Returns a `FreqHandler` that decodes `DocId`s, term frequencies, and term positions.
pub fn new_with_freq_and_position(position_data: &[u8]) -> FreqHandler {
let positions = read_positions(position_data);
FreqHandler {
freq_decoder: BlockDecoder::new(),
positions: positions,
option: SegmentPostingsOption::FreqAndPositions,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
fn fill_positions_offset(&mut self) {
let mut cur_position: usize = self.positions_offsets[NUM_DOCS_PER_BLOCK];
let mut i: usize = 0;
self.positions_offsets[i] = cur_position;
let mut last_cur_position = cur_position;
for &doc_freq in self.freq_decoder.output_array() {
i += 1;
let mut cumulated_pos = 0u32;
// this next loop decodes delta positions into normal positions.
for j in last_cur_position..(last_cur_position + (doc_freq as usize)) {
cumulated_pos += self.positions[j];
self.positions[j] = cumulated_pos;
}
cur_position += doc_freq as usize;
self.positions_offsets[i] = cur_position;
last_cur_position = cur_position;
}
}
/// Accessor to term frequency
///
/// idx is the offset of the current doc in the block.
/// It takes value between 0 and 128.
pub fn freq(&self, idx: usize) -> u32 {
self.freq_decoder.output(idx)
}
/// Accessor to the positions
///
/// idx is the offset of the current doc in the block.
/// It takes value between 0 and 128.
pub fn positions(&self, idx: usize) -> &[u32] {
let start = self.positions_offsets[idx];
let stop = self.positions_offsets[idx + 1];
&self.positions[start..stop]
}
/// Decompresses a complete frequency block
pub fn read_freq_block<'a>(&mut self, data: &'a [u8]) -> &'a [u8] {
match self.option {
SegmentPostingsOption::NoFreq => data,
SegmentPostingsOption::Freq => self.freq_decoder.uncompress_block_unsorted(data),
SegmentPostingsOption::FreqAndPositions => {
let remaining: &'a [u8] = self.freq_decoder.uncompress_block_unsorted(data);
self.fill_positions_offset();
|
}
/// Decompresses an incomplete frequency block
pub fn read_freq_vint(&mut self, data: &[u8], num_els: usize) {
match self.option {
SegmentPostingsOption::NoFreq => {}
SegmentPostingsOption::Freq => {
self.freq_decoder.uncompress_vint_unsorted(data, num_els);
}
SegmentPostingsOption::FreqAndPositions => {
self.freq_decoder.uncompress_vint_unsorted(data, num_els);
self.fill_positions_offset();
}
}
}
}
|
remaining
}
}
|
random_line_split
|
freq_handler.rs
|
use compression::BlockDecoder;
use common::VInt;
use common::BinarySerializable;
use compression::{CompositeDecoder, VIntDecoder};
use postings::SegmentPostingsOption;
use compression::NUM_DOCS_PER_BLOCK;
/// `FreqHandler` is in charge of decompressing
/// frequencies and/or positions.
pub struct FreqHandler {
freq_decoder: BlockDecoder,
positions: Vec<u32>,
option: SegmentPostingsOption,
positions_offsets: [usize; NUM_DOCS_PER_BLOCK + 1],
}
fn read_positions(data: &[u8]) -> Vec<u32> {
let mut composite_reader = CompositeDecoder::new();
let mut readable: &[u8] = data;
let uncompressed_len = VInt::deserialize(&mut readable).unwrap().0 as usize;
composite_reader.uncompress_unsorted(readable, uncompressed_len);
composite_reader.into()
}
impl FreqHandler {
/// Returns a `FreqHandler` that just decodes `DocId`s.
pub fn new_without_freq() -> FreqHandler {
FreqHandler {
freq_decoder: BlockDecoder::with_val(1u32),
positions: Vec::new(),
option: SegmentPostingsOption::NoFreq,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
/// Returns a `FreqHandler` that decodes `DocId`s and term frequencies.
pub fn new_with_freq() -> FreqHandler {
FreqHandler {
freq_decoder: BlockDecoder::new(),
positions: Vec::new(),
option: SegmentPostingsOption::Freq,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
/// Returns a `FreqHandler` that decodes `DocId`s, term frequencies, and term positions.
pub fn
|
(position_data: &[u8]) -> FreqHandler {
let positions = read_positions(position_data);
FreqHandler {
freq_decoder: BlockDecoder::new(),
positions: positions,
option: SegmentPostingsOption::FreqAndPositions,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
fn fill_positions_offset(&mut self) {
let mut cur_position: usize = self.positions_offsets[NUM_DOCS_PER_BLOCK];
let mut i: usize = 0;
self.positions_offsets[i] = cur_position;
let mut last_cur_position = cur_position;
for &doc_freq in self.freq_decoder.output_array() {
i += 1;
let mut cumulated_pos = 0u32;
// this next loop decodes delta positions into normal positions.
for j in last_cur_position..(last_cur_position + (doc_freq as usize)) {
cumulated_pos += self.positions[j];
self.positions[j] = cumulated_pos;
}
cur_position += doc_freq as usize;
self.positions_offsets[i] = cur_position;
last_cur_position = cur_position;
}
}
/// Accessor to term frequency
///
/// idx is the offset of the current doc in the block.
/// It takes value between 0 and 128.
pub fn freq(&self, idx: usize) -> u32 {
self.freq_decoder.output(idx)
}
/// Accessor to the positions
///
/// idx is the offset of the current doc in the block.
/// It takes value between 0 and 128.
pub fn positions(&self, idx: usize) -> &[u32] {
let start = self.positions_offsets[idx];
let stop = self.positions_offsets[idx + 1];
&self.positions[start..stop]
}
/// Decompresses a complete frequency block
pub fn read_freq_block<'a>(&mut self, data: &'a [u8]) -> &'a [u8] {
match self.option {
SegmentPostingsOption::NoFreq => data,
SegmentPostingsOption::Freq => self.freq_decoder.uncompress_block_unsorted(data),
SegmentPostingsOption::FreqAndPositions => {
let remaining: &'a [u8] = self.freq_decoder.uncompress_block_unsorted(data);
self.fill_positions_offset();
remaining
}
}
}
/// Decompresses an incomplete frequency block
pub fn read_freq_vint(&mut self, data: &[u8], num_els: usize) {
match self.option {
SegmentPostingsOption::NoFreq => {}
SegmentPostingsOption::Freq => {
self.freq_decoder.uncompress_vint_unsorted(data, num_els);
}
SegmentPostingsOption::FreqAndPositions => {
self.freq_decoder.uncompress_vint_unsorted(data, num_els);
self.fill_positions_offset();
}
}
}
}
|
new_with_freq_and_position
|
identifier_name
|
freq_handler.rs
|
use compression::BlockDecoder;
use common::VInt;
use common::BinarySerializable;
use compression::{CompositeDecoder, VIntDecoder};
use postings::SegmentPostingsOption;
use compression::NUM_DOCS_PER_BLOCK;
/// `FreqHandler` is in charge of decompressing
/// frequencies and/or positions.
pub struct FreqHandler {
freq_decoder: BlockDecoder,
positions: Vec<u32>,
option: SegmentPostingsOption,
positions_offsets: [usize; NUM_DOCS_PER_BLOCK + 1],
}
fn read_positions(data: &[u8]) -> Vec<u32> {
let mut composite_reader = CompositeDecoder::new();
let mut readable: &[u8] = data;
let uncompressed_len = VInt::deserialize(&mut readable).unwrap().0 as usize;
composite_reader.uncompress_unsorted(readable, uncompressed_len);
composite_reader.into()
}
impl FreqHandler {
/// Returns a `FreqHandler` that just decodes `DocId`s.
pub fn new_without_freq() -> FreqHandler {
FreqHandler {
freq_decoder: BlockDecoder::with_val(1u32),
positions: Vec::new(),
option: SegmentPostingsOption::NoFreq,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
/// Returns a `FreqHandler` that decodes `DocId`s and term frequencies.
pub fn new_with_freq() -> FreqHandler {
FreqHandler {
freq_decoder: BlockDecoder::new(),
positions: Vec::new(),
option: SegmentPostingsOption::Freq,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
/// Returns a `FreqHandler` that decodes `DocId`s, term frequencies, and term positions.
pub fn new_with_freq_and_position(position_data: &[u8]) -> FreqHandler {
let positions = read_positions(position_data);
FreqHandler {
freq_decoder: BlockDecoder::new(),
positions: positions,
option: SegmentPostingsOption::FreqAndPositions,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
fn fill_positions_offset(&mut self) {
let mut cur_position: usize = self.positions_offsets[NUM_DOCS_PER_BLOCK];
let mut i: usize = 0;
self.positions_offsets[i] = cur_position;
let mut last_cur_position = cur_position;
for &doc_freq in self.freq_decoder.output_array() {
i += 1;
let mut cumulated_pos = 0u32;
// this next loop decodes delta positions into normal positions.
for j in last_cur_position..(last_cur_position + (doc_freq as usize)) {
cumulated_pos += self.positions[j];
self.positions[j] = cumulated_pos;
}
cur_position += doc_freq as usize;
self.positions_offsets[i] = cur_position;
last_cur_position = cur_position;
}
}
/// Accessor to term frequency
///
/// idx is the offset of the current doc in the block.
/// It takes value between 0 and 128.
pub fn freq(&self, idx: usize) -> u32 {
self.freq_decoder.output(idx)
}
/// Accessor to the positions
///
/// idx is the offset of the current doc in the block.
/// It takes value between 0 and 128.
pub fn positions(&self, idx: usize) -> &[u32] {
let start = self.positions_offsets[idx];
let stop = self.positions_offsets[idx + 1];
&self.positions[start..stop]
}
/// Decompresses a complete frequency block
pub fn read_freq_block<'a>(&mut self, data: &'a [u8]) -> &'a [u8] {
match self.option {
SegmentPostingsOption::NoFreq => data,
SegmentPostingsOption::Freq => self.freq_decoder.uncompress_block_unsorted(data),
SegmentPostingsOption::FreqAndPositions => {
let remaining: &'a [u8] = self.freq_decoder.uncompress_block_unsorted(data);
self.fill_positions_offset();
remaining
}
}
}
/// Decompresses an incomplete frequency block
pub fn read_freq_vint(&mut self, data: &[u8], num_els: usize) {
match self.option {
SegmentPostingsOption::NoFreq => {}
SegmentPostingsOption::Freq => {
self.freq_decoder.uncompress_vint_unsorted(data, num_els);
}
SegmentPostingsOption::FreqAndPositions =>
|
}
}
}
|
{
self.freq_decoder.uncompress_vint_unsorted(data, num_els);
self.fill_positions_offset();
}
|
conditional_block
|
freq_handler.rs
|
use compression::BlockDecoder;
use common::VInt;
use common::BinarySerializable;
use compression::{CompositeDecoder, VIntDecoder};
use postings::SegmentPostingsOption;
use compression::NUM_DOCS_PER_BLOCK;
/// `FreqHandler` is in charge of decompressing
/// frequencies and/or positions.
pub struct FreqHandler {
freq_decoder: BlockDecoder,
positions: Vec<u32>,
option: SegmentPostingsOption,
positions_offsets: [usize; NUM_DOCS_PER_BLOCK + 1],
}
fn read_positions(data: &[u8]) -> Vec<u32> {
let mut composite_reader = CompositeDecoder::new();
let mut readable: &[u8] = data;
let uncompressed_len = VInt::deserialize(&mut readable).unwrap().0 as usize;
composite_reader.uncompress_unsorted(readable, uncompressed_len);
composite_reader.into()
}
impl FreqHandler {
/// Returns a `FreqHandler` that just decodes `DocId`s.
pub fn new_without_freq() -> FreqHandler {
FreqHandler {
freq_decoder: BlockDecoder::with_val(1u32),
positions: Vec::new(),
option: SegmentPostingsOption::NoFreq,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
/// Returns a `FreqHandler` that decodes `DocId`s and term frequencies.
pub fn new_with_freq() -> FreqHandler {
FreqHandler {
freq_decoder: BlockDecoder::new(),
positions: Vec::new(),
option: SegmentPostingsOption::Freq,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
/// Returns a `FreqHandler` that decodes `DocId`s, term frequencies, and term positions.
pub fn new_with_freq_and_position(position_data: &[u8]) -> FreqHandler
|
fn fill_positions_offset(&mut self) {
let mut cur_position: usize = self.positions_offsets[NUM_DOCS_PER_BLOCK];
let mut i: usize = 0;
self.positions_offsets[i] = cur_position;
let mut last_cur_position = cur_position;
for &doc_freq in self.freq_decoder.output_array() {
i += 1;
let mut cumulated_pos = 0u32;
// this next loop decodes delta positions into normal positions.
for j in last_cur_position..(last_cur_position + (doc_freq as usize)) {
cumulated_pos += self.positions[j];
self.positions[j] = cumulated_pos;
}
cur_position += doc_freq as usize;
self.positions_offsets[i] = cur_position;
last_cur_position = cur_position;
}
}
/// Accessor to term frequency
///
/// idx is the offset of the current doc in the block.
/// It takes value between 0 and 128.
pub fn freq(&self, idx: usize) -> u32 {
self.freq_decoder.output(idx)
}
/// Accessor to the positions
///
/// idx is the offset of the current doc in the block.
/// It takes value between 0 and 128.
pub fn positions(&self, idx: usize) -> &[u32] {
let start = self.positions_offsets[idx];
let stop = self.positions_offsets[idx + 1];
&self.positions[start..stop]
}
/// Decompresses a complete frequency block
pub fn read_freq_block<'a>(&mut self, data: &'a [u8]) -> &'a [u8] {
match self.option {
SegmentPostingsOption::NoFreq => data,
SegmentPostingsOption::Freq => self.freq_decoder.uncompress_block_unsorted(data),
SegmentPostingsOption::FreqAndPositions => {
let remaining: &'a [u8] = self.freq_decoder.uncompress_block_unsorted(data);
self.fill_positions_offset();
remaining
}
}
}
/// Decompresses an incomplete frequency block
pub fn read_freq_vint(&mut self, data: &[u8], num_els: usize) {
match self.option {
SegmentPostingsOption::NoFreq => {}
SegmentPostingsOption::Freq => {
self.freq_decoder.uncompress_vint_unsorted(data, num_els);
}
SegmentPostingsOption::FreqAndPositions => {
self.freq_decoder.uncompress_vint_unsorted(data, num_els);
self.fill_positions_offset();
}
}
}
}
|
{
let positions = read_positions(position_data);
FreqHandler {
freq_decoder: BlockDecoder::new(),
positions: positions,
option: SegmentPostingsOption::FreqAndPositions,
positions_offsets: [0; NUM_DOCS_PER_BLOCK + 1],
}
}
|
identifier_body
|
partial.rs
|
// Copyright 2014 Pierre Talbot (IRCAM)
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Partial is similar to Option where `Value` replaces `Some` and `Nothing` replaces `None`.
//!
//! `Fake` means that we got a value to pass for continuation (e.g. in `map` or `and_then`) but without real meaning, so it's an error to unwrap it.
//!
//! Value transformation are only from Value to Fake to Nothing which means that a Fake value will never be a Value again.
//! Use case: When compiling, an error in one function must be reported but should
//! not prevent the compilation of a second function to detect more errors in one run.
//! This intermediate state is represented by `Fake`.
use monad::partial::Partial::*;
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Debug)]
pub enum Partial<T>
{
Value(T),
Fake(T),
Nothing
}
impl<T> Partial<T>
{
pub fn unwrap(self) -> T
|
pub fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Partial<U> {
match self {
Value(x) => Value(f(x)),
Fake(x) => Fake(f(x)),
Nothing => Nothing
}
}
pub fn and_then<U, F: FnOnce(T) -> Partial<U>>(self, f: F) -> Partial<U> {
match self {
Value(x) => f(x),
Fake(x) => match f(x) {
Value(x) => Fake(x),
x => x
},
Nothing => Nothing
}
}
}
#[test]
fn partial() {
assert_eq!(Value(9i32).unwrap(), 9i32);
assert_eq!(Value(9i32).map(|i|i*2), Value(18i32));
assert_eq!(Fake(9i32).map(|i|i*2), Fake(18i32));
assert_eq!(Nothing.map(|i:i32|i), Nothing);
assert_eq!(Value(9i32).and_then(|i| Value(i*2)), Value(18i32));
assert_eq!(Value(9i32).and_then(|i| Fake(i*2)), Fake(18i32));
assert_eq!(Fake(9i32).and_then(|i| Fake(i*2)), Fake(18i32));
// Even if you return a Value, it automatically coerces to Fake.
assert_eq!(Fake(9i32).and_then(|i| Value(i*2)), Fake(18i32));
assert_eq!(Fake(9i32).and_then(|_| -> Partial<i32> { Nothing }), Nothing);
}
|
{
match self {
Value(x) => x,
Fake(_) => panic!("called `Partial::unwrap()` on a `Fake` value"),
Nothing => panic!("called `Partial::unwrap()` on a `Nothing` value")
}
}
|
identifier_body
|
partial.rs
|
// Copyright 2014 Pierre Talbot (IRCAM)
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Partial is similar to Option where `Value` replaces `Some` and `Nothing` replaces `None`.
//!
//! `Fake` means that we got a value to pass for continuation (e.g. in `map` or `and_then`) but without real meaning, so it's an error to unwrap it.
//!
//! Value transformation are only from Value to Fake to Nothing which means that a Fake value will never be a Value again.
//! Use case: When compiling, an error in one function must be reported but should
//! not prevent the compilation of a second function to detect more errors in one run.
//! This intermediate state is represented by `Fake`.
use monad::partial::Partial::*;
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Debug)]
pub enum Partial<T>
{
Value(T),
Fake(T),
Nothing
}
impl<T> Partial<T>
{
pub fn unwrap(self) -> T {
match self {
Value(x) => x,
Fake(_) => panic!("called `Partial::unwrap()` on a `Fake` value"),
Nothing => panic!("called `Partial::unwrap()` on a `Nothing` value")
}
}
pub fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Partial<U> {
match self {
Value(x) => Value(f(x)),
Fake(x) => Fake(f(x)),
Nothing => Nothing
}
}
pub fn and_then<U, F: FnOnce(T) -> Partial<U>>(self, f: F) -> Partial<U> {
match self {
Value(x) => f(x),
Fake(x) => match f(x) {
Value(x) => Fake(x),
x => x
},
Nothing => Nothing
}
}
}
#[test]
fn partial() {
assert_eq!(Value(9i32).unwrap(), 9i32);
assert_eq!(Value(9i32).map(|i|i*2), Value(18i32));
assert_eq!(Fake(9i32).map(|i|i*2), Fake(18i32));
assert_eq!(Nothing.map(|i:i32|i), Nothing);
assert_eq!(Value(9i32).and_then(|i| Value(i*2)), Value(18i32));
assert_eq!(Value(9i32).and_then(|i| Fake(i*2)), Fake(18i32));
assert_eq!(Fake(9i32).and_then(|i| Fake(i*2)), Fake(18i32));
// Even if you return a Value, it automatically coerces to Fake.
assert_eq!(Fake(9i32).and_then(|i| Value(i*2)), Fake(18i32));
assert_eq!(Fake(9i32).and_then(|_| -> Partial<i32> { Nothing }), Nothing);
}
|
random_line_split
|
|
partial.rs
|
// Copyright 2014 Pierre Talbot (IRCAM)
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Partial is similar to Option where `Value` replaces `Some` and `Nothing` replaces `None`.
//!
//! `Fake` means that we got a value to pass for continuation (e.g. in `map` or `and_then`) but without real meaning, so it's an error to unwrap it.
//!
//! Value transformation are only from Value to Fake to Nothing which means that a Fake value will never be a Value again.
//! Use case: When compiling, an error in one function must be reported but should
//! not prevent the compilation of a second function to detect more errors in one run.
//! This intermediate state is represented by `Fake`.
use monad::partial::Partial::*;
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Debug)]
pub enum Partial<T>
{
Value(T),
Fake(T),
Nothing
}
impl<T> Partial<T>
{
pub fn unwrap(self) -> T {
match self {
Value(x) => x,
Fake(_) => panic!("called `Partial::unwrap()` on a `Fake` value"),
Nothing => panic!("called `Partial::unwrap()` on a `Nothing` value")
}
}
pub fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Partial<U> {
match self {
Value(x) => Value(f(x)),
Fake(x) => Fake(f(x)),
Nothing => Nothing
}
}
pub fn
|
<U, F: FnOnce(T) -> Partial<U>>(self, f: F) -> Partial<U> {
match self {
Value(x) => f(x),
Fake(x) => match f(x) {
Value(x) => Fake(x),
x => x
},
Nothing => Nothing
}
}
}
#[test]
fn partial() {
assert_eq!(Value(9i32).unwrap(), 9i32);
assert_eq!(Value(9i32).map(|i|i*2), Value(18i32));
assert_eq!(Fake(9i32).map(|i|i*2), Fake(18i32));
assert_eq!(Nothing.map(|i:i32|i), Nothing);
assert_eq!(Value(9i32).and_then(|i| Value(i*2)), Value(18i32));
assert_eq!(Value(9i32).and_then(|i| Fake(i*2)), Fake(18i32));
assert_eq!(Fake(9i32).and_then(|i| Fake(i*2)), Fake(18i32));
// Even if you return a Value, it automatically coerces to Fake.
assert_eq!(Fake(9i32).and_then(|i| Value(i*2)), Fake(18i32));
assert_eq!(Fake(9i32).and_then(|_| -> Partial<i32> { Nothing }), Nothing);
}
|
and_then
|
identifier_name
|
mod.rs
|
use co_slog;
pub fn
|
() {
let log = co_slog::logger();
let server = log.new(o!("host" => "localhost", "port" => "8080"));
let _log = co_slog::set_logger(server);
info!("starting");
info!("listening");
let log = co_slog::logger();
join!(
{
let peer1 = log.clone().new(
o!("peer_addr" => "8.8.8.8", "port" => "18230"),
);
let _log = co_slog::set_logger(peer1);
debug!("connected");
debug!("message received"; "length" => 2);
debug!("response sent"; "length" => 8);
debug!("disconnected");
},
{
let peer2 = log.clone().new(
o!("peer_addr" => "82.9.9.9", "port" => "42381"),
);
let _log = co_slog::set_logger(peer2);
debug!("connected");
debug!("message received"; "length" => 2);
warn!("weak encryption requested"; "algo" => "xor");
debug!("response sent"; "length" => 8);
debug!("disconnected");
}
);
crit!("internal error");
info!("exit");
}
|
simulate_server
|
identifier_name
|
mod.rs
|
use co_slog;
pub fn simulate_server()
|
debug!("disconnected");
},
{
let peer2 = log.clone().new(
o!("peer_addr" => "82.9.9.9", "port" => "42381"),
);
let _log = co_slog::set_logger(peer2);
debug!("connected");
debug!("message received"; "length" => 2);
warn!("weak encryption requested"; "algo" => "xor");
debug!("response sent"; "length" => 8);
debug!("disconnected");
}
);
crit!("internal error");
info!("exit");
}
|
{
let log = co_slog::logger();
let server = log.new(o!("host" => "localhost", "port" => "8080"));
let _log = co_slog::set_logger(server);
info!("starting");
info!("listening");
let log = co_slog::logger();
join!(
{
let peer1 = log.clone().new(
o!("peer_addr" => "8.8.8.8", "port" => "18230"),
);
let _log = co_slog::set_logger(peer1);
debug!("connected");
debug!("message received"; "length" => 2);
debug!("response sent"; "length" => 8);
|
identifier_body
|
mod.rs
|
use co_slog;
pub fn simulate_server() {
let log = co_slog::logger();
let server = log.new(o!("host" => "localhost", "port" => "8080"));
let _log = co_slog::set_logger(server);
info!("starting");
info!("listening");
let log = co_slog::logger();
join!(
{
let peer1 = log.clone().new(
o!("peer_addr" => "8.8.8.8", "port" => "18230"),
);
let _log = co_slog::set_logger(peer1);
debug!("connected");
debug!("message received"; "length" => 2);
debug!("response sent"; "length" => 8);
debug!("disconnected");
},
{
let peer2 = log.clone().new(
o!("peer_addr" => "82.9.9.9", "port" => "42381"),
);
let _log = co_slog::set_logger(peer2);
debug!("connected");
debug!("message received"; "length" => 2);
warn!("weak encryption requested"; "algo" => "xor");
debug!("response sent"; "length" => 8);
debug!("disconnected");
}
|
);
crit!("internal error");
info!("exit");
}
|
random_line_split
|
|
mutability-inherits-through-fixed-length-vec.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
fn test1() {
let mut ints = [0; 32];
ints[0] += 1;
assert_eq!(ints[0], 1);
}
fn
|
() {
let mut ints = [0; 32];
for i in &mut ints { *i += 22; }
for i in &ints { assert_eq!(*i, 22); }
}
pub fn main() {
test1();
test2();
}
|
test2
|
identifier_name
|
mutability-inherits-through-fixed-length-vec.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
fn test1()
|
fn test2() {
let mut ints = [0; 32];
for i in &mut ints { *i += 22; }
for i in &ints { assert_eq!(*i, 22); }
}
pub fn main() {
test1();
test2();
}
|
{
let mut ints = [0; 32];
ints[0] += 1;
assert_eq!(ints[0], 1);
}
|
identifier_body
|
mutability-inherits-through-fixed-length-vec.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
fn test1() {
let mut ints = [0; 32];
ints[0] += 1;
assert_eq!(ints[0], 1);
}
fn test2() {
let mut ints = [0; 32];
for i in &mut ints { *i += 22; }
for i in &ints { assert_eq!(*i, 22); }
}
|
test1();
test2();
}
|
pub fn main() {
|
random_line_split
|
unique-in-vec-copy.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn
|
() {
let mut a = vec!(box 10i);
let b = a.clone();
assert_eq!(**a.get(0), 10);
assert_eq!(**b.get(0), 10);
// This should only modify the value in a, not b
**a.get_mut(0) = 20;
assert_eq!(**a.get(0), 20);
assert_eq!(**b.get(0), 10);
}
|
main
|
identifier_name
|
unique-in-vec-copy.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main()
|
{
let mut a = vec!(box 10i);
let b = a.clone();
assert_eq!(**a.get(0), 10);
assert_eq!(**b.get(0), 10);
// This should only modify the value in a, not b
**a.get_mut(0) = 20;
assert_eq!(**a.get(0), 20);
assert_eq!(**b.get(0), 10);
}
|
identifier_body
|
|
unique-in-vec-copy.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
|
let mut a = vec!(box 10i);
let b = a.clone();
assert_eq!(**a.get(0), 10);
assert_eq!(**b.get(0), 10);
// This should only modify the value in a, not b
**a.get_mut(0) = 20;
assert_eq!(**a.get(0), 20);
assert_eq!(**b.get(0), 10);
}
|
random_line_split
|
|
hyperbolictangent.rs
|
use std::f64;
use activation::Activation;
#[derive(Copy, Clone)]
pub struct HyperbolicTangent;
impl HyperbolicTangent {
pub fn new() -> HyperbolicTangent {
return HyperbolicTangent;
}
}
impl Activation for HyperbolicTangent {
/// Calculates the tanh of input `x`
fn calc(&self, x: Vec<f64>) -> Vec<f64> {
x.iter().map(|n| n.tanh()).collect::<Vec<_>>()
}
/// Calculates the Derivative tanh of input `x`
fn
|
(&self, x: Vec<f64>) -> Vec<f64> {
x.iter()
.map(|n| {
let tanh_factor = n.tanh();
1f64 - (tanh_factor * tanh_factor)
})
.collect::<Vec<_>>()
}
}
#[cfg(test)]
mod tests {
use super::Activation;
use super::HyperbolicTangent;
#[test]
fn tanh_test() {
let activation = HyperbolicTangent::new();
assert_approx_eq!(activation.calc(vec![3f64])[0], 0.995054754f64);
}
#[test]
fn tanh_derivative_test() {
let activation = HyperbolicTangent::new();
assert_approx_eq!(activation.derivative(vec![3f64])[0], 0.0098660372f64);
}
}
|
derivative
|
identifier_name
|
hyperbolictangent.rs
|
use std::f64;
use activation::Activation;
#[derive(Copy, Clone)]
pub struct HyperbolicTangent;
impl HyperbolicTangent {
pub fn new() -> HyperbolicTangent
|
}
impl Activation for HyperbolicTangent {
/// Calculates the tanh of input `x`
fn calc(&self, x: Vec<f64>) -> Vec<f64> {
x.iter().map(|n| n.tanh()).collect::<Vec<_>>()
}
/// Calculates the Derivative tanh of input `x`
fn derivative(&self, x: Vec<f64>) -> Vec<f64> {
x.iter()
.map(|n| {
let tanh_factor = n.tanh();
1f64 - (tanh_factor * tanh_factor)
})
.collect::<Vec<_>>()
}
}
#[cfg(test)]
mod tests {
use super::Activation;
use super::HyperbolicTangent;
#[test]
fn tanh_test() {
let activation = HyperbolicTangent::new();
assert_approx_eq!(activation.calc(vec![3f64])[0], 0.995054754f64);
}
#[test]
fn tanh_derivative_test() {
let activation = HyperbolicTangent::new();
assert_approx_eq!(activation.derivative(vec![3f64])[0], 0.0098660372f64);
}
}
|
{
return HyperbolicTangent;
}
|
identifier_body
|
hyperbolictangent.rs
|
use std::f64;
use activation::Activation;
#[derive(Copy, Clone)]
pub struct HyperbolicTangent;
impl HyperbolicTangent {
pub fn new() -> HyperbolicTangent {
return HyperbolicTangent;
}
}
impl Activation for HyperbolicTangent {
/// Calculates the tanh of input `x`
fn calc(&self, x: Vec<f64>) -> Vec<f64> {
x.iter().map(|n| n.tanh()).collect::<Vec<_>>()
}
/// Calculates the Derivative tanh of input `x`
fn derivative(&self, x: Vec<f64>) -> Vec<f64> {
x.iter()
.map(|n| {
let tanh_factor = n.tanh();
1f64 - (tanh_factor * tanh_factor)
})
.collect::<Vec<_>>()
}
}
#[cfg(test)]
mod tests {
use super::Activation;
use super::HyperbolicTangent;
#[test]
fn tanh_test() {
|
#[test]
fn tanh_derivative_test() {
let activation = HyperbolicTangent::new();
assert_approx_eq!(activation.derivative(vec![3f64])[0], 0.0098660372f64);
}
}
|
let activation = HyperbolicTangent::new();
assert_approx_eq!(activation.calc(vec![3f64])[0], 0.995054754f64);
}
|
random_line_split
|
stemmer.rs
|
use super::{Token, TokenFilter, TokenStream};
use crate::tokenizer::BoxTokenStream;
use rust_stemmers::{self, Algorithm};
use serde::{Deserialize, Serialize};
/// Available stemmer languages.
#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Copy, Clone)]
#[allow(missing_docs)]
pub enum Language {
Arabic,
Danish,
Dutch,
English,
Finnish,
French,
German,
Greek,
Hungarian,
Italian,
Norwegian,
Portuguese,
Romanian,
Russian,
Spanish,
Swedish,
Tamil,
Turkish,
}
impl Language {
fn algorithm(self) -> Algorithm {
use self::Language::*;
match self {
Arabic => Algorithm::Arabic,
Danish => Algorithm::Danish,
Dutch => Algorithm::Dutch,
English => Algorithm::English,
Finnish => Algorithm::Finnish,
French => Algorithm::French,
German => Algorithm::German,
Greek => Algorithm::Greek,
Hungarian => Algorithm::Hungarian,
Italian => Algorithm::Italian,
Norwegian => Algorithm::Norwegian,
Portuguese => Algorithm::Portuguese,
Romanian => Algorithm::Romanian,
Russian => Algorithm::Russian,
Spanish => Algorithm::Spanish,
Swedish => Algorithm::Swedish,
Tamil => Algorithm::Tamil,
Turkish => Algorithm::Turkish,
}
}
}
/// `Stemmer` token filter. Several languages are supported, see `Language` for the available
/// languages.
/// Tokens are expected to be lowercased beforehand.
#[derive(Clone)]
pub struct Stemmer {
stemmer_algorithm: Algorithm,
}
impl Stemmer {
/// Creates a new Stemmer `TokenFilter` for a given language algorithm.
pub fn new(language: Language) -> Stemmer {
Stemmer {
stemmer_algorithm: language.algorithm(),
}
}
}
impl Default for Stemmer {
/// Creates a new Stemmer `TokenFilter` for English.
fn default() -> Self {
Stemmer::new(Language::English)
}
}
impl TokenFilter for Stemmer {
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
let inner_stemmer = rust_stemmers::Stemmer::create(self.stemmer_algorithm);
BoxTokenStream::from(StemmerTokenStream {
tail: token_stream,
stemmer: inner_stemmer,
})
}
}
pub struct StemmerTokenStream<'a> {
tail: BoxTokenStream<'a>,
stemmer: rust_stemmers::Stemmer,
}
impl<'a> TokenStream for StemmerTokenStream<'a> {
fn advance(&mut self) -> bool {
if!self.tail.advance() {
return false;
}
// TODO remove allocation
let stemmed_str: String = self.stemmer.stem(&self.token().text).into_owned();
self.token_mut().text.clear();
self.token_mut().text.push_str(&stemmed_str);
true
}
fn token(&self) -> &Token {
self.tail.token()
}
|
fn token_mut(&mut self) -> &mut Token {
self.tail.token_mut()
}
}
|
random_line_split
|
|
stemmer.rs
|
use super::{Token, TokenFilter, TokenStream};
use crate::tokenizer::BoxTokenStream;
use rust_stemmers::{self, Algorithm};
use serde::{Deserialize, Serialize};
/// Available stemmer languages.
#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Copy, Clone)]
#[allow(missing_docs)]
pub enum Language {
Arabic,
Danish,
Dutch,
English,
Finnish,
French,
German,
Greek,
Hungarian,
Italian,
Norwegian,
Portuguese,
Romanian,
Russian,
Spanish,
Swedish,
Tamil,
Turkish,
}
impl Language {
fn algorithm(self) -> Algorithm {
use self::Language::*;
match self {
Arabic => Algorithm::Arabic,
Danish => Algorithm::Danish,
Dutch => Algorithm::Dutch,
English => Algorithm::English,
Finnish => Algorithm::Finnish,
French => Algorithm::French,
German => Algorithm::German,
Greek => Algorithm::Greek,
Hungarian => Algorithm::Hungarian,
Italian => Algorithm::Italian,
Norwegian => Algorithm::Norwegian,
Portuguese => Algorithm::Portuguese,
Romanian => Algorithm::Romanian,
Russian => Algorithm::Russian,
Spanish => Algorithm::Spanish,
Swedish => Algorithm::Swedish,
Tamil => Algorithm::Tamil,
Turkish => Algorithm::Turkish,
}
}
}
/// `Stemmer` token filter. Several languages are supported, see `Language` for the available
/// languages.
/// Tokens are expected to be lowercased beforehand.
#[derive(Clone)]
pub struct Stemmer {
stemmer_algorithm: Algorithm,
}
impl Stemmer {
/// Creates a new Stemmer `TokenFilter` for a given language algorithm.
pub fn new(language: Language) -> Stemmer {
Stemmer {
stemmer_algorithm: language.algorithm(),
}
}
}
impl Default for Stemmer {
/// Creates a new Stemmer `TokenFilter` for English.
fn default() -> Self {
Stemmer::new(Language::English)
}
}
impl TokenFilter for Stemmer {
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
let inner_stemmer = rust_stemmers::Stemmer::create(self.stemmer_algorithm);
BoxTokenStream::from(StemmerTokenStream {
tail: token_stream,
stemmer: inner_stemmer,
})
}
}
pub struct StemmerTokenStream<'a> {
tail: BoxTokenStream<'a>,
stemmer: rust_stemmers::Stemmer,
}
impl<'a> TokenStream for StemmerTokenStream<'a> {
fn advance(&mut self) -> bool {
if!self.tail.advance() {
return false;
}
// TODO remove allocation
let stemmed_str: String = self.stemmer.stem(&self.token().text).into_owned();
self.token_mut().text.clear();
self.token_mut().text.push_str(&stemmed_str);
true
}
fn token(&self) -> &Token {
self.tail.token()
}
fn
|
(&mut self) -> &mut Token {
self.tail.token_mut()
}
}
|
token_mut
|
identifier_name
|
stemmer.rs
|
use super::{Token, TokenFilter, TokenStream};
use crate::tokenizer::BoxTokenStream;
use rust_stemmers::{self, Algorithm};
use serde::{Deserialize, Serialize};
/// Available stemmer languages.
#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Copy, Clone)]
#[allow(missing_docs)]
pub enum Language {
Arabic,
Danish,
Dutch,
English,
Finnish,
French,
German,
Greek,
Hungarian,
Italian,
Norwegian,
Portuguese,
Romanian,
Russian,
Spanish,
Swedish,
Tamil,
Turkish,
}
impl Language {
fn algorithm(self) -> Algorithm {
use self::Language::*;
match self {
Arabic => Algorithm::Arabic,
Danish => Algorithm::Danish,
Dutch => Algorithm::Dutch,
English => Algorithm::English,
Finnish => Algorithm::Finnish,
French => Algorithm::French,
German => Algorithm::German,
Greek => Algorithm::Greek,
Hungarian => Algorithm::Hungarian,
Italian => Algorithm::Italian,
Norwegian => Algorithm::Norwegian,
Portuguese => Algorithm::Portuguese,
Romanian => Algorithm::Romanian,
Russian => Algorithm::Russian,
Spanish => Algorithm::Spanish,
Swedish => Algorithm::Swedish,
Tamil => Algorithm::Tamil,
Turkish => Algorithm::Turkish,
}
}
}
/// `Stemmer` token filter. Several languages are supported, see `Language` for the available
/// languages.
/// Tokens are expected to be lowercased beforehand.
#[derive(Clone)]
pub struct Stemmer {
stemmer_algorithm: Algorithm,
}
impl Stemmer {
/// Creates a new Stemmer `TokenFilter` for a given language algorithm.
pub fn new(language: Language) -> Stemmer {
Stemmer {
stemmer_algorithm: language.algorithm(),
}
}
}
impl Default for Stemmer {
/// Creates a new Stemmer `TokenFilter` for English.
fn default() -> Self
|
}
impl TokenFilter for Stemmer {
fn transform<'a>(&self, token_stream: BoxTokenStream<'a>) -> BoxTokenStream<'a> {
let inner_stemmer = rust_stemmers::Stemmer::create(self.stemmer_algorithm);
BoxTokenStream::from(StemmerTokenStream {
tail: token_stream,
stemmer: inner_stemmer,
})
}
}
pub struct StemmerTokenStream<'a> {
tail: BoxTokenStream<'a>,
stemmer: rust_stemmers::Stemmer,
}
impl<'a> TokenStream for StemmerTokenStream<'a> {
fn advance(&mut self) -> bool {
if!self.tail.advance() {
return false;
}
// TODO remove allocation
let stemmed_str: String = self.stemmer.stem(&self.token().text).into_owned();
self.token_mut().text.clear();
self.token_mut().text.push_str(&stemmed_str);
true
}
fn token(&self) -> &Token {
self.tail.token()
}
fn token_mut(&mut self) -> &mut Token {
self.tail.token_mut()
}
}
|
{
Stemmer::new(Language::English)
}
|
identifier_body
|
windows.rs
|
//! A collection of window functions.
//!
//! All functions take a size `n` and return vectors with `n` elements.
//!
//! Window calculation can be costly depending on the size and type of window,
//! so it is recommended to precompute windows whenever possible.
use std::f32::consts::PI;
use types::Sample;
/// Returns a bartlett (triangular) window of size `n`.
pub fn bartlett(n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
for i in 0..n {
let z = 2.0*i as f32/(n as f32-1.0);
if z <= 1.0 {
window.push(z);
} else
|
}
window
}
/// Returns a generalized cosine window of size `n`, with the provided
/// coefficients.
///
/// Hanning, hamming, and blackmann windows are all generalized cosine windows.
pub fn generalized_cosine_window(alphas: &[Sample], n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
let f0 = 2.0*PI / ((n-1) as f32);
for i in 0..n {
let mut wi = 0.0;
for (k, ak) in alphas.iter().enumerate() {
wi += ak * (f0*k as f32*i as f32).cos();
}
window.push(wi);
}
window
}
/// Returns a hanning window of size `n`.
pub fn hanning(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.5, -0.5], n)
}
/// Returns a hamming window of size `n`.
pub fn hamming(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.54, -0.46], n)
}
/// Returns a blackman window of size `n`.
pub fn blackman(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.42, -0.5, 0.08], n)
}
/// Numerically estimates a zeroth order modified bessel function.
///
/// The mathematical function is an infinite summation, but the terms quickly go
/// to zero, so we instead use just the first k terms.
fn i0(x: Sample, k: usize) -> Sample {
let mut ifact = 1.0;
let mut y = 0.0;
for i in 0..k {
// Compute i factorial iteratively
if i > 1 { ifact *= i as f32; }
y += (x/2.0).powf(2.0*i as f32) / (ifact*ifact);
}
y
}
/// Returns a kaiser window of size `n`, with the provided beta.
pub fn kaiser(beta: Sample, n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
let i0_beta = i0(beta, 20);
for i in 0..n {
let tmp = 2.0*i as f32/(n-1) as f32 - 1.0;
let x = beta * (1.0 - tmp*tmp).sqrt();
window.push(i0(x, 20)/i0_beta);
}
window
}
#[cfg(test)]
mod tests {
use std::f32::consts::PI;
use testing::flt_eq;
use types::Sample;
fn check_window(actual: &[Sample], expected: &[Sample]) {
for (a, e) in actual.iter().zip(expected) {
assert!(flt_eq(*a, *e));
}
}
#[test]
fn test_bartlett() {
use super::bartlett;
check_window(&bartlett(8),
&[0.0, 0.28571429, 0.57142857, 0.85714286, 0.85714286,
0.57142857, 0.28571429, 0.0])
}
#[test]
fn test_hanning() {
use super::hanning;
check_window(&hanning(8),
&[0.0, 0.1882551, 0.61126047, 0.95048443, 0.95048443,
0.61126047, 0.1882551, 0.0]);
}
#[test]
fn test_hamming() {
use super::hamming;
check_window(&hamming(8),
&[0.08, 0.25319469, 0.64235963, 0.95444568, 0.95444568,
0.64235963, 0.25319469, 0.08]);
}
#[test]
fn test_blackman() {
use super::blackman;
check_window(&blackman(8),
&[-1.38777878e-17, 9.04534244e-02, 4.59182958e-01,
9.20363618e-01, 9.20363618e-01, 4.59182958e-01,
9.04534244e-02, -1.38777878e-17]);
}
#[test]
fn test_kaiser() {
use super::kaiser;
check_window(&kaiser(2.0*PI, 8),
&[0.01147993, 0.18336612, 0.57527808, 0.94267182,
0.94267182, 0.57527808, 0.18336612, 0.01147993]);
}
}
|
{
window.push(2.0-z);
}
|
conditional_block
|
windows.rs
|
//! A collection of window functions.
//!
//! All functions take a size `n` and return vectors with `n` elements.
//!
//! Window calculation can be costly depending on the size and type of window,
//! so it is recommended to precompute windows whenever possible.
use std::f32::consts::PI;
use types::Sample;
/// Returns a bartlett (triangular) window of size `n`.
pub fn bartlett(n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
for i in 0..n {
let z = 2.0*i as f32/(n as f32-1.0);
if z <= 1.0 {
window.push(z);
} else {
window.push(2.0-z);
}
}
window
}
/// Returns a generalized cosine window of size `n`, with the provided
/// coefficients.
///
/// Hanning, hamming, and blackmann windows are all generalized cosine windows.
pub fn generalized_cosine_window(alphas: &[Sample], n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
let f0 = 2.0*PI / ((n-1) as f32);
for i in 0..n {
let mut wi = 0.0;
for (k, ak) in alphas.iter().enumerate() {
wi += ak * (f0*k as f32*i as f32).cos();
}
window.push(wi);
}
window
}
/// Returns a hanning window of size `n`.
pub fn hanning(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.5, -0.5], n)
}
/// Returns a hamming window of size `n`.
pub fn hamming(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.54, -0.46], n)
}
/// Returns a blackman window of size `n`.
pub fn blackman(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.42, -0.5, 0.08], n)
}
/// Numerically estimates a zeroth order modified bessel function.
///
/// The mathematical function is an infinite summation, but the terms quickly go
/// to zero, so we instead use just the first k terms.
fn i0(x: Sample, k: usize) -> Sample {
let mut ifact = 1.0;
let mut y = 0.0;
for i in 0..k {
// Compute i factorial iteratively
if i > 1 { ifact *= i as f32; }
y += (x/2.0).powf(2.0*i as f32) / (ifact*ifact);
}
y
}
/// Returns a kaiser window of size `n`, with the provided beta.
pub fn kaiser(beta: Sample, n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
let i0_beta = i0(beta, 20);
for i in 0..n {
let tmp = 2.0*i as f32/(n-1) as f32 - 1.0;
let x = beta * (1.0 - tmp*tmp).sqrt();
window.push(i0(x, 20)/i0_beta);
}
window
}
#[cfg(test)]
mod tests {
use std::f32::consts::PI;
use testing::flt_eq;
use types::Sample;
fn check_window(actual: &[Sample], expected: &[Sample]) {
for (a, e) in actual.iter().zip(expected) {
assert!(flt_eq(*a, *e));
}
}
#[test]
fn test_bartlett()
|
#[test]
fn test_hanning() {
use super::hanning;
check_window(&hanning(8),
&[0.0, 0.1882551, 0.61126047, 0.95048443, 0.95048443,
0.61126047, 0.1882551, 0.0]);
}
#[test]
fn test_hamming() {
use super::hamming;
check_window(&hamming(8),
&[0.08, 0.25319469, 0.64235963, 0.95444568, 0.95444568,
0.64235963, 0.25319469, 0.08]);
}
#[test]
fn test_blackman() {
use super::blackman;
check_window(&blackman(8),
&[-1.38777878e-17, 9.04534244e-02, 4.59182958e-01,
9.20363618e-01, 9.20363618e-01, 4.59182958e-01,
9.04534244e-02, -1.38777878e-17]);
}
#[test]
fn test_kaiser() {
use super::kaiser;
check_window(&kaiser(2.0*PI, 8),
&[0.01147993, 0.18336612, 0.57527808, 0.94267182,
0.94267182, 0.57527808, 0.18336612, 0.01147993]);
}
}
|
{
use super::bartlett;
check_window(&bartlett(8),
&[0.0, 0.28571429, 0.57142857, 0.85714286, 0.85714286,
0.57142857, 0.28571429, 0.0])
}
|
identifier_body
|
windows.rs
|
//! A collection of window functions.
//!
//! All functions take a size `n` and return vectors with `n` elements.
//!
//! Window calculation can be costly depending on the size and type of window,
//! so it is recommended to precompute windows whenever possible.
use std::f32::consts::PI;
use types::Sample;
/// Returns a bartlett (triangular) window of size `n`.
pub fn bartlett(n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
for i in 0..n {
let z = 2.0*i as f32/(n as f32-1.0);
if z <= 1.0 {
window.push(z);
} else {
window.push(2.0-z);
}
}
window
}
/// Returns a generalized cosine window of size `n`, with the provided
/// coefficients.
///
/// Hanning, hamming, and blackmann windows are all generalized cosine windows.
pub fn generalized_cosine_window(alphas: &[Sample], n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
let f0 = 2.0*PI / ((n-1) as f32);
for i in 0..n {
let mut wi = 0.0;
for (k, ak) in alphas.iter().enumerate() {
wi += ak * (f0*k as f32*i as f32).cos();
}
window.push(wi);
}
window
}
/// Returns a hanning window of size `n`.
pub fn hanning(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.5, -0.5], n)
}
/// Returns a hamming window of size `n`.
pub fn hamming(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.54, -0.46], n)
}
/// Returns a blackman window of size `n`.
pub fn blackman(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.42, -0.5, 0.08], n)
}
/// Numerically estimates a zeroth order modified bessel function.
///
/// The mathematical function is an infinite summation, but the terms quickly go
/// to zero, so we instead use just the first k terms.
fn i0(x: Sample, k: usize) -> Sample {
let mut ifact = 1.0;
let mut y = 0.0;
for i in 0..k {
// Compute i factorial iteratively
if i > 1 { ifact *= i as f32; }
y += (x/2.0).powf(2.0*i as f32) / (ifact*ifact);
}
y
}
/// Returns a kaiser window of size `n`, with the provided beta.
pub fn kaiser(beta: Sample, n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
let i0_beta = i0(beta, 20);
for i in 0..n {
let tmp = 2.0*i as f32/(n-1) as f32 - 1.0;
let x = beta * (1.0 - tmp*tmp).sqrt();
window.push(i0(x, 20)/i0_beta);
}
window
}
#[cfg(test)]
mod tests {
use std::f32::consts::PI;
use testing::flt_eq;
use types::Sample;
fn
|
(actual: &[Sample], expected: &[Sample]) {
for (a, e) in actual.iter().zip(expected) {
assert!(flt_eq(*a, *e));
}
}
#[test]
fn test_bartlett() {
use super::bartlett;
check_window(&bartlett(8),
&[0.0, 0.28571429, 0.57142857, 0.85714286, 0.85714286,
0.57142857, 0.28571429, 0.0])
}
#[test]
fn test_hanning() {
use super::hanning;
check_window(&hanning(8),
&[0.0, 0.1882551, 0.61126047, 0.95048443, 0.95048443,
0.61126047, 0.1882551, 0.0]);
}
#[test]
fn test_hamming() {
use super::hamming;
check_window(&hamming(8),
&[0.08, 0.25319469, 0.64235963, 0.95444568, 0.95444568,
0.64235963, 0.25319469, 0.08]);
}
#[test]
fn test_blackman() {
use super::blackman;
check_window(&blackman(8),
&[-1.38777878e-17, 9.04534244e-02, 4.59182958e-01,
9.20363618e-01, 9.20363618e-01, 4.59182958e-01,
9.04534244e-02, -1.38777878e-17]);
}
#[test]
fn test_kaiser() {
use super::kaiser;
check_window(&kaiser(2.0*PI, 8),
&[0.01147993, 0.18336612, 0.57527808, 0.94267182,
0.94267182, 0.57527808, 0.18336612, 0.01147993]);
}
}
|
check_window
|
identifier_name
|
windows.rs
|
//! A collection of window functions.
//!
//! All functions take a size `n` and return vectors with `n` elements.
//!
//! Window calculation can be costly depending on the size and type of window,
//! so it is recommended to precompute windows whenever possible.
use std::f32::consts::PI;
use types::Sample;
/// Returns a bartlett (triangular) window of size `n`.
pub fn bartlett(n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
for i in 0..n {
let z = 2.0*i as f32/(n as f32-1.0);
if z <= 1.0 {
window.push(z);
} else {
window.push(2.0-z);
}
}
window
}
/// Returns a generalized cosine window of size `n`, with the provided
|
let mut window = Vec::with_capacity(n);
let f0 = 2.0*PI / ((n-1) as f32);
for i in 0..n {
let mut wi = 0.0;
for (k, ak) in alphas.iter().enumerate() {
wi += ak * (f0*k as f32*i as f32).cos();
}
window.push(wi);
}
window
}
/// Returns a hanning window of size `n`.
pub fn hanning(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.5, -0.5], n)
}
/// Returns a hamming window of size `n`.
pub fn hamming(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.54, -0.46], n)
}
/// Returns a blackman window of size `n`.
pub fn blackman(n: usize) -> Vec<Sample> {
generalized_cosine_window(&[0.42, -0.5, 0.08], n)
}
/// Numerically estimates a zeroth order modified bessel function.
///
/// The mathematical function is an infinite summation, but the terms quickly go
/// to zero, so we instead use just the first k terms.
fn i0(x: Sample, k: usize) -> Sample {
let mut ifact = 1.0;
let mut y = 0.0;
for i in 0..k {
// Compute i factorial iteratively
if i > 1 { ifact *= i as f32; }
y += (x/2.0).powf(2.0*i as f32) / (ifact*ifact);
}
y
}
/// Returns a kaiser window of size `n`, with the provided beta.
pub fn kaiser(beta: Sample, n: usize) -> Vec<Sample> {
let mut window = Vec::with_capacity(n);
let i0_beta = i0(beta, 20);
for i in 0..n {
let tmp = 2.0*i as f32/(n-1) as f32 - 1.0;
let x = beta * (1.0 - tmp*tmp).sqrt();
window.push(i0(x, 20)/i0_beta);
}
window
}
#[cfg(test)]
mod tests {
use std::f32::consts::PI;
use testing::flt_eq;
use types::Sample;
fn check_window(actual: &[Sample], expected: &[Sample]) {
for (a, e) in actual.iter().zip(expected) {
assert!(flt_eq(*a, *e));
}
}
#[test]
fn test_bartlett() {
use super::bartlett;
check_window(&bartlett(8),
&[0.0, 0.28571429, 0.57142857, 0.85714286, 0.85714286,
0.57142857, 0.28571429, 0.0])
}
#[test]
fn test_hanning() {
use super::hanning;
check_window(&hanning(8),
&[0.0, 0.1882551, 0.61126047, 0.95048443, 0.95048443,
0.61126047, 0.1882551, 0.0]);
}
#[test]
fn test_hamming() {
use super::hamming;
check_window(&hamming(8),
&[0.08, 0.25319469, 0.64235963, 0.95444568, 0.95444568,
0.64235963, 0.25319469, 0.08]);
}
#[test]
fn test_blackman() {
use super::blackman;
check_window(&blackman(8),
&[-1.38777878e-17, 9.04534244e-02, 4.59182958e-01,
9.20363618e-01, 9.20363618e-01, 4.59182958e-01,
9.04534244e-02, -1.38777878e-17]);
}
#[test]
fn test_kaiser() {
use super::kaiser;
check_window(&kaiser(2.0*PI, 8),
&[0.01147993, 0.18336612, 0.57527808, 0.94267182,
0.94267182, 0.57527808, 0.18336612, 0.01147993]);
}
}
|
/// coefficients.
///
/// Hanning, hamming, and blackmann windows are all generalized cosine windows.
pub fn generalized_cosine_window(alphas: &[Sample], n: usize) -> Vec<Sample> {
|
random_line_split
|
coverage.rs
|
// Copyright 2020, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <https://opensource.org/licenses/MIT>
use glib::translate::*;
use pango_sys;
use std::fmt;
use std::mem;
use std::ptr;
use CoverageLevel;
#[cfg(any(feature = "v1_44", feature = "dox"))]
glib_wrapper! {
pub struct Coverage(Object<pango_sys::PangoCoverage, CoverageClass>);
match fn {
get_type => || pango_sys::pango_coverage_get_type(),
}
}
// There was no get_type() function before 1.44
#[cfg(not(any(feature = "v1_44", feature = "dox")))]
glib_wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Coverage(Shared<pango_sys::PangoCoverage>);
match fn {
ref => |ptr| pango_sys::pango_coverage_ref(ptr),
unref => |ptr| pango_sys::pango_coverage_unref(ptr),
}
}
impl Coverage {
pub fn new() -> Coverage {
unsafe { from_glib_full(pango_sys::pango_coverage_new()) }
}
pub fn copy(&self) -> Option<Coverage> {
unsafe { from_glib_full(pango_sys::pango_coverage_copy(self.to_glib_none().0)) }
}
pub fn get(&self, index_: i32) -> CoverageLevel {
unsafe { from_glib(pango_sys::pango_coverage_get(self.to_glib_none().0, index_)) }
}
#[cfg_attr(feature = "v1_44", deprecated)]
pub fn max(&self, other: &Coverage)
|
pub fn set(&self, index_: i32, level: CoverageLevel) {
unsafe {
pango_sys::pango_coverage_set(self.to_glib_none().0, index_, level.to_glib());
}
}
#[cfg_attr(feature = "v1_44", deprecated)]
pub fn to_bytes(&self) -> Vec<u8> {
unsafe {
let mut bytes = ptr::null_mut();
let mut n_bytes = mem::MaybeUninit::uninit();
pango_sys::pango_coverage_to_bytes(
self.to_glib_none().0,
&mut bytes,
n_bytes.as_mut_ptr(),
);
FromGlibContainer::from_glib_full_num(bytes, n_bytes.assume_init() as usize)
}
}
#[cfg_attr(feature = "v1_44", deprecated)]
pub fn from_bytes(bytes: &[u8]) -> Option<Coverage> {
let n_bytes = bytes.len() as i32;
unsafe {
from_glib_full(pango_sys::pango_coverage_from_bytes(
bytes.to_glib_none().0,
n_bytes,
))
}
}
}
impl Default for Coverage {
fn default() -> Self {
Self::new()
}
}
impl fmt::Display for Coverage {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Coverage")
}
}
|
{
unsafe {
pango_sys::pango_coverage_max(self.to_glib_none().0, other.to_glib_none().0);
}
}
|
identifier_body
|
coverage.rs
|
// Copyright 2020, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <https://opensource.org/licenses/MIT>
use glib::translate::*;
use pango_sys;
use std::fmt;
use std::mem;
use std::ptr;
use CoverageLevel;
#[cfg(any(feature = "v1_44", feature = "dox"))]
glib_wrapper! {
pub struct Coverage(Object<pango_sys::PangoCoverage, CoverageClass>);
match fn {
get_type => || pango_sys::pango_coverage_get_type(),
}
}
// There was no get_type() function before 1.44
#[cfg(not(any(feature = "v1_44", feature = "dox")))]
glib_wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Coverage(Shared<pango_sys::PangoCoverage>);
match fn {
ref => |ptr| pango_sys::pango_coverage_ref(ptr),
unref => |ptr| pango_sys::pango_coverage_unref(ptr),
}
}
impl Coverage {
pub fn new() -> Coverage {
unsafe { from_glib_full(pango_sys::pango_coverage_new()) }
}
pub fn copy(&self) -> Option<Coverage> {
unsafe { from_glib_full(pango_sys::pango_coverage_copy(self.to_glib_none().0)) }
}
pub fn get(&self, index_: i32) -> CoverageLevel {
unsafe { from_glib(pango_sys::pango_coverage_get(self.to_glib_none().0, index_)) }
}
#[cfg_attr(feature = "v1_44", deprecated)]
pub fn
|
(&self, other: &Coverage) {
unsafe {
pango_sys::pango_coverage_max(self.to_glib_none().0, other.to_glib_none().0);
}
}
pub fn set(&self, index_: i32, level: CoverageLevel) {
unsafe {
pango_sys::pango_coverage_set(self.to_glib_none().0, index_, level.to_glib());
}
}
#[cfg_attr(feature = "v1_44", deprecated)]
pub fn to_bytes(&self) -> Vec<u8> {
unsafe {
let mut bytes = ptr::null_mut();
let mut n_bytes = mem::MaybeUninit::uninit();
pango_sys::pango_coverage_to_bytes(
self.to_glib_none().0,
&mut bytes,
n_bytes.as_mut_ptr(),
);
FromGlibContainer::from_glib_full_num(bytes, n_bytes.assume_init() as usize)
}
}
#[cfg_attr(feature = "v1_44", deprecated)]
pub fn from_bytes(bytes: &[u8]) -> Option<Coverage> {
let n_bytes = bytes.len() as i32;
unsafe {
from_glib_full(pango_sys::pango_coverage_from_bytes(
bytes.to_glib_none().0,
n_bytes,
))
}
}
}
impl Default for Coverage {
fn default() -> Self {
Self::new()
}
}
impl fmt::Display for Coverage {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Coverage")
}
}
|
max
|
identifier_name
|
coverage.rs
|
// Copyright 2020, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <https://opensource.org/licenses/MIT>
use glib::translate::*;
use pango_sys;
use std::fmt;
use std::mem;
use std::ptr;
use CoverageLevel;
#[cfg(any(feature = "v1_44", feature = "dox"))]
glib_wrapper! {
pub struct Coverage(Object<pango_sys::PangoCoverage, CoverageClass>);
match fn {
get_type => || pango_sys::pango_coverage_get_type(),
}
}
// There was no get_type() function before 1.44
#[cfg(not(any(feature = "v1_44", feature = "dox")))]
glib_wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Coverage(Shared<pango_sys::PangoCoverage>);
match fn {
ref => |ptr| pango_sys::pango_coverage_ref(ptr),
unref => |ptr| pango_sys::pango_coverage_unref(ptr),
}
}
impl Coverage {
pub fn new() -> Coverage {
unsafe { from_glib_full(pango_sys::pango_coverage_new()) }
}
pub fn copy(&self) -> Option<Coverage> {
unsafe { from_glib_full(pango_sys::pango_coverage_copy(self.to_glib_none().0)) }
}
pub fn get(&self, index_: i32) -> CoverageLevel {
unsafe { from_glib(pango_sys::pango_coverage_get(self.to_glib_none().0, index_)) }
}
#[cfg_attr(feature = "v1_44", deprecated)]
pub fn max(&self, other: &Coverage) {
unsafe {
pango_sys::pango_coverage_max(self.to_glib_none().0, other.to_glib_none().0);
}
}
pub fn set(&self, index_: i32, level: CoverageLevel) {
unsafe {
pango_sys::pango_coverage_set(self.to_glib_none().0, index_, level.to_glib());
}
}
#[cfg_attr(feature = "v1_44", deprecated)]
pub fn to_bytes(&self) -> Vec<u8> {
unsafe {
|
let mut n_bytes = mem::MaybeUninit::uninit();
pango_sys::pango_coverage_to_bytes(
self.to_glib_none().0,
&mut bytes,
n_bytes.as_mut_ptr(),
);
FromGlibContainer::from_glib_full_num(bytes, n_bytes.assume_init() as usize)
}
}
#[cfg_attr(feature = "v1_44", deprecated)]
pub fn from_bytes(bytes: &[u8]) -> Option<Coverage> {
let n_bytes = bytes.len() as i32;
unsafe {
from_glib_full(pango_sys::pango_coverage_from_bytes(
bytes.to_glib_none().0,
n_bytes,
))
}
}
}
impl Default for Coverage {
fn default() -> Self {
Self::new()
}
}
impl fmt::Display for Coverage {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Coverage")
}
}
|
let mut bytes = ptr::null_mut();
|
random_line_split
|
integration.rs
|
// Copyright 2017 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate serde_json;
extern crate xi_rpc;
use std::io;
use std::time::Duration;
use serde_json::Value;
use xi_rpc::test_utils::{make_reader, test_channel};
use xi_rpc::{Handler, ReadError, RemoteError, RpcCall, RpcCtx, RpcLoop};
/// Handler that responds to requests with whatever params they sent.
pub struct EchoHandler;
#[allow(unused)]
impl Handler for EchoHandler {
type Notification = RpcCall;
type Request = RpcCall;
fn handle_notification(&mut self, ctx: &RpcCtx, rpc: Self::Notification) {}
fn handle_request(&mut self, ctx: &RpcCtx, rpc: Self::Request) -> Result<Value, RemoteError> {
Ok(rpc.params)
}
}
#[test]
fn test_recv_notif() {
// we should not reply to a well formed notification
let mut handler = EchoHandler;
let (tx, mut rx) = test_channel();
let mut rpc_looper = RpcLoop::new(tx);
let r = make_reader(r#"{"method": "hullo", "params": {"words": "plz"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.next_timeout(Duration::from_millis(100));
assert!(resp.is_none());
}
#[test]
fn test_recv_resp() {
// we should reply to a well formed request
let mut handler = EchoHandler;
let (tx, mut rx) = test_channel();
let mut rpc_looper = RpcLoop::new(tx);
let r = make_reader(r#"{"id": 1, "method": "hullo", "params": {"words": "plz"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.expect_response().unwrap();
assert_eq!(resp["words"], json!("plz"));
// do it again
let r = make_reader(r#"{"id": 0, "method": "hullo", "params": {"words": "yay"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.expect_response().unwrap();
assert_eq!(resp["words"], json!("yay"));
}
#[test]
fn test_recv_error() {
// a malformed request containing an ID should receive an error
let mut handler = EchoHandler;
let (tx, mut rx) = test_channel();
let mut rpc_looper = RpcLoop::new(tx);
let r =
make_reader(r#"{"id": 0, "method": "hullo","args": {"args": "should", "be": "params"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.expect_response();
assert!(resp.is_err(), "{:?}", resp);
}
#[test]
fn test_bad_json_err() {
// malformed json should cause the runloop to return an error.
let mut handler = EchoHandler;
let mut rpc_looper = RpcLoop::new(io::sink());
|
let exit = rpc_looper.mainloop(|| r, &mut handler);
match exit {
Err(ReadError::Json(_)) => (),
Err(err) => panic!("Incorrect error: {:?}", err),
Ok(()) => panic!("Expected an error"),
}
}
|
let r = make_reader(r#"this is not valid json"#);
|
random_line_split
|
integration.rs
|
// Copyright 2017 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate serde_json;
extern crate xi_rpc;
use std::io;
use std::time::Duration;
use serde_json::Value;
use xi_rpc::test_utils::{make_reader, test_channel};
use xi_rpc::{Handler, ReadError, RemoteError, RpcCall, RpcCtx, RpcLoop};
/// Handler that responds to requests with whatever params they sent.
pub struct EchoHandler;
#[allow(unused)]
impl Handler for EchoHandler {
type Notification = RpcCall;
type Request = RpcCall;
fn handle_notification(&mut self, ctx: &RpcCtx, rpc: Self::Notification) {}
fn handle_request(&mut self, ctx: &RpcCtx, rpc: Self::Request) -> Result<Value, RemoteError> {
Ok(rpc.params)
}
}
#[test]
fn test_recv_notif()
|
#[test]
fn test_recv_resp() {
// we should reply to a well formed request
let mut handler = EchoHandler;
let (tx, mut rx) = test_channel();
let mut rpc_looper = RpcLoop::new(tx);
let r = make_reader(r#"{"id": 1, "method": "hullo", "params": {"words": "plz"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.expect_response().unwrap();
assert_eq!(resp["words"], json!("plz"));
// do it again
let r = make_reader(r#"{"id": 0, "method": "hullo", "params": {"words": "yay"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.expect_response().unwrap();
assert_eq!(resp["words"], json!("yay"));
}
#[test]
fn test_recv_error() {
// a malformed request containing an ID should receive an error
let mut handler = EchoHandler;
let (tx, mut rx) = test_channel();
let mut rpc_looper = RpcLoop::new(tx);
let r =
make_reader(r#"{"id": 0, "method": "hullo","args": {"args": "should", "be": "params"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.expect_response();
assert!(resp.is_err(), "{:?}", resp);
}
#[test]
fn test_bad_json_err() {
// malformed json should cause the runloop to return an error.
let mut handler = EchoHandler;
let mut rpc_looper = RpcLoop::new(io::sink());
let r = make_reader(r#"this is not valid json"#);
let exit = rpc_looper.mainloop(|| r, &mut handler);
match exit {
Err(ReadError::Json(_)) => (),
Err(err) => panic!("Incorrect error: {:?}", err),
Ok(()) => panic!("Expected an error"),
}
}
|
{
// we should not reply to a well formed notification
let mut handler = EchoHandler;
let (tx, mut rx) = test_channel();
let mut rpc_looper = RpcLoop::new(tx);
let r = make_reader(r#"{"method": "hullo", "params": {"words": "plz"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.next_timeout(Duration::from_millis(100));
assert!(resp.is_none());
}
|
identifier_body
|
integration.rs
|
// Copyright 2017 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate serde_json;
extern crate xi_rpc;
use std::io;
use std::time::Duration;
use serde_json::Value;
use xi_rpc::test_utils::{make_reader, test_channel};
use xi_rpc::{Handler, ReadError, RemoteError, RpcCall, RpcCtx, RpcLoop};
/// Handler that responds to requests with whatever params they sent.
pub struct
|
;
#[allow(unused)]
impl Handler for EchoHandler {
type Notification = RpcCall;
type Request = RpcCall;
fn handle_notification(&mut self, ctx: &RpcCtx, rpc: Self::Notification) {}
fn handle_request(&mut self, ctx: &RpcCtx, rpc: Self::Request) -> Result<Value, RemoteError> {
Ok(rpc.params)
}
}
#[test]
fn test_recv_notif() {
// we should not reply to a well formed notification
let mut handler = EchoHandler;
let (tx, mut rx) = test_channel();
let mut rpc_looper = RpcLoop::new(tx);
let r = make_reader(r#"{"method": "hullo", "params": {"words": "plz"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.next_timeout(Duration::from_millis(100));
assert!(resp.is_none());
}
#[test]
fn test_recv_resp() {
// we should reply to a well formed request
let mut handler = EchoHandler;
let (tx, mut rx) = test_channel();
let mut rpc_looper = RpcLoop::new(tx);
let r = make_reader(r#"{"id": 1, "method": "hullo", "params": {"words": "plz"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.expect_response().unwrap();
assert_eq!(resp["words"], json!("plz"));
// do it again
let r = make_reader(r#"{"id": 0, "method": "hullo", "params": {"words": "yay"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.expect_response().unwrap();
assert_eq!(resp["words"], json!("yay"));
}
#[test]
fn test_recv_error() {
// a malformed request containing an ID should receive an error
let mut handler = EchoHandler;
let (tx, mut rx) = test_channel();
let mut rpc_looper = RpcLoop::new(tx);
let r =
make_reader(r#"{"id": 0, "method": "hullo","args": {"args": "should", "be": "params"}}"#);
assert!(rpc_looper.mainloop(|| r, &mut handler).is_ok());
let resp = rx.expect_response();
assert!(resp.is_err(), "{:?}", resp);
}
#[test]
fn test_bad_json_err() {
// malformed json should cause the runloop to return an error.
let mut handler = EchoHandler;
let mut rpc_looper = RpcLoop::new(io::sink());
let r = make_reader(r#"this is not valid json"#);
let exit = rpc_looper.mainloop(|| r, &mut handler);
match exit {
Err(ReadError::Json(_)) => (),
Err(err) => panic!("Incorrect error: {:?}", err),
Ok(()) => panic!("Expected an error"),
}
}
|
EchoHandler
|
identifier_name
|
date.rs
|
use std::fmt::{self, Show};
use std::str::FromStr;
use time::Tm;
use header::{Header, HeaderFormat};
use header::shared::util::from_one_raw_str;
use header::shared::time::tm_from_str;
// Egh, replace as soon as something better than time::Tm exists.
/// The `Date` header field.
#[derive(Copy, PartialEq, Clone)]
pub struct Date(pub Tm);
deref!(Date => Tm);
impl Header for Date {
fn header_name(_: Option<Date>) -> &'static str {
"Date"
}
fn parse_header(raw: &[Vec<u8>]) -> Option<Date> {
from_one_raw_str(raw)
}
}
impl HeaderFormat for Date {
fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let tm = **self;
match tm.tm_utcoff {
0 => tm.rfc822().fmt(fmt),
_ => tm.to_utc().rfc822().fmt(fmt)
}
}
}
|
impl FromStr for Date {
fn from_str(s: &str) -> Option<Date> {
tm_from_str(s).map(Date)
}
}
bench_header!(imf_fixdate, Date, { vec![b"Sun, 07 Nov 1994 08:48:37 GMT".to_vec()] });
bench_header!(rfc_850, Date, { vec![b"Sunday, 06-Nov-94 08:49:37 GMT".to_vec()] });
bench_header!(asctime, Date, { vec![b"Sun Nov 6 08:49:37 1994".to_vec()] });
|
random_line_split
|
|
date.rs
|
use std::fmt::{self, Show};
use std::str::FromStr;
use time::Tm;
use header::{Header, HeaderFormat};
use header::shared::util::from_one_raw_str;
use header::shared::time::tm_from_str;
// Egh, replace as soon as something better than time::Tm exists.
/// The `Date` header field.
#[derive(Copy, PartialEq, Clone)]
pub struct Date(pub Tm);
deref!(Date => Tm);
impl Header for Date {
fn
|
(_: Option<Date>) -> &'static str {
"Date"
}
fn parse_header(raw: &[Vec<u8>]) -> Option<Date> {
from_one_raw_str(raw)
}
}
impl HeaderFormat for Date {
fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let tm = **self;
match tm.tm_utcoff {
0 => tm.rfc822().fmt(fmt),
_ => tm.to_utc().rfc822().fmt(fmt)
}
}
}
impl FromStr for Date {
fn from_str(s: &str) -> Option<Date> {
tm_from_str(s).map(Date)
}
}
bench_header!(imf_fixdate, Date, { vec![b"Sun, 07 Nov 1994 08:48:37 GMT".to_vec()] });
bench_header!(rfc_850, Date, { vec![b"Sunday, 06-Nov-94 08:49:37 GMT".to_vec()] });
bench_header!(asctime, Date, { vec![b"Sun Nov 6 08:49:37 1994".to_vec()] });
|
header_name
|
identifier_name
|
date.rs
|
use std::fmt::{self, Show};
use std::str::FromStr;
use time::Tm;
use header::{Header, HeaderFormat};
use header::shared::util::from_one_raw_str;
use header::shared::time::tm_from_str;
// Egh, replace as soon as something better than time::Tm exists.
/// The `Date` header field.
#[derive(Copy, PartialEq, Clone)]
pub struct Date(pub Tm);
deref!(Date => Tm);
impl Header for Date {
fn header_name(_: Option<Date>) -> &'static str {
"Date"
}
fn parse_header(raw: &[Vec<u8>]) -> Option<Date> {
from_one_raw_str(raw)
}
}
impl HeaderFormat for Date {
fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result
|
}
impl FromStr for Date {
fn from_str(s: &str) -> Option<Date> {
tm_from_str(s).map(Date)
}
}
bench_header!(imf_fixdate, Date, { vec![b"Sun, 07 Nov 1994 08:48:37 GMT".to_vec()] });
bench_header!(rfc_850, Date, { vec![b"Sunday, 06-Nov-94 08:49:37 GMT".to_vec()] });
bench_header!(asctime, Date, { vec![b"Sun Nov 6 08:49:37 1994".to_vec()] });
|
{
let tm = **self;
match tm.tm_utcoff {
0 => tm.rfc822().fmt(fmt),
_ => tm.to_utc().rfc822().fmt(fmt)
}
}
|
identifier_body
|
lifetime-elision-return-type-requires-explicit-lifetime.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Lifetime annotation needed because we have no arguments.
fn f() -> &int { //~ ERROR missing lifetime specifier
//~^ HELP there is no value for it to be borrowed from
panic!()
}
// Lifetime annotation needed because we have two by-reference parameters.
fn g(_x: &int, _y: &int) -> &int { //~ ERROR missing lifetime specifier
//~^ HELP the signature does not say whether it is borrowed from `_x` or `_y`
panic!()
}
struct
|
<'a> {
x: &'a int,
}
// Lifetime annotation needed because we have two lifetimes: one as a parameter
// and one on the reference.
fn h(_x: &Foo) -> &int { //~ ERROR missing lifetime specifier
//~^ HELP the signature does not say which one of `_x`'s 2 elided lifetimes it is borrowed from
panic!()
}
fn i(_x: int) -> &int { //~ ERROR missing lifetime specifier
//~^ HELP this function's return type contains a borrowed value
panic!()
}
fn main() {}
|
Foo
|
identifier_name
|
lifetime-elision-return-type-requires-explicit-lifetime.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Lifetime annotation needed because we have no arguments.
fn f() -> &int { //~ ERROR missing lifetime specifier
//~^ HELP there is no value for it to be borrowed from
panic!()
}
// Lifetime annotation needed because we have two by-reference parameters.
fn g(_x: &int, _y: &int) -> &int { //~ ERROR missing lifetime specifier
//~^ HELP the signature does not say whether it is borrowed from `_x` or `_y`
panic!()
}
struct Foo<'a> {
x: &'a int,
}
// Lifetime annotation needed because we have two lifetimes: one as a parameter
// and one on the reference.
fn h(_x: &Foo) -> &int { //~ ERROR missing lifetime specifier
//~^ HELP the signature does not say which one of `_x`'s 2 elided lifetimes it is borrowed from
panic!()
}
fn i(_x: int) -> &int { //~ ERROR missing lifetime specifier
//~^ HELP this function's return type contains a borrowed value
panic!()
}
fn main() {}
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
random_line_split
|
lifetime-elision-return-type-requires-explicit-lifetime.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Lifetime annotation needed because we have no arguments.
fn f() -> &int
|
// Lifetime annotation needed because we have two by-reference parameters.
fn g(_x: &int, _y: &int) -> &int { //~ ERROR missing lifetime specifier
//~^ HELP the signature does not say whether it is borrowed from `_x` or `_y`
panic!()
}
struct Foo<'a> {
x: &'a int,
}
// Lifetime annotation needed because we have two lifetimes: one as a parameter
// and one on the reference.
fn h(_x: &Foo) -> &int { //~ ERROR missing lifetime specifier
//~^ HELP the signature does not say which one of `_x`'s 2 elided lifetimes it is borrowed from
panic!()
}
fn i(_x: int) -> &int { //~ ERROR missing lifetime specifier
//~^ HELP this function's return type contains a borrowed value
panic!()
}
fn main() {}
|
{ //~ ERROR missing lifetime specifier
//~^ HELP there is no value for it to be borrowed from
panic!()
}
|
identifier_body
|
builder.rs
|
use std::collections::{HashMap, VecDeque};
use crate::level::{Background, Level};
use crate::position::*;
use crate::util::*;
/// Dynamic part of a cell.
#[derive(Debug, Clone, Copy, PartialEq, Hash)]
pub enum Foreground {
None,
Worker,
Crate,
}
fn char_to_cell(chr: char) -> Option<(Background, Foreground)> {
match chr {
'#' => Some((Background::Wall, Foreground::None)),
'' => Some((Background::Empty, Foreground::None)),
'$' => Some((Background::Floor, Foreground::Crate)),
'@' => Some((Background::Floor, Foreground::Worker)),
'.' => Some((Background::Goal, Foreground::None)),
'*' => Some((Background::Goal, Foreground::Crate)),
'+' => Some((Background::Goal, Foreground::Worker)),
_ => None,
}
}
pub(crate) struct LevelBuilder {
columns: usize,
rows: usize,
background: Vec<Background>,
crates: HashMap<Position, usize>,
worker_position: Position,
}
fn is_empty_or_comment(s: &str) -> bool {
s.is_empty() || s.trim().starts_with(';')
}
impl LevelBuilder {
pub fn new(rank: usize, level_string: &str) -> Result<Self, SokobanError> {
let lines: Vec<_> = level_string
.lines()
.filter(|x|!is_empty_or_comment(x))
.collect();
let rows = lines.len();
if rows == 0 {
return Err(SokobanError::NoLevel(rank));
}
let columns = lines.iter().map(|x| x.len()).max().unwrap();
if columns == 0 {
return Err(SokobanError::NoLevel(rank));
}
let mut found_worker = false;
let mut worker_position = Position { x: 0, y: 0 };
let mut background = vec![Background::Empty; columns * rows];
let mut crates = Vec::with_capacity(20);
let mut goals_minus_crates = 0_i32;
let mut found_level_description = false;
for (y, line) in lines.iter().enumerate() {
let mut inside = false;
for (x, chr) in line.chars().enumerate() {
let (bg, fg) = char_to_cell(chr).unwrap_or_else(|| {
panic!("Invalid character '{}' in line {}, column {}.", chr, y, x)
});
let index = y * columns + x;
background[index] = bg;
found_level_description = true;
// Count goals still to be filled and make sure that there are exactly as many
// goals as there are crates.
if bg == Background::Goal && fg!= Foreground::Crate {
goals_minus_crates += 1;
} else if bg!= Background::Goal && fg == Foreground::Crate {
goals_minus_crates -= 1;
}
|
}
// Try to figure out whether a given cell is inside the walls.
if!inside && bg.is_wall() {
inside = true;
}
if inside
&& bg == Background::Empty
&& index >= columns
&& background[index - columns]!= Background::Empty
{
background[index] = Background::Floor;
}
// Find the initial worker position.
if fg == Foreground::Worker {
if found_worker {
return Err(SokobanError::TwoWorkers(rank));
}
worker_position = Position::new(x, y);
found_worker = true;
}
}
}
if!found_level_description {
return Err(SokobanError::NoLevel(rank));
} else if!found_worker {
return Err(SokobanError::NoWorker(rank));
} else if goals_minus_crates!= 0 {
return Err(SokobanError::CratesGoalsMismatch(rank, goals_minus_crates));
}
let swap = |(a, b)| (b, a);
let crates = crates.into_iter().enumerate().map(swap).collect();
Ok(Self {
columns,
rows,
background,
crates,
worker_position,
})
}
pub fn build(mut self) -> Level {
self.correct_outside_cells();
Level {
columns: self.columns,
rows: self.rows,
background: self.background,
crates: self.crates,
worker_position: self.worker_position,
}
}
/// Fix the mistakes of the heuristic used in `new()` for detecting which cells are on the
/// inside.
fn correct_outside_cells(&mut self) {
let columns = self.columns;
let mut queue = VecDeque::new();
let mut visited = vec![false; self.background.len()];
visited[self.worker_position.to_index(columns)] = true;
let mut inside = visited.clone();
queue.push_back(self.worker_position);
for crate_pos in self.crates.keys() {
visited[crate_pos.to_index(columns)] = true;
queue.push_back(*crate_pos);
}
for (i, &bg) in self.background.iter().enumerate() {
match bg {
Background::Wall => visited[i] = true,
Background::Goal if!visited[i] => {
inside[i] = true;
visited[i] = true;
queue.push_back(Position::from_index(i, columns));
}
_ => (),
}
}
// Flood fill from all positions added above
while let Some(pos) = queue.pop_front() {
use crate::Direction::*;
let i = pos.to_index(columns);
if let Background::Wall = self.background[i] {
continue;
} else {
inside[i] = true;
}
for n in [Up, Down, Left, Right].iter().map(|&x| pos.neighbour(x)) {
// The outermost rows and columns may only contain empty space and walls, so
// n has to bee within bounds.
let j = n.to_index(columns);
if!visited[j] {
visited[j] = true;
queue.push_back(n);
}
}
}
for (i, bg) in self.background.iter_mut().enumerate() {
if!inside[i] && *bg == Background::Floor {
*bg = Background::Empty;
}
}
}
}
// }}}
|
if fg == Foreground::Crate {
crates.push(Position::new(x, y));
|
random_line_split
|
builder.rs
|
use std::collections::{HashMap, VecDeque};
use crate::level::{Background, Level};
use crate::position::*;
use crate::util::*;
/// Dynamic part of a cell.
#[derive(Debug, Clone, Copy, PartialEq, Hash)]
pub enum Foreground {
None,
Worker,
Crate,
}
fn char_to_cell(chr: char) -> Option<(Background, Foreground)> {
match chr {
'#' => Some((Background::Wall, Foreground::None)),
'' => Some((Background::Empty, Foreground::None)),
'$' => Some((Background::Floor, Foreground::Crate)),
'@' => Some((Background::Floor, Foreground::Worker)),
'.' => Some((Background::Goal, Foreground::None)),
'*' => Some((Background::Goal, Foreground::Crate)),
'+' => Some((Background::Goal, Foreground::Worker)),
_ => None,
}
}
pub(crate) struct
|
{
columns: usize,
rows: usize,
background: Vec<Background>,
crates: HashMap<Position, usize>,
worker_position: Position,
}
fn is_empty_or_comment(s: &str) -> bool {
s.is_empty() || s.trim().starts_with(';')
}
impl LevelBuilder {
pub fn new(rank: usize, level_string: &str) -> Result<Self, SokobanError> {
let lines: Vec<_> = level_string
.lines()
.filter(|x|!is_empty_or_comment(x))
.collect();
let rows = lines.len();
if rows == 0 {
return Err(SokobanError::NoLevel(rank));
}
let columns = lines.iter().map(|x| x.len()).max().unwrap();
if columns == 0 {
return Err(SokobanError::NoLevel(rank));
}
let mut found_worker = false;
let mut worker_position = Position { x: 0, y: 0 };
let mut background = vec![Background::Empty; columns * rows];
let mut crates = Vec::with_capacity(20);
let mut goals_minus_crates = 0_i32;
let mut found_level_description = false;
for (y, line) in lines.iter().enumerate() {
let mut inside = false;
for (x, chr) in line.chars().enumerate() {
let (bg, fg) = char_to_cell(chr).unwrap_or_else(|| {
panic!("Invalid character '{}' in line {}, column {}.", chr, y, x)
});
let index = y * columns + x;
background[index] = bg;
found_level_description = true;
// Count goals still to be filled and make sure that there are exactly as many
// goals as there are crates.
if bg == Background::Goal && fg!= Foreground::Crate {
goals_minus_crates += 1;
} else if bg!= Background::Goal && fg == Foreground::Crate {
goals_minus_crates -= 1;
}
if fg == Foreground::Crate {
crates.push(Position::new(x, y));
}
// Try to figure out whether a given cell is inside the walls.
if!inside && bg.is_wall() {
inside = true;
}
if inside
&& bg == Background::Empty
&& index >= columns
&& background[index - columns]!= Background::Empty
{
background[index] = Background::Floor;
}
// Find the initial worker position.
if fg == Foreground::Worker {
if found_worker {
return Err(SokobanError::TwoWorkers(rank));
}
worker_position = Position::new(x, y);
found_worker = true;
}
}
}
if!found_level_description {
return Err(SokobanError::NoLevel(rank));
} else if!found_worker {
return Err(SokobanError::NoWorker(rank));
} else if goals_minus_crates!= 0 {
return Err(SokobanError::CratesGoalsMismatch(rank, goals_minus_crates));
}
let swap = |(a, b)| (b, a);
let crates = crates.into_iter().enumerate().map(swap).collect();
Ok(Self {
columns,
rows,
background,
crates,
worker_position,
})
}
pub fn build(mut self) -> Level {
self.correct_outside_cells();
Level {
columns: self.columns,
rows: self.rows,
background: self.background,
crates: self.crates,
worker_position: self.worker_position,
}
}
/// Fix the mistakes of the heuristic used in `new()` for detecting which cells are on the
/// inside.
fn correct_outside_cells(&mut self) {
let columns = self.columns;
let mut queue = VecDeque::new();
let mut visited = vec![false; self.background.len()];
visited[self.worker_position.to_index(columns)] = true;
let mut inside = visited.clone();
queue.push_back(self.worker_position);
for crate_pos in self.crates.keys() {
visited[crate_pos.to_index(columns)] = true;
queue.push_back(*crate_pos);
}
for (i, &bg) in self.background.iter().enumerate() {
match bg {
Background::Wall => visited[i] = true,
Background::Goal if!visited[i] => {
inside[i] = true;
visited[i] = true;
queue.push_back(Position::from_index(i, columns));
}
_ => (),
}
}
// Flood fill from all positions added above
while let Some(pos) = queue.pop_front() {
use crate::Direction::*;
let i = pos.to_index(columns);
if let Background::Wall = self.background[i] {
continue;
} else {
inside[i] = true;
}
for n in [Up, Down, Left, Right].iter().map(|&x| pos.neighbour(x)) {
// The outermost rows and columns may only contain empty space and walls, so
// n has to bee within bounds.
let j = n.to_index(columns);
if!visited[j] {
visited[j] = true;
queue.push_back(n);
}
}
}
for (i, bg) in self.background.iter_mut().enumerate() {
if!inside[i] && *bg == Background::Floor {
*bg = Background::Empty;
}
}
}
}
// }}}
|
LevelBuilder
|
identifier_name
|
builder.rs
|
use std::collections::{HashMap, VecDeque};
use crate::level::{Background, Level};
use crate::position::*;
use crate::util::*;
/// Dynamic part of a cell.
#[derive(Debug, Clone, Copy, PartialEq, Hash)]
pub enum Foreground {
None,
Worker,
Crate,
}
fn char_to_cell(chr: char) -> Option<(Background, Foreground)>
|
pub(crate) struct LevelBuilder {
columns: usize,
rows: usize,
background: Vec<Background>,
crates: HashMap<Position, usize>,
worker_position: Position,
}
fn is_empty_or_comment(s: &str) -> bool {
s.is_empty() || s.trim().starts_with(';')
}
impl LevelBuilder {
pub fn new(rank: usize, level_string: &str) -> Result<Self, SokobanError> {
let lines: Vec<_> = level_string
.lines()
.filter(|x|!is_empty_or_comment(x))
.collect();
let rows = lines.len();
if rows == 0 {
return Err(SokobanError::NoLevel(rank));
}
let columns = lines.iter().map(|x| x.len()).max().unwrap();
if columns == 0 {
return Err(SokobanError::NoLevel(rank));
}
let mut found_worker = false;
let mut worker_position = Position { x: 0, y: 0 };
let mut background = vec![Background::Empty; columns * rows];
let mut crates = Vec::with_capacity(20);
let mut goals_minus_crates = 0_i32;
let mut found_level_description = false;
for (y, line) in lines.iter().enumerate() {
let mut inside = false;
for (x, chr) in line.chars().enumerate() {
let (bg, fg) = char_to_cell(chr).unwrap_or_else(|| {
panic!("Invalid character '{}' in line {}, column {}.", chr, y, x)
});
let index = y * columns + x;
background[index] = bg;
found_level_description = true;
// Count goals still to be filled and make sure that there are exactly as many
// goals as there are crates.
if bg == Background::Goal && fg!= Foreground::Crate {
goals_minus_crates += 1;
} else if bg!= Background::Goal && fg == Foreground::Crate {
goals_minus_crates -= 1;
}
if fg == Foreground::Crate {
crates.push(Position::new(x, y));
}
// Try to figure out whether a given cell is inside the walls.
if!inside && bg.is_wall() {
inside = true;
}
if inside
&& bg == Background::Empty
&& index >= columns
&& background[index - columns]!= Background::Empty
{
background[index] = Background::Floor;
}
// Find the initial worker position.
if fg == Foreground::Worker {
if found_worker {
return Err(SokobanError::TwoWorkers(rank));
}
worker_position = Position::new(x, y);
found_worker = true;
}
}
}
if!found_level_description {
return Err(SokobanError::NoLevel(rank));
} else if!found_worker {
return Err(SokobanError::NoWorker(rank));
} else if goals_minus_crates!= 0 {
return Err(SokobanError::CratesGoalsMismatch(rank, goals_minus_crates));
}
let swap = |(a, b)| (b, a);
let crates = crates.into_iter().enumerate().map(swap).collect();
Ok(Self {
columns,
rows,
background,
crates,
worker_position,
})
}
pub fn build(mut self) -> Level {
self.correct_outside_cells();
Level {
columns: self.columns,
rows: self.rows,
background: self.background,
crates: self.crates,
worker_position: self.worker_position,
}
}
/// Fix the mistakes of the heuristic used in `new()` for detecting which cells are on the
/// inside.
fn correct_outside_cells(&mut self) {
let columns = self.columns;
let mut queue = VecDeque::new();
let mut visited = vec![false; self.background.len()];
visited[self.worker_position.to_index(columns)] = true;
let mut inside = visited.clone();
queue.push_back(self.worker_position);
for crate_pos in self.crates.keys() {
visited[crate_pos.to_index(columns)] = true;
queue.push_back(*crate_pos);
}
for (i, &bg) in self.background.iter().enumerate() {
match bg {
Background::Wall => visited[i] = true,
Background::Goal if!visited[i] => {
inside[i] = true;
visited[i] = true;
queue.push_back(Position::from_index(i, columns));
}
_ => (),
}
}
// Flood fill from all positions added above
while let Some(pos) = queue.pop_front() {
use crate::Direction::*;
let i = pos.to_index(columns);
if let Background::Wall = self.background[i] {
continue;
} else {
inside[i] = true;
}
for n in [Up, Down, Left, Right].iter().map(|&x| pos.neighbour(x)) {
// The outermost rows and columns may only contain empty space and walls, so
// n has to bee within bounds.
let j = n.to_index(columns);
if!visited[j] {
visited[j] = true;
queue.push_back(n);
}
}
}
for (i, bg) in self.background.iter_mut().enumerate() {
if!inside[i] && *bg == Background::Floor {
*bg = Background::Empty;
}
}
}
}
// }}}
|
{
match chr {
'#' => Some((Background::Wall, Foreground::None)),
' ' => Some((Background::Empty, Foreground::None)),
'$' => Some((Background::Floor, Foreground::Crate)),
'@' => Some((Background::Floor, Foreground::Worker)),
'.' => Some((Background::Goal, Foreground::None)),
'*' => Some((Background::Goal, Foreground::Crate)),
'+' => Some((Background::Goal, Foreground::Worker)),
_ => None,
}
}
|
identifier_body
|
hybridmulti.rs
|
// Copyright 2016 The Noise-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! An example of using the `HybridMulti` noise function
extern crate noise;
use noise::HybridMulti;
mod debug;
fn
|
() {
debug::render_noise_module3("hybridmulti.png", &HybridMulti::new(), 1024, 1024, 200);
}
|
main
|
identifier_name
|
hybridmulti.rs
|
// Copyright 2016 The Noise-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
|
extern crate noise;
use noise::HybridMulti;
mod debug;
fn main() {
debug::render_noise_module3("hybridmulti.png", &HybridMulti::new(), 1024, 1024, 200);
}
|
//! An example of using the `HybridMulti` noise function
|
random_line_split
|
hybridmulti.rs
|
// Copyright 2016 The Noise-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! An example of using the `HybridMulti` noise function
extern crate noise;
use noise::HybridMulti;
mod debug;
fn main()
|
{
debug::render_noise_module3("hybridmulti.png", &HybridMulti::new(), 1024, 1024, 200);
}
|
identifier_body
|
|
24_game_solve.rs
|
// http://rosettacode.org/wiki/24_game/Solve
// modeled after the scala solution
// http://rosettacode.org/wiki/24_game/Solve#Scala
#![feature(collections)]
#![feature(slice_patterns)]
extern crate num;
use num::rational::{Ratio, Rational};
use num::traits::Zero;
// convenience macro to create a fixed-sized vector
// of rationals by writing:
// rational![1, 2,...] instead of
// [Ratio::<isize>::from_integer(1), Ratio::<isize>::from_integer(2),...]
macro_rules! rationals(
($($e:expr),+) => ([$(Ratio::<isize>::from_integer($e)),+])
);
#[cfg(not(test))]
fn main() {
let mut r = rationals![1, 3, 7, 9];
let sol = solve(&mut r[..], 24).unwrap_or("no solution found".to_string());
println!("{}", sol);
}
// for a vector of rationals r, find the combination of arithmentic
// operations that yield target_val as a result (if such combination exists)
fn solve(r: &mut[Rational], target_val: isize) -> Option<String> {
//need to sort because next_permutation()
// returns permutations in lexicographic order
r.sort();
loop {
let all_ops = compute_all_operations(r);
for &(res, ref ops) in &all_ops {
if res==Ratio::from_integer(target_val) {return Some(ops.to_string());}
}
if! r.next_permutation() {return None;}
}
}
// applies all the valid combinations of + - * and / to the
// numbers in l and for each combination creates a tuple
// with the result and the expression in String form
// returns all (result, expression in string form)
// results in a vector
fn compute_all_operations(l: &[Rational]) -> Vec<(Rational, String)> {
match l {
[] => vec![],
[x] => vec![(x, (format!("{}", x)))],
[x,rest..] => {
let mut rt=Vec::new();
for &(y, ref exp) in &compute_all_operations(rest) {
let mut sub=vec![(x * y, "*"),(x + y, "+"), (x - y, "-")];
if y!= Zero::zero() {sub.push( (x/y, "/")); }
for &(z, ref op) in &sub {
let aux = (z, (format!("({} {} {})", x, op, exp )));
rt.push(aux);
}
}
rt
}
}
}
#[test]
fn
|
() {
assert_eq!(
// without the rationals! macro
[Ratio::from_integer(1),
Ratio::from_integer(2),
Ratio::from_integer(3),
Ratio::from_integer(4)],
// with the rationals! macro
(rationals![1, 2, 3, 4]));
}
#[test]
#[ignore] // printing of rationals changed but seems wrong...
fn test_solve() {
let mut r = rationals![1, 3, 7, 9];
assert_eq!(
solve(&mut r[..], 24),
Some("(9 / (3 / (1 + 7)))".to_string()));
}
|
test_rationals_macro
|
identifier_name
|
24_game_solve.rs
|
// http://rosettacode.org/wiki/24_game/Solve
// modeled after the scala solution
// http://rosettacode.org/wiki/24_game/Solve#Scala
#![feature(collections)]
#![feature(slice_patterns)]
extern crate num;
use num::rational::{Ratio, Rational};
use num::traits::Zero;
// convenience macro to create a fixed-sized vector
// of rationals by writing:
// rational![1, 2,...] instead of
// [Ratio::<isize>::from_integer(1), Ratio::<isize>::from_integer(2),...]
macro_rules! rationals(
($($e:expr),+) => ([$(Ratio::<isize>::from_integer($e)),+])
);
#[cfg(not(test))]
fn main() {
let mut r = rationals![1, 3, 7, 9];
let sol = solve(&mut r[..], 24).unwrap_or("no solution found".to_string());
println!("{}", sol);
}
// for a vector of rationals r, find the combination of arithmentic
// operations that yield target_val as a result (if such combination exists)
fn solve(r: &mut[Rational], target_val: isize) -> Option<String> {
//need to sort because next_permutation()
// returns permutations in lexicographic order
r.sort();
loop {
let all_ops = compute_all_operations(r);
for &(res, ref ops) in &all_ops {
if res==Ratio::from_integer(target_val) {return Some(ops.to_string());}
}
if! r.next_permutation() {return None;}
}
}
// applies all the valid combinations of + - * and / to the
// numbers in l and for each combination creates a tuple
// with the result and the expression in String form
// returns all (result, expression in string form)
// results in a vector
fn compute_all_operations(l: &[Rational]) -> Vec<(Rational, String)> {
match l {
[] => vec![],
[x] => vec![(x, (format!("{}", x)))],
[x,rest..] => {
let mut rt=Vec::new();
for &(y, ref exp) in &compute_all_operations(rest) {
let mut sub=vec![(x * y, "*"),(x + y, "+"), (x - y, "-")];
if y!= Zero::zero() {sub.push( (x/y, "/")); }
for &(z, ref op) in &sub {
let aux = (z, (format!("({} {} {})", x, op, exp )));
rt.push(aux);
}
|
}
rt
}
}
}
#[test]
fn test_rationals_macro() {
assert_eq!(
// without the rationals! macro
[Ratio::from_integer(1),
Ratio::from_integer(2),
Ratio::from_integer(3),
Ratio::from_integer(4)],
// with the rationals! macro
(rationals![1, 2, 3, 4]));
}
#[test]
#[ignore] // printing of rationals changed but seems wrong...
fn test_solve() {
let mut r = rationals![1, 3, 7, 9];
assert_eq!(
solve(&mut r[..], 24),
Some("(9 / (3 / (1 + 7)))".to_string()));
}
|
random_line_split
|
|
24_game_solve.rs
|
// http://rosettacode.org/wiki/24_game/Solve
// modeled after the scala solution
// http://rosettacode.org/wiki/24_game/Solve#Scala
#![feature(collections)]
#![feature(slice_patterns)]
extern crate num;
use num::rational::{Ratio, Rational};
use num::traits::Zero;
// convenience macro to create a fixed-sized vector
// of rationals by writing:
// rational![1, 2,...] instead of
// [Ratio::<isize>::from_integer(1), Ratio::<isize>::from_integer(2),...]
macro_rules! rationals(
($($e:expr),+) => ([$(Ratio::<isize>::from_integer($e)),+])
);
#[cfg(not(test))]
fn main() {
let mut r = rationals![1, 3, 7, 9];
let sol = solve(&mut r[..], 24).unwrap_or("no solution found".to_string());
println!("{}", sol);
}
// for a vector of rationals r, find the combination of arithmentic
// operations that yield target_val as a result (if such combination exists)
fn solve(r: &mut[Rational], target_val: isize) -> Option<String> {
//need to sort because next_permutation()
// returns permutations in lexicographic order
r.sort();
loop {
let all_ops = compute_all_operations(r);
for &(res, ref ops) in &all_ops {
if res==Ratio::from_integer(target_val) {return Some(ops.to_string());}
}
if! r.next_permutation() {return None;}
}
}
// applies all the valid combinations of + - * and / to the
// numbers in l and for each combination creates a tuple
// with the result and the expression in String form
// returns all (result, expression in string form)
// results in a vector
fn compute_all_operations(l: &[Rational]) -> Vec<(Rational, String)> {
match l {
[] => vec![],
[x] => vec![(x, (format!("{}", x)))],
[x,rest..] => {
let mut rt=Vec::new();
for &(y, ref exp) in &compute_all_operations(rest) {
let mut sub=vec![(x * y, "*"),(x + y, "+"), (x - y, "-")];
if y!= Zero::zero() {sub.push( (x/y, "/")); }
for &(z, ref op) in &sub {
let aux = (z, (format!("({} {} {})", x, op, exp )));
rt.push(aux);
}
}
rt
}
}
}
#[test]
fn test_rationals_macro() {
assert_eq!(
// without the rationals! macro
[Ratio::from_integer(1),
Ratio::from_integer(2),
Ratio::from_integer(3),
Ratio::from_integer(4)],
// with the rationals! macro
(rationals![1, 2, 3, 4]));
}
#[test]
#[ignore] // printing of rationals changed but seems wrong...
fn test_solve()
|
{
let mut r = rationals![1, 3, 7, 9];
assert_eq!(
solve(&mut r[..], 24),
Some("(9 / (3 / (1 + 7)))".to_string()));
}
|
identifier_body
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.