file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
life.rs | #![cfg(test)]
use traits::Cell;
use traits::Coord;
use traits::Engine;
use traits::Consumer;
use traits::Grid;
use engine::Sequential;
use grid::twodim::TwodimGrid;
use grid::nhood::MooreNhood;
use grid::EmptyState;
use utils::find_cell;
/// Implementation of Conway's Game of Life.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
enum LifeState {
Dead,
Alive,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
struct Life {
state: LifeState,
coord: (i32, i32),
}
impl Life {
fn alive_count<'a, I>(&self, neighbors: I) -> u32
where I: Iterator<Item = Option<&'a Self>>,
{
neighbors.filter(|n| match *n {
Some(n) => n.state == LifeState::Alive,
None => false,
})
.count() as u32
}
#[inline]
fn | (&self, alive: u32) -> LifeState {
match alive {
3 => LifeState::Alive,
_ => LifeState::Dead,
}
}
#[inline]
fn alive_state(&self, alive: u32) -> LifeState {
match alive {
2 | 3 => LifeState::Alive,
_ => LifeState::Dead,
}
}
}
impl Cell for Life {
type Coord = (i32, i32);
type State = EmptyState;
fn update<'a, I>(&'a mut self, old: &'a Self, neighbors: I, _: &Self::State)
where I: Iterator<Item = Option<&'a Self>>,
{
let alive_count = self.alive_count(neighbors);
let new_state = match old.state {
LifeState::Alive => self.alive_state(alive_count),
LifeState::Dead => self.dead_state(alive_count),
};
self.state = new_state;
}
fn with_coord<C: Coord>(coord: C) -> Self {
Life {
state: LifeState::Dead,
coord: (coord.x(), coord.y()),
}
}
fn coord(&self) -> &Self::Coord { &self.coord }
fn set_coord<C: Coord>(&mut self, coord: &C) { self.coord = (coord.x(), coord.y()); }
}
fn pretty_print<G: Grid<Cell = Life>>(grid: &G) {
let dims = grid.size();
println!("");
for y in 0..dims.y() {
for x in 0..dims.x() {
let cell = find_cell(grid.cells(), x, y);
match cell.state {
LifeState::Dead => print!("D |"),
LifeState::Alive => print!("A |"),
};
}
println!("");
}
println!("");
}
struct SpinnerTestConsumer {
vertical: bool,
}
impl SpinnerTestConsumer {
pub fn new() -> Self { SpinnerTestConsumer { vertical: true } }
}
impl Consumer for SpinnerTestConsumer {
type Cell = Life;
fn consume<G: Grid<Cell = Self::Cell>>(&mut self, grid: &mut G) {
assert_eq!(grid.cells().len(), 9);
pretty_print(grid);
let dead_cells_count = grid.cells()
.iter()
.filter(|c| c.state == LifeState::Dead)
.count();
assert_eq!(dead_cells_count, 6);
let alive_cells = || {
grid.cells()
.iter()
.filter(|c| c.state == LifeState::Alive)
};
assert_eq!(alive_cells().count(), 3);
self.vertical = !self.vertical;
// if spinner is in vertical state
if alive_cells().all(|c| c.coord.x() == 1) {
assert!(self.vertical);
}
// if spinner is in horizontal state
if alive_cells().all(|c| c.coord.y() == 1) {
assert!(!self.vertical);
}
}
}
#[test]
fn test_game_of_life() {
let nhood = MooreNhood::new();
let mut grid: TwodimGrid<Life, _, _> = TwodimGrid::new(3, 3, nhood, EmptyState, 1);
// Should be in default state
let default_state = LifeState::Dead;
assert!(grid.cells()
.iter()
.all(|c| c.state == default_state));
// Vertical spinner
// D | A | D
// D | A | D
// D | A | D
let cells = vec![Life {
state: LifeState::Alive,
coord: (1, 0),
},
Life {
state: LifeState::Alive,
coord: (1, 1),
},
Life {
state: LifeState::Alive,
coord: (1, 2),
}];
grid.set_cells(cells);
pretty_print(&grid);
let consumer = SpinnerTestConsumer::new();
let mut engine = Sequential::new(grid, consumer);
engine.run_times(2);
}
| dead_state | identifier_name |
life.rs | #![cfg(test)]
use traits::Cell;
use traits::Coord;
use traits::Engine;
use traits::Consumer;
use traits::Grid;
use engine::Sequential;
use grid::twodim::TwodimGrid;
use grid::nhood::MooreNhood;
use grid::EmptyState;
use utils::find_cell;
/// Implementation of Conway's Game of Life.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
enum LifeState {
Dead,
Alive,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
struct Life {
state: LifeState,
coord: (i32, i32),
}
impl Life {
fn alive_count<'a, I>(&self, neighbors: I) -> u32
where I: Iterator<Item = Option<&'a Self>>,
{
neighbors.filter(|n| match *n {
Some(n) => n.state == LifeState::Alive,
None => false,
})
.count() as u32
}
#[inline]
fn dead_state(&self, alive: u32) -> LifeState {
match alive {
3 => LifeState::Alive,
_ => LifeState::Dead,
}
}
#[inline]
fn alive_state(&self, alive: u32) -> LifeState {
match alive {
2 | 3 => LifeState::Alive,
_ => LifeState::Dead,
}
}
}
impl Cell for Life {
type Coord = (i32, i32);
type State = EmptyState;
fn update<'a, I>(&'a mut self, old: &'a Self, neighbors: I, _: &Self::State)
where I: Iterator<Item = Option<&'a Self>>,
{
let alive_count = self.alive_count(neighbors);
let new_state = match old.state {
LifeState::Alive => self.alive_state(alive_count),
LifeState::Dead => self.dead_state(alive_count),
};
self.state = new_state;
}
fn with_coord<C: Coord>(coord: C) -> Self {
Life {
state: LifeState::Dead,
coord: (coord.x(), coord.y()),
}
}
fn coord(&self) -> &Self::Coord { &self.coord }
fn set_coord<C: Coord>(&mut self, coord: &C) { self.coord = (coord.x(), coord.y()); }
}
fn pretty_print<G: Grid<Cell = Life>>(grid: &G) {
let dims = grid.size();
println!("");
for y in 0..dims.y() {
for x in 0..dims.x() {
let cell = find_cell(grid.cells(), x, y);
match cell.state {
LifeState::Dead => print!("D |"),
LifeState::Alive => print!("A |"),
};
}
println!("");
}
println!("");
}
struct SpinnerTestConsumer {
vertical: bool,
}
impl SpinnerTestConsumer {
pub fn new() -> Self { SpinnerTestConsumer { vertical: true } }
}
impl Consumer for SpinnerTestConsumer {
type Cell = Life;
fn consume<G: Grid<Cell = Self::Cell>>(&mut self, grid: &mut G) {
assert_eq!(grid.cells().len(), 9);
pretty_print(grid);
let dead_cells_count = grid.cells()
.iter()
.filter(|c| c.state == LifeState::Dead)
.count();
assert_eq!(dead_cells_count, 6);
let alive_cells = || {
grid.cells()
.iter()
.filter(|c| c.state == LifeState::Alive)
};
assert_eq!(alive_cells().count(), 3);
self.vertical = !self.vertical;
// if spinner is in vertical state
if alive_cells().all(|c| c.coord.x() == 1) {
assert!(self.vertical);
}
// if spinner is in horizontal state
if alive_cells().all(|c| c.coord.y() == 1) {
assert!(!self.vertical);
}
}
}
#[test]
fn test_game_of_life() {
let nhood = MooreNhood::new(); |
// Should be in default state
let default_state = LifeState::Dead;
assert!(grid.cells()
.iter()
.all(|c| c.state == default_state));
// Vertical spinner
// D | A | D
// D | A | D
// D | A | D
let cells = vec![Life {
state: LifeState::Alive,
coord: (1, 0),
},
Life {
state: LifeState::Alive,
coord: (1, 1),
},
Life {
state: LifeState::Alive,
coord: (1, 2),
}];
grid.set_cells(cells);
pretty_print(&grid);
let consumer = SpinnerTestConsumer::new();
let mut engine = Sequential::new(grid, consumer);
engine.run_times(2);
} | let mut grid: TwodimGrid<Life, _, _> = TwodimGrid::new(3, 3, nhood, EmptyState, 1); | random_line_split |
app.component.ts | import { Component, ViewChild, ElementRef } from "@angular/core";
import { Http, Headers, RequestOptions, Response } from "@angular/http";
import { SemanticPopupComponent } from "ng-semantic";
@Component({
selector: "app",
template: `
<nav class="ui menu inverted huge" *ngIf="!isPlaying()">
<a routerLink="home" routerLinkActive="active" class="item">Home</a>
<a routerLink="leaderboard" routerLinkActive="active" class="item">Leaderboard</a>
<!--<a routerLink="contact" class="item">Contact Me</a>-->
<!--<nav class="menu right">-->
<!--<a (click)="myPopup.show($event, {position: 'right center'})" *ngIf="!isLogged" class="item">Login</a>-->
<!--<a (click)="logout()" *ngIf="isLogged" class="item inverted red">Logout</a>-->
<!--</nav>-->
</nav>
<router-outlet></router-outlet>
`
})
export class | {
response:Response;
isLogged:boolean;
@ViewChild("myPopup") myPopup:SemanticPopupComponent;
constructor(private http:Http) {
this.isLogged = !!localStorage.getItem("id_token");
}
ngOnInit(){
localStorage.setItem("playing", "false");
}
isPlaying() {
return localStorage.getItem("playing") == "true";
}
//signup() {
// this.http.post("/login/signup", JSON.stringify({
// password: this.user.password,
// username: this.user.username
// }), new RequestOptions({
// headers: new Headers({"Content-Type": "application/json"})
// }))
// .map((res:any) => res.json())
// .subscribe(
// (res:Response) => {
// this.response = res;
// },
// (error:Error) => {
// console.log(error);
// }
// );
//}
//
//login() {
// this.http.post("/login", JSON.stringify({password: this.user.password}), new RequestOptions({
// headers: new Headers({"Content-Type": "application/json"})
// }))
// .map((res:Response) => res.json())
// .subscribe(
// (res:Response & { jwt: string }) => {
// localStorage.setItem("id_token", res.jwt);
// this.myPopup.hide();
// location.reload();
// },
// (error:Error) => {
// console.log(error);
// }
// );
//}
//
//logout():void {
// localStorage.removeItem("id_token");
// location.reload();
//}
} | AppComponent | identifier_name |
app.component.ts | import { Component, ViewChild, ElementRef } from "@angular/core";
import { Http, Headers, RequestOptions, Response } from "@angular/http";
import { SemanticPopupComponent } from "ng-semantic";
@Component({
selector: "app",
template: `
<nav class="ui menu inverted huge" *ngIf="!isPlaying()">
<a routerLink="home" routerLinkActive="active" class="item">Home</a>
<a routerLink="leaderboard" routerLinkActive="active" class="item">Leaderboard</a>
<!--<a routerLink="contact" class="item">Contact Me</a>-->
<!--<nav class="menu right">-->
<!--<a (click)="myPopup.show($event, {position: 'right center'})" *ngIf="!isLogged" class="item">Login</a>-->
<!--<a (click)="logout()" *ngIf="isLogged" class="item inverted red">Logout</a>-->
<!--</nav>-->
</nav>
<router-outlet></router-outlet>
`
})
export class AppComponent {
response:Response;
isLogged:boolean;
@ViewChild("myPopup") myPopup:SemanticPopupComponent;
constructor(private http:Http) {
this.isLogged = !!localStorage.getItem("id_token");
}
ngOnInit(){
localStorage.setItem("playing", "false");
}
isPlaying() {
return localStorage.getItem("playing") == "true";
}
//signup() {
// this.http.post("/login/signup", JSON.stringify({
// password: this.user.password,
// username: this.user.username
// }), new RequestOptions({ | // this.response = res;
// },
// (error:Error) => {
// console.log(error);
// }
// );
//}
//
//login() {
// this.http.post("/login", JSON.stringify({password: this.user.password}), new RequestOptions({
// headers: new Headers({"Content-Type": "application/json"})
// }))
// .map((res:Response) => res.json())
// .subscribe(
// (res:Response & { jwt: string }) => {
// localStorage.setItem("id_token", res.jwt);
// this.myPopup.hide();
// location.reload();
// },
// (error:Error) => {
// console.log(error);
// }
// );
//}
//
//logout():void {
// localStorage.removeItem("id_token");
// location.reload();
//}
} | // headers: new Headers({"Content-Type": "application/json"})
// }))
// .map((res:any) => res.json())
// .subscribe(
// (res:Response) => { | random_line_split |
app.component.ts | import { Component, ViewChild, ElementRef } from "@angular/core";
import { Http, Headers, RequestOptions, Response } from "@angular/http";
import { SemanticPopupComponent } from "ng-semantic";
@Component({
selector: "app",
template: `
<nav class="ui menu inverted huge" *ngIf="!isPlaying()">
<a routerLink="home" routerLinkActive="active" class="item">Home</a>
<a routerLink="leaderboard" routerLinkActive="active" class="item">Leaderboard</a>
<!--<a routerLink="contact" class="item">Contact Me</a>-->
<!--<nav class="menu right">-->
<!--<a (click)="myPopup.show($event, {position: 'right center'})" *ngIf="!isLogged" class="item">Login</a>-->
<!--<a (click)="logout()" *ngIf="isLogged" class="item inverted red">Logout</a>-->
<!--</nav>-->
</nav>
<router-outlet></router-outlet>
`
})
export class AppComponent {
response:Response;
isLogged:boolean;
@ViewChild("myPopup") myPopup:SemanticPopupComponent;
constructor(private http:Http) {
this.isLogged = !!localStorage.getItem("id_token");
}
ngOnInit() |
isPlaying() {
return localStorage.getItem("playing") == "true";
}
//signup() {
// this.http.post("/login/signup", JSON.stringify({
// password: this.user.password,
// username: this.user.username
// }), new RequestOptions({
// headers: new Headers({"Content-Type": "application/json"})
// }))
// .map((res:any) => res.json())
// .subscribe(
// (res:Response) => {
// this.response = res;
// },
// (error:Error) => {
// console.log(error);
// }
// );
//}
//
//login() {
// this.http.post("/login", JSON.stringify({password: this.user.password}), new RequestOptions({
// headers: new Headers({"Content-Type": "application/json"})
// }))
// .map((res:Response) => res.json())
// .subscribe(
// (res:Response & { jwt: string }) => {
// localStorage.setItem("id_token", res.jwt);
// this.myPopup.hide();
// location.reload();
// },
// (error:Error) => {
// console.log(error);
// }
// );
//}
//
//logout():void {
// localStorage.removeItem("id_token");
// location.reload();
//}
} | {
localStorage.setItem("playing", "false");
} | identifier_body |
lib.rs | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![recursion_limit = "128"]
mod ext;
mod repr;
use proc_macro2::Span;
use syn::visit::{self, Visit};
use syn::{
parse_quote, punctuated::Punctuated, token::Comma, Data, DataEnum, DataStruct, DeriveInput,
Error, GenericParam, Ident, Lifetime, Type, TypePath,
};
use synstructure::{decl_derive, quote, Structure};
use ext::*;
use repr::*;
// TODO(joshlf): Some errors could be made better if we could add multiple lines
// of error output like this:
//
// error: unsupported representation
// --> enum.rs:28:8
// |
// 28 | #[repr(transparent)]
// |
// help: required by the derive of FromBytes
//
// Instead, we have more verbose error messages like "unsupported representation
// for deriving FromBytes, AsBytes, or Unaligned on an enum"
//
// This will probably require Span::error
// (https://doc.rust-lang.org/nightly/proc_macro/struct.Span.html#method.error),
// which is currently unstable. Revisit this once it's stable.
decl_derive!([FromBytes] => derive_from_bytes);
decl_derive!([AsBytes] => derive_as_bytes);
decl_derive!([Unaligned] => derive_unaligned);
fn derive_from_bytes(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_from_bytes_struct(&s, strct),
Data::Enum(enm) => derive_from_bytes_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_as_bytes(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_as_bytes_struct(&s, strct),
Data::Enum(enm) => derive_as_bytes_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_unaligned(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_unaligned_struct(&s, strct),
Data::Enum(enm) => derive_unaligned_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
// Unwrap a Result<_, Vec<Error>>, converting any Err value into a TokenStream
// and returning it.
macro_rules! try_or_print {
($e:expr) => {
match $e {
Ok(x) => x,
Err(errors) => return print_all_errors(errors),
}
};
}
// A struct is FromBytes if:
// - all fields are FromBytes
fn derive_from_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
impl_block(s.ast(), strct, "FromBytes", true, false)
}
// An enum is FromBytes if:
// - Every possible bit pattern must be valid, which means that every bit
// pattern must correspond to a different enum variant. Thus, for an enum
// whose layout takes up N bytes, there must be 2^N variants.
// - Since we must know N, only representations which guarantee the layout's
// size are allowed. These are repr(uN) and repr(iN) (repr(C) implies an
// implementation-defined size). size and isize technically guarantee the
// layout's size, but would require us to know how large those are on the
// target platform. This isn't terribly difficult - we could emit a const
// expression that could call core::mem::size_of in order to determine the
// size and check against the number of enum variants, but a) this would be
// platform-specific and, b) even on Rust's smallest bit width platform (32),
// this would require ~4 billion enum variants, which obviously isn't a thing.
fn derive_from_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if !enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement FromBytes")
.to_compile_error();
}
let reprs = try_or_print!(ENUM_FROM_BYTES_CFG.validate_reprs(s.ast()));
let variants_required = match reprs.as_slice() {
[EnumRepr::U8] | [EnumRepr::I8] => 1usize << 8,
[EnumRepr::U16] | [EnumRepr::I16] => 1usize << 16,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
if enm.variants.len() != variants_required {
return Error::new_spanned(
s.ast(),
format!(
"FromBytes only supported on {} enum with {} variants",
reprs[0], variants_required
),
)
.to_compile_error();
}
impl_block(s.ast(), enm, "FromBytes", true, false)
}
#[rustfmt::skip]
const ENUM_FROM_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message: r#"FromBytes requires repr of "u8", "u16", "i8", or "i16""#,
derive_unaligned: false,
allowed_combinations: &[
&[U8],
&[U16],
&[I8],
&[I16],
],
disallowed_but_legal_combinations: &[
&[C],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
}
};
// A struct is AsBytes if:
// - all fields are AsBytes
// - repr(C) or repr(transparent) and
// - no padding (size of struct equals sum of size of field types)
// - repr(packed)
fn derive_as_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
// TODO(joshlf): Support type parameters.
if !s.ast().generics.params.is_empty() {
return Error::new(Span::call_site(), "unsupported on types with type parameters")
.to_compile_error();
}
let reprs = try_or_print!(STRUCT_AS_BYTES_CFG.validate_reprs(s.ast()));
let require_size_check = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "AsBytes", true, require_size_check)
}
#[rustfmt::skip]
const STRUCT_AS_BYTES_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "transparent", or "packed""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[Transparent],
&[C, Packed],
&[Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is AsBytes if it is C-like and has a defined repr
fn derive_as_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if !enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement AsBytes")
.to_compile_error();
}
// We don't care what the repr is; we only care that it is one of the
// allowed ones.
try_or_print!(ENUM_AS_BYTES_CFG.validate_reprs(s.ast()));
impl_block(s.ast(), enm, "AsBytes", false, false)
}
#[rustfmt::skip]
const ENUM_AS_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", or "isize""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[U8],
&[U16],
&[I8],
&[I16],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
disallowed_but_legal_combinations: &[],
}
};
// A struct is Unaligned if:
// - repr(align) is no more than 1 and either
// - repr(C) or repr(transparent) and
// - all fields Unaligned
// - repr(packed)
fn derive_unaligned_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
let reprs = try_or_print!(STRUCT_UNALIGNED_CFG.validate_reprs(s.ast()));
let require_trait_bound = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "Unaligned", require_trait_bound, false)
}
#[rustfmt::skip]
const STRUCT_UNALIGNED_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message:
r#"Unaligned requires either a) repr "C" or "transparent" with all fields implementing Unaligned or, b) repr "packed""#,
derive_unaligned: true,
allowed_combinations: &[
&[C],
&[Transparent],
&[Packed],
&[C, Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is Unaligned if:
// - No repr(align(N > 1))
// - repr(u8) or repr(i8)
fn derive_unaligned_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if !enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement Unaligned")
.to_compile_error();
}
// The only valid reprs are u8 and i8, and optionally align(1). We don't
// actually care what the reprs are so long as they satisfy that
// requirement.
try_or_print!(ENUM_UNALIGNED_CFG.validate_reprs(s.ast()));
// NOTE: C-like enums cannot currently have type parameters, so this value
// of true for require_trait_bounds doesn't really do anything. But it's
// marginally more future-proof in case that restriction is lifted in the
// future.
impl_block(s.ast(), enm, "Unaligned", true, false)
}
#[rustfmt::skip]
const ENUM_UNALIGNED_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message:
r#"Unaligned requires repr of "u8" or "i8", and no alignment (i.e., repr(align(N > 1)))"#,
derive_unaligned: true,
allowed_combinations: &[
&[U8],
&[I8],
],
disallowed_but_legal_combinations: &[
&[C],
&[U16],
&[U32],
&[U64],
&[Usize],
&[I16],
&[I32],
&[I64],
&[Isize],
],
}
};
fn impl_block<D: DataExt>(
input: &DeriveInput,
data: &D,
trait_name: &str,
require_trait_bound: bool,
require_size_check: bool,
) -> proc_macro2::TokenStream {
// In this documentation, we will refer to this hypothetical struct:
//
// #[derive(FromBytes)]
// struct Foo<T, I: Iterator>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// a: u8,
// b: T,
// c: I::Item,
// }
//
// First, we extract the field types, which in this case are u8, T, and
// I::Item. We use the names of the type parameters to split the field types
// into two sets - a set of types which are based on the type parameters,
// and a set of types which are not. First, we re-use the existing
// parameters and where clauses, generating an impl block like:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// }
//
// Then, we use the list of types which are based on the type parameters to
// generate new entries in the where clause:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// T: FromBytes,
// I::Item: FromBytes,
// {
// }
//
// Finally, we use a different technique to generate the bounds for the types
// which are not based on type parameters:
//
//
// fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
// struct ImplementsFromBytes<F: ?Sized + FromBytes>(PhantomData<F>);
// let _: ImplementsFromBytes<u8>;
// }
//
// It would be easier to put all types in the where clause, but that won't
// work until the trivial_bounds feature is stabilized (#48214).
//
// NOTE: It is standard practice to only emit bounds for the type parameters
// themselves, not for field types based on those parameters (e.g., `T` vs
// `T::Foo`). For a discussion of why this is standard practice, see
// https://github.com/rust-lang/rust/issues/26925.
//
// The reason we diverge from this standard is that doing it that way for us
// would be unsound. E.g., consider a type, `T` where `T: FromBytes` but
// `T::Foo: !FromBytes`. It would not be sound for us to accept a type with
// a `T::Foo` field as `FromBytes` simply because `T: FromBytes`.
//
// While there's no getting around this requirement for us, it does have
// some pretty serious downsides that are worth calling out:
//
// 1. You lose the ability to have fields of generic type with reduced visibility.
//
// #[derive(Unaligned)]
// #[repr(C)]
// pub struct Public<T>(Private<T>);
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Private<T>(T);
//
//
// warning: private type `Private<T>` in public interface (error E0446)
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: #[warn(private_in_public)] on by default
// = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
// = note: for more information, see issue #34537 <https://github.com/rust-lang/rust/issues/34537>
//
// 2. When lifetimes are involved, the trait solver ties itself in knots.
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Dup<'a, 'b> {
// a: PhantomData<&'a u8>,
// b: PhantomData<&'b u8>,
// }
//
//
// error[E0283]: type annotations required: cannot resolve `core::marker::PhantomData<&'a u8>: zerocopy::Unaligned`
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: required by `zerocopy::Unaligned`
// A visitor which is used to walk a field's type and determine whether any
// of its definition is based on the type or lifetime parameters on a type.
struct FromTypeParamVisit<'a, 'b>(&'a Punctuated<GenericParam, Comma>, &'b mut bool);
impl<'a, 'b> Visit<'a> for FromTypeParamVisit<'a, 'b> {
fn visit_type_path(&mut self, i: &'a TypePath) {
visit::visit_type_path(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Type(param) = param {
i.path.segments.first().unwrap().ident == param.ident
} else {
false
}
}) {
*self.1 = true;
}
}
fn visit_lifetime(&mut self, i: &'a Lifetime) {
visit::visit_lifetime(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Lifetime(param) = param {
param.lifetime.ident == i.ident
} else {
false
}
}) {
*self.1 = true;
}
}
}
// Whether this type is based on one of the type parameters. E.g., given the
// type parameters `<T>`, `T`, `T::Foo`, and `(T::Foo, String)` are all
// based on the type parameters, while `String` and `(String, Box<()>)` are
// not.
let is_from_type_param = |ty: &Type| {
let mut ret = false;
FromTypeParamVisit(&input.generics.params, &mut ret).visit_type(ty);
ret
};
let trait_ident = Ident::new(trait_name, Span::call_site());
let field_types = data.nested_types();
let type_param_field_types = field_types.iter().filter(|ty| is_from_type_param(ty));
let non_type_param_field_types = field_types.iter().filter(|ty| !is_from_type_param(ty));
// Add a new set of where clause predicates of the form `T: Trait` for each
// of the types of the struct's fields (but only the ones whose types are
// based on one of the type parameters).
let mut generics = input.generics.clone();
let where_clause = generics.make_where_clause();
if require_trait_bound {
for ty in type_param_field_types {
let bound = parse_quote!(#ty: zerocopy::#trait_ident);
where_clause.predicates.push(bound);
}
}
let type_ident = &input.ident;
// The parameters with trait bounds, but without type defaults.
let params = input.generics.params.clone().into_iter().map(|mut param| {
match &mut param {
GenericParam::Type(ty) => ty.default = None,
GenericParam::Const(cnst) => cnst.default = None,
GenericParam::Lifetime(_) => {}
}
quote!(#param)
});
// The identifiers of the parameters without trait bounds or type defaults.
let param_idents = input.generics.params.iter().map(|param| match param {
GenericParam::Type(ty) => {
let ident = &ty.ident;
quote!(#ident)
}
GenericParam::Lifetime(l) => quote!(#l),
GenericParam::Const(cnst) => quote!(#cnst),
});
let trait_bound_body = if require_trait_bound {
let implements_type_ident =
Ident::new(format!("Implements{}", trait_ident).as_str(), Span::call_site());
let implements_type_tokens = quote!(#implements_type_ident);
let types = non_type_param_field_types.map(|ty| quote!(#implements_type_tokens<#ty>));
quote!(
// A type with a type parameter that must implement #trait_ident
struct #implements_type_ident<F: ?Sized + zerocopy::#trait_ident>(::core::marker::PhantomData<F>);
// For each field type, an instantiation that won't type check if
// that type doesn't implement #trait_ident
#(let _: #types;)*
)
} else {
quote!()
};
let size_check_body = if require_size_check && !field_types.is_empty() {
quote!(
const HAS_PADDING: bool = core::mem::size_of::<#type_ident>() != #(core::mem::size_of::<#field_types>())+*;
let _: [(); 1/(1 - HAS_PADDING as usize)];
)
} else {
quote!()
};
quote! {
unsafe impl < #(#params),* > zerocopy::#trait_ident for #type_ident < #(#param_idents),* > #where_clause {
fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
#trait_bound_body
#size_check_body
}
}
}
}
fn print_all_errors(errors: Vec<Error>) -> proc_macro2::TokenStream {
errors.iter().map(Error::to_compile_error).collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_config_repr_orderings() {
// Validate that the repr lists in the various configs are in the
// canonical order. If they aren't, then our algorithm to look up in
// those lists won't work.
// TODO(joshlf): Remove once the is_sorted method is stabilized
// (issue #53485).
fn is_sorted_and_deduped<T: Clone + Ord>(ts: &[T]) -> bool {
let mut sorted = ts.to_vec();
sorted.sort();
sorted.dedup();
ts == sorted.as_slice()
}
fn elements_are_sorted_and_deduped<T: Clone + Ord>(lists: &[&[T]]) -> bool {
lists.iter().all(|list| is_sorted_and_deduped(*list))
}
fn config_is_sorted<T: KindRepr + Clone>(config: &Config<T>) -> bool {
elements_are_sorted_and_deduped(&config.allowed_combinations)
&& elements_are_sorted_and_deduped(&config.disallowed_but_legal_combinations)
}
assert!(config_is_sorted(&STRUCT_UNALIGNED_CFG));
assert!(config_is_sorted(&ENUM_FROM_BYTES_CFG));
assert!(config_is_sorted(&ENUM_UNALIGNED_CFG));
}
#[test]
fn test_config_repr_no_overlap() {
// Validate that no set of reprs appears in both th allowed_combinations
// and disallowed_but_legal_combinations lists.
fn overlap<T: Eq>(a: &[T], b: &[T]) -> bool {
a.iter().any(|elem| b.contains(elem))
}
fn | <T: KindRepr + Eq>(config: &Config<T>) -> bool {
overlap(config.allowed_combinations, config.disallowed_but_legal_combinations)
}
assert!(!config_overlaps(&STRUCT_UNALIGNED_CFG));
assert!(!config_overlaps(&ENUM_FROM_BYTES_CFG));
assert!(!config_overlaps(&ENUM_UNALIGNED_CFG));
}
}
| config_overlaps | identifier_name |
lib.rs | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![recursion_limit = "128"]
mod ext;
mod repr;
use proc_macro2::Span;
use syn::visit::{self, Visit};
use syn::{
parse_quote, punctuated::Punctuated, token::Comma, Data, DataEnum, DataStruct, DeriveInput,
Error, GenericParam, Ident, Lifetime, Type, TypePath,
};
use synstructure::{decl_derive, quote, Structure};
use ext::*;
use repr::*;
// TODO(joshlf): Some errors could be made better if we could add multiple lines
// of error output like this:
//
// error: unsupported representation
// --> enum.rs:28:8
// |
// 28 | #[repr(transparent)]
// |
// help: required by the derive of FromBytes
//
// Instead, we have more verbose error messages like "unsupported representation
// for deriving FromBytes, AsBytes, or Unaligned on an enum"
//
// This will probably require Span::error
// (https://doc.rust-lang.org/nightly/proc_macro/struct.Span.html#method.error),
// which is currently unstable. Revisit this once it's stable.
decl_derive!([FromBytes] => derive_from_bytes);
decl_derive!([AsBytes] => derive_as_bytes);
decl_derive!([Unaligned] => derive_unaligned);
fn derive_from_bytes(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_from_bytes_struct(&s, strct),
Data::Enum(enm) => derive_from_bytes_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_as_bytes(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_as_bytes_struct(&s, strct),
Data::Enum(enm) => derive_as_bytes_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_unaligned(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_unaligned_struct(&s, strct),
Data::Enum(enm) => derive_unaligned_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
// Unwrap a Result<_, Vec<Error>>, converting any Err value into a TokenStream
// and returning it.
macro_rules! try_or_print {
($e:expr) => {
match $e {
Ok(x) => x,
Err(errors) => return print_all_errors(errors),
}
};
}
// A struct is FromBytes if:
// - all fields are FromBytes
fn derive_from_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
impl_block(s.ast(), strct, "FromBytes", true, false)
}
// An enum is FromBytes if:
// - Every possible bit pattern must be valid, which means that every bit
// pattern must correspond to a different enum variant. Thus, for an enum
// whose layout takes up N bytes, there must be 2^N variants.
// - Since we must know N, only representations which guarantee the layout's
// size are allowed. These are repr(uN) and repr(iN) (repr(C) implies an
// implementation-defined size). size and isize technically guarantee the
// layout's size, but would require us to know how large those are on the
// target platform. This isn't terribly difficult - we could emit a const
// expression that could call core::mem::size_of in order to determine the
// size and check against the number of enum variants, but a) this would be
// platform-specific and, b) even on Rust's smallest bit width platform (32),
// this would require ~4 billion enum variants, which obviously isn't a thing.
fn derive_from_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if !enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement FromBytes")
.to_compile_error();
}
let reprs = try_or_print!(ENUM_FROM_BYTES_CFG.validate_reprs(s.ast()));
let variants_required = match reprs.as_slice() {
[EnumRepr::U8] | [EnumRepr::I8] => 1usize << 8,
[EnumRepr::U16] | [EnumRepr::I16] => 1usize << 16,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
if enm.variants.len() != variants_required {
return Error::new_spanned(
s.ast(),
format!(
"FromBytes only supported on {} enum with {} variants",
reprs[0], variants_required
),
)
.to_compile_error();
}
impl_block(s.ast(), enm, "FromBytes", true, false)
}
#[rustfmt::skip]
const ENUM_FROM_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message: r#"FromBytes requires repr of "u8", "u16", "i8", or "i16""#,
derive_unaligned: false,
allowed_combinations: &[
&[U8],
&[U16],
&[I8],
&[I16],
],
disallowed_but_legal_combinations: &[
&[C],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
}
};
// A struct is AsBytes if:
// - all fields are AsBytes
// - repr(C) or repr(transparent) and
// - no padding (size of struct equals sum of size of field types)
// - repr(packed)
fn derive_as_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream |
#[rustfmt::skip]
const STRUCT_AS_BYTES_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "transparent", or "packed""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[Transparent],
&[C, Packed],
&[Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is AsBytes if it is C-like and has a defined repr
fn derive_as_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if !enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement AsBytes")
.to_compile_error();
}
// We don't care what the repr is; we only care that it is one of the
// allowed ones.
try_or_print!(ENUM_AS_BYTES_CFG.validate_reprs(s.ast()));
impl_block(s.ast(), enm, "AsBytes", false, false)
}
#[rustfmt::skip]
const ENUM_AS_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", or "isize""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[U8],
&[U16],
&[I8],
&[I16],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
disallowed_but_legal_combinations: &[],
}
};
// A struct is Unaligned if:
// - repr(align) is no more than 1 and either
// - repr(C) or repr(transparent) and
// - all fields Unaligned
// - repr(packed)
fn derive_unaligned_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
let reprs = try_or_print!(STRUCT_UNALIGNED_CFG.validate_reprs(s.ast()));
let require_trait_bound = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "Unaligned", require_trait_bound, false)
}
#[rustfmt::skip]
const STRUCT_UNALIGNED_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message:
r#"Unaligned requires either a) repr "C" or "transparent" with all fields implementing Unaligned or, b) repr "packed""#,
derive_unaligned: true,
allowed_combinations: &[
&[C],
&[Transparent],
&[Packed],
&[C, Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is Unaligned if:
// - No repr(align(N > 1))
// - repr(u8) or repr(i8)
fn derive_unaligned_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if !enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement Unaligned")
.to_compile_error();
}
// The only valid reprs are u8 and i8, and optionally align(1). We don't
// actually care what the reprs are so long as they satisfy that
// requirement.
try_or_print!(ENUM_UNALIGNED_CFG.validate_reprs(s.ast()));
// NOTE: C-like enums cannot currently have type parameters, so this value
// of true for require_trait_bounds doesn't really do anything. But it's
// marginally more future-proof in case that restriction is lifted in the
// future.
impl_block(s.ast(), enm, "Unaligned", true, false)
}
#[rustfmt::skip]
const ENUM_UNALIGNED_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message:
r#"Unaligned requires repr of "u8" or "i8", and no alignment (i.e., repr(align(N > 1)))"#,
derive_unaligned: true,
allowed_combinations: &[
&[U8],
&[I8],
],
disallowed_but_legal_combinations: &[
&[C],
&[U16],
&[U32],
&[U64],
&[Usize],
&[I16],
&[I32],
&[I64],
&[Isize],
],
}
};
fn impl_block<D: DataExt>(
input: &DeriveInput,
data: &D,
trait_name: &str,
require_trait_bound: bool,
require_size_check: bool,
) -> proc_macro2::TokenStream {
// In this documentation, we will refer to this hypothetical struct:
//
// #[derive(FromBytes)]
// struct Foo<T, I: Iterator>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// a: u8,
// b: T,
// c: I::Item,
// }
//
// First, we extract the field types, which in this case are u8, T, and
// I::Item. We use the names of the type parameters to split the field types
// into two sets - a set of types which are based on the type parameters,
// and a set of types which are not. First, we re-use the existing
// parameters and where clauses, generating an impl block like:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// }
//
// Then, we use the list of types which are based on the type parameters to
// generate new entries in the where clause:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// T: FromBytes,
// I::Item: FromBytes,
// {
// }
//
// Finally, we use a different technique to generate the bounds for the types
// which are not based on type parameters:
//
//
// fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
// struct ImplementsFromBytes<F: ?Sized + FromBytes>(PhantomData<F>);
// let _: ImplementsFromBytes<u8>;
// }
//
// It would be easier to put all types in the where clause, but that won't
// work until the trivial_bounds feature is stabilized (#48214).
//
// NOTE: It is standard practice to only emit bounds for the type parameters
// themselves, not for field types based on those parameters (e.g., `T` vs
// `T::Foo`). For a discussion of why this is standard practice, see
// https://github.com/rust-lang/rust/issues/26925.
//
// The reason we diverge from this standard is that doing it that way for us
// would be unsound. E.g., consider a type, `T` where `T: FromBytes` but
// `T::Foo: !FromBytes`. It would not be sound for us to accept a type with
// a `T::Foo` field as `FromBytes` simply because `T: FromBytes`.
//
// While there's no getting around this requirement for us, it does have
// some pretty serious downsides that are worth calling out:
//
// 1. You lose the ability to have fields of generic type with reduced visibility.
//
// #[derive(Unaligned)]
// #[repr(C)]
// pub struct Public<T>(Private<T>);
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Private<T>(T);
//
//
// warning: private type `Private<T>` in public interface (error E0446)
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: #[warn(private_in_public)] on by default
// = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
// = note: for more information, see issue #34537 <https://github.com/rust-lang/rust/issues/34537>
//
// 2. When lifetimes are involved, the trait solver ties itself in knots.
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Dup<'a, 'b> {
// a: PhantomData<&'a u8>,
// b: PhantomData<&'b u8>,
// }
//
//
// error[E0283]: type annotations required: cannot resolve `core::marker::PhantomData<&'a u8>: zerocopy::Unaligned`
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: required by `zerocopy::Unaligned`
// A visitor which is used to walk a field's type and determine whether any
// of its definition is based on the type or lifetime parameters on a type.
struct FromTypeParamVisit<'a, 'b>(&'a Punctuated<GenericParam, Comma>, &'b mut bool);
impl<'a, 'b> Visit<'a> for FromTypeParamVisit<'a, 'b> {
fn visit_type_path(&mut self, i: &'a TypePath) {
visit::visit_type_path(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Type(param) = param {
i.path.segments.first().unwrap().ident == param.ident
} else {
false
}
}) {
*self.1 = true;
}
}
fn visit_lifetime(&mut self, i: &'a Lifetime) {
visit::visit_lifetime(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Lifetime(param) = param {
param.lifetime.ident == i.ident
} else {
false
}
}) {
*self.1 = true;
}
}
}
// Whether this type is based on one of the type parameters. E.g., given the
// type parameters `<T>`, `T`, `T::Foo`, and `(T::Foo, String)` are all
// based on the type parameters, while `String` and `(String, Box<()>)` are
// not.
let is_from_type_param = |ty: &Type| {
let mut ret = false;
FromTypeParamVisit(&input.generics.params, &mut ret).visit_type(ty);
ret
};
let trait_ident = Ident::new(trait_name, Span::call_site());
let field_types = data.nested_types();
let type_param_field_types = field_types.iter().filter(|ty| is_from_type_param(ty));
let non_type_param_field_types = field_types.iter().filter(|ty| !is_from_type_param(ty));
// Add a new set of where clause predicates of the form `T: Trait` for each
// of the types of the struct's fields (but only the ones whose types are
// based on one of the type parameters).
let mut generics = input.generics.clone();
let where_clause = generics.make_where_clause();
if require_trait_bound {
for ty in type_param_field_types {
let bound = parse_quote!(#ty: zerocopy::#trait_ident);
where_clause.predicates.push(bound);
}
}
let type_ident = &input.ident;
// The parameters with trait bounds, but without type defaults.
let params = input.generics.params.clone().into_iter().map(|mut param| {
match &mut param {
GenericParam::Type(ty) => ty.default = None,
GenericParam::Const(cnst) => cnst.default = None,
GenericParam::Lifetime(_) => {}
}
quote!(#param)
});
// The identifiers of the parameters without trait bounds or type defaults.
let param_idents = input.generics.params.iter().map(|param| match param {
GenericParam::Type(ty) => {
let ident = &ty.ident;
quote!(#ident)
}
GenericParam::Lifetime(l) => quote!(#l),
GenericParam::Const(cnst) => quote!(#cnst),
});
let trait_bound_body = if require_trait_bound {
let implements_type_ident =
Ident::new(format!("Implements{}", trait_ident).as_str(), Span::call_site());
let implements_type_tokens = quote!(#implements_type_ident);
let types = non_type_param_field_types.map(|ty| quote!(#implements_type_tokens<#ty>));
quote!(
// A type with a type parameter that must implement #trait_ident
struct #implements_type_ident<F: ?Sized + zerocopy::#trait_ident>(::core::marker::PhantomData<F>);
// For each field type, an instantiation that won't type check if
// that type doesn't implement #trait_ident
#(let _: #types;)*
)
} else {
quote!()
};
let size_check_body = if require_size_check && !field_types.is_empty() {
quote!(
const HAS_PADDING: bool = core::mem::size_of::<#type_ident>() != #(core::mem::size_of::<#field_types>())+*;
let _: [(); 1/(1 - HAS_PADDING as usize)];
)
} else {
quote!()
};
quote! {
unsafe impl < #(#params),* > zerocopy::#trait_ident for #type_ident < #(#param_idents),* > #where_clause {
fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
#trait_bound_body
#size_check_body
}
}
}
}
fn print_all_errors(errors: Vec<Error>) -> proc_macro2::TokenStream {
errors.iter().map(Error::to_compile_error).collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_config_repr_orderings() {
// Validate that the repr lists in the various configs are in the
// canonical order. If they aren't, then our algorithm to look up in
// those lists won't work.
// TODO(joshlf): Remove once the is_sorted method is stabilized
// (issue #53485).
fn is_sorted_and_deduped<T: Clone + Ord>(ts: &[T]) -> bool {
let mut sorted = ts.to_vec();
sorted.sort();
sorted.dedup();
ts == sorted.as_slice()
}
fn elements_are_sorted_and_deduped<T: Clone + Ord>(lists: &[&[T]]) -> bool {
lists.iter().all(|list| is_sorted_and_deduped(*list))
}
fn config_is_sorted<T: KindRepr + Clone>(config: &Config<T>) -> bool {
elements_are_sorted_and_deduped(&config.allowed_combinations)
&& elements_are_sorted_and_deduped(&config.disallowed_but_legal_combinations)
}
assert!(config_is_sorted(&STRUCT_UNALIGNED_CFG));
assert!(config_is_sorted(&ENUM_FROM_BYTES_CFG));
assert!(config_is_sorted(&ENUM_UNALIGNED_CFG));
}
#[test]
fn test_config_repr_no_overlap() {
// Validate that no set of reprs appears in both th allowed_combinations
// and disallowed_but_legal_combinations lists.
fn overlap<T: Eq>(a: &[T], b: &[T]) -> bool {
a.iter().any(|elem| b.contains(elem))
}
fn config_overlaps<T: KindRepr + Eq>(config: &Config<T>) -> bool {
overlap(config.allowed_combinations, config.disallowed_but_legal_combinations)
}
assert!(!config_overlaps(&STRUCT_UNALIGNED_CFG));
assert!(!config_overlaps(&ENUM_FROM_BYTES_CFG));
assert!(!config_overlaps(&ENUM_UNALIGNED_CFG));
}
}
| {
// TODO(joshlf): Support type parameters.
if !s.ast().generics.params.is_empty() {
return Error::new(Span::call_site(), "unsupported on types with type parameters")
.to_compile_error();
}
let reprs = try_or_print!(STRUCT_AS_BYTES_CFG.validate_reprs(s.ast()));
let require_size_check = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "AsBytes", true, require_size_check)
} | identifier_body |
lib.rs | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![recursion_limit = "128"]
mod ext;
mod repr;
use proc_macro2::Span;
use syn::visit::{self, Visit};
use syn::{
parse_quote, punctuated::Punctuated, token::Comma, Data, DataEnum, DataStruct, DeriveInput,
Error, GenericParam, Ident, Lifetime, Type, TypePath,
};
use synstructure::{decl_derive, quote, Structure};
use ext::*;
use repr::*;
// TODO(joshlf): Some errors could be made better if we could add multiple lines
// of error output like this:
//
// error: unsupported representation
// --> enum.rs:28:8
// |
// 28 | #[repr(transparent)]
// |
// help: required by the derive of FromBytes
//
// Instead, we have more verbose error messages like "unsupported representation
// for deriving FromBytes, AsBytes, or Unaligned on an enum"
//
// This will probably require Span::error
// (https://doc.rust-lang.org/nightly/proc_macro/struct.Span.html#method.error),
// which is currently unstable. Revisit this once it's stable.
decl_derive!([FromBytes] => derive_from_bytes);
decl_derive!([AsBytes] => derive_as_bytes);
decl_derive!([Unaligned] => derive_unaligned);
fn derive_from_bytes(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_from_bytes_struct(&s, strct),
Data::Enum(enm) => derive_from_bytes_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_as_bytes(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_as_bytes_struct(&s, strct),
Data::Enum(enm) => derive_as_bytes_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
fn derive_unaligned(s: Structure<'_>) -> proc_macro2::TokenStream {
match &s.ast().data {
Data::Struct(strct) => derive_unaligned_struct(&s, strct),
Data::Enum(enm) => derive_unaligned_enum(&s, enm),
Data::Union(_) => Error::new(Span::call_site(), "unsupported on unions").to_compile_error(),
}
}
// Unwrap a Result<_, Vec<Error>>, converting any Err value into a TokenStream
// and returning it.
macro_rules! try_or_print {
($e:expr) => {
match $e {
Ok(x) => x,
Err(errors) => return print_all_errors(errors),
}
};
}
// A struct is FromBytes if:
// - all fields are FromBytes
fn derive_from_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
impl_block(s.ast(), strct, "FromBytes", true, false)
}
// An enum is FromBytes if:
// - Every possible bit pattern must be valid, which means that every bit
// pattern must correspond to a different enum variant. Thus, for an enum
// whose layout takes up N bytes, there must be 2^N variants.
// - Since we must know N, only representations which guarantee the layout's
// size are allowed. These are repr(uN) and repr(iN) (repr(C) implies an
// implementation-defined size). size and isize technically guarantee the
// layout's size, but would require us to know how large those are on the
// target platform. This isn't terribly difficult - we could emit a const
// expression that could call core::mem::size_of in order to determine the
// size and check against the number of enum variants, but a) this would be
// platform-specific and, b) even on Rust's smallest bit width platform (32),
// this would require ~4 billion enum variants, which obviously isn't a thing.
fn derive_from_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if !enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement FromBytes")
.to_compile_error();
}
let reprs = try_or_print!(ENUM_FROM_BYTES_CFG.validate_reprs(s.ast()));
let variants_required = match reprs.as_slice() {
[EnumRepr::U8] | [EnumRepr::I8] => 1usize << 8,
[EnumRepr::U16] | [EnumRepr::I16] => 1usize << 16,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
if enm.variants.len() != variants_required {
return Error::new_spanned(
s.ast(),
format!(
"FromBytes only supported on {} enum with {} variants",
reprs[0], variants_required
),
)
.to_compile_error();
}
impl_block(s.ast(), enm, "FromBytes", true, false)
}
#[rustfmt::skip]
const ENUM_FROM_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message: r#"FromBytes requires repr of "u8", "u16", "i8", or "i16""#,
derive_unaligned: false,
allowed_combinations: &[
&[U8],
&[U16],
&[I8],
&[I16],
],
disallowed_but_legal_combinations: &[
&[C],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
}
};
// A struct is AsBytes if:
// - all fields are AsBytes
// - repr(C) or repr(transparent) and
// - no padding (size of struct equals sum of size of field types)
// - repr(packed)
fn derive_as_bytes_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
// TODO(joshlf): Support type parameters.
if !s.ast().generics.params.is_empty() {
return Error::new(Span::call_site(), "unsupported on types with type parameters")
.to_compile_error();
}
let reprs = try_or_print!(STRUCT_AS_BYTES_CFG.validate_reprs(s.ast()));
let require_size_check = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "AsBytes", true, require_size_check)
}
#[rustfmt::skip]
const STRUCT_AS_BYTES_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "transparent", or "packed""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[Transparent],
&[C, Packed],
&[Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is AsBytes if it is C-like and has a defined repr
fn derive_as_bytes_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if !enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement AsBytes")
.to_compile_error();
}
// We don't care what the repr is; we only care that it is one of the
// allowed ones.
try_or_print!(ENUM_AS_BYTES_CFG.validate_reprs(s.ast()));
impl_block(s.ast(), enm, "AsBytes", false, false)
}
#[rustfmt::skip]
const ENUM_AS_BYTES_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message: r#"AsBytes requires repr of "C", "u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", or "isize""#,
derive_unaligned: false,
allowed_combinations: &[
&[C],
&[U8],
&[U16],
&[I8],
&[I16],
&[U32],
&[I32],
&[U64],
&[I64],
&[Usize],
&[Isize],
],
disallowed_but_legal_combinations: &[],
}
};
// A struct is Unaligned if:
// - repr(align) is no more than 1 and either
// - repr(C) or repr(transparent) and
// - all fields Unaligned
// - repr(packed)
fn derive_unaligned_struct(s: &Structure<'_>, strct: &DataStruct) -> proc_macro2::TokenStream {
let reprs = try_or_print!(STRUCT_UNALIGNED_CFG.validate_reprs(s.ast()));
let require_trait_bound = match reprs.as_slice() {
[StructRepr::C] | [StructRepr::Transparent] => true,
[StructRepr::Packed] | [StructRepr::C, StructRepr::Packed] => false,
// validate_reprs has already validated that it's one of the preceding
// patterns
_ => unreachable!(),
};
impl_block(s.ast(), strct, "Unaligned", require_trait_bound, false)
}
#[rustfmt::skip]
const STRUCT_UNALIGNED_CFG: Config<StructRepr> = {
use StructRepr::*;
Config {
// NOTE: Since disallowed_but_legal_combinations is empty, this message
// will never actually be emitted.
allowed_combinations_message:
r#"Unaligned requires either a) repr "C" or "transparent" with all fields implementing Unaligned or, b) repr "packed""#,
derive_unaligned: true,
allowed_combinations: &[
&[C],
&[Transparent],
&[Packed],
&[C, Packed],
],
disallowed_but_legal_combinations: &[],
}
};
// An enum is Unaligned if:
// - No repr(align(N > 1))
// - repr(u8) or repr(i8)
fn derive_unaligned_enum(s: &Structure<'_>, enm: &DataEnum) -> proc_macro2::TokenStream {
if !enm.is_c_like() {
return Error::new_spanned(s.ast(), "only C-like enums can implement Unaligned")
.to_compile_error();
}
// The only valid reprs are u8 and i8, and optionally align(1). We don't
// actually care what the reprs are so long as they satisfy that
// requirement.
try_or_print!(ENUM_UNALIGNED_CFG.validate_reprs(s.ast()));
// NOTE: C-like enums cannot currently have type parameters, so this value
// of true for require_trait_bounds doesn't really do anything. But it's
// marginally more future-proof in case that restriction is lifted in the
// future.
impl_block(s.ast(), enm, "Unaligned", true, false)
}
#[rustfmt::skip]
const ENUM_UNALIGNED_CFG: Config<EnumRepr> = {
use EnumRepr::*;
Config {
allowed_combinations_message:
r#"Unaligned requires repr of "u8" or "i8", and no alignment (i.e., repr(align(N > 1)))"#,
derive_unaligned: true,
allowed_combinations: &[
&[U8],
&[I8],
],
disallowed_but_legal_combinations: &[
&[C],
&[U16],
&[U32],
&[U64],
&[Usize],
&[I16],
&[I32],
&[I64],
&[Isize],
],
}
};
fn impl_block<D: DataExt>(
input: &DeriveInput,
data: &D,
trait_name: &str,
require_trait_bound: bool,
require_size_check: bool,
) -> proc_macro2::TokenStream {
// In this documentation, we will refer to this hypothetical struct:
//
// #[derive(FromBytes)]
// struct Foo<T, I: Iterator>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// a: u8,
// b: T,
// c: I::Item,
// }
//
// First, we extract the field types, which in this case are u8, T, and
// I::Item. We use the names of the type parameters to split the field types
// into two sets - a set of types which are based on the type parameters,
// and a set of types which are not. First, we re-use the existing
// parameters and where clauses, generating an impl block like:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// {
// }
//
// Then, we use the list of types which are based on the type parameters to
// generate new entries in the where clause:
//
// impl<T, I: Iterator> FromBytes for Foo<T, I>
// where
// T: Copy,
// I: Clone,
// I::Item: Clone,
// T: FromBytes,
// I::Item: FromBytes,
// {
// }
//
// Finally, we use a different technique to generate the bounds for the types
// which are not based on type parameters:
//
//
// fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
// struct ImplementsFromBytes<F: ?Sized + FromBytes>(PhantomData<F>);
// let _: ImplementsFromBytes<u8>;
// }
//
// It would be easier to put all types in the where clause, but that won't
// work until the trivial_bounds feature is stabilized (#48214).
//
// NOTE: It is standard practice to only emit bounds for the type parameters
// themselves, not for field types based on those parameters (e.g., `T` vs
// `T::Foo`). For a discussion of why this is standard practice, see
// https://github.com/rust-lang/rust/issues/26925.
//
// The reason we diverge from this standard is that doing it that way for us
// would be unsound. E.g., consider a type, `T` where `T: FromBytes` but
// `T::Foo: !FromBytes`. It would not be sound for us to accept a type with
// a `T::Foo` field as `FromBytes` simply because `T: FromBytes`.
//
// While there's no getting around this requirement for us, it does have
// some pretty serious downsides that are worth calling out:
//
// 1. You lose the ability to have fields of generic type with reduced visibility.
//
// #[derive(Unaligned)]
// #[repr(C)]
// pub struct Public<T>(Private<T>);
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Private<T>(T);
//
//
// warning: private type `Private<T>` in public interface (error E0446)
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: #[warn(private_in_public)] on by default
// = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
// = note: for more information, see issue #34537 <https://github.com/rust-lang/rust/issues/34537>
//
// 2. When lifetimes are involved, the trait solver ties itself in knots.
//
// #[derive(Unaligned)]
// #[repr(C)]
// struct Dup<'a, 'b> {
// a: PhantomData<&'a u8>,
// b: PhantomData<&'b u8>,
// }
//
//
// error[E0283]: type annotations required: cannot resolve `core::marker::PhantomData<&'a u8>: zerocopy::Unaligned`
// --> src/main.rs:6:10
// |
// 6 | #[derive(Unaligned)]
// | ^^^^^^^^^
// |
// = note: required by `zerocopy::Unaligned`
// A visitor which is used to walk a field's type and determine whether any
// of its definition is based on the type or lifetime parameters on a type.
struct FromTypeParamVisit<'a, 'b>(&'a Punctuated<GenericParam, Comma>, &'b mut bool);
impl<'a, 'b> Visit<'a> for FromTypeParamVisit<'a, 'b> {
fn visit_type_path(&mut self, i: &'a TypePath) {
visit::visit_type_path(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Type(param) = param {
i.path.segments.first().unwrap().ident == param.ident
} else {
false
}
}) {
*self.1 = true;
}
}
fn visit_lifetime(&mut self, i: &'a Lifetime) {
visit::visit_lifetime(self, i);
if self.0.iter().any(|param| {
if let GenericParam::Lifetime(param) = param {
param.lifetime.ident == i.ident
} else {
false
}
}) {
*self.1 = true;
}
}
}
// Whether this type is based on one of the type parameters. E.g., given the
// type parameters `<T>`, `T`, `T::Foo`, and `(T::Foo, String)` are all
// based on the type parameters, while `String` and `(String, Box<()>)` are
// not.
let is_from_type_param = |ty: &Type| {
let mut ret = false;
FromTypeParamVisit(&input.generics.params, &mut ret).visit_type(ty);
ret
};
let trait_ident = Ident::new(trait_name, Span::call_site());
let field_types = data.nested_types();
let type_param_field_types = field_types.iter().filter(|ty| is_from_type_param(ty));
let non_type_param_field_types = field_types.iter().filter(|ty| !is_from_type_param(ty));
// Add a new set of where clause predicates of the form `T: Trait` for each
// of the types of the struct's fields (but only the ones whose types are
// based on one of the type parameters).
let mut generics = input.generics.clone();
let where_clause = generics.make_where_clause();
if require_trait_bound {
for ty in type_param_field_types {
let bound = parse_quote!(#ty: zerocopy::#trait_ident);
where_clause.predicates.push(bound);
}
}
let type_ident = &input.ident;
// The parameters with trait bounds, but without type defaults.
let params = input.generics.params.clone().into_iter().map(|mut param| {
match &mut param {
GenericParam::Type(ty) => ty.default = None,
GenericParam::Const(cnst) => cnst.default = None,
GenericParam::Lifetime(_) => {}
}
quote!(#param)
});
// The identifiers of the parameters without trait bounds or type defaults.
let param_idents = input.generics.params.iter().map(|param| match param {
GenericParam::Type(ty) => {
let ident = &ty.ident;
quote!(#ident)
}
GenericParam::Lifetime(l) => quote!(#l),
GenericParam::Const(cnst) => quote!(#cnst),
});
let trait_bound_body = if require_trait_bound {
let implements_type_ident =
Ident::new(format!("Implements{}", trait_ident).as_str(), Span::call_site());
let implements_type_tokens = quote!(#implements_type_ident);
let types = non_type_param_field_types.map(|ty| quote!(#implements_type_tokens<#ty>));
quote!(
// A type with a type parameter that must implement #trait_ident
struct #implements_type_ident<F: ?Sized + zerocopy::#trait_ident>(::core::marker::PhantomData<F>);
// For each field type, an instantiation that won't type check if
// that type doesn't implement #trait_ident
#(let _: #types;)*
)
} else {
quote!()
};
let size_check_body = if require_size_check && !field_types.is_empty() {
quote!(
const HAS_PADDING: bool = core::mem::size_of::<#type_ident>() != #(core::mem::size_of::<#field_types>())+*;
let _: [(); 1/(1 - HAS_PADDING as usize)];
)
} else {
quote!()
};
quote! {
unsafe impl < #(#params),* > zerocopy::#trait_ident for #type_ident < #(#param_idents),* > #where_clause {
fn only_derive_is_allowed_to_implement_this_trait() where Self: Sized {
#trait_bound_body
#size_check_body
}
}
}
}
fn print_all_errors(errors: Vec<Error>) -> proc_macro2::TokenStream {
errors.iter().map(Error::to_compile_error).collect() | }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_config_repr_orderings() {
// Validate that the repr lists in the various configs are in the
// canonical order. If they aren't, then our algorithm to look up in
// those lists won't work.
// TODO(joshlf): Remove once the is_sorted method is stabilized
// (issue #53485).
fn is_sorted_and_deduped<T: Clone + Ord>(ts: &[T]) -> bool {
let mut sorted = ts.to_vec();
sorted.sort();
sorted.dedup();
ts == sorted.as_slice()
}
fn elements_are_sorted_and_deduped<T: Clone + Ord>(lists: &[&[T]]) -> bool {
lists.iter().all(|list| is_sorted_and_deduped(*list))
}
fn config_is_sorted<T: KindRepr + Clone>(config: &Config<T>) -> bool {
elements_are_sorted_and_deduped(&config.allowed_combinations)
&& elements_are_sorted_and_deduped(&config.disallowed_but_legal_combinations)
}
assert!(config_is_sorted(&STRUCT_UNALIGNED_CFG));
assert!(config_is_sorted(&ENUM_FROM_BYTES_CFG));
assert!(config_is_sorted(&ENUM_UNALIGNED_CFG));
}
#[test]
fn test_config_repr_no_overlap() {
// Validate that no set of reprs appears in both th allowed_combinations
// and disallowed_but_legal_combinations lists.
fn overlap<T: Eq>(a: &[T], b: &[T]) -> bool {
a.iter().any(|elem| b.contains(elem))
}
fn config_overlaps<T: KindRepr + Eq>(config: &Config<T>) -> bool {
overlap(config.allowed_combinations, config.disallowed_but_legal_combinations)
}
assert!(!config_overlaps(&STRUCT_UNALIGNED_CFG));
assert!(!config_overlaps(&ENUM_FROM_BYTES_CFG));
assert!(!config_overlaps(&ENUM_UNALIGNED_CFG));
}
} | random_line_split |
|
couchpotato.js | var CouchPotato = new Class({
Implements: [Events, Options], | action: 'index',
params: {}
},
pages: [],
block: [],
initialize: function(){
var self = this;
self.global_events = {};
},
setup: function(options) {
var self = this;
self.setOptions(options);
self.c = $(document.body);
self.route = new Route(self.defaults);
self.createLayout();
self.createPages();
if(window.location.hash)
History.handleInitialState();
else
self.openPage(window.location.pathname);
History.addEvent('change', self.openPage.bind(self));
self.c.addEvent('click:relay(a[href^=/]:not([target]))', self.pushState.bind(self));
self.c.addEvent('click:relay(a[href^=http])', self.openDerefered.bind(self));
// Check if device is touchenabled
self.touch_device = 'ontouchstart' in window || navigator.msMaxTouchPoints;
if(self.touch_device)
self.c.addClass('touch_enabled');
},
getOption: function(name){
try {
return this.options[name];
}
catch(e){
return null
}
},
pushState: function(e){
if((!e.meta && Browser.Platform.mac) || (!e.control && !Browser.Platform.mac)){
(e).preventDefault();
var url = e.target.get('href');
if(History.getPath() != url)
History.push(url);
}
},
isMac: function(){
return Browser.Platform.mac
},
createLayout: function(){
var self = this;
self.block.header = new Block();
self.c.adopt(
$(self.block.header).addClass('header').adopt(
new Element('div').adopt(
self.block.navigation = new Block.Navigation(self, {}),
self.block.search = new Block.Search(self, {}),
self.block.more = new Block.Menu(self, {'button_class': 'icon2.cog'})
)
),
self.content = new Element('div.content'),
self.block.footer = new Block.Footer(self, {})
);
var setting_links = [
new Element('a', {
'text': 'About CouchPotato',
'href': App.createUrl('settings/about')
}),
new Element('a', {
'text': 'Check for Updates',
'events': {
'click': self.checkForUpdate.bind(self, null)
}
}),
new Element('span.separator'),
new Element('a', {
'text': 'Settings',
'href': App.createUrl('settings/general')
}),
new Element('a', {
'text': 'Logs',
'href': App.createUrl('log')
}),
new Element('span.separator'),
new Element('a', {
'text': 'Restart',
'events': {
'click': self.restartQA.bind(self)
}
}),
new Element('a', {
'text': 'Shutdown',
'events': {
'click': self.shutdownQA.bind(self)
}
})
];
setting_links.each(function(a){
self.block.more.addLink(a)
});
new ScrollSpy({
min: 10,
onLeave: function(){
$(self.block.header).removeClass('with_shadow')
},
onEnter: function(){
$(self.block.header).addClass('with_shadow')
}
})
},
createPages: function(){
var self = this;
Object.each(Page, function(page_class, class_name){
var pg = new Page[class_name](self, {});
self.pages[class_name] = pg;
$(pg).inject(self.content);
});
self.fireEvent('load');
},
openPage: function(url) {
var self = this;
self.route.parse();
var page_name = self.route.getPage().capitalize();
var action = self.route.getAction();
var params = self.route.getParams();
var current_url = self.route.getCurrentUrl();
if(current_url == self.current_url)
return;
if(self.current_page)
self.current_page.hide();
try {
var page = self.pages[page_name] || self.pages.Home;
page.open(action, params, current_url);
page.show();
}
catch(e){
console.error("Can't open page:" + url, e)
}
self.current_page = page;
self.current_url = current_url;
},
getBlock: function(block_name){
return this.block[block_name]
},
getPage: function(name){
return this.pages[name]
},
shutdown: function(){
var self = this;
self.blockPage('You have shutdown. This is what is supposed to happen ;)');
Api.request('app.shutdown', {
'onComplete': self.blockPage.bind(self)
});
self.checkAvailable(1000);
},
shutdownQA: function(){
var self = this;
var q = new Question('Are you sure you want to shutdown CouchPotato?', '', [{
'text': 'Shutdown',
'class': 'shutdown red',
'events': {
'click': function(e){
(e).preventDefault();
self.shutdown();
q.close.delay(100, q);
}
}
}, {
'text': 'No, nevah!',
'cancel': true
}]);
},
restart: function(message, title){
var self = this;
self.blockPage(message || 'Restarting... please wait. If this takes too long, something must have gone wrong.', title);
Api.request('app.restart');
self.checkAvailable(1000);
},
restartQA: function(e, message, title){
var self = this;
var q = new Question('Are you sure you want to restart CouchPotato?', '', [{
'text': 'Restart',
'class': 'restart orange',
'events': {
'click': function(e){
(e).preventDefault();
self.restart(message, title);
q.close.delay(100, q);
}
}
}, {
'text': 'No, nevah!',
'cancel': true
}]);
},
checkForUpdate: function(onComplete){
var self = this;
Updater.check(onComplete);
self.blockPage('Please wait. If this takes too long, something must have gone wrong.', 'Checking for updates');
self.checkAvailable(3000);
},
checkAvailable: function(delay, onAvailable){
var self = this;
(function(){
Api.request('app.available', {
'onFailure': function(){
self.checkAvailable.delay(1000, self, [delay, onAvailable]);
self.fireEvent('unload');
},
'onSuccess': function(){
if(onAvailable)
onAvailable();
self.unBlockPage();
self.fireEvent('reload');
}
});
}).delay(delay || 0)
},
blockPage: function(message, title){
var self = this;
self.unBlockPage();
self.mask = new Element('div.mask').adopt(
new Element('div').adopt(
new Element('h1', {'text': title || 'Unavailable'}),
new Element('div', {'text': message || 'Something must have crashed.. check the logs ;)'})
)
).fade('hide').inject(document.body).fade('in');
createSpinner(self.mask, {
'top': -50
});
},
unBlockPage: function(){
var self = this;
if(self.mask)
self.mask.get('tween').start('opacity', 0).chain(function(){
this.element.destroy()
});
},
createUrl: function(action, params){
return this.options.base_url + (action ? action+'/' : '') + (params ? '?'+Object.toQueryString(params) : '')
},
openDerefered: function(e, el){
(e).stop();
var url = 'http://www.dereferer.org/?' + el.get('href');
if(el.get('target') == '_blank' || (e.meta && Browser.Platform.mac) || (e.control && !Browser.Platform.mac))
window.open(url);
else
window.location = url;
},
createUserscriptButtons: function(){
var host_url = window.location.protocol + '//' + window.location.host;
return new Element('div.group_userscript').adopt(
new Element('a.userscript.button', {
'text': 'Install userscript',
'href': Api.createUrl('userscript.get')+randomString()+'/couchpotato.user.js',
'target': '_blank'
}),
new Element('span.or[text=or]'),
new Element('span.bookmarklet').adopt(
new Element('a.button.orange', {
'text': '+CouchPotato',
'href': "javascript:void((function(){var e=document.createElement('script');e.setAttribute('type','text/javascript');e.setAttribute('charset','UTF-8');e.setAttribute('src','" +
host_url + Api.createUrl('userscript.bookmark') +
"?host="+ encodeURI(host_url + Api.createUrl('userscript.get')+randomString()+'/') +
"&r='+Math.random()*99999999);document.body.appendChild(e)})());",
'target': '',
'events': {
'click': function(e){
(e).stop();
alert('Drag it to your bookmark ;)')
}
}
}),
new Element('span', {
'text': '⇽ Drag this to your bookmarks'
})
)
);
},
/*
* Global events
*/
on: function(name, handle){
var self = this;
if(!self.global_events[name])
self.global_events[name] = [];
self.global_events[name].push(handle);
},
trigger: function(name, args, on_complete){
var self = this;
if(!self.global_events[name]){ return; }
if(!on_complete && typeOf(args) == 'function'){
on_complete = args;
args = {};
}
// Create parallel callback
var callbacks = [];
self.global_events[name].each(function(handle, nr){
callbacks.push(function(callback){
var results = handle(args || {});
callback(null, results || null);
});
});
// Fire events
async.parallel(callbacks, function(err, results){
if(err) p(err);
if(on_complete)
on_complete(results);
});
},
off: function(name, handle){
var self = this;
if(!self.global_events[name]) return;
// Remove single
if(handle){
self.global_events[name] = self.global_events[name].erase(handle);
}
// Reset full event
else {
self.global_events[name] = [];
}
}
});
window.App = new CouchPotato();
var Route = new Class({
defaults: {},
page: '',
action: 'index',
params: {},
initialize: function(defaults){
var self = this;
self.defaults = defaults
},
parse: function(){
var self = this;
var rep = function (pa) {
return pa.replace(Api.getOption('url'), '/').replace(App.getOption('base_url'), '/')
};
var path = rep(History.getPath());
if(path == '/' && location.hash){
path = rep(location.hash.replace('#', '/'))
}
self.current = path.replace(/^\/+|\/+$/g, '');
var url = self.current.split('/');
self.page = (url.length > 0) ? url.shift() : self.defaults.page;
self.action = (url.length > 0) ? url.shift() : self.defaults.action;
self.params = Object.merge({}, self.defaults.params);
if(url.length > 1){
var key;
url.each(function(el, nr){
if(nr%2 == 0)
key = el;
else if(key) {
self.params[key] = el;
key = null
}
})
}
else if(url.length == 1){
self.params[url] = true;
}
return self
},
getPage: function(){
return this.page
},
getAction: function(){
return this.action
},
getParams: function(){
return this.params
},
getCurrentUrl: function(){
return this.current
},
get: function(param){
return this.params[param]
}
});
var p = function(){
if(typeof(console) !== 'undefined' && console != null)
console.log(arguments)
};
(function(){
var events;
var check = function(e) {
var target = $(e.target);
var parents = target.getParents();
events.each(function(item) {
var element = item.element;
if (element != target && !parents.contains(element))
item.fn.call(element, e);
});
};
Element.Events.outerClick = {
onAdd : function(fn) {
if (!events) {
document.addEvent('click', check);
events = [];
}
events.push( {
element : this,
fn : fn
});
},
onRemove : function(fn) {
events = events.filter(function(item) {
return item.element != this || item.fn != fn;
}, this);
if (!events.length) {
document.removeEvent('click', check);
events = null;
}
}
};
})();
function randomString(length, extra) {
var chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXTZabcdefghiklmnopqrstuvwxyz" + (extra ? '-._!@#$%^&*()+=' : '');
var stringLength = length || 8;
var randomString = '';
for (var i = 0; i < stringLength; i++) {
var rnum = Math.floor(Math.random() * chars.length);
randomString += chars.charAt(rnum);
}
return randomString;
}
(function(){
var keyPaths = [];
var saveKeyPath = function(path) {
keyPaths.push({
sign: (path[0] === '+' || path[0] === '-')? parseInt(path.shift()+1) : 1,
path: path
});
};
var valueOf = function(object, path) {
var ptr = object;
path.each(function(key) { ptr = ptr[key] });
return ptr;
};
var comparer = function(a, b) {
for (var i = 0, l = keyPaths.length; i < l; i++) {
var aVal = valueOf(a, keyPaths[i].path),
bVal = valueOf(b, keyPaths[i].path);
if (aVal > bVal) return keyPaths[i].sign;
if (aVal < bVal) return -keyPaths[i].sign;
}
return 0;
};
Array.implement({
sortBy: function(){
keyPaths.empty();
Array.each(arguments, function(argument) {
switch (typeOf(argument)) {
case "array": saveKeyPath(argument); break;
case "string": saveKeyPath(argument.match(/[+-]|[^.]+/g)); break;
}
});
return this.stableSort(comparer);
}
});
})();
var createSpinner = function(target, options){
var opts = Object.merge({
lines: 12,
length: 5,
width: 4,
radius: 9,
color: '#fff',
speed: 1.9,
trail: 53,
shadow: false,
hwaccel: true,
className: 'spinner',
zIndex: 2e9,
top: 'auto',
left: 'auto'
}, options);
return new Spinner(opts).spin(target);
}; |
defaults: {
page: 'home', | random_line_split |
couchpotato.js | var CouchPotato = new Class({
Implements: [Events, Options],
defaults: {
page: 'home',
action: 'index',
params: {}
},
pages: [],
block: [],
initialize: function(){
var self = this;
self.global_events = {};
},
setup: function(options) {
var self = this;
self.setOptions(options);
self.c = $(document.body);
self.route = new Route(self.defaults);
self.createLayout();
self.createPages();
if(window.location.hash)
History.handleInitialState();
else
self.openPage(window.location.pathname);
History.addEvent('change', self.openPage.bind(self));
self.c.addEvent('click:relay(a[href^=/]:not([target]))', self.pushState.bind(self));
self.c.addEvent('click:relay(a[href^=http])', self.openDerefered.bind(self));
// Check if device is touchenabled
self.touch_device = 'ontouchstart' in window || navigator.msMaxTouchPoints;
if(self.touch_device)
self.c.addClass('touch_enabled');
},
getOption: function(name){
try {
return this.options[name];
}
catch(e){
return null
}
},
pushState: function(e){
if((!e.meta && Browser.Platform.mac) || (!e.control && !Browser.Platform.mac)){
(e).preventDefault();
var url = e.target.get('href');
if(History.getPath() != url)
History.push(url);
}
},
isMac: function(){
return Browser.Platform.mac
},
createLayout: function(){
var self = this;
self.block.header = new Block();
self.c.adopt(
$(self.block.header).addClass('header').adopt(
new Element('div').adopt(
self.block.navigation = new Block.Navigation(self, {}),
self.block.search = new Block.Search(self, {}),
self.block.more = new Block.Menu(self, {'button_class': 'icon2.cog'})
)
),
self.content = new Element('div.content'),
self.block.footer = new Block.Footer(self, {})
);
var setting_links = [
new Element('a', {
'text': 'About CouchPotato',
'href': App.createUrl('settings/about')
}),
new Element('a', {
'text': 'Check for Updates',
'events': {
'click': self.checkForUpdate.bind(self, null)
}
}),
new Element('span.separator'),
new Element('a', {
'text': 'Settings',
'href': App.createUrl('settings/general')
}),
new Element('a', {
'text': 'Logs',
'href': App.createUrl('log')
}),
new Element('span.separator'),
new Element('a', {
'text': 'Restart',
'events': {
'click': self.restartQA.bind(self)
}
}),
new Element('a', {
'text': 'Shutdown',
'events': {
'click': self.shutdownQA.bind(self)
}
})
];
setting_links.each(function(a){
self.block.more.addLink(a)
});
new ScrollSpy({
min: 10,
onLeave: function(){
$(self.block.header).removeClass('with_shadow')
},
onEnter: function(){
$(self.block.header).addClass('with_shadow')
}
})
},
createPages: function(){
var self = this;
Object.each(Page, function(page_class, class_name){
var pg = new Page[class_name](self, {});
self.pages[class_name] = pg;
$(pg).inject(self.content);
});
self.fireEvent('load');
},
openPage: function(url) {
var self = this;
self.route.parse();
var page_name = self.route.getPage().capitalize();
var action = self.route.getAction();
var params = self.route.getParams();
var current_url = self.route.getCurrentUrl();
if(current_url == self.current_url)
return;
if(self.current_page)
self.current_page.hide();
try {
var page = self.pages[page_name] || self.pages.Home;
page.open(action, params, current_url);
page.show();
}
catch(e){
console.error("Can't open page:" + url, e)
}
self.current_page = page;
self.current_url = current_url;
},
getBlock: function(block_name){
return this.block[block_name]
},
getPage: function(name){
return this.pages[name]
},
shutdown: function(){
var self = this;
self.blockPage('You have shutdown. This is what is supposed to happen ;)');
Api.request('app.shutdown', {
'onComplete': self.blockPage.bind(self)
});
self.checkAvailable(1000);
},
shutdownQA: function(){
var self = this;
var q = new Question('Are you sure you want to shutdown CouchPotato?', '', [{
'text': 'Shutdown',
'class': 'shutdown red',
'events': {
'click': function(e){
(e).preventDefault();
self.shutdown();
q.close.delay(100, q);
}
}
}, {
'text': 'No, nevah!',
'cancel': true
}]);
},
restart: function(message, title){
var self = this;
self.blockPage(message || 'Restarting... please wait. If this takes too long, something must have gone wrong.', title);
Api.request('app.restart');
self.checkAvailable(1000);
},
restartQA: function(e, message, title){
var self = this;
var q = new Question('Are you sure you want to restart CouchPotato?', '', [{
'text': 'Restart',
'class': 'restart orange',
'events': {
'click': function(e){
(e).preventDefault();
self.restart(message, title);
q.close.delay(100, q);
}
}
}, {
'text': 'No, nevah!',
'cancel': true
}]);
},
checkForUpdate: function(onComplete){
var self = this;
Updater.check(onComplete);
self.blockPage('Please wait. If this takes too long, something must have gone wrong.', 'Checking for updates');
self.checkAvailable(3000);
},
checkAvailable: function(delay, onAvailable){
var self = this;
(function(){
Api.request('app.available', {
'onFailure': function(){
self.checkAvailable.delay(1000, self, [delay, onAvailable]);
self.fireEvent('unload');
},
'onSuccess': function(){
if(onAvailable)
onAvailable();
self.unBlockPage();
self.fireEvent('reload');
}
});
}).delay(delay || 0)
},
blockPage: function(message, title){
var self = this;
self.unBlockPage();
self.mask = new Element('div.mask').adopt(
new Element('div').adopt(
new Element('h1', {'text': title || 'Unavailable'}),
new Element('div', {'text': message || 'Something must have crashed.. check the logs ;)'})
)
).fade('hide').inject(document.body).fade('in');
createSpinner(self.mask, {
'top': -50
});
},
unBlockPage: function(){
var self = this;
if(self.mask)
self.mask.get('tween').start('opacity', 0).chain(function(){
this.element.destroy()
});
},
createUrl: function(action, params){
return this.options.base_url + (action ? action+'/' : '') + (params ? '?'+Object.toQueryString(params) : '')
},
openDerefered: function(e, el){
(e).stop();
var url = 'http://www.dereferer.org/?' + el.get('href');
if(el.get('target') == '_blank' || (e.meta && Browser.Platform.mac) || (e.control && !Browser.Platform.mac))
window.open(url);
else
window.location = url;
},
createUserscriptButtons: function(){
var host_url = window.location.protocol + '//' + window.location.host;
return new Element('div.group_userscript').adopt(
new Element('a.userscript.button', {
'text': 'Install userscript',
'href': Api.createUrl('userscript.get')+randomString()+'/couchpotato.user.js',
'target': '_blank'
}),
new Element('span.or[text=or]'),
new Element('span.bookmarklet').adopt(
new Element('a.button.orange', {
'text': '+CouchPotato',
'href': "javascript:void((function(){var e=document.createElement('script');e.setAttribute('type','text/javascript');e.setAttribute('charset','UTF-8');e.setAttribute('src','" +
host_url + Api.createUrl('userscript.bookmark') +
"?host="+ encodeURI(host_url + Api.createUrl('userscript.get')+randomString()+'/') +
"&r='+Math.random()*99999999);document.body.appendChild(e)})());",
'target': '',
'events': {
'click': function(e){
(e).stop();
alert('Drag it to your bookmark ;)')
}
}
}),
new Element('span', {
'text': '⇽ Drag this to your bookmarks'
})
)
);
},
/*
* Global events
*/
on: function(name, handle){
var self = this;
if(!self.global_events[name])
self.global_events[name] = [];
self.global_events[name].push(handle);
},
trigger: function(name, args, on_complete){
var self = this;
if(!self.global_events[name]){ return; }
if(!on_complete && typeOf(args) == 'function'){
on_complete = args;
args = {};
}
// Create parallel callback
var callbacks = [];
self.global_events[name].each(function(handle, nr){
callbacks.push(function(callback){
var results = handle(args || {});
callback(null, results || null);
});
});
// Fire events
async.parallel(callbacks, function(err, results){
if(err) p(err);
if(on_complete)
on_complete(results);
});
},
off: function(name, handle){
var self = this;
if(!self.global_events[name]) return;
// Remove single
if(handle){
self.global_events[name] = self.global_events[name].erase(handle);
}
// Reset full event
else {
self.global_events[name] = [];
}
}
});
window.App = new CouchPotato();
var Route = new Class({
defaults: {},
page: '',
action: 'index',
params: {},
initialize: function(defaults){
var self = this;
self.defaults = defaults
},
parse: function(){
var self = this;
var rep = function (pa) {
return pa.replace(Api.getOption('url'), '/').replace(App.getOption('base_url'), '/')
};
var path = rep(History.getPath());
if(path == '/' && location.hash){
path = rep(location.hash.replace('#', '/'))
}
self.current = path.replace(/^\/+|\/+$/g, '');
var url = self.current.split('/');
self.page = (url.length > 0) ? url.shift() : self.defaults.page;
self.action = (url.length > 0) ? url.shift() : self.defaults.action;
self.params = Object.merge({}, self.defaults.params);
if(url.length > 1){
var key;
url.each(function(el, nr){
if(nr%2 == 0)
key = el;
else if(key) {
self.params[key] = el;
key = null
}
})
}
else if(url.length == 1){
self.params[url] = true;
}
return self
},
getPage: function(){
return this.page
},
getAction: function(){
return this.action
},
getParams: function(){
return this.params
},
getCurrentUrl: function(){
return this.current
},
get: function(param){
return this.params[param]
}
});
var p = function(){
if(typeof(console) !== 'undefined' && console != null)
console.log(arguments)
};
(function(){
var events;
var check = function(e) {
var target = $(e.target);
var parents = target.getParents();
events.each(function(item) {
var element = item.element;
if (element != target && !parents.contains(element))
item.fn.call(element, e);
});
};
Element.Events.outerClick = {
onAdd : function(fn) {
if (!events) {
document.addEvent('click', check);
events = [];
}
events.push( {
element : this,
fn : fn
});
},
onRemove : function(fn) {
events = events.filter(function(item) {
return item.element != this || item.fn != fn;
}, this);
if (!events.length) {
document.removeEvent('click', check);
events = null;
}
}
};
})();
function rand | gth, extra) {
var chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXTZabcdefghiklmnopqrstuvwxyz" + (extra ? '-._!@#$%^&*()+=' : '');
var stringLength = length || 8;
var randomString = '';
for (var i = 0; i < stringLength; i++) {
var rnum = Math.floor(Math.random() * chars.length);
randomString += chars.charAt(rnum);
}
return randomString;
}
(function(){
var keyPaths = [];
var saveKeyPath = function(path) {
keyPaths.push({
sign: (path[0] === '+' || path[0] === '-')? parseInt(path.shift()+1) : 1,
path: path
});
};
var valueOf = function(object, path) {
var ptr = object;
path.each(function(key) { ptr = ptr[key] });
return ptr;
};
var comparer = function(a, b) {
for (var i = 0, l = keyPaths.length; i < l; i++) {
var aVal = valueOf(a, keyPaths[i].path),
bVal = valueOf(b, keyPaths[i].path);
if (aVal > bVal) return keyPaths[i].sign;
if (aVal < bVal) return -keyPaths[i].sign;
}
return 0;
};
Array.implement({
sortBy: function(){
keyPaths.empty();
Array.each(arguments, function(argument) {
switch (typeOf(argument)) {
case "array": saveKeyPath(argument); break;
case "string": saveKeyPath(argument.match(/[+-]|[^.]+/g)); break;
}
});
return this.stableSort(comparer);
}
});
})();
var createSpinner = function(target, options){
var opts = Object.merge({
lines: 12,
length: 5,
width: 4,
radius: 9,
color: '#fff',
speed: 1.9,
trail: 53,
shadow: false,
hwaccel: true,
className: 'spinner',
zIndex: 2e9,
top: 'auto',
left: 'auto'
}, options);
return new Spinner(opts).spin(target);
}; | omString(len | identifier_name |
couchpotato.js | var CouchPotato = new Class({
Implements: [Events, Options],
defaults: {
page: 'home',
action: 'index',
params: {}
},
pages: [],
block: [],
initialize: function(){
var self = this;
self.global_events = {};
},
setup: function(options) {
var self = this;
self.setOptions(options);
self.c = $(document.body);
self.route = new Route(self.defaults);
self.createLayout();
self.createPages();
if(window.location.hash)
History.handleInitialState();
else
self.openPage(window.location.pathname);
History.addEvent('change', self.openPage.bind(self));
self.c.addEvent('click:relay(a[href^=/]:not([target]))', self.pushState.bind(self));
self.c.addEvent('click:relay(a[href^=http])', self.openDerefered.bind(self));
// Check if device is touchenabled
self.touch_device = 'ontouchstart' in window || navigator.msMaxTouchPoints;
if(self.touch_device)
self.c.addClass('touch_enabled');
},
getOption: function(name){
try {
return this.options[name];
}
catch(e){
return null
}
},
pushState: function(e){
if((!e.meta && Browser.Platform.mac) || (!e.control && !Browser.Platform.mac)){
(e).preventDefault();
var url = e.target.get('href');
if(History.getPath() != url)
History.push(url);
}
},
isMac: function(){
return Browser.Platform.mac
},
createLayout: function(){
var self = this;
self.block.header = new Block();
self.c.adopt(
$(self.block.header).addClass('header').adopt(
new Element('div').adopt(
self.block.navigation = new Block.Navigation(self, {}),
self.block.search = new Block.Search(self, {}),
self.block.more = new Block.Menu(self, {'button_class': 'icon2.cog'})
)
),
self.content = new Element('div.content'),
self.block.footer = new Block.Footer(self, {})
);
var setting_links = [
new Element('a', {
'text': 'About CouchPotato',
'href': App.createUrl('settings/about')
}),
new Element('a', {
'text': 'Check for Updates',
'events': {
'click': self.checkForUpdate.bind(self, null)
}
}),
new Element('span.separator'),
new Element('a', {
'text': 'Settings',
'href': App.createUrl('settings/general')
}),
new Element('a', {
'text': 'Logs',
'href': App.createUrl('log')
}),
new Element('span.separator'),
new Element('a', {
'text': 'Restart',
'events': {
'click': self.restartQA.bind(self)
}
}),
new Element('a', {
'text': 'Shutdown',
'events': {
'click': self.shutdownQA.bind(self)
}
})
];
setting_links.each(function(a){
self.block.more.addLink(a)
});
new ScrollSpy({
min: 10,
onLeave: function(){
$(self.block.header).removeClass('with_shadow')
},
onEnter: function(){
$(self.block.header).addClass('with_shadow')
}
})
},
createPages: function(){
var self = this;
Object.each(Page, function(page_class, class_name){
var pg = new Page[class_name](self, {});
self.pages[class_name] = pg;
$(pg).inject(self.content);
});
self.fireEvent('load');
},
openPage: function(url) {
var self = this;
self.route.parse();
var page_name = self.route.getPage().capitalize();
var action = self.route.getAction();
var params = self.route.getParams();
var current_url = self.route.getCurrentUrl();
if(current_url == self.current_url)
return;
if(self.current_page)
self.current_page.hide();
try {
var page = self.pages[page_name] || self.pages.Home;
page.open(action, params, current_url);
page.show();
}
catch(e){
console.error("Can't open page:" + url, e)
}
self.current_page = page;
self.current_url = current_url;
},
getBlock: function(block_name){
return this.block[block_name]
},
getPage: function(name){
return this.pages[name]
},
shutdown: function(){
var self = this;
self.blockPage('You have shutdown. This is what is supposed to happen ;)');
Api.request('app.shutdown', {
'onComplete': self.blockPage.bind(self)
});
self.checkAvailable(1000);
},
shutdownQA: function(){
var self = this;
var q = new Question('Are you sure you want to shutdown CouchPotato?', '', [{
'text': 'Shutdown',
'class': 'shutdown red',
'events': {
'click': function(e){
(e).preventDefault();
self.shutdown();
q.close.delay(100, q);
}
}
}, {
'text': 'No, nevah!',
'cancel': true
}]);
},
restart: function(message, title){
var self = this;
self.blockPage(message || 'Restarting... please wait. If this takes too long, something must have gone wrong.', title);
Api.request('app.restart');
self.checkAvailable(1000);
},
restartQA: function(e, message, title){
var self = this;
var q = new Question('Are you sure you want to restart CouchPotato?', '', [{
'text': 'Restart',
'class': 'restart orange',
'events': {
'click': function(e){
(e).preventDefault();
self.restart(message, title);
q.close.delay(100, q);
}
}
}, {
'text': 'No, nevah!',
'cancel': true
}]);
},
checkForUpdate: function(onComplete){
var self = this;
Updater.check(onComplete);
self.blockPage('Please wait. If this takes too long, something must have gone wrong.', 'Checking for updates');
self.checkAvailable(3000);
},
checkAvailable: function(delay, onAvailable){
var self = this;
(function(){
Api.request('app.available', {
'onFailure': function(){
self.checkAvailable.delay(1000, self, [delay, onAvailable]);
self.fireEvent('unload');
},
'onSuccess': function(){
if(onAvailable)
onAvailable();
self.unBlockPage();
self.fireEvent('reload');
}
});
}).delay(delay || 0)
},
blockPage: function(message, title){
var self = this;
self.unBlockPage();
self.mask = new Element('div.mask').adopt(
new Element('div').adopt(
new Element('h1', {'text': title || 'Unavailable'}),
new Element('div', {'text': message || 'Something must have crashed.. check the logs ;)'})
)
).fade('hide').inject(document.body).fade('in');
createSpinner(self.mask, {
'top': -50
});
},
unBlockPage: function(){
var self = this;
if(self.mask)
self.mask.get('tween').start('opacity', 0).chain(function(){
this.element.destroy()
});
},
createUrl: function(action, params){
return this.options.base_url + (action ? action+'/' : '') + (params ? '?'+Object.toQueryString(params) : '')
},
openDerefered: function(e, el){
(e).stop();
var url = 'http://www.dereferer.org/?' + el.get('href');
if(el.get('target') == '_blank' || (e.meta && Browser.Platform.mac) || (e.control && !Browser.Platform.mac))
window.open(url);
else
window.location = url;
},
createUserscriptButtons: function(){
var host_url = window.location.protocol + '//' + window.location.host;
return new Element('div.group_userscript').adopt(
new Element('a.userscript.button', {
'text': 'Install userscript',
'href': Api.createUrl('userscript.get')+randomString()+'/couchpotato.user.js',
'target': '_blank'
}),
new Element('span.or[text=or]'),
new Element('span.bookmarklet').adopt(
new Element('a.button.orange', {
'text': '+CouchPotato',
'href': "javascript:void((function(){var e=document.createElement('script');e.setAttribute('type','text/javascript');e.setAttribute('charset','UTF-8');e.setAttribute('src','" +
host_url + Api.createUrl('userscript.bookmark') +
"?host="+ encodeURI(host_url + Api.createUrl('userscript.get')+randomString()+'/') +
"&r='+Math.random()*99999999);document.body.appendChild(e)})());",
'target': '',
'events': {
'click': function(e){
(e).stop();
alert('Drag it to your bookmark ;)')
}
}
}),
new Element('span', {
'text': '⇽ Drag this to your bookmarks'
})
)
);
},
/*
* Global events
*/
on: function(name, handle){
var self = this;
if(!self.global_events[name])
self.global_events[name] = [];
self.global_events[name].push(handle);
},
trigger: function(name, args, on_complete){
var self = this;
if(!self.global_events[name]){ return; }
if(!on_complete && typeOf(args) == 'function'){
on_complete = args;
args = {};
}
// Create parallel callback
var callbacks = [];
self.global_events[name].each(function(handle, nr){
callbacks.push(function(callback){
var results = handle(args || {});
callback(null, results || null);
});
});
// Fire events
async.parallel(callbacks, function(err, results){
if(err) p(err);
if(on_complete)
on_complete(results);
});
},
off: function(name, handle){
var self = this;
if(!self.global_events[name]) return;
// Remove single
if(handle){
self.global_events[name] = self.global_events[name].erase(handle);
}
// Reset full event
else {
self.global_events[name] = [];
}
}
});
window.App = new CouchPotato();
var Route = new Class({
defaults: {},
page: '',
action: 'index',
params: {},
initialize: function(defaults){
var self = this;
self.defaults = defaults
},
parse: function(){
var self = this;
var rep = function (pa) {
return pa.replace(Api.getOption('url'), '/').replace(App.getOption('base_url'), '/')
};
var path = rep(History.getPath());
if(path == '/' && location.hash){
path = rep(location.hash.replace('#', '/'))
}
self.current = path.replace(/^\/+|\/+$/g, '');
var url = self.current.split('/');
self.page = (url.length > 0) ? url.shift() : self.defaults.page;
self.action = (url.length > 0) ? url.shift() : self.defaults.action;
self.params = Object.merge({}, self.defaults.params);
if(url.length > 1){
var key;
url.each(function(el, nr){
if(nr%2 == 0)
key = el;
else if(key) {
self.params[key] = el;
key = null
}
})
}
else if(url.length == 1){
self.params[url] = true;
}
return self
},
getPage: function(){
return this.page
},
getAction: function(){
return this.action
},
getParams: function(){
return this.params
},
getCurrentUrl: function(){
return this.current
},
get: function(param){
return this.params[param]
}
});
var p = function(){
if(typeof(console) !== 'undefined' && console != null)
console.log(arguments)
};
(function(){
var events;
var check = function(e) {
var target = $(e.target);
var parents = target.getParents();
events.each(function(item) {
var element = item.element;
if (element != target && !parents.contains(element))
item.fn.call(element, e);
});
};
Element.Events.outerClick = {
onAdd : function(fn) {
if (!events) {
document.addEvent('click', check);
events = [];
}
events.push( {
element : this,
fn : fn
});
},
onRemove : function(fn) {
events = events.filter(function(item) {
return item.element != this || item.fn != fn;
}, this);
if (!events.length) {
document.removeEvent('click', check);
events = null;
}
}
};
})();
function randomString(length, extra) {
v | unction(){
var keyPaths = [];
var saveKeyPath = function(path) {
keyPaths.push({
sign: (path[0] === '+' || path[0] === '-')? parseInt(path.shift()+1) : 1,
path: path
});
};
var valueOf = function(object, path) {
var ptr = object;
path.each(function(key) { ptr = ptr[key] });
return ptr;
};
var comparer = function(a, b) {
for (var i = 0, l = keyPaths.length; i < l; i++) {
var aVal = valueOf(a, keyPaths[i].path),
bVal = valueOf(b, keyPaths[i].path);
if (aVal > bVal) return keyPaths[i].sign;
if (aVal < bVal) return -keyPaths[i].sign;
}
return 0;
};
Array.implement({
sortBy: function(){
keyPaths.empty();
Array.each(arguments, function(argument) {
switch (typeOf(argument)) {
case "array": saveKeyPath(argument); break;
case "string": saveKeyPath(argument.match(/[+-]|[^.]+/g)); break;
}
});
return this.stableSort(comparer);
}
});
})();
var createSpinner = function(target, options){
var opts = Object.merge({
lines: 12,
length: 5,
width: 4,
radius: 9,
color: '#fff',
speed: 1.9,
trail: 53,
shadow: false,
hwaccel: true,
className: 'spinner',
zIndex: 2e9,
top: 'auto',
left: 'auto'
}, options);
return new Spinner(opts).spin(target);
}; | ar chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXTZabcdefghiklmnopqrstuvwxyz" + (extra ? '-._!@#$%^&*()+=' : '');
var stringLength = length || 8;
var randomString = '';
for (var i = 0; i < stringLength; i++) {
var rnum = Math.floor(Math.random() * chars.length);
randomString += chars.charAt(rnum);
}
return randomString;
}
(f | identifier_body |
restyle_hints.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Restyle hints: an optimization to avoid unnecessarily matching selectors.
#[cfg(feature = "gecko")]
use crate::gecko_bindings::structs::nsRestyleHint;
use crate::traversal_flags::TraversalFlags;
bitflags! {
/// The kind of restyle we need to do for a given element.
pub struct RestyleHint: u8 {
/// Do a selector match of the element.
const RESTYLE_SELF = 1 << 0;
/// Do a selector match of the element's descendants.
const RESTYLE_DESCENDANTS = 1 << 1;
/// Recascade the current element.
const RECASCADE_SELF = 1 << 2;
/// Recascade all descendant elements.
const RECASCADE_DESCENDANTS = 1 << 3;
/// Replace the style data coming from CSS transitions without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_TRANSITIONS = 1 << 4;
/// Replace the style data coming from CSS animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_ANIMATIONS = 1 << 5;
/// Don't re-run selector-matching on the element, only the style
/// attribute has changed, and this change didn't have any other
/// dependencies.
const RESTYLE_STYLE_ATTRIBUTE = 1 << 6;
/// Replace the style data coming from SMIL animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_SMIL = 1 << 7;
}
}
impl RestyleHint {
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be fully restyled.
pub fn restyle_subtree() -> Self {
RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS
}
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be recascaded.
pub fn recascade_subtree() -> Self {
RestyleHint::RECASCADE_SELF | RestyleHint::RECASCADE_DESCENDANTS
}
/// Returns whether this hint invalidates the element and all its
/// descendants.
pub fn contains_subtree(&self) -> bool {
self.contains(RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS)
}
/// Returns whether we need to restyle this element.
pub fn has_non_animation_invalidations(&self) -> bool {
self.intersects(
RestyleHint::RESTYLE_SELF |
RestyleHint::RECASCADE_SELF |
(Self::replacements() & !Self::for_animations()),
)
}
/// Propagates this restyle hint to a child element.
pub fn propagate(&mut self, traversal_flags: &TraversalFlags) -> Self {
use std::mem;
// In the middle of an animation only restyle, we don't need to
// propagate any restyle hints, and we need to remove ourselves.
if traversal_flags.for_animation_only() {
self.remove_animation_hints();
return Self::empty();
}
debug_assert!(
!self.has_animation_hint(),
"There should not be any animation restyle hints \
during normal traversal"
);
// Else we should clear ourselves, and return the propagated hint.
mem::replace(self, Self::empty()).propagate_for_non_animation_restyle()
}
/// Returns a new `CascadeHint` appropriate for children of the current
/// element.
fn propagate_for_non_animation_restyle(&self) -> Self {
if self.contains(RestyleHint::RESTYLE_DESCENDANTS) {
return Self::restyle_subtree();
}
if self.contains(RestyleHint::RECASCADE_DESCENDANTS) {
return Self::recascade_subtree();
}
Self::empty()
}
/// Creates a new `RestyleHint` that indicates the element must be
/// recascaded.
pub fn recascade_self() -> Self {
RestyleHint::RECASCADE_SELF
}
/// Returns a hint that contains all the replacement hints.
pub fn replacements() -> Self {
RestyleHint::RESTYLE_STYLE_ATTRIBUTE | Self::for_animations()
}
/// The replacements for the animation cascade levels.
#[inline]
pub fn for_animations() -> Self {
RestyleHint::RESTYLE_SMIL |
RestyleHint::RESTYLE_CSS_ANIMATIONS |
RestyleHint::RESTYLE_CSS_TRANSITIONS
}
/// Returns whether the hint specifies that the currently element must be
/// recascaded.
pub fn has_recascade_self(&self) -> bool {
self.contains(RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint(&self) -> bool {
self.intersects(Self::for_animations())
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint_or_recascade(&self) -> bool {
self.intersects(Self::for_animations() | RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies some restyle work other than an
/// animation cascade level replacement.
#[inline]
pub fn has_non_animation_hint(&self) -> bool |
/// Returns whether the hint specifies that selector matching must be re-run
/// for the element.
#[inline]
pub fn match_self(&self) -> bool {
self.intersects(RestyleHint::RESTYLE_SELF)
}
/// Returns whether the hint specifies that some cascade levels must be
/// replaced.
#[inline]
pub fn has_replacements(&self) -> bool {
self.intersects(Self::replacements())
}
/// Removes all of the animation-related hints.
#[inline]
pub fn remove_animation_hints(&mut self) {
self.remove(Self::for_animations());
// While RECASCADE_SELF is not animation-specific, we only ever add and
// process it during traversal. If we are here, removing animation
// hints, then we are in an animation-only traversal, and we know that
// any RECASCADE_SELF flag must have been set due to changes in
// inherited values after restyling for animations, and thus we want to
// remove it so that we don't later try to restyle the element during a
// normal restyle. (We could have separate RECASCADE_SELF_NORMAL and
// RECASCADE_SELF_ANIMATIONS flags to make it clear, but this isn't
// currently necessary.)
self.remove(RestyleHint::RECASCADE_SELF);
}
}
impl Default for RestyleHint {
fn default() -> Self {
Self::empty()
}
}
#[cfg(feature = "gecko")]
impl From<nsRestyleHint> for RestyleHint {
fn from(mut raw: nsRestyleHint) -> Self {
let mut hint = RestyleHint::empty();
debug_assert!(
raw.0 & nsRestyleHint::eRestyle_LaterSiblings.0 == 0,
"Handle later siblings manually if necessary plz."
);
if (raw.0 & (nsRestyleHint::eRestyle_Self.0 | nsRestyleHint::eRestyle_Subtree.0)) != 0 {
raw.0 &= !nsRestyleHint::eRestyle_Self.0;
hint.insert(RestyleHint::RESTYLE_SELF);
}
if (raw.0 & (nsRestyleHint::eRestyle_Subtree.0 | nsRestyleHint::eRestyle_SomeDescendants.0)) !=
0
{
raw.0 &= !nsRestyleHint::eRestyle_Subtree.0;
raw.0 &= !nsRestyleHint::eRestyle_SomeDescendants.0;
hint.insert(RestyleHint::RESTYLE_DESCENDANTS);
}
if (raw.0 & (nsRestyleHint::eRestyle_ForceDescendants.0 | nsRestyleHint::eRestyle_Force.0)) !=
0
{
raw.0 &= !nsRestyleHint::eRestyle_Force.0;
hint.insert(RestyleHint::RECASCADE_SELF);
}
if (raw.0 & nsRestyleHint::eRestyle_ForceDescendants.0) != 0 {
raw.0 &= !nsRestyleHint::eRestyle_ForceDescendants.0;
hint.insert(RestyleHint::RECASCADE_DESCENDANTS);
}
hint.insert(RestyleHint::from_bits_truncate(raw.0 as u8));
hint
}
}
#[cfg(feature = "servo")]
malloc_size_of_is_0!(RestyleHint);
/// Asserts that all replacement hints have a matching nsRestyleHint value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_restyle_hints_match() {
use crate::gecko_bindings::structs;
macro_rules! check_restyle_hints {
( $( $a:ident => $b:path),*, ) => {
if cfg!(debug_assertions) {
let mut replacements = RestyleHint::replacements();
$(
assert_eq!(structs::nsRestyleHint::$a.0 as usize, $b.bits() as usize, stringify!($b));
replacements.remove($b);
)*
assert_eq!(replacements, RestyleHint::empty(),
"all RestyleHint replacement bits should have an \
assertion");
}
}
}
check_restyle_hints! {
eRestyle_CSSTransitions => RestyleHint::RESTYLE_CSS_TRANSITIONS,
eRestyle_CSSAnimations => RestyleHint::RESTYLE_CSS_ANIMATIONS,
eRestyle_StyleAttribute => RestyleHint::RESTYLE_STYLE_ATTRIBUTE,
eRestyle_StyleAttribute_Animations => RestyleHint::RESTYLE_SMIL,
}
}
| {
!(*self & !Self::for_animations()).is_empty()
} | identifier_body |
restyle_hints.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Restyle hints: an optimization to avoid unnecessarily matching selectors.
#[cfg(feature = "gecko")]
use crate::gecko_bindings::structs::nsRestyleHint;
use crate::traversal_flags::TraversalFlags;
bitflags! {
/// The kind of restyle we need to do for a given element.
pub struct RestyleHint: u8 {
/// Do a selector match of the element.
const RESTYLE_SELF = 1 << 0;
/// Do a selector match of the element's descendants.
const RESTYLE_DESCENDANTS = 1 << 1;
/// Recascade the current element.
const RECASCADE_SELF = 1 << 2;
/// Recascade all descendant elements.
const RECASCADE_DESCENDANTS = 1 << 3;
/// Replace the style data coming from CSS transitions without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_TRANSITIONS = 1 << 4;
/// Replace the style data coming from CSS animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_ANIMATIONS = 1 << 5;
/// Don't re-run selector-matching on the element, only the style
/// attribute has changed, and this change didn't have any other
/// dependencies.
const RESTYLE_STYLE_ATTRIBUTE = 1 << 6;
/// Replace the style data coming from SMIL animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_SMIL = 1 << 7;
}
}
impl RestyleHint {
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be fully restyled.
pub fn restyle_subtree() -> Self {
RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS
}
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be recascaded.
pub fn recascade_subtree() -> Self {
RestyleHint::RECASCADE_SELF | RestyleHint::RECASCADE_DESCENDANTS
}
| /// descendants.
pub fn contains_subtree(&self) -> bool {
self.contains(RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS)
}
/// Returns whether we need to restyle this element.
pub fn has_non_animation_invalidations(&self) -> bool {
self.intersects(
RestyleHint::RESTYLE_SELF |
RestyleHint::RECASCADE_SELF |
(Self::replacements() & !Self::for_animations()),
)
}
/// Propagates this restyle hint to a child element.
pub fn propagate(&mut self, traversal_flags: &TraversalFlags) -> Self {
use std::mem;
// In the middle of an animation only restyle, we don't need to
// propagate any restyle hints, and we need to remove ourselves.
if traversal_flags.for_animation_only() {
self.remove_animation_hints();
return Self::empty();
}
debug_assert!(
!self.has_animation_hint(),
"There should not be any animation restyle hints \
during normal traversal"
);
// Else we should clear ourselves, and return the propagated hint.
mem::replace(self, Self::empty()).propagate_for_non_animation_restyle()
}
/// Returns a new `CascadeHint` appropriate for children of the current
/// element.
fn propagate_for_non_animation_restyle(&self) -> Self {
if self.contains(RestyleHint::RESTYLE_DESCENDANTS) {
return Self::restyle_subtree();
}
if self.contains(RestyleHint::RECASCADE_DESCENDANTS) {
return Self::recascade_subtree();
}
Self::empty()
}
/// Creates a new `RestyleHint` that indicates the element must be
/// recascaded.
pub fn recascade_self() -> Self {
RestyleHint::RECASCADE_SELF
}
/// Returns a hint that contains all the replacement hints.
pub fn replacements() -> Self {
RestyleHint::RESTYLE_STYLE_ATTRIBUTE | Self::for_animations()
}
/// The replacements for the animation cascade levels.
#[inline]
pub fn for_animations() -> Self {
RestyleHint::RESTYLE_SMIL |
RestyleHint::RESTYLE_CSS_ANIMATIONS |
RestyleHint::RESTYLE_CSS_TRANSITIONS
}
/// Returns whether the hint specifies that the currently element must be
/// recascaded.
pub fn has_recascade_self(&self) -> bool {
self.contains(RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint(&self) -> bool {
self.intersects(Self::for_animations())
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint_or_recascade(&self) -> bool {
self.intersects(Self::for_animations() | RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies some restyle work other than an
/// animation cascade level replacement.
#[inline]
pub fn has_non_animation_hint(&self) -> bool {
!(*self & !Self::for_animations()).is_empty()
}
/// Returns whether the hint specifies that selector matching must be re-run
/// for the element.
#[inline]
pub fn match_self(&self) -> bool {
self.intersects(RestyleHint::RESTYLE_SELF)
}
/// Returns whether the hint specifies that some cascade levels must be
/// replaced.
#[inline]
pub fn has_replacements(&self) -> bool {
self.intersects(Self::replacements())
}
/// Removes all of the animation-related hints.
#[inline]
pub fn remove_animation_hints(&mut self) {
self.remove(Self::for_animations());
// While RECASCADE_SELF is not animation-specific, we only ever add and
// process it during traversal. If we are here, removing animation
// hints, then we are in an animation-only traversal, and we know that
// any RECASCADE_SELF flag must have been set due to changes in
// inherited values after restyling for animations, and thus we want to
// remove it so that we don't later try to restyle the element during a
// normal restyle. (We could have separate RECASCADE_SELF_NORMAL and
// RECASCADE_SELF_ANIMATIONS flags to make it clear, but this isn't
// currently necessary.)
self.remove(RestyleHint::RECASCADE_SELF);
}
}
impl Default for RestyleHint {
fn default() -> Self {
Self::empty()
}
}
#[cfg(feature = "gecko")]
impl From<nsRestyleHint> for RestyleHint {
fn from(mut raw: nsRestyleHint) -> Self {
let mut hint = RestyleHint::empty();
debug_assert!(
raw.0 & nsRestyleHint::eRestyle_LaterSiblings.0 == 0,
"Handle later siblings manually if necessary plz."
);
if (raw.0 & (nsRestyleHint::eRestyle_Self.0 | nsRestyleHint::eRestyle_Subtree.0)) != 0 {
raw.0 &= !nsRestyleHint::eRestyle_Self.0;
hint.insert(RestyleHint::RESTYLE_SELF);
}
if (raw.0 & (nsRestyleHint::eRestyle_Subtree.0 | nsRestyleHint::eRestyle_SomeDescendants.0)) !=
0
{
raw.0 &= !nsRestyleHint::eRestyle_Subtree.0;
raw.0 &= !nsRestyleHint::eRestyle_SomeDescendants.0;
hint.insert(RestyleHint::RESTYLE_DESCENDANTS);
}
if (raw.0 & (nsRestyleHint::eRestyle_ForceDescendants.0 | nsRestyleHint::eRestyle_Force.0)) !=
0
{
raw.0 &= !nsRestyleHint::eRestyle_Force.0;
hint.insert(RestyleHint::RECASCADE_SELF);
}
if (raw.0 & nsRestyleHint::eRestyle_ForceDescendants.0) != 0 {
raw.0 &= !nsRestyleHint::eRestyle_ForceDescendants.0;
hint.insert(RestyleHint::RECASCADE_DESCENDANTS);
}
hint.insert(RestyleHint::from_bits_truncate(raw.0 as u8));
hint
}
}
#[cfg(feature = "servo")]
malloc_size_of_is_0!(RestyleHint);
/// Asserts that all replacement hints have a matching nsRestyleHint value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_restyle_hints_match() {
use crate::gecko_bindings::structs;
macro_rules! check_restyle_hints {
( $( $a:ident => $b:path),*, ) => {
if cfg!(debug_assertions) {
let mut replacements = RestyleHint::replacements();
$(
assert_eq!(structs::nsRestyleHint::$a.0 as usize, $b.bits() as usize, stringify!($b));
replacements.remove($b);
)*
assert_eq!(replacements, RestyleHint::empty(),
"all RestyleHint replacement bits should have an \
assertion");
}
}
}
check_restyle_hints! {
eRestyle_CSSTransitions => RestyleHint::RESTYLE_CSS_TRANSITIONS,
eRestyle_CSSAnimations => RestyleHint::RESTYLE_CSS_ANIMATIONS,
eRestyle_StyleAttribute => RestyleHint::RESTYLE_STYLE_ATTRIBUTE,
eRestyle_StyleAttribute_Animations => RestyleHint::RESTYLE_SMIL,
}
} | /// Returns whether this hint invalidates the element and all its | random_line_split |
restyle_hints.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Restyle hints: an optimization to avoid unnecessarily matching selectors.
#[cfg(feature = "gecko")]
use crate::gecko_bindings::structs::nsRestyleHint;
use crate::traversal_flags::TraversalFlags;
bitflags! {
/// The kind of restyle we need to do for a given element.
pub struct RestyleHint: u8 {
/// Do a selector match of the element.
const RESTYLE_SELF = 1 << 0;
/// Do a selector match of the element's descendants.
const RESTYLE_DESCENDANTS = 1 << 1;
/// Recascade the current element.
const RECASCADE_SELF = 1 << 2;
/// Recascade all descendant elements.
const RECASCADE_DESCENDANTS = 1 << 3;
/// Replace the style data coming from CSS transitions without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_TRANSITIONS = 1 << 4;
/// Replace the style data coming from CSS animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_ANIMATIONS = 1 << 5;
/// Don't re-run selector-matching on the element, only the style
/// attribute has changed, and this change didn't have any other
/// dependencies.
const RESTYLE_STYLE_ATTRIBUTE = 1 << 6;
/// Replace the style data coming from SMIL animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_SMIL = 1 << 7;
}
}
impl RestyleHint {
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be fully restyled.
pub fn restyle_subtree() -> Self {
RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS
}
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be recascaded.
pub fn recascade_subtree() -> Self {
RestyleHint::RECASCADE_SELF | RestyleHint::RECASCADE_DESCENDANTS
}
/// Returns whether this hint invalidates the element and all its
/// descendants.
pub fn contains_subtree(&self) -> bool {
self.contains(RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS)
}
/// Returns whether we need to restyle this element.
pub fn has_non_animation_invalidations(&self) -> bool {
self.intersects(
RestyleHint::RESTYLE_SELF |
RestyleHint::RECASCADE_SELF |
(Self::replacements() & !Self::for_animations()),
)
}
/// Propagates this restyle hint to a child element.
pub fn propagate(&mut self, traversal_flags: &TraversalFlags) -> Self {
use std::mem;
// In the middle of an animation only restyle, we don't need to
// propagate any restyle hints, and we need to remove ourselves.
if traversal_flags.for_animation_only() {
self.remove_animation_hints();
return Self::empty();
}
debug_assert!(
!self.has_animation_hint(),
"There should not be any animation restyle hints \
during normal traversal"
);
// Else we should clear ourselves, and return the propagated hint.
mem::replace(self, Self::empty()).propagate_for_non_animation_restyle()
}
/// Returns a new `CascadeHint` appropriate for children of the current
/// element.
fn propagate_for_non_animation_restyle(&self) -> Self {
if self.contains(RestyleHint::RESTYLE_DESCENDANTS) {
return Self::restyle_subtree();
}
if self.contains(RestyleHint::RECASCADE_DESCENDANTS) {
return Self::recascade_subtree();
}
Self::empty()
}
/// Creates a new `RestyleHint` that indicates the element must be
/// recascaded.
pub fn recascade_self() -> Self {
RestyleHint::RECASCADE_SELF
}
/// Returns a hint that contains all the replacement hints.
pub fn replacements() -> Self {
RestyleHint::RESTYLE_STYLE_ATTRIBUTE | Self::for_animations()
}
/// The replacements for the animation cascade levels.
#[inline]
pub fn for_animations() -> Self {
RestyleHint::RESTYLE_SMIL |
RestyleHint::RESTYLE_CSS_ANIMATIONS |
RestyleHint::RESTYLE_CSS_TRANSITIONS
}
/// Returns whether the hint specifies that the currently element must be
/// recascaded.
pub fn has_recascade_self(&self) -> bool {
self.contains(RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint(&self) -> bool {
self.intersects(Self::for_animations())
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint_or_recascade(&self) -> bool {
self.intersects(Self::for_animations() | RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies some restyle work other than an
/// animation cascade level replacement.
#[inline]
pub fn has_non_animation_hint(&self) -> bool {
!(*self & !Self::for_animations()).is_empty()
}
/// Returns whether the hint specifies that selector matching must be re-run
/// for the element.
#[inline]
pub fn match_self(&self) -> bool {
self.intersects(RestyleHint::RESTYLE_SELF)
}
/// Returns whether the hint specifies that some cascade levels must be
/// replaced.
#[inline]
pub fn has_replacements(&self) -> bool {
self.intersects(Self::replacements())
}
/// Removes all of the animation-related hints.
#[inline]
pub fn remove_animation_hints(&mut self) {
self.remove(Self::for_animations());
// While RECASCADE_SELF is not animation-specific, we only ever add and
// process it during traversal. If we are here, removing animation
// hints, then we are in an animation-only traversal, and we know that
// any RECASCADE_SELF flag must have been set due to changes in
// inherited values after restyling for animations, and thus we want to
// remove it so that we don't later try to restyle the element during a
// normal restyle. (We could have separate RECASCADE_SELF_NORMAL and
// RECASCADE_SELF_ANIMATIONS flags to make it clear, but this isn't
// currently necessary.)
self.remove(RestyleHint::RECASCADE_SELF);
}
}
impl Default for RestyleHint {
fn default() -> Self {
Self::empty()
}
}
#[cfg(feature = "gecko")]
impl From<nsRestyleHint> for RestyleHint {
fn from(mut raw: nsRestyleHint) -> Self {
let mut hint = RestyleHint::empty();
debug_assert!(
raw.0 & nsRestyleHint::eRestyle_LaterSiblings.0 == 0,
"Handle later siblings manually if necessary plz."
);
if (raw.0 & (nsRestyleHint::eRestyle_Self.0 | nsRestyleHint::eRestyle_Subtree.0)) != 0 {
raw.0 &= !nsRestyleHint::eRestyle_Self.0;
hint.insert(RestyleHint::RESTYLE_SELF);
}
if (raw.0 & (nsRestyleHint::eRestyle_Subtree.0 | nsRestyleHint::eRestyle_SomeDescendants.0)) !=
0
|
if (raw.0 & (nsRestyleHint::eRestyle_ForceDescendants.0 | nsRestyleHint::eRestyle_Force.0)) !=
0
{
raw.0 &= !nsRestyleHint::eRestyle_Force.0;
hint.insert(RestyleHint::RECASCADE_SELF);
}
if (raw.0 & nsRestyleHint::eRestyle_ForceDescendants.0) != 0 {
raw.0 &= !nsRestyleHint::eRestyle_ForceDescendants.0;
hint.insert(RestyleHint::RECASCADE_DESCENDANTS);
}
hint.insert(RestyleHint::from_bits_truncate(raw.0 as u8));
hint
}
}
#[cfg(feature = "servo")]
malloc_size_of_is_0!(RestyleHint);
/// Asserts that all replacement hints have a matching nsRestyleHint value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_restyle_hints_match() {
use crate::gecko_bindings::structs;
macro_rules! check_restyle_hints {
( $( $a:ident => $b:path),*, ) => {
if cfg!(debug_assertions) {
let mut replacements = RestyleHint::replacements();
$(
assert_eq!(structs::nsRestyleHint::$a.0 as usize, $b.bits() as usize, stringify!($b));
replacements.remove($b);
)*
assert_eq!(replacements, RestyleHint::empty(),
"all RestyleHint replacement bits should have an \
assertion");
}
}
}
check_restyle_hints! {
eRestyle_CSSTransitions => RestyleHint::RESTYLE_CSS_TRANSITIONS,
eRestyle_CSSAnimations => RestyleHint::RESTYLE_CSS_ANIMATIONS,
eRestyle_StyleAttribute => RestyleHint::RESTYLE_STYLE_ATTRIBUTE,
eRestyle_StyleAttribute_Animations => RestyleHint::RESTYLE_SMIL,
}
}
| {
raw.0 &= !nsRestyleHint::eRestyle_Subtree.0;
raw.0 &= !nsRestyleHint::eRestyle_SomeDescendants.0;
hint.insert(RestyleHint::RESTYLE_DESCENDANTS);
} | conditional_block |
restyle_hints.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Restyle hints: an optimization to avoid unnecessarily matching selectors.
#[cfg(feature = "gecko")]
use crate::gecko_bindings::structs::nsRestyleHint;
use crate::traversal_flags::TraversalFlags;
bitflags! {
/// The kind of restyle we need to do for a given element.
pub struct RestyleHint: u8 {
/// Do a selector match of the element.
const RESTYLE_SELF = 1 << 0;
/// Do a selector match of the element's descendants.
const RESTYLE_DESCENDANTS = 1 << 1;
/// Recascade the current element.
const RECASCADE_SELF = 1 << 2;
/// Recascade all descendant elements.
const RECASCADE_DESCENDANTS = 1 << 3;
/// Replace the style data coming from CSS transitions without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_TRANSITIONS = 1 << 4;
/// Replace the style data coming from CSS animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_CSS_ANIMATIONS = 1 << 5;
/// Don't re-run selector-matching on the element, only the style
/// attribute has changed, and this change didn't have any other
/// dependencies.
const RESTYLE_STYLE_ATTRIBUTE = 1 << 6;
/// Replace the style data coming from SMIL animations without updating
/// any other style data. This hint is only processed in animation-only
/// traversal which is prior to normal traversal.
const RESTYLE_SMIL = 1 << 7;
}
}
impl RestyleHint {
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be fully restyled.
pub fn restyle_subtree() -> Self {
RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS
}
/// Creates a new `RestyleHint` indicating that the current element and all
/// its descendants must be recascaded.
pub fn recascade_subtree() -> Self {
RestyleHint::RECASCADE_SELF | RestyleHint::RECASCADE_DESCENDANTS
}
/// Returns whether this hint invalidates the element and all its
/// descendants.
pub fn contains_subtree(&self) -> bool {
self.contains(RestyleHint::RESTYLE_SELF | RestyleHint::RESTYLE_DESCENDANTS)
}
/// Returns whether we need to restyle this element.
pub fn | (&self) -> bool {
self.intersects(
RestyleHint::RESTYLE_SELF |
RestyleHint::RECASCADE_SELF |
(Self::replacements() & !Self::for_animations()),
)
}
/// Propagates this restyle hint to a child element.
pub fn propagate(&mut self, traversal_flags: &TraversalFlags) -> Self {
use std::mem;
// In the middle of an animation only restyle, we don't need to
// propagate any restyle hints, and we need to remove ourselves.
if traversal_flags.for_animation_only() {
self.remove_animation_hints();
return Self::empty();
}
debug_assert!(
!self.has_animation_hint(),
"There should not be any animation restyle hints \
during normal traversal"
);
// Else we should clear ourselves, and return the propagated hint.
mem::replace(self, Self::empty()).propagate_for_non_animation_restyle()
}
/// Returns a new `CascadeHint` appropriate for children of the current
/// element.
fn propagate_for_non_animation_restyle(&self) -> Self {
if self.contains(RestyleHint::RESTYLE_DESCENDANTS) {
return Self::restyle_subtree();
}
if self.contains(RestyleHint::RECASCADE_DESCENDANTS) {
return Self::recascade_subtree();
}
Self::empty()
}
/// Creates a new `RestyleHint` that indicates the element must be
/// recascaded.
pub fn recascade_self() -> Self {
RestyleHint::RECASCADE_SELF
}
/// Returns a hint that contains all the replacement hints.
pub fn replacements() -> Self {
RestyleHint::RESTYLE_STYLE_ATTRIBUTE | Self::for_animations()
}
/// The replacements for the animation cascade levels.
#[inline]
pub fn for_animations() -> Self {
RestyleHint::RESTYLE_SMIL |
RestyleHint::RESTYLE_CSS_ANIMATIONS |
RestyleHint::RESTYLE_CSS_TRANSITIONS
}
/// Returns whether the hint specifies that the currently element must be
/// recascaded.
pub fn has_recascade_self(&self) -> bool {
self.contains(RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint(&self) -> bool {
self.intersects(Self::for_animations())
}
/// Returns whether the hint specifies that an animation cascade level must
/// be replaced.
#[inline]
pub fn has_animation_hint_or_recascade(&self) -> bool {
self.intersects(Self::for_animations() | RestyleHint::RECASCADE_SELF)
}
/// Returns whether the hint specifies some restyle work other than an
/// animation cascade level replacement.
#[inline]
pub fn has_non_animation_hint(&self) -> bool {
!(*self & !Self::for_animations()).is_empty()
}
/// Returns whether the hint specifies that selector matching must be re-run
/// for the element.
#[inline]
pub fn match_self(&self) -> bool {
self.intersects(RestyleHint::RESTYLE_SELF)
}
/// Returns whether the hint specifies that some cascade levels must be
/// replaced.
#[inline]
pub fn has_replacements(&self) -> bool {
self.intersects(Self::replacements())
}
/// Removes all of the animation-related hints.
#[inline]
pub fn remove_animation_hints(&mut self) {
self.remove(Self::for_animations());
// While RECASCADE_SELF is not animation-specific, we only ever add and
// process it during traversal. If we are here, removing animation
// hints, then we are in an animation-only traversal, and we know that
// any RECASCADE_SELF flag must have been set due to changes in
// inherited values after restyling for animations, and thus we want to
// remove it so that we don't later try to restyle the element during a
// normal restyle. (We could have separate RECASCADE_SELF_NORMAL and
// RECASCADE_SELF_ANIMATIONS flags to make it clear, but this isn't
// currently necessary.)
self.remove(RestyleHint::RECASCADE_SELF);
}
}
impl Default for RestyleHint {
fn default() -> Self {
Self::empty()
}
}
#[cfg(feature = "gecko")]
impl From<nsRestyleHint> for RestyleHint {
fn from(mut raw: nsRestyleHint) -> Self {
let mut hint = RestyleHint::empty();
debug_assert!(
raw.0 & nsRestyleHint::eRestyle_LaterSiblings.0 == 0,
"Handle later siblings manually if necessary plz."
);
if (raw.0 & (nsRestyleHint::eRestyle_Self.0 | nsRestyleHint::eRestyle_Subtree.0)) != 0 {
raw.0 &= !nsRestyleHint::eRestyle_Self.0;
hint.insert(RestyleHint::RESTYLE_SELF);
}
if (raw.0 & (nsRestyleHint::eRestyle_Subtree.0 | nsRestyleHint::eRestyle_SomeDescendants.0)) !=
0
{
raw.0 &= !nsRestyleHint::eRestyle_Subtree.0;
raw.0 &= !nsRestyleHint::eRestyle_SomeDescendants.0;
hint.insert(RestyleHint::RESTYLE_DESCENDANTS);
}
if (raw.0 & (nsRestyleHint::eRestyle_ForceDescendants.0 | nsRestyleHint::eRestyle_Force.0)) !=
0
{
raw.0 &= !nsRestyleHint::eRestyle_Force.0;
hint.insert(RestyleHint::RECASCADE_SELF);
}
if (raw.0 & nsRestyleHint::eRestyle_ForceDescendants.0) != 0 {
raw.0 &= !nsRestyleHint::eRestyle_ForceDescendants.0;
hint.insert(RestyleHint::RECASCADE_DESCENDANTS);
}
hint.insert(RestyleHint::from_bits_truncate(raw.0 as u8));
hint
}
}
#[cfg(feature = "servo")]
malloc_size_of_is_0!(RestyleHint);
/// Asserts that all replacement hints have a matching nsRestyleHint value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_restyle_hints_match() {
use crate::gecko_bindings::structs;
macro_rules! check_restyle_hints {
( $( $a:ident => $b:path),*, ) => {
if cfg!(debug_assertions) {
let mut replacements = RestyleHint::replacements();
$(
assert_eq!(structs::nsRestyleHint::$a.0 as usize, $b.bits() as usize, stringify!($b));
replacements.remove($b);
)*
assert_eq!(replacements, RestyleHint::empty(),
"all RestyleHint replacement bits should have an \
assertion");
}
}
}
check_restyle_hints! {
eRestyle_CSSTransitions => RestyleHint::RESTYLE_CSS_TRANSITIONS,
eRestyle_CSSAnimations => RestyleHint::RESTYLE_CSS_ANIMATIONS,
eRestyle_StyleAttribute => RestyleHint::RESTYLE_STYLE_ATTRIBUTE,
eRestyle_StyleAttribute_Animations => RestyleHint::RESTYLE_SMIL,
}
}
| has_non_animation_invalidations | identifier_name |
LocalFileField.js | var _ = require('underscore'),
$ = require('jquery'),
React = require('react'),
Field = require('../Field'),
Note = require('../../components/Note'),
Select = require('react-select');
module.exports = Field.create({
shouldCollapse: function() {
return this.props.collapse && !this.hasExisting();
},
fileFieldNode: function() {
return this.refs.fileField.getDOMNode();
},
changeFile: function() {
this.refs.fileField.getDOMNode().click();
},
getFileSource: function() {
if (this.hasLocal()) {
return this.state.localSource;
} else if (this.hasExisting()) {
return this.props.value.url;
} else {
return null;
}
},
getFileURL: function() {
if (!this.hasLocal() && this.hasExisting()) {
return this.props.value.url;
}
},
undoRemove: function() {
this.fileFieldNode().value = '';
this.setState({
removeExisting: false,
localSource: null,
origin: false,
action: null
});
},
fileChanged: function (event) {
this.setState({
origin: 'local'
});
},
removeFile: function (e) {
var state = {
localSource: null,
origin: false
};
if (this.hasLocal()) {
this.fileFieldNode().value = '';
} else if (this.hasExisting()) {
state.removeExisting = true;
if (this.props.autoCleanup) {
if (e.altKey) {
state.action = 'reset';
} else {
state.action = 'delete';
}
} else {
if (e.altKey) {
state.action = 'delete';
} else {
state.action = 'reset';
} | this.setState(state);
},
hasLocal: function() {
return this.state.origin === 'local';
},
hasFile: function() {
return this.hasExisting() || this.hasLocal();
},
hasExisting: function() {
return !!this.props.value.filename;
},
getFilename: function() {
if (this.hasLocal()) {
return this.fileFieldNode().value.split('\\').pop();
} else {
return this.props.value.filename;
}
},
renderFileDetails: function (add) {
var values = null;
if (this.hasFile() && !this.state.removeExisting) {
values = <div className='file-values'>
<div className='field-value'>{this.getFilename()}</div>
</div>;
}
return <div key={this.props.path + '_details'} className='file-details'>
{values}
{add}
</div>;
},
renderAlert: function() {
if (this.hasLocal()) {
return <div className='upload-queued pull-left'>
<div className='alert alert-success'>File selected - save to upload</div>
</div>;
} else if (this.state.origin === 'cloudinary') {
return <div className='select-queued pull-left'>
<div className='alert alert-success'>File selected from Cloudinary</div>
</div>;
} else if (this.state.removeExisting) {
return <div className='delete-queued pull-left'>
<div className='alert alert-danger'>File {this.props.autoCleanup ? 'deleted' : 'removed'} - save to confirm</div>
</div>;
} else {
return null;
}
},
renderClearButton: function() {
if (this.state.removeExisting) {
return <button type='button' className='btn btn-link btn-cancel btn-undo-file' onClick={this.undoRemove}>
Undo Remove
</button>;
} else {
var clearText;
if (this.hasLocal()) {
clearText = 'Cancel Upload';
} else {
clearText = (this.props.autoCleanup ? 'Delete File' : 'Remove File');
}
return <button type='button' className='btn btn-link btn-cancel btn-delete-file' onClick={this.removeFile}>
{clearText}
</button>;
}
},
renderFileField: function() {
return <input ref='fileField' type='file' name={this.props.paths.upload} className='field-upload' onChange={this.fileChanged} />;
},
renderFileAction: function() {
return <input type='hidden' name={this.props.paths.action} className='field-action' value={this.state.action} />;
},
renderFileToolbar: function() {
return <div key={this.props.path + '_toolbar'} className='file-toolbar'>
<div className='pull-left'>
<button type='button' onClick={this.changeFile} className='btn btn-default btn-upload-file'>
{this.hasFile() ? 'Change' : 'Upload'} File
</button>
{this.hasFile() && this.renderClearButton()}
</div>
</div>;
},
renderUI: function() {
var container = [],
body = [],
hasFile = this.hasFile(),
fieldClassName = 'field-ui';
if (hasFile) {
fieldClassName += ' has-file';
}
if (this.shouldRenderField()) {
if (hasFile) {
container.push(this.renderFileDetails(this.renderAlert()));
}
body.push(this.renderFileToolbar());
} else {
if (hasFile) {
container.push(this.renderFileDetails());
} else {
container.push(<div className='help-block'>no file</div>);
}
}
return <div className='field field-type-localfile'>
<label className='field-label'>{this.props.label}</label>
{this.renderFileField()}
{this.renderFileAction()}
<div className={fieldClassName}>
<div className='file-container'>{container}</div>
{body}
<Note note={this.props.note} />
</div>
</div>;
}
}); | }
}
| random_line_split |
LocalFileField.js | var _ = require('underscore'),
$ = require('jquery'),
React = require('react'),
Field = require('../Field'),
Note = require('../../components/Note'),
Select = require('react-select');
module.exports = Field.create({
shouldCollapse: function() {
return this.props.collapse && !this.hasExisting();
},
fileFieldNode: function() {
return this.refs.fileField.getDOMNode();
},
changeFile: function() {
this.refs.fileField.getDOMNode().click();
},
getFileSource: function() {
if (this.hasLocal()) {
return this.state.localSource;
} else if (this.hasExisting()) {
return this.props.value.url;
} else {
return null;
}
},
getFileURL: function() {
if (!this.hasLocal() && this.hasExisting()) {
return this.props.value.url;
}
},
undoRemove: function() {
this.fileFieldNode().value = '';
this.setState({
removeExisting: false,
localSource: null,
origin: false,
action: null
});
},
fileChanged: function (event) {
this.setState({
origin: 'local'
});
},
removeFile: function (e) {
var state = {
localSource: null,
origin: false
};
if (this.hasLocal()) {
this.fileFieldNode().value = '';
} else if (this.hasExisting()) {
state.removeExisting = true;
if (this.props.autoCleanup) {
if (e.altKey) {
state.action = 'reset';
} else {
state.action = 'delete';
}
} else {
if (e.altKey) {
state.action = 'delete';
} else {
state.action = 'reset';
}
}
}
this.setState(state);
},
hasLocal: function() {
return this.state.origin === 'local';
},
hasFile: function() {
return this.hasExisting() || this.hasLocal();
},
hasExisting: function() {
return !!this.props.value.filename;
},
getFilename: function() {
if (this.hasLocal()) {
return this.fileFieldNode().value.split('\\').pop();
} else {
return this.props.value.filename;
}
},
renderFileDetails: function (add) {
var values = null;
if (this.hasFile() && !this.state.removeExisting) {
values = <div className='file-values'>
<div className='field-value'>{this.getFilename()}</div>
</div>;
}
return <div key={this.props.path + '_details'} className='file-details'>
{values}
{add}
</div>;
},
renderAlert: function() {
if (this.hasLocal()) {
return <div className='upload-queued pull-left'>
<div className='alert alert-success'>File selected - save to upload</div>
</div>;
} else if (this.state.origin === 'cloudinary') {
return <div className='select-queued pull-left'>
<div className='alert alert-success'>File selected from Cloudinary</div>
</div>;
} else if (this.state.removeExisting) {
return <div className='delete-queued pull-left'>
<div className='alert alert-danger'>File {this.props.autoCleanup ? 'deleted' : 'removed'} - save to confirm</div>
</div>;
} else {
return null;
}
},
renderClearButton: function() {
if (this.state.removeExisting) {
return <button type='button' className='btn btn-link btn-cancel btn-undo-file' onClick={this.undoRemove}>
Undo Remove
</button>;
} else {
var clearText;
if (this.hasLocal()) {
clearText = 'Cancel Upload';
} else |
return <button type='button' className='btn btn-link btn-cancel btn-delete-file' onClick={this.removeFile}>
{clearText}
</button>;
}
},
renderFileField: function() {
return <input ref='fileField' type='file' name={this.props.paths.upload} className='field-upload' onChange={this.fileChanged} />;
},
renderFileAction: function() {
return <input type='hidden' name={this.props.paths.action} className='field-action' value={this.state.action} />;
},
renderFileToolbar: function() {
return <div key={this.props.path + '_toolbar'} className='file-toolbar'>
<div className='pull-left'>
<button type='button' onClick={this.changeFile} className='btn btn-default btn-upload-file'>
{this.hasFile() ? 'Change' : 'Upload'} File
</button>
{this.hasFile() && this.renderClearButton()}
</div>
</div>;
},
renderUI: function() {
var container = [],
body = [],
hasFile = this.hasFile(),
fieldClassName = 'field-ui';
if (hasFile) {
fieldClassName += ' has-file';
}
if (this.shouldRenderField()) {
if (hasFile) {
container.push(this.renderFileDetails(this.renderAlert()));
}
body.push(this.renderFileToolbar());
} else {
if (hasFile) {
container.push(this.renderFileDetails());
} else {
container.push(<div className='help-block'>no file</div>);
}
}
return <div className='field field-type-localfile'>
<label className='field-label'>{this.props.label}</label>
{this.renderFileField()}
{this.renderFileAction()}
<div className={fieldClassName}>
<div className='file-container'>{container}</div>
{body}
<Note note={this.props.note} />
</div>
</div>;
}
});
| {
clearText = (this.props.autoCleanup ? 'Delete File' : 'Remove File');
} | conditional_block |
text.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed types for text properties.
#[cfg(feature = "servo")]
use properties::StyleBuilder;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
use values::{CSSInteger, CSSFloat};
use values::computed::{NonNegativeLength, NonNegativeNumber};
use values::computed::length::{Length, LengthOrPercentage};
use values::generics::text::InitialLetter as GenericInitialLetter;
use values::generics::text::LineHeight as GenericLineHeight;
use values::generics::text::Spacing;
use values::specified::text::{TextOverflowSide, TextDecorationLine};
pub use values::specified::TextAlignKeyword as TextAlign;
/// A computed value for the `initial-letter` property.
pub type InitialLetter = GenericInitialLetter<CSSFloat, CSSInteger>;
/// A computed value for the `letter-spacing` property.
pub type LetterSpacing = Spacing<Length>;
/// A computed value for the `word-spacing` property.
pub type WordSpacing = Spacing<LengthOrPercentage>;
/// A computed value for the `line-height` property.
pub type LineHeight = GenericLineHeight<NonNegativeNumber, NonNegativeLength>;
#[derive(Clone, Debug, MallocSizeOf, PartialEq)]
/// text-overflow.
/// When the specified value only has one side, that's the "second"
/// side, and the sides are logical, so "second" means "end". The
/// start side is Clip in that case.
///
/// When the specified value has two sides, those are our "first"
/// and "second" sides, and they are physical sides ("left" and
/// "right").
pub struct TextOverflow {
/// First side
pub first: TextOverflowSide,
/// Second side
pub second: TextOverflowSide,
/// True if the specified value only has one side.
pub sides_are_logical: bool,
}
impl TextOverflow {
/// Returns the initial `text-overflow` value
pub fn get_initial_value() -> TextOverflow {
TextOverflow {
first: TextOverflowSide::Clip,
second: TextOverflowSide::Clip,
sides_are_logical: true,
}
}
}
impl ToCss for TextOverflow {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.sides_are_logical {
debug_assert_eq!(self.first, TextOverflowSide::Clip);
self.second.to_css(dest)?;
} else {
self.first.to_css(dest)?;
dest.write_str(" ")?;
self.second.to_css(dest)?;
}
Ok(())
}
}
impl ToCss for TextDecorationLine {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{ | let mut has_any = false;
macro_rules! write_value {
($line:path => $css:expr) => {
if self.contains($line) {
if has_any {
dest.write_str(" ")?;
}
dest.write_str($css)?;
has_any = true;
}
}
}
write_value!(TextDecorationLine::UNDERLINE => "underline");
write_value!(TextDecorationLine::OVERLINE => "overline");
write_value!(TextDecorationLine::LINE_THROUGH => "line-through");
write_value!(TextDecorationLine::BLINK => "blink");
if !has_any {
dest.write_str("none")?;
}
Ok(())
}
}
/// A struct that represents the _used_ value of the text-decoration property.
///
/// FIXME(emilio): This is done at style resolution time, though probably should
/// be done at layout time, otherwise we need to account for display: contents
/// and similar stuff when we implement it.
///
/// FIXME(emilio): Also, should be just a bitfield instead of three bytes.
#[derive(Clone, Copy, Debug, Default, MallocSizeOf, PartialEq)]
pub struct TextDecorationsInEffect {
/// Whether an underline is in effect.
pub underline: bool,
/// Whether an overline decoration is in effect.
pub overline: bool,
/// Whether a line-through style is in effect.
pub line_through: bool,
}
impl TextDecorationsInEffect {
/// Computes the text-decorations in effect for a given style.
#[cfg(feature = "servo")]
pub fn from_style(style: &StyleBuilder) -> Self {
use values::computed::Display;
// Start with no declarations if this is an atomic inline-level box;
// otherwise, start with the declarations in effect and add in the text
// decorations that this block specifies.
let mut result = match style.get_box().clone_display() {
Display::InlineBlock |
Display::InlineTable => Self::default(),
_ => style.get_parent_inheritedtext().text_decorations_in_effect.clone(),
};
let text_style = style.get_text();
result.underline |= text_style.has_underline();
result.overline |= text_style.has_overline();
result.line_through |= text_style.has_line_through();
result
}
} | random_line_split |
|
text.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed types for text properties.
#[cfg(feature = "servo")]
use properties::StyleBuilder;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
use values::{CSSInteger, CSSFloat};
use values::computed::{NonNegativeLength, NonNegativeNumber};
use values::computed::length::{Length, LengthOrPercentage};
use values::generics::text::InitialLetter as GenericInitialLetter;
use values::generics::text::LineHeight as GenericLineHeight;
use values::generics::text::Spacing;
use values::specified::text::{TextOverflowSide, TextDecorationLine};
pub use values::specified::TextAlignKeyword as TextAlign;
/// A computed value for the `initial-letter` property.
pub type InitialLetter = GenericInitialLetter<CSSFloat, CSSInteger>;
/// A computed value for the `letter-spacing` property.
pub type LetterSpacing = Spacing<Length>;
/// A computed value for the `word-spacing` property.
pub type WordSpacing = Spacing<LengthOrPercentage>;
/// A computed value for the `line-height` property.
pub type LineHeight = GenericLineHeight<NonNegativeNumber, NonNegativeLength>;
#[derive(Clone, Debug, MallocSizeOf, PartialEq)]
/// text-overflow.
/// When the specified value only has one side, that's the "second"
/// side, and the sides are logical, so "second" means "end". The
/// start side is Clip in that case.
///
/// When the specified value has two sides, those are our "first"
/// and "second" sides, and they are physical sides ("left" and
/// "right").
pub struct TextOverflow {
/// First side
pub first: TextOverflowSide,
/// Second side
pub second: TextOverflowSide,
/// True if the specified value only has one side.
pub sides_are_logical: bool,
}
impl TextOverflow {
/// Returns the initial `text-overflow` value
pub fn get_initial_value() -> TextOverflow {
TextOverflow {
first: TextOverflowSide::Clip,
second: TextOverflowSide::Clip,
sides_are_logical: true,
}
}
}
impl ToCss for TextOverflow {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.sides_are_logical {
debug_assert_eq!(self.first, TextOverflowSide::Clip);
self.second.to_css(dest)?;
} else {
self.first.to_css(dest)?;
dest.write_str(" ")?;
self.second.to_css(dest)?;
}
Ok(())
}
}
impl ToCss for TextDecorationLine {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
let mut has_any = false;
macro_rules! write_value {
($line:path => $css:expr) => {
if self.contains($line) {
if has_any {
dest.write_str(" ")?;
}
dest.write_str($css)?;
has_any = true;
}
}
}
write_value!(TextDecorationLine::UNDERLINE => "underline");
write_value!(TextDecorationLine::OVERLINE => "overline");
write_value!(TextDecorationLine::LINE_THROUGH => "line-through");
write_value!(TextDecorationLine::BLINK => "blink");
if !has_any {
dest.write_str("none")?;
}
Ok(())
}
}
/// A struct that represents the _used_ value of the text-decoration property.
///
/// FIXME(emilio): This is done at style resolution time, though probably should
/// be done at layout time, otherwise we need to account for display: contents
/// and similar stuff when we implement it.
///
/// FIXME(emilio): Also, should be just a bitfield instead of three bytes.
#[derive(Clone, Copy, Debug, Default, MallocSizeOf, PartialEq)]
pub struct TextDecorationsInEffect {
/// Whether an underline is in effect.
pub underline: bool,
/// Whether an overline decoration is in effect.
pub overline: bool,
/// Whether a line-through style is in effect.
pub line_through: bool,
}
impl TextDecorationsInEffect {
/// Computes the text-decorations in effect for a given style.
#[cfg(feature = "servo")]
pub fn | (style: &StyleBuilder) -> Self {
use values::computed::Display;
// Start with no declarations if this is an atomic inline-level box;
// otherwise, start with the declarations in effect and add in the text
// decorations that this block specifies.
let mut result = match style.get_box().clone_display() {
Display::InlineBlock |
Display::InlineTable => Self::default(),
_ => style.get_parent_inheritedtext().text_decorations_in_effect.clone(),
};
let text_style = style.get_text();
result.underline |= text_style.has_underline();
result.overline |= text_style.has_overline();
result.line_through |= text_style.has_line_through();
result
}
}
| from_style | identifier_name |
text.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed types for text properties.
#[cfg(feature = "servo")]
use properties::StyleBuilder;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
use values::{CSSInteger, CSSFloat};
use values::computed::{NonNegativeLength, NonNegativeNumber};
use values::computed::length::{Length, LengthOrPercentage};
use values::generics::text::InitialLetter as GenericInitialLetter;
use values::generics::text::LineHeight as GenericLineHeight;
use values::generics::text::Spacing;
use values::specified::text::{TextOverflowSide, TextDecorationLine};
pub use values::specified::TextAlignKeyword as TextAlign;
/// A computed value for the `initial-letter` property.
pub type InitialLetter = GenericInitialLetter<CSSFloat, CSSInteger>;
/// A computed value for the `letter-spacing` property.
pub type LetterSpacing = Spacing<Length>;
/// A computed value for the `word-spacing` property.
pub type WordSpacing = Spacing<LengthOrPercentage>;
/// A computed value for the `line-height` property.
pub type LineHeight = GenericLineHeight<NonNegativeNumber, NonNegativeLength>;
#[derive(Clone, Debug, MallocSizeOf, PartialEq)]
/// text-overflow.
/// When the specified value only has one side, that's the "second"
/// side, and the sides are logical, so "second" means "end". The
/// start side is Clip in that case.
///
/// When the specified value has two sides, those are our "first"
/// and "second" sides, and they are physical sides ("left" and
/// "right").
pub struct TextOverflow {
/// First side
pub first: TextOverflowSide,
/// Second side
pub second: TextOverflowSide,
/// True if the specified value only has one side.
pub sides_are_logical: bool,
}
impl TextOverflow {
/// Returns the initial `text-overflow` value
pub fn get_initial_value() -> TextOverflow {
TextOverflow {
first: TextOverflowSide::Clip,
second: TextOverflowSide::Clip,
sides_are_logical: true,
}
}
}
impl ToCss for TextOverflow {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
|
}
impl ToCss for TextDecorationLine {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
let mut has_any = false;
macro_rules! write_value {
($line:path => $css:expr) => {
if self.contains($line) {
if has_any {
dest.write_str(" ")?;
}
dest.write_str($css)?;
has_any = true;
}
}
}
write_value!(TextDecorationLine::UNDERLINE => "underline");
write_value!(TextDecorationLine::OVERLINE => "overline");
write_value!(TextDecorationLine::LINE_THROUGH => "line-through");
write_value!(TextDecorationLine::BLINK => "blink");
if !has_any {
dest.write_str("none")?;
}
Ok(())
}
}
/// A struct that represents the _used_ value of the text-decoration property.
///
/// FIXME(emilio): This is done at style resolution time, though probably should
/// be done at layout time, otherwise we need to account for display: contents
/// and similar stuff when we implement it.
///
/// FIXME(emilio): Also, should be just a bitfield instead of three bytes.
#[derive(Clone, Copy, Debug, Default, MallocSizeOf, PartialEq)]
pub struct TextDecorationsInEffect {
/// Whether an underline is in effect.
pub underline: bool,
/// Whether an overline decoration is in effect.
pub overline: bool,
/// Whether a line-through style is in effect.
pub line_through: bool,
}
impl TextDecorationsInEffect {
/// Computes the text-decorations in effect for a given style.
#[cfg(feature = "servo")]
pub fn from_style(style: &StyleBuilder) -> Self {
use values::computed::Display;
// Start with no declarations if this is an atomic inline-level box;
// otherwise, start with the declarations in effect and add in the text
// decorations that this block specifies.
let mut result = match style.get_box().clone_display() {
Display::InlineBlock |
Display::InlineTable => Self::default(),
_ => style.get_parent_inheritedtext().text_decorations_in_effect.clone(),
};
let text_style = style.get_text();
result.underline |= text_style.has_underline();
result.overline |= text_style.has_overline();
result.line_through |= text_style.has_line_through();
result
}
}
| {
if self.sides_are_logical {
debug_assert_eq!(self.first, TextOverflowSide::Clip);
self.second.to_css(dest)?;
} else {
self.first.to_css(dest)?;
dest.write_str(" ")?;
self.second.to_css(dest)?;
}
Ok(())
} | identifier_body |
text.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed types for text properties.
#[cfg(feature = "servo")]
use properties::StyleBuilder;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
use values::{CSSInteger, CSSFloat};
use values::computed::{NonNegativeLength, NonNegativeNumber};
use values::computed::length::{Length, LengthOrPercentage};
use values::generics::text::InitialLetter as GenericInitialLetter;
use values::generics::text::LineHeight as GenericLineHeight;
use values::generics::text::Spacing;
use values::specified::text::{TextOverflowSide, TextDecorationLine};
pub use values::specified::TextAlignKeyword as TextAlign;
/// A computed value for the `initial-letter` property.
pub type InitialLetter = GenericInitialLetter<CSSFloat, CSSInteger>;
/// A computed value for the `letter-spacing` property.
pub type LetterSpacing = Spacing<Length>;
/// A computed value for the `word-spacing` property.
pub type WordSpacing = Spacing<LengthOrPercentage>;
/// A computed value for the `line-height` property.
pub type LineHeight = GenericLineHeight<NonNegativeNumber, NonNegativeLength>;
#[derive(Clone, Debug, MallocSizeOf, PartialEq)]
/// text-overflow.
/// When the specified value only has one side, that's the "second"
/// side, and the sides are logical, so "second" means "end". The
/// start side is Clip in that case.
///
/// When the specified value has two sides, those are our "first"
/// and "second" sides, and they are physical sides ("left" and
/// "right").
pub struct TextOverflow {
/// First side
pub first: TextOverflowSide,
/// Second side
pub second: TextOverflowSide,
/// True if the specified value only has one side.
pub sides_are_logical: bool,
}
impl TextOverflow {
/// Returns the initial `text-overflow` value
pub fn get_initial_value() -> TextOverflow {
TextOverflow {
first: TextOverflowSide::Clip,
second: TextOverflowSide::Clip,
sides_are_logical: true,
}
}
}
impl ToCss for TextOverflow {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.sides_are_logical {
debug_assert_eq!(self.first, TextOverflowSide::Clip);
self.second.to_css(dest)?;
} else {
self.first.to_css(dest)?;
dest.write_str(" ")?;
self.second.to_css(dest)?;
}
Ok(())
}
}
impl ToCss for TextDecorationLine {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
let mut has_any = false;
macro_rules! write_value {
($line:path => $css:expr) => {
if self.contains($line) {
if has_any {
dest.write_str(" ")?;
}
dest.write_str($css)?;
has_any = true;
}
}
}
write_value!(TextDecorationLine::UNDERLINE => "underline");
write_value!(TextDecorationLine::OVERLINE => "overline");
write_value!(TextDecorationLine::LINE_THROUGH => "line-through");
write_value!(TextDecorationLine::BLINK => "blink");
if !has_any |
Ok(())
}
}
/// A struct that represents the _used_ value of the text-decoration property.
///
/// FIXME(emilio): This is done at style resolution time, though probably should
/// be done at layout time, otherwise we need to account for display: contents
/// and similar stuff when we implement it.
///
/// FIXME(emilio): Also, should be just a bitfield instead of three bytes.
#[derive(Clone, Copy, Debug, Default, MallocSizeOf, PartialEq)]
pub struct TextDecorationsInEffect {
/// Whether an underline is in effect.
pub underline: bool,
/// Whether an overline decoration is in effect.
pub overline: bool,
/// Whether a line-through style is in effect.
pub line_through: bool,
}
impl TextDecorationsInEffect {
/// Computes the text-decorations in effect for a given style.
#[cfg(feature = "servo")]
pub fn from_style(style: &StyleBuilder) -> Self {
use values::computed::Display;
// Start with no declarations if this is an atomic inline-level box;
// otherwise, start with the declarations in effect and add in the text
// decorations that this block specifies.
let mut result = match style.get_box().clone_display() {
Display::InlineBlock |
Display::InlineTable => Self::default(),
_ => style.get_parent_inheritedtext().text_decorations_in_effect.clone(),
};
let text_style = style.get_text();
result.underline |= text_style.has_underline();
result.overline |= text_style.has_overline();
result.line_through |= text_style.has_line_through();
result
}
}
| {
dest.write_str("none")?;
} | conditional_block |
customers_not_buying_since_long_time.py | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import cint
def execute(filters=None):
if not filters: filters ={}
days_since_last_order = filters.get("days_since_last_order")
if cint(days_since_last_order) <= 0:
webnotes.msgprint("Please mention positive value in 'Days Since Last Order' field",raise_exception=1)
columns = get_columns()
customers = get_so_details()
data = []
for cust in customers:
if cint(cust[8]) >= cint(days_since_last_order):
cust.insert(7,get_last_so_amt(cust[0]))
data.append(cust)
return columns, data
def get_so_details():
return webnotes.conn.sql("""select
cust.name,
cust.customer_name,
cust.territory,
cust.customer_group, | sum(if(so.status = "Stopped",
so.net_total * so.per_delivered/100,
so.net_total)) as 'total_order_considered',
max(so.transaction_date) as 'last_sales_order_date',
DATEDIFF(CURDATE(), max(so.transaction_date)) as 'days_since_last_order'
from `tabCustomer` cust, `tabSales Order` so
where cust.name = so.customer and so.docstatus = 1
group by cust.name
order by 'days_since_last_order' desc """,as_list=1)
def get_last_so_amt(customer):
res = webnotes.conn.sql("""select net_total from `tabSales Order`
where customer ='%(customer)s' and docstatus = 1 order by transaction_date desc
limit 1""" % {'customer':customer})
return res and res[0][0] or 0
def get_columns():
return [
"Customer:Link/Customer:120",
"Customer Name:Data:120",
"Territory::120",
"Customer Group::120",
"Number of Order::120",
"Total Order Value:Currency:120",
"Total Order Considered:Currency:160",
"Last Order Amount:Currency:160",
"Last Sales Order Date:Date:160",
"Days Since Last Order::160"
] | count(distinct(so.name)) as 'num_of_order',
sum(net_total) as 'total_order_value', | random_line_split |
customers_not_buying_since_long_time.py | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import cint
def execute(filters=None):
if not filters: filters ={}
days_since_last_order = filters.get("days_since_last_order")
if cint(days_since_last_order) <= 0:
webnotes.msgprint("Please mention positive value in 'Days Since Last Order' field",raise_exception=1)
columns = get_columns()
customers = get_so_details()
data = []
for cust in customers:
if cint(cust[8]) >= cint(days_since_last_order):
cust.insert(7,get_last_so_amt(cust[0]))
data.append(cust)
return columns, data
def get_so_details():
return webnotes.conn.sql("""select
cust.name,
cust.customer_name,
cust.territory,
cust.customer_group,
count(distinct(so.name)) as 'num_of_order',
sum(net_total) as 'total_order_value',
sum(if(so.status = "Stopped",
so.net_total * so.per_delivered/100,
so.net_total)) as 'total_order_considered',
max(so.transaction_date) as 'last_sales_order_date',
DATEDIFF(CURDATE(), max(so.transaction_date)) as 'days_since_last_order'
from `tabCustomer` cust, `tabSales Order` so
where cust.name = so.customer and so.docstatus = 1
group by cust.name
order by 'days_since_last_order' desc """,as_list=1)
def get_last_so_amt(customer):
res = webnotes.conn.sql("""select net_total from `tabSales Order`
where customer ='%(customer)s' and docstatus = 1 order by transaction_date desc
limit 1""" % {'customer':customer})
return res and res[0][0] or 0
def | ():
return [
"Customer:Link/Customer:120",
"Customer Name:Data:120",
"Territory::120",
"Customer Group::120",
"Number of Order::120",
"Total Order Value:Currency:120",
"Total Order Considered:Currency:160",
"Last Order Amount:Currency:160",
"Last Sales Order Date:Date:160",
"Days Since Last Order::160"
] | get_columns | identifier_name |
customers_not_buying_since_long_time.py | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import cint
def execute(filters=None):
if not filters: filters ={}
days_since_last_order = filters.get("days_since_last_order")
if cint(days_since_last_order) <= 0:
webnotes.msgprint("Please mention positive value in 'Days Since Last Order' field",raise_exception=1)
columns = get_columns()
customers = get_so_details()
data = []
for cust in customers:
if cint(cust[8]) >= cint(days_since_last_order):
cust.insert(7,get_last_so_amt(cust[0]))
data.append(cust)
return columns, data
def get_so_details():
return webnotes.conn.sql("""select
cust.name,
cust.customer_name,
cust.territory,
cust.customer_group,
count(distinct(so.name)) as 'num_of_order',
sum(net_total) as 'total_order_value',
sum(if(so.status = "Stopped",
so.net_total * so.per_delivered/100,
so.net_total)) as 'total_order_considered',
max(so.transaction_date) as 'last_sales_order_date',
DATEDIFF(CURDATE(), max(so.transaction_date)) as 'days_since_last_order'
from `tabCustomer` cust, `tabSales Order` so
where cust.name = so.customer and so.docstatus = 1
group by cust.name
order by 'days_since_last_order' desc """,as_list=1)
def get_last_so_amt(customer):
res = webnotes.conn.sql("""select net_total from `tabSales Order`
where customer ='%(customer)s' and docstatus = 1 order by transaction_date desc
limit 1""" % {'customer':customer})
return res and res[0][0] or 0
def get_columns():
| return [
"Customer:Link/Customer:120",
"Customer Name:Data:120",
"Territory::120",
"Customer Group::120",
"Number of Order::120",
"Total Order Value:Currency:120",
"Total Order Considered:Currency:160",
"Last Order Amount:Currency:160",
"Last Sales Order Date:Date:160",
"Days Since Last Order::160"
] | identifier_body |
|
customers_not_buying_since_long_time.py | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import cint
def execute(filters=None):
if not filters: filters ={}
days_since_last_order = filters.get("days_since_last_order")
if cint(days_since_last_order) <= 0:
webnotes.msgprint("Please mention positive value in 'Days Since Last Order' field",raise_exception=1)
columns = get_columns()
customers = get_so_details()
data = []
for cust in customers:
if cint(cust[8]) >= cint(days_since_last_order):
|
return columns, data
def get_so_details():
return webnotes.conn.sql("""select
cust.name,
cust.customer_name,
cust.territory,
cust.customer_group,
count(distinct(so.name)) as 'num_of_order',
sum(net_total) as 'total_order_value',
sum(if(so.status = "Stopped",
so.net_total * so.per_delivered/100,
so.net_total)) as 'total_order_considered',
max(so.transaction_date) as 'last_sales_order_date',
DATEDIFF(CURDATE(), max(so.transaction_date)) as 'days_since_last_order'
from `tabCustomer` cust, `tabSales Order` so
where cust.name = so.customer and so.docstatus = 1
group by cust.name
order by 'days_since_last_order' desc """,as_list=1)
def get_last_so_amt(customer):
res = webnotes.conn.sql("""select net_total from `tabSales Order`
where customer ='%(customer)s' and docstatus = 1 order by transaction_date desc
limit 1""" % {'customer':customer})
return res and res[0][0] or 0
def get_columns():
return [
"Customer:Link/Customer:120",
"Customer Name:Data:120",
"Territory::120",
"Customer Group::120",
"Number of Order::120",
"Total Order Value:Currency:120",
"Total Order Considered:Currency:160",
"Last Order Amount:Currency:160",
"Last Sales Order Date:Date:160",
"Days Since Last Order::160"
] | cust.insert(7,get_last_so_amt(cust[0]))
data.append(cust) | conditional_block |
pmf.rs | use std::slice;
use itertools::Itertools;
use bio::stats::LogProb;
#[derive(Clone, Debug)]
pub struct Entry<T: Clone> {
pub value: T,
pub prob: LogProb,
}
#[derive(Clone, Debug)]
pub struct PMF<T: Clone> {
inner: Vec<Entry<T>>,
}
impl<T: Clone + Sized> PMF<T> {
/// Create a new PMF from sorted vector.
pub fn new(inner: Vec<Entry<T>>) -> Self {
PMF { inner }
}
pub fn iter(&self) -> slice::Iter<Entry<T>> {
self.inner.iter()
}
pub fn cdf(&self) -> Vec<LogProb> {
LogProb::ln_cumsum_exp(self.inner.iter().map(|e| e.prob)).collect_vec()
}
/// Return maximum a posteriori probability estimate (MAP).
pub fn map(&self) -> &T {
let mut max = self.iter().next().unwrap();
for e in self.iter() {
if e.prob >= max.prob {
max = e;
}
}
&max.value
}
}
impl<T: Clone + Sized + Copy> PMF<T> {
/// Return the 95% credible interval.
pub fn credible_interval(&self) -> (T, T) |
}
impl PMF<f32> {
pub fn scale(&mut self, scale: f32) {
for e in &mut self.inner {
e.value *= scale;
}
}
}
| {
let cdf = self.cdf();
let lower = cdf
.binary_search_by(|p| p.partial_cmp(&LogProb(0.025f64.ln())).unwrap())
.unwrap_or_else(|i| i);
let upper = cdf
.binary_search_by(|p| p.partial_cmp(&LogProb(0.975f64.ln())).unwrap())
.unwrap_or_else(|i| i);
(self.inner[lower].value, self.inner[upper].value)
} | identifier_body |
pmf.rs | use std::slice;
use itertools::Itertools;
use bio::stats::LogProb;
#[derive(Clone, Debug)]
pub struct Entry<T: Clone> {
pub value: T,
pub prob: LogProb,
}
#[derive(Clone, Debug)]
pub struct PMF<T: Clone> {
inner: Vec<Entry<T>>,
}
impl<T: Clone + Sized> PMF<T> {
/// Create a new PMF from sorted vector.
pub fn new(inner: Vec<Entry<T>>) -> Self {
PMF { inner }
}
pub fn iter(&self) -> slice::Iter<Entry<T>> {
self.inner.iter()
}
pub fn cdf(&self) -> Vec<LogProb> { | }
/// Return maximum a posteriori probability estimate (MAP).
pub fn map(&self) -> &T {
let mut max = self.iter().next().unwrap();
for e in self.iter() {
if e.prob >= max.prob {
max = e;
}
}
&max.value
}
}
impl<T: Clone + Sized + Copy> PMF<T> {
/// Return the 95% credible interval.
pub fn credible_interval(&self) -> (T, T) {
let cdf = self.cdf();
let lower = cdf
.binary_search_by(|p| p.partial_cmp(&LogProb(0.025f64.ln())).unwrap())
.unwrap_or_else(|i| i);
let upper = cdf
.binary_search_by(|p| p.partial_cmp(&LogProb(0.975f64.ln())).unwrap())
.unwrap_or_else(|i| i);
(self.inner[lower].value, self.inner[upper].value)
}
}
impl PMF<f32> {
pub fn scale(&mut self, scale: f32) {
for e in &mut self.inner {
e.value *= scale;
}
}
} | LogProb::ln_cumsum_exp(self.inner.iter().map(|e| e.prob)).collect_vec() | random_line_split |
pmf.rs | use std::slice;
use itertools::Itertools;
use bio::stats::LogProb;
#[derive(Clone, Debug)]
pub struct Entry<T: Clone> {
pub value: T,
pub prob: LogProb,
}
#[derive(Clone, Debug)]
pub struct PMF<T: Clone> {
inner: Vec<Entry<T>>,
}
impl<T: Clone + Sized> PMF<T> {
/// Create a new PMF from sorted vector.
pub fn new(inner: Vec<Entry<T>>) -> Self {
PMF { inner }
}
pub fn | (&self) -> slice::Iter<Entry<T>> {
self.inner.iter()
}
pub fn cdf(&self) -> Vec<LogProb> {
LogProb::ln_cumsum_exp(self.inner.iter().map(|e| e.prob)).collect_vec()
}
/// Return maximum a posteriori probability estimate (MAP).
pub fn map(&self) -> &T {
let mut max = self.iter().next().unwrap();
for e in self.iter() {
if e.prob >= max.prob {
max = e;
}
}
&max.value
}
}
impl<T: Clone + Sized + Copy> PMF<T> {
/// Return the 95% credible interval.
pub fn credible_interval(&self) -> (T, T) {
let cdf = self.cdf();
let lower = cdf
.binary_search_by(|p| p.partial_cmp(&LogProb(0.025f64.ln())).unwrap())
.unwrap_or_else(|i| i);
let upper = cdf
.binary_search_by(|p| p.partial_cmp(&LogProb(0.975f64.ln())).unwrap())
.unwrap_or_else(|i| i);
(self.inner[lower].value, self.inner[upper].value)
}
}
impl PMF<f32> {
pub fn scale(&mut self, scale: f32) {
for e in &mut self.inner {
e.value *= scale;
}
}
}
| iter | identifier_name |
pmf.rs | use std::slice;
use itertools::Itertools;
use bio::stats::LogProb;
#[derive(Clone, Debug)]
pub struct Entry<T: Clone> {
pub value: T,
pub prob: LogProb,
}
#[derive(Clone, Debug)]
pub struct PMF<T: Clone> {
inner: Vec<Entry<T>>,
}
impl<T: Clone + Sized> PMF<T> {
/// Create a new PMF from sorted vector.
pub fn new(inner: Vec<Entry<T>>) -> Self {
PMF { inner }
}
pub fn iter(&self) -> slice::Iter<Entry<T>> {
self.inner.iter()
}
pub fn cdf(&self) -> Vec<LogProb> {
LogProb::ln_cumsum_exp(self.inner.iter().map(|e| e.prob)).collect_vec()
}
/// Return maximum a posteriori probability estimate (MAP).
pub fn map(&self) -> &T {
let mut max = self.iter().next().unwrap();
for e in self.iter() {
if e.prob >= max.prob |
}
&max.value
}
}
impl<T: Clone + Sized + Copy> PMF<T> {
/// Return the 95% credible interval.
pub fn credible_interval(&self) -> (T, T) {
let cdf = self.cdf();
let lower = cdf
.binary_search_by(|p| p.partial_cmp(&LogProb(0.025f64.ln())).unwrap())
.unwrap_or_else(|i| i);
let upper = cdf
.binary_search_by(|p| p.partial_cmp(&LogProb(0.975f64.ln())).unwrap())
.unwrap_or_else(|i| i);
(self.inner[lower].value, self.inner[upper].value)
}
}
impl PMF<f32> {
pub fn scale(&mut self, scale: f32) {
for e in &mut self.inner {
e.value *= scale;
}
}
}
| {
max = e;
} | conditional_block |
urls.py | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
|
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'DjangoApplication1.views.home', name='home'),
# url(r'^DjangoApplication1/', include('DjangoApplication1.fob.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^Oar/$', 'Oar.views.index'),
url(r'^/$', 'oar.views.main'),
url(r'^loop_nobom/$', 'Oar.views.loop_nobom'),
url(r'^loop/$', 'Oar.views.loop'),
url(r'^loop2/$', 'Oar.views.loop2'),
) | admin.autodiscover()
| random_line_split |
index.d.ts | // Type definitions for electron-winstaller 2.6
// Project: https://github.com/electron/windows-installer
// Definitions by: Brendan Forster <https://github.com/shiftkey>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
export function convertVersion(version: string): string;
export function createWindowsInstaller(options: Options): Promise<void>;
export interface Options {
/**
* The folder path of your Electron app
*/
appDirectory: string;
/**
* The folder path to create the .exe installer in.
*
* Defaults to the installer folder at the project root.
*/
outputDirectory?: string;
/**
* The local path to a `.gif` file to display during install.
*/
loadingGif?: string;
/**
* The authors value for the nuget package metadata.
*
* Defaults to the `author` field from your app's package.json file when unspecified.
*/
authors: string;
/**
* The owners value for the nuget package metadata.
*
* Defaults to the `authors` field when unspecified.
*/
owners?: string;
/**
* The name of your app's main `.exe` file.
*
* This uses the `name` field in your app's package.json file with an added `.exe` extension when unspecified.
*/
exe?: string;
/**
* The description value for the nuget package metadata.
*
* Defaults to the `description` field from your app's package.json file when unspecified.
*/
description?: string;
/**
* The version value for the nuget package metadata.
*
* Defaults to the `version` field from your app's package.json file when unspecified.
*/
version?: string;
/**
* The title value for the nuget package metadata.
*
* Defaults to the `productName` field and then the `name` field from your app's package.json file when unspecified.
*/
title?: string;
/**
* Windows Application Model ID (appId).
*
* Defaults to the name field in your app's package.json file.
*/
name?: string;
/**
* The path to an Authenticode Code Signing Certificate
*/
certificateFile?: string;
/**
* The password to decrypt the certificate given in `certificateFile`
*/
certificatePassword?: string;
/**
* Params to pass to signtool. | */
signWithParams?: string;
/**
* A URL to an ICO file to use as the application icon (displayed in Control Panel > Programs and Features).
*
* Defaults to the Atom icon.
*/
iconUrl?: string;
/**
* The ICO file to use as the icon for the generated Setup.exe
*/
setupIcon?: string;
/**
* The name to use for the generated Setup.exe file
*/
setupExe?: string;
/**
* The name to use for the generated Setup.msi file
*/
setupMsi?: string;
/**
* Should Squirrel.Windows create an MSI installer?
*/
noMsi?: boolean;
/**
* Should Squirrel.Windows delta packages? (disable only if necessary, they are a Good Thing)
*/
noDelta?: boolean;
/**
* A URL to your existing updates. If given, these will be downloaded to create delta updates
*/
remoteReleases?: string;
/**
* Authentication token for remote updates
*/
remoteToken?: string;
} | *
* Overrides `certificateFile` and `certificatePassword`. | random_line_split |
android-fingerprint-auth.ts | import { Cordova, Plugin } from './plugin';
export interface AndroidFingerprintAuthOptions {
/**
* Required
* Used as the alias for your key in the Android Key Store.
*/
clientId: string;
/**
* Used to create credential string for encrypted token and as alias to retrieve the cipher.
*/
username?: string;
/**
* Used to create credential string for encrypted token
*/
password?: string;
/**
* Required for decrypt()
* Encrypted user credentials to decrypt upon successful authentication.
*/
token?: string;
/**
* Set to true to remove the "USE BACKUP" button
*/
disableBackup?: boolean;
/**
* Change the language. (en_US or es)
*/
locale?: string;
/**
* The device max is 5 attempts. Set this parameter if you want to allow fewer than 5 attempts.
*/
maxAttempts?: number;
/**
* Require the user to authenticate with a fingerprint to authorize every use of the key.
* New fingerprint enrollment will invalidate key and require backup authenticate to
* re-enable the fingerprint authentication dialog.
*/
userAuthRequired?: boolean;
/**
* Set the title of the fingerprint authentication dialog.
*/
dialogTitle?: string;
/**
* Set the message of the fingerprint authentication dialog.
*/
dialogMessage?: string;
/**
* Set the hint displayed by the fingerprint icon on the fingerprint authentication dialog.
*/
dialogHint?: string;
}
/**
* @name Android Fingerprint Auth
* @description
* This plugin will open a native dialog fragment prompting the user to authenticate using their fingerprint. If the device has a secure lockscreen (pattern, PIN, or password), the user may opt to authenticate using that method as a backup.
* @usage
* ```typescript
* import { AndroidFingerprintAuth } from 'ionic-native';
*
* AndroidFingerprintAuth.isAvailable()
* .then((result)=> {
* if(result.isAvailable){
* // it is available
*
* AndroidFingerprintAuth.encrypt({ clientId: "myAppName", username: "myUsername", password: "myPassword" })
* .then(result => {
* if (result.withFingerprint) {
* console.log("Successfully encrypted credentials.");
* console.log("Encrypted credentials: " + result.token);
* } else if (result.withBackup) {
* console.log('Successfully authenticated with backup password!');
* } else console.log('Didn\'t authenticate!');
* })
* .catch(error => {
* if (error === "Cancelled") {
* console.log("Fingerprint authentication cancelled");
* } else console.error(error)
* });
*
* } else {
* // fingerprint auth isn't available
* }
* })
* .catch(error => console.error(error));
* ```
* @interfaces | * AndroidFingerprintAuthOptions
*/
@Plugin({
pluginName: 'AndroidFingerprintAuth',
plugin: 'cordova-plugin-android-fingerprint-auth',
pluginRef: 'FingerprintAuth',
repo: 'https://github.com/mjwheatley/cordova-plugin-android-fingerprint-auth'
})
export class AndroidFingerprintAuth {
/**
* Opens a native dialog fragment to use the device hardware fingerprint scanner to authenticate against fingerprints registered for the device.
* @param options {AndroidFingerprintAuthOptions} Options
* @returns {Promise<any>}
*/
@Cordova()
static encrypt(options: AndroidFingerprintAuthOptions): Promise<{
/**
* Biometric authentication
*/
withFingerprint: boolean;
/**
* Authentication using backup credential activity
*/
withBackup: boolean;
/**
* base64encoded string representation of user credentials
*/
token: string;
}> {return; }
/**
* Opens a native dialog fragment to use the device hardware fingerprint scanner to authenticate against fingerprints registered for the device.
* @param options {AndroidFingerprintAuthOptions} Options
* @returns {Promise<any>}
*/
@Cordova()
static decrypt(options: AndroidFingerprintAuthOptions): Promise<{
/**
* Biometric authentication
*/
withFingerprint: boolean;
/**
* Authentication using backup credential activity
*/
withBackup: boolean;
/**
* FingerprintAuth.CipherMode.DECRYPT
* Decrypted password
*/
password: string;
}> {return; }
/**
* Check if service is available
* @returns {Promise<any>} Returns a Promise that resolves if fingerprint auth is available on the device
*/
@Cordova()
static isAvailable(): Promise<{isAvailable: boolean}> { return; }
/**
* Delete the cipher used for encryption and decryption by username
* @returns {Promise<any>} Returns a Promise that resolves if the cipher was successfully deleted
*/
@Cordova()
static delete(options: {clientId: string; username: string; }): Promise<{deleted: boolean}> { return; }
} | random_line_split |
|
android-fingerprint-auth.ts | import { Cordova, Plugin } from './plugin';
export interface AndroidFingerprintAuthOptions {
/**
* Required
* Used as the alias for your key in the Android Key Store.
*/
clientId: string;
/**
* Used to create credential string for encrypted token and as alias to retrieve the cipher.
*/
username?: string;
/**
* Used to create credential string for encrypted token
*/
password?: string;
/**
* Required for decrypt()
* Encrypted user credentials to decrypt upon successful authentication.
*/
token?: string;
/**
* Set to true to remove the "USE BACKUP" button
*/
disableBackup?: boolean;
/**
* Change the language. (en_US or es)
*/
locale?: string;
/**
* The device max is 5 attempts. Set this parameter if you want to allow fewer than 5 attempts.
*/
maxAttempts?: number;
/**
* Require the user to authenticate with a fingerprint to authorize every use of the key.
* New fingerprint enrollment will invalidate key and require backup authenticate to
* re-enable the fingerprint authentication dialog.
*/
userAuthRequired?: boolean;
/**
* Set the title of the fingerprint authentication dialog.
*/
dialogTitle?: string;
/**
* Set the message of the fingerprint authentication dialog.
*/
dialogMessage?: string;
/**
* Set the hint displayed by the fingerprint icon on the fingerprint authentication dialog.
*/
dialogHint?: string;
}
/**
* @name Android Fingerprint Auth
* @description
* This plugin will open a native dialog fragment prompting the user to authenticate using their fingerprint. If the device has a secure lockscreen (pattern, PIN, or password), the user may opt to authenticate using that method as a backup.
* @usage
* ```typescript
* import { AndroidFingerprintAuth } from 'ionic-native';
*
* AndroidFingerprintAuth.isAvailable()
* .then((result)=> {
* if(result.isAvailable){
* // it is available
*
* AndroidFingerprintAuth.encrypt({ clientId: "myAppName", username: "myUsername", password: "myPassword" })
* .then(result => {
* if (result.withFingerprint) {
* console.log("Successfully encrypted credentials.");
* console.log("Encrypted credentials: " + result.token);
* } else if (result.withBackup) {
* console.log('Successfully authenticated with backup password!');
* } else console.log('Didn\'t authenticate!');
* })
* .catch(error => {
* if (error === "Cancelled") {
* console.log("Fingerprint authentication cancelled");
* } else console.error(error)
* });
*
* } else {
* // fingerprint auth isn't available
* }
* })
* .catch(error => console.error(error));
* ```
* @interfaces
* AndroidFingerprintAuthOptions
*/
@Plugin({
pluginName: 'AndroidFingerprintAuth',
plugin: 'cordova-plugin-android-fingerprint-auth',
pluginRef: 'FingerprintAuth',
repo: 'https://github.com/mjwheatley/cordova-plugin-android-fingerprint-auth'
})
export class AndroidFingerprintAuth {
/**
* Opens a native dialog fragment to use the device hardware fingerprint scanner to authenticate against fingerprints registered for the device.
* @param options {AndroidFingerprintAuthOptions} Options
* @returns {Promise<any>}
*/
@Cordova()
static encrypt(options: AndroidFingerprintAuthOptions): Promise<{
/**
* Biometric authentication
*/
withFingerprint: boolean;
/**
* Authentication using backup credential activity
*/
withBackup: boolean;
/**
* base64encoded string representation of user credentials
*/
token: string;
}> {return; }
/**
* Opens a native dialog fragment to use the device hardware fingerprint scanner to authenticate against fingerprints registered for the device.
* @param options {AndroidFingerprintAuthOptions} Options
* @returns {Promise<any>}
*/
@Cordova()
static decrypt(options: AndroidFingerprintAuthOptions): Promise<{
/**
* Biometric authentication
*/
withFingerprint: boolean;
/**
* Authentication using backup credential activity
*/
withBackup: boolean;
/**
* FingerprintAuth.CipherMode.DECRYPT
* Decrypted password
*/
password: string;
}> {return; }
/**
* Check if service is available
* @returns {Promise<any>} Returns a Promise that resolves if fingerprint auth is available on the device
*/
@Cordova()
static isAvailable(): Promise<{isAvailable: boolean}> |
/**
* Delete the cipher used for encryption and decryption by username
* @returns {Promise<any>} Returns a Promise that resolves if the cipher was successfully deleted
*/
@Cordova()
static delete(options: {clientId: string; username: string; }): Promise<{deleted: boolean}> { return; }
}
| { return; } | identifier_body |
android-fingerprint-auth.ts | import { Cordova, Plugin } from './plugin';
export interface AndroidFingerprintAuthOptions {
/**
* Required
* Used as the alias for your key in the Android Key Store.
*/
clientId: string;
/**
* Used to create credential string for encrypted token and as alias to retrieve the cipher.
*/
username?: string;
/**
* Used to create credential string for encrypted token
*/
password?: string;
/**
* Required for decrypt()
* Encrypted user credentials to decrypt upon successful authentication.
*/
token?: string;
/**
* Set to true to remove the "USE BACKUP" button
*/
disableBackup?: boolean;
/**
* Change the language. (en_US or es)
*/
locale?: string;
/**
* The device max is 5 attempts. Set this parameter if you want to allow fewer than 5 attempts.
*/
maxAttempts?: number;
/**
* Require the user to authenticate with a fingerprint to authorize every use of the key.
* New fingerprint enrollment will invalidate key and require backup authenticate to
* re-enable the fingerprint authentication dialog.
*/
userAuthRequired?: boolean;
/**
* Set the title of the fingerprint authentication dialog.
*/
dialogTitle?: string;
/**
* Set the message of the fingerprint authentication dialog.
*/
dialogMessage?: string;
/**
* Set the hint displayed by the fingerprint icon on the fingerprint authentication dialog.
*/
dialogHint?: string;
}
/**
* @name Android Fingerprint Auth
* @description
* This plugin will open a native dialog fragment prompting the user to authenticate using their fingerprint. If the device has a secure lockscreen (pattern, PIN, or password), the user may opt to authenticate using that method as a backup.
* @usage
* ```typescript
* import { AndroidFingerprintAuth } from 'ionic-native';
*
* AndroidFingerprintAuth.isAvailable()
* .then((result)=> {
* if(result.isAvailable){
* // it is available
*
* AndroidFingerprintAuth.encrypt({ clientId: "myAppName", username: "myUsername", password: "myPassword" })
* .then(result => {
* if (result.withFingerprint) {
* console.log("Successfully encrypted credentials.");
* console.log("Encrypted credentials: " + result.token);
* } else if (result.withBackup) {
* console.log('Successfully authenticated with backup password!');
* } else console.log('Didn\'t authenticate!');
* })
* .catch(error => {
* if (error === "Cancelled") {
* console.log("Fingerprint authentication cancelled");
* } else console.error(error)
* });
*
* } else {
* // fingerprint auth isn't available
* }
* })
* .catch(error => console.error(error));
* ```
* @interfaces
* AndroidFingerprintAuthOptions
*/
@Plugin({
pluginName: 'AndroidFingerprintAuth',
plugin: 'cordova-plugin-android-fingerprint-auth',
pluginRef: 'FingerprintAuth',
repo: 'https://github.com/mjwheatley/cordova-plugin-android-fingerprint-auth'
})
export class AndroidFingerprintAuth {
/**
* Opens a native dialog fragment to use the device hardware fingerprint scanner to authenticate against fingerprints registered for the device.
* @param options {AndroidFingerprintAuthOptions} Options
* @returns {Promise<any>}
*/
@Cordova()
static | (options: AndroidFingerprintAuthOptions): Promise<{
/**
* Biometric authentication
*/
withFingerprint: boolean;
/**
* Authentication using backup credential activity
*/
withBackup: boolean;
/**
* base64encoded string representation of user credentials
*/
token: string;
}> {return; }
/**
* Opens a native dialog fragment to use the device hardware fingerprint scanner to authenticate against fingerprints registered for the device.
* @param options {AndroidFingerprintAuthOptions} Options
* @returns {Promise<any>}
*/
@Cordova()
static decrypt(options: AndroidFingerprintAuthOptions): Promise<{
/**
* Biometric authentication
*/
withFingerprint: boolean;
/**
* Authentication using backup credential activity
*/
withBackup: boolean;
/**
* FingerprintAuth.CipherMode.DECRYPT
* Decrypted password
*/
password: string;
}> {return; }
/**
* Check if service is available
* @returns {Promise<any>} Returns a Promise that resolves if fingerprint auth is available on the device
*/
@Cordova()
static isAvailable(): Promise<{isAvailable: boolean}> { return; }
/**
* Delete the cipher used for encryption and decryption by username
* @returns {Promise<any>} Returns a Promise that resolves if the cipher was successfully deleted
*/
@Cordova()
static delete(options: {clientId: string; username: string; }): Promise<{deleted: boolean}> { return; }
}
| encrypt | identifier_name |
lib.rs | #![feature(test)]
#![feature(plugin)]
#![feature(asm)]
#![feature(naked_functions)]
#![plugin(dynasm)]
#![allow(unused_features)]
#![allow(unknown_lints)]
#![allow(new_without_default)]
#![allow(match_same_arms)]
#[cfg(target_arch = "x86_64")]
extern crate dynasmrt;
#[macro_use]
extern crate bitflags;
#[macro_use]
extern crate quick_error;
#[macro_use]
extern crate nom;
pub extern crate sdl2;
extern crate blip_buf;
extern crate memmap;
extern crate fnv;
#[cfg(feature = "vectorize")]
extern crate simd;
pub mod cart;
pub mod memory;
pub mod mappers;
pub mod ppu;
pub mod apu;
pub mod io;
pub mod cpu;
pub mod screen;
pub mod audio;
mod util;
#[cfg(test)]
mod tests;
use apu::APU;
use cart::Cart;
use cpu::CPU;
use io::IO;
use ppu::PPU;
use std::cell::RefCell;
use std::cell::UnsafeCell;
use std::rc::Rc;
#[derive(Debug, Clone)]
pub struct Settings {
pub jit: bool,
pub graphics_enabled: bool,
pub sound_enabled: bool,
// The following will only be used if compiled with the debug_features feature
pub trace_cpu: bool,
pub disassemble_functions: bool,
}
impl Default for Settings {
fn default() -> Settings {
Settings {
jit: false,
graphics_enabled: true,
sound_enabled: true,
trace_cpu: false,
disassemble_functions: false,
}
}
}
pub struct EmulatorBuilder {
cart: Cart,
settings: Settings,
pub screen: Box<screen::Screen>,
pub audio_out: Box<audio::AudioOut>,
pub io: Box<IO>,
}
impl EmulatorBuilder {
pub fn new(cart: Cart, settings: Settings) -> EmulatorBuilder {
EmulatorBuilder {
cart: cart,
settings: settings,
screen: Box::new(screen::DummyScreen::default()),
audio_out: Box::new(audio::DummyAudioOut),
io: Box::new(io::DummyIO::Dummy),
}
}
pub fn new_sdl(
cart: Cart,
settings: Settings,
sdl: &sdl2::Sdl,
event_pump: &Rc<RefCell<sdl2::EventPump>>,
) -> EmulatorBuilder {
let sound_enabled = settings.sound_enabled;
let mut builder = EmulatorBuilder::new(cart, settings);
builder.screen = Box::new(screen::sdl::SDLScreen::new(sdl));
if sound_enabled {
builder.audio_out = Box::new(audio::sdl::SDLAudioOut::new(sdl));
}
builder.io = Box::new(io::sdl::SdlIO::new(event_pump.clone()));
builder
}
pub fn build(self) -> Emulator {
let settings = Rc::new(self.settings);
let dispatcher = cpu::dispatcher::Dispatcher::new();
let cart: Rc<UnsafeCell<Cart>> = Rc::new(UnsafeCell::new(self.cart));
let ppu = PPU::new(settings.clone(), cart.clone(), self.screen);
let apu = APU::new(settings.clone(), self.audio_out);
let mut cpu = CPU::new(settings, ppu, apu, self.io, cart, dispatcher);
cpu.init();
Emulator { cpu: cpu }
}
}
pub struct Emulator {
cpu: CPU,
}
impl Emulator {
pub fn run_frame(&mut self) {
self.cpu.run_frame();
}
pub fn halted(&self) -> bool {
self.cpu.halted()
}
#[cfg(feature = "debug_features")]
pub fn mouse_pick(&self, px_x: i32, px_y: i32) {
self.cpu.ppu.mouse_pick(px_x, px_y);
}
pub fn rendering_enabled(&self) -> bool |
}
| {
self.cpu.ppu.rendering_enabled()
} | identifier_body |
lib.rs | #![feature(test)]
#![feature(plugin)]
#![feature(asm)]
#![feature(naked_functions)]
#![plugin(dynasm)]
#![allow(unused_features)]
#![allow(unknown_lints)]
#![allow(new_without_default)]
#![allow(match_same_arms)]
#[cfg(target_arch = "x86_64")]
extern crate dynasmrt;
#[macro_use]
extern crate bitflags;
#[macro_use]
extern crate quick_error;
#[macro_use]
extern crate nom;
pub extern crate sdl2;
extern crate blip_buf;
extern crate memmap;
extern crate fnv;
#[cfg(feature = "vectorize")]
extern crate simd;
pub mod cart;
pub mod memory;
pub mod mappers;
pub mod ppu;
pub mod apu;
pub mod io;
pub mod cpu;
pub mod screen;
pub mod audio;
mod util;
#[cfg(test)]
mod tests;
use apu::APU;
use cart::Cart;
use cpu::CPU;
use io::IO;
use ppu::PPU;
use std::cell::RefCell;
use std::cell::UnsafeCell;
use std::rc::Rc;
#[derive(Debug, Clone)]
pub struct Settings {
pub jit: bool,
pub graphics_enabled: bool,
pub sound_enabled: bool,
// The following will only be used if compiled with the debug_features feature
pub trace_cpu: bool,
pub disassemble_functions: bool,
}
impl Default for Settings {
fn default() -> Settings {
Settings {
jit: false,
graphics_enabled: true,
sound_enabled: true,
| }
}
}
pub struct EmulatorBuilder {
cart: Cart,
settings: Settings,
pub screen: Box<screen::Screen>,
pub audio_out: Box<audio::AudioOut>,
pub io: Box<IO>,
}
impl EmulatorBuilder {
pub fn new(cart: Cart, settings: Settings) -> EmulatorBuilder {
EmulatorBuilder {
cart: cart,
settings: settings,
screen: Box::new(screen::DummyScreen::default()),
audio_out: Box::new(audio::DummyAudioOut),
io: Box::new(io::DummyIO::Dummy),
}
}
pub fn new_sdl(
cart: Cart,
settings: Settings,
sdl: &sdl2::Sdl,
event_pump: &Rc<RefCell<sdl2::EventPump>>,
) -> EmulatorBuilder {
let sound_enabled = settings.sound_enabled;
let mut builder = EmulatorBuilder::new(cart, settings);
builder.screen = Box::new(screen::sdl::SDLScreen::new(sdl));
if sound_enabled {
builder.audio_out = Box::new(audio::sdl::SDLAudioOut::new(sdl));
}
builder.io = Box::new(io::sdl::SdlIO::new(event_pump.clone()));
builder
}
pub fn build(self) -> Emulator {
let settings = Rc::new(self.settings);
let dispatcher = cpu::dispatcher::Dispatcher::new();
let cart: Rc<UnsafeCell<Cart>> = Rc::new(UnsafeCell::new(self.cart));
let ppu = PPU::new(settings.clone(), cart.clone(), self.screen);
let apu = APU::new(settings.clone(), self.audio_out);
let mut cpu = CPU::new(settings, ppu, apu, self.io, cart, dispatcher);
cpu.init();
Emulator { cpu: cpu }
}
}
pub struct Emulator {
cpu: CPU,
}
impl Emulator {
pub fn run_frame(&mut self) {
self.cpu.run_frame();
}
pub fn halted(&self) -> bool {
self.cpu.halted()
}
#[cfg(feature = "debug_features")]
pub fn mouse_pick(&self, px_x: i32, px_y: i32) {
self.cpu.ppu.mouse_pick(px_x, px_y);
}
pub fn rendering_enabled(&self) -> bool {
self.cpu.ppu.rendering_enabled()
}
} | trace_cpu: false,
disassemble_functions: false,
| random_line_split |
lib.rs | #![feature(test)]
#![feature(plugin)]
#![feature(asm)]
#![feature(naked_functions)]
#![plugin(dynasm)]
#![allow(unused_features)]
#![allow(unknown_lints)]
#![allow(new_without_default)]
#![allow(match_same_arms)]
#[cfg(target_arch = "x86_64")]
extern crate dynasmrt;
#[macro_use]
extern crate bitflags;
#[macro_use]
extern crate quick_error;
#[macro_use]
extern crate nom;
pub extern crate sdl2;
extern crate blip_buf;
extern crate memmap;
extern crate fnv;
#[cfg(feature = "vectorize")]
extern crate simd;
pub mod cart;
pub mod memory;
pub mod mappers;
pub mod ppu;
pub mod apu;
pub mod io;
pub mod cpu;
pub mod screen;
pub mod audio;
mod util;
#[cfg(test)]
mod tests;
use apu::APU;
use cart::Cart;
use cpu::CPU;
use io::IO;
use ppu::PPU;
use std::cell::RefCell;
use std::cell::UnsafeCell;
use std::rc::Rc;
#[derive(Debug, Clone)]
pub struct Settings {
pub jit: bool,
pub graphics_enabled: bool,
pub sound_enabled: bool,
// The following will only be used if compiled with the debug_features feature
pub trace_cpu: bool,
pub disassemble_functions: bool,
}
impl Default for Settings {
fn default() -> Settings {
Settings {
jit: false,
graphics_enabled: true,
sound_enabled: true,
trace_cpu: false,
disassemble_functions: false,
}
}
}
pub struct EmulatorBuilder {
cart: Cart,
settings: Settings,
pub screen: Box<screen::Screen>,
pub audio_out: Box<audio::AudioOut>,
pub io: Box<IO>,
}
impl EmulatorBuilder {
pub fn new(cart: Cart, settings: Settings) -> EmulatorBuilder {
EmulatorBuilder {
cart: cart,
settings: settings,
screen: Box::new(screen::DummyScreen::default()),
audio_out: Box::new(audio::DummyAudioOut),
io: Box::new(io::DummyIO::Dummy),
}
}
pub fn new_sdl(
cart: Cart,
settings: Settings,
sdl: &sdl2::Sdl,
event_pump: &Rc<RefCell<sdl2::EventPump>>,
) -> EmulatorBuilder {
let sound_enabled = settings.sound_enabled;
let mut builder = EmulatorBuilder::new(cart, settings);
builder.screen = Box::new(screen::sdl::SDLScreen::new(sdl));
if sound_enabled |
builder.io = Box::new(io::sdl::SdlIO::new(event_pump.clone()));
builder
}
pub fn build(self) -> Emulator {
let settings = Rc::new(self.settings);
let dispatcher = cpu::dispatcher::Dispatcher::new();
let cart: Rc<UnsafeCell<Cart>> = Rc::new(UnsafeCell::new(self.cart));
let ppu = PPU::new(settings.clone(), cart.clone(), self.screen);
let apu = APU::new(settings.clone(), self.audio_out);
let mut cpu = CPU::new(settings, ppu, apu, self.io, cart, dispatcher);
cpu.init();
Emulator { cpu: cpu }
}
}
pub struct Emulator {
cpu: CPU,
}
impl Emulator {
pub fn run_frame(&mut self) {
self.cpu.run_frame();
}
pub fn halted(&self) -> bool {
self.cpu.halted()
}
#[cfg(feature = "debug_features")]
pub fn mouse_pick(&self, px_x: i32, px_y: i32) {
self.cpu.ppu.mouse_pick(px_x, px_y);
}
pub fn rendering_enabled(&self) -> bool {
self.cpu.ppu.rendering_enabled()
}
}
| {
builder.audio_out = Box::new(audio::sdl::SDLAudioOut::new(sdl));
} | conditional_block |
lib.rs | #![feature(test)]
#![feature(plugin)]
#![feature(asm)]
#![feature(naked_functions)]
#![plugin(dynasm)]
#![allow(unused_features)]
#![allow(unknown_lints)]
#![allow(new_without_default)]
#![allow(match_same_arms)]
#[cfg(target_arch = "x86_64")]
extern crate dynasmrt;
#[macro_use]
extern crate bitflags;
#[macro_use]
extern crate quick_error;
#[macro_use]
extern crate nom;
pub extern crate sdl2;
extern crate blip_buf;
extern crate memmap;
extern crate fnv;
#[cfg(feature = "vectorize")]
extern crate simd;
pub mod cart;
pub mod memory;
pub mod mappers;
pub mod ppu;
pub mod apu;
pub mod io;
pub mod cpu;
pub mod screen;
pub mod audio;
mod util;
#[cfg(test)]
mod tests;
use apu::APU;
use cart::Cart;
use cpu::CPU;
use io::IO;
use ppu::PPU;
use std::cell::RefCell;
use std::cell::UnsafeCell;
use std::rc::Rc;
#[derive(Debug, Clone)]
pub struct Settings {
pub jit: bool,
pub graphics_enabled: bool,
pub sound_enabled: bool,
// The following will only be used if compiled with the debug_features feature
pub trace_cpu: bool,
pub disassemble_functions: bool,
}
impl Default for Settings {
fn default() -> Settings {
Settings {
jit: false,
graphics_enabled: true,
sound_enabled: true,
trace_cpu: false,
disassemble_functions: false,
}
}
}
pub struct EmulatorBuilder {
cart: Cart,
settings: Settings,
pub screen: Box<screen::Screen>,
pub audio_out: Box<audio::AudioOut>,
pub io: Box<IO>,
}
impl EmulatorBuilder {
pub fn new(cart: Cart, settings: Settings) -> EmulatorBuilder {
EmulatorBuilder {
cart: cart,
settings: settings,
screen: Box::new(screen::DummyScreen::default()),
audio_out: Box::new(audio::DummyAudioOut),
io: Box::new(io::DummyIO::Dummy),
}
}
pub fn new_sdl(
cart: Cart,
settings: Settings,
sdl: &sdl2::Sdl,
event_pump: &Rc<RefCell<sdl2::EventPump>>,
) -> EmulatorBuilder {
let sound_enabled = settings.sound_enabled;
let mut builder = EmulatorBuilder::new(cart, settings);
builder.screen = Box::new(screen::sdl::SDLScreen::new(sdl));
if sound_enabled {
builder.audio_out = Box::new(audio::sdl::SDLAudioOut::new(sdl));
}
builder.io = Box::new(io::sdl::SdlIO::new(event_pump.clone()));
builder
}
pub fn build(self) -> Emulator {
let settings = Rc::new(self.settings);
let dispatcher = cpu::dispatcher::Dispatcher::new();
let cart: Rc<UnsafeCell<Cart>> = Rc::new(UnsafeCell::new(self.cart));
let ppu = PPU::new(settings.clone(), cart.clone(), self.screen);
let apu = APU::new(settings.clone(), self.audio_out);
let mut cpu = CPU::new(settings, ppu, apu, self.io, cart, dispatcher);
cpu.init();
Emulator { cpu: cpu }
}
}
pub struct Emulator {
cpu: CPU,
}
impl Emulator {
pub fn | (&mut self) {
self.cpu.run_frame();
}
pub fn halted(&self) -> bool {
self.cpu.halted()
}
#[cfg(feature = "debug_features")]
pub fn mouse_pick(&self, px_x: i32, px_y: i32) {
self.cpu.ppu.mouse_pick(px_x, px_y);
}
pub fn rendering_enabled(&self) -> bool {
self.cpu.ppu.rendering_enabled()
}
}
| run_frame | identifier_name |
0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
| dependencies = [
]
operations = [
migrations.CreateModel(
name='SkipRequest',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(max_length=64, verbose_name='Sender Key')),
],
options={
'verbose_name': 'Skip request',
'verbose_name_plural': 'Skip requests',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('description', models.TextField(help_text='Description text for the video', verbose_name='Description', blank=True)),
('youtube_url', models.URLField(help_text='URL to a youtube video', verbose_name='Youtube URL')),
('key', models.CharField(max_length=64, null=True, verbose_name='Sender Key', blank=True)),
('deleted', models.IntegerField(default=False, verbose_name='Deleted')),
('playing', models.BooleanField(default=False, verbose_name='Playing')),
('duration', models.IntegerField(default=0, verbose_name='Duration')),
],
options={
'verbose_name': 'Video',
'verbose_name_plural': 'Videos',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='skiprequest',
name='event',
field=models.ForeignKey(verbose_name='Video', to='manager.Video'),
preserve_default=True,
),
] | identifier_body |
|
0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='SkipRequest',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(max_length=64, verbose_name='Sender Key')),
],
options={
'verbose_name': 'Skip request', | },
bases=(models.Model,),
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('description', models.TextField(help_text='Description text for the video', verbose_name='Description', blank=True)),
('youtube_url', models.URLField(help_text='URL to a youtube video', verbose_name='Youtube URL')),
('key', models.CharField(max_length=64, null=True, verbose_name='Sender Key', blank=True)),
('deleted', models.IntegerField(default=False, verbose_name='Deleted')),
('playing', models.BooleanField(default=False, verbose_name='Playing')),
('duration', models.IntegerField(default=0, verbose_name='Duration')),
],
options={
'verbose_name': 'Video',
'verbose_name_plural': 'Videos',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='skiprequest',
name='event',
field=models.ForeignKey(verbose_name='Video', to='manager.Video'),
preserve_default=True,
),
] | 'verbose_name_plural': 'Skip requests', | random_line_split |
0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class | (migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='SkipRequest',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(max_length=64, verbose_name='Sender Key')),
],
options={
'verbose_name': 'Skip request',
'verbose_name_plural': 'Skip requests',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('description', models.TextField(help_text='Description text for the video', verbose_name='Description', blank=True)),
('youtube_url', models.URLField(help_text='URL to a youtube video', verbose_name='Youtube URL')),
('key', models.CharField(max_length=64, null=True, verbose_name='Sender Key', blank=True)),
('deleted', models.IntegerField(default=False, verbose_name='Deleted')),
('playing', models.BooleanField(default=False, verbose_name='Playing')),
('duration', models.IntegerField(default=0, verbose_name='Duration')),
],
options={
'verbose_name': 'Video',
'verbose_name_plural': 'Videos',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='skiprequest',
name='event',
field=models.ForeignKey(verbose_name='Video', to='manager.Video'),
preserve_default=True,
),
]
| Migration | identifier_name |
pygame.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import division
from __future__ import print_function
'''
Collected utilities for pygame
It is difficult to write pixels directly in python.
There's some way to get a framebuffer back from Tk, but it is
cumberosme.
The module pygame supports sending pixel buffers,
which is wrapped for convneinece in this module.
example usage
import neurotools.graphics.pygame as npg
import time
import numpy as np
import pygame
K = 128
screen = npg.start(K,K,'Image data')
dt = 1/20
wait_til = time.time() + dt
print('Animating..')
for i in neurotools.tools.progress_bar(range(100)):
t = time.time()
if t<wait_til: time.sleep(wait_til-t)
wait_til = t + dt
npg.draw_array(screen, np.random.rand(K,K,3))
pygame.quit()
'''
import sys
import numpy as np
try:
import pygame as pg
except:
print('pygame package is missing; it is obsolete so this is not unusual')
print('pygame graphics will not work')
pg = None
def | ():
if sys.platform != 'darwin':
return
try:
import ctypes
import ctypes.util
ogl = ctypes.cdll.LoadLibrary(ctypes.util.find_library("OpenGL"))
# set v to 1 to enable vsync, 0 to disable vsync
v = ctypes.c_int(1)
ogl.CGLSetParameter(ogl.CGLGetCurrentContext(), ctypes.c_int(222), ctypes.pointer(v))
except:
print("Unable to set vsync mode, using driver defaults")
def start(W,H,name='untitled'):
# Get things going
pg.quit()
pg.init()
enable_vsync()
window = pg.display.set_mode((W,H))
pg.display.set_caption(name)
return window
def draw_array(screen,rgbdata,doshow=True):
'''
Send array data to a PyGame window.
PyGame is BRG order which is unusual -- reorder it.
Parameters
----------
screen : object
Object returned by neurotools.graphics.pygame.start
rgbdata :
RGB image data with color values in [0,1]
'''
# Cast to int
rgbdata = np.int32(rgbdata*255)
# clip bytes to 0..255 range
rgbdata[rgbdata<0]=0
rgbdata[rgbdata>255]=255
# get color dimension
if len(rgbdata.shape)==3:
w,h,d = rgbdata.shape
else:
w,h = rgbdata.shape
d=1
# repack color data in screen format
draw = np.zeros((w,h,4),'uint8')
if d==1:
draw[...,0]=rgbdata
draw[...,1]=rgbdata
draw[...,2]=rgbdata
draw[...,3]=255 # alpha channel
if d==3:
draw[...,:3]=rgbdata[...,::-1]
draw[...,-1]=255 # alpha channel
if d==4:
draw[...,:3]=rgbdata[...,-2::-1]
draw[...,-1]=rgbdata[...,-1]
# get surface and copy data to sceeen
surface = pg.Surface((w,h))
numpy_surface = np.frombuffer(surface.get_buffer())
numpy_surface[...] = np.frombuffer(draw)
del numpy_surface
screen.blit(surface,(0,0))
if doshow:
pg.display.update()
| enable_vsync | identifier_name |
pygame.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import division
from __future__ import print_function
'''
Collected utilities for pygame
It is difficult to write pixels directly in python.
There's some way to get a framebuffer back from Tk, but it is
cumberosme.
The module pygame supports sending pixel buffers,
which is wrapped for convneinece in this module.
example usage
import neurotools.graphics.pygame as npg
import time
import numpy as np
import pygame
K = 128
screen = npg.start(K,K,'Image data')
dt = 1/20
wait_til = time.time() + dt
print('Animating..')
for i in neurotools.tools.progress_bar(range(100)):
t = time.time()
if t<wait_til: time.sleep(wait_til-t)
wait_til = t + dt
npg.draw_array(screen, np.random.rand(K,K,3))
pygame.quit()
'''
import sys
import numpy as np
try:
import pygame as pg
except:
print('pygame package is missing; it is obsolete so this is not unusual')
print('pygame graphics will not work')
pg = None
def enable_vsync():
if sys.platform != 'darwin':
return
try:
import ctypes
import ctypes.util
ogl = ctypes.cdll.LoadLibrary(ctypes.util.find_library("OpenGL"))
# set v to 1 to enable vsync, 0 to disable vsync
v = ctypes.c_int(1)
ogl.CGLSetParameter(ogl.CGLGetCurrentContext(), ctypes.c_int(222), ctypes.pointer(v))
except:
print("Unable to set vsync mode, using driver defaults")
def start(W,H,name='untitled'):
# Get things going
pg.quit()
pg.init()
enable_vsync()
window = pg.display.set_mode((W,H))
pg.display.set_caption(name)
return window
def draw_array(screen,rgbdata,doshow=True):
'''
Send array data to a PyGame window.
PyGame is BRG order which is unusual -- reorder it.
Parameters
----------
screen : object
Object returned by neurotools.graphics.pygame.start
rgbdata :
RGB image data with color values in [0,1]
'''
# Cast to int
rgbdata = np.int32(rgbdata*255)
# clip bytes to 0..255 range
rgbdata[rgbdata<0]=0
rgbdata[rgbdata>255]=255
# get color dimension
if len(rgbdata.shape)==3:
w,h,d = rgbdata.shape
else:
w,h = rgbdata.shape
d=1
# repack color data in screen format
draw = np.zeros((w,h,4),'uint8')
if d==1:
|
if d==3:
draw[...,:3]=rgbdata[...,::-1]
draw[...,-1]=255 # alpha channel
if d==4:
draw[...,:3]=rgbdata[...,-2::-1]
draw[...,-1]=rgbdata[...,-1]
# get surface and copy data to sceeen
surface = pg.Surface((w,h))
numpy_surface = np.frombuffer(surface.get_buffer())
numpy_surface[...] = np.frombuffer(draw)
del numpy_surface
screen.blit(surface,(0,0))
if doshow:
pg.display.update()
| draw[...,0]=rgbdata
draw[...,1]=rgbdata
draw[...,2]=rgbdata
draw[...,3]=255 # alpha channel | conditional_block |
pygame.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import division
from __future__ import print_function
'''
Collected utilities for pygame
It is difficult to write pixels directly in python.
There's some way to get a framebuffer back from Tk, but it is
cumberosme.
The module pygame supports sending pixel buffers,
which is wrapped for convneinece in this module.
example usage
import neurotools.graphics.pygame as npg
import time
import numpy as np
import pygame
K = 128
screen = npg.start(K,K,'Image data')
dt = 1/20
wait_til = time.time() + dt
print('Animating..')
for i in neurotools.tools.progress_bar(range(100)):
t = time.time()
if t<wait_til: time.sleep(wait_til-t)
wait_til = t + dt
npg.draw_array(screen, np.random.rand(K,K,3))
pygame.quit()
'''
import sys
import numpy as np
try: | print('pygame graphics will not work')
pg = None
def enable_vsync():
if sys.platform != 'darwin':
return
try:
import ctypes
import ctypes.util
ogl = ctypes.cdll.LoadLibrary(ctypes.util.find_library("OpenGL"))
# set v to 1 to enable vsync, 0 to disable vsync
v = ctypes.c_int(1)
ogl.CGLSetParameter(ogl.CGLGetCurrentContext(), ctypes.c_int(222), ctypes.pointer(v))
except:
print("Unable to set vsync mode, using driver defaults")
def start(W,H,name='untitled'):
# Get things going
pg.quit()
pg.init()
enable_vsync()
window = pg.display.set_mode((W,H))
pg.display.set_caption(name)
return window
def draw_array(screen,rgbdata,doshow=True):
'''
Send array data to a PyGame window.
PyGame is BRG order which is unusual -- reorder it.
Parameters
----------
screen : object
Object returned by neurotools.graphics.pygame.start
rgbdata :
RGB image data with color values in [0,1]
'''
# Cast to int
rgbdata = np.int32(rgbdata*255)
# clip bytes to 0..255 range
rgbdata[rgbdata<0]=0
rgbdata[rgbdata>255]=255
# get color dimension
if len(rgbdata.shape)==3:
w,h,d = rgbdata.shape
else:
w,h = rgbdata.shape
d=1
# repack color data in screen format
draw = np.zeros((w,h,4),'uint8')
if d==1:
draw[...,0]=rgbdata
draw[...,1]=rgbdata
draw[...,2]=rgbdata
draw[...,3]=255 # alpha channel
if d==3:
draw[...,:3]=rgbdata[...,::-1]
draw[...,-1]=255 # alpha channel
if d==4:
draw[...,:3]=rgbdata[...,-2::-1]
draw[...,-1]=rgbdata[...,-1]
# get surface and copy data to sceeen
surface = pg.Surface((w,h))
numpy_surface = np.frombuffer(surface.get_buffer())
numpy_surface[...] = np.frombuffer(draw)
del numpy_surface
screen.blit(surface,(0,0))
if doshow:
pg.display.update() | import pygame as pg
except:
print('pygame package is missing; it is obsolete so this is not unusual') | random_line_split |
pygame.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from __future__ import absolute_import
from __future__ import with_statement
from __future__ import division
from __future__ import print_function
'''
Collected utilities for pygame
It is difficult to write pixels directly in python.
There's some way to get a framebuffer back from Tk, but it is
cumberosme.
The module pygame supports sending pixel buffers,
which is wrapped for convneinece in this module.
example usage
import neurotools.graphics.pygame as npg
import time
import numpy as np
import pygame
K = 128
screen = npg.start(K,K,'Image data')
dt = 1/20
wait_til = time.time() + dt
print('Animating..')
for i in neurotools.tools.progress_bar(range(100)):
t = time.time()
if t<wait_til: time.sleep(wait_til-t)
wait_til = t + dt
npg.draw_array(screen, np.random.rand(K,K,3))
pygame.quit()
'''
import sys
import numpy as np
try:
import pygame as pg
except:
print('pygame package is missing; it is obsolete so this is not unusual')
print('pygame graphics will not work')
pg = None
def enable_vsync():
|
def start(W,H,name='untitled'):
# Get things going
pg.quit()
pg.init()
enable_vsync()
window = pg.display.set_mode((W,H))
pg.display.set_caption(name)
return window
def draw_array(screen,rgbdata,doshow=True):
'''
Send array data to a PyGame window.
PyGame is BRG order which is unusual -- reorder it.
Parameters
----------
screen : object
Object returned by neurotools.graphics.pygame.start
rgbdata :
RGB image data with color values in [0,1]
'''
# Cast to int
rgbdata = np.int32(rgbdata*255)
# clip bytes to 0..255 range
rgbdata[rgbdata<0]=0
rgbdata[rgbdata>255]=255
# get color dimension
if len(rgbdata.shape)==3:
w,h,d = rgbdata.shape
else:
w,h = rgbdata.shape
d=1
# repack color data in screen format
draw = np.zeros((w,h,4),'uint8')
if d==1:
draw[...,0]=rgbdata
draw[...,1]=rgbdata
draw[...,2]=rgbdata
draw[...,3]=255 # alpha channel
if d==3:
draw[...,:3]=rgbdata[...,::-1]
draw[...,-1]=255 # alpha channel
if d==4:
draw[...,:3]=rgbdata[...,-2::-1]
draw[...,-1]=rgbdata[...,-1]
# get surface and copy data to sceeen
surface = pg.Surface((w,h))
numpy_surface = np.frombuffer(surface.get_buffer())
numpy_surface[...] = np.frombuffer(draw)
del numpy_surface
screen.blit(surface,(0,0))
if doshow:
pg.display.update()
| if sys.platform != 'darwin':
return
try:
import ctypes
import ctypes.util
ogl = ctypes.cdll.LoadLibrary(ctypes.util.find_library("OpenGL"))
# set v to 1 to enable vsync, 0 to disable vsync
v = ctypes.c_int(1)
ogl.CGLSetParameter(ogl.CGLGetCurrentContext(), ctypes.c_int(222), ctypes.pointer(v))
except:
print("Unable to set vsync mode, using driver defaults") | identifier_body |
lib.rs | //! [Experimental] Generic programming with SIMD
#![cfg_attr(test, feature(test))]
#![cfg_attr(test, plugin(quickcheck_macros))]
#![deny(missing_docs)]
#![deny(warnings)]
#![feature(core)]
#![feature(plugin)]
#![feature(simd)]
#[cfg(test)] extern crate test as stdtest;
#[cfg(test)] extern crate approx;
#[cfg(test)] extern crate quickcheck;
#[cfg(test)] extern crate rand;
use traits::Simd;
#[cfg(test)]
mod test;
#[cfg(test)]
mod bench;
mod f32;
mod f64;
pub mod traits;
#[allow(missing_docs, non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
#[simd]
pub struct f32x4(pub f32, pub f32, pub f32, pub f32);
#[allow(missing_docs, non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
#[simd]
pub struct f64x2(pub f64, pub f64);
/// Sum the elements of a slice using SIMD ops
///
/// # Benchmarks
///
/// Size: 1 million elements
///
/// ``` ignore
/// test bench::f32::plain::sum ... bench: 962914 ns/iter (+/- 64354)
/// test bench::f32::simd::sum ... bench: 242686 ns/iter (+/- 11847)
/// test bench::f64::plain::sum ... bench: 995390 ns/iter (+/- 27834)
/// test bench::f64::simd::sum ... bench: 504943 ns/iter (+/- 22928)
/// ```
pub fn sum<T>(slice: &[T]) -> T where
T: Simd,
{
use std::ops::Add;
use traits::Vector;
let (head, body, tail) = T::Vector::cast(slice);
let sum = body.iter().map(|&x| x).fold(T::Vector::zeroed(), Add::add).sum();
let sum = head.iter().map(|&x| x).fold(sum, Add::add);
tail.iter().map(|&x| x).fold(sum, Add::add)
}
/// "Casts" a `&[A]` into an aligned `&[B]`, the elements (both in the front and in the back) that
/// don't fit in the aligned slice, will be returned as slices.
unsafe fn cast<'a, A, B>(slice: &'a [A]) -> (&'a [A], &'a [B], &'a [A]) {
use std::raw::Repr;
use std::{raw, mem};
/// Rounds down `n` to the nearest multiple of `k`
fn round_down(n: usize, k: usize) -> usize |
/// Rounds up `n` to the nearest multiple of `k`
fn round_up(n: usize, k: usize) -> usize {
let r = n % k;
if r == 0 {
n
} else {
n + k - r
}
}
let align_of_b = mem::align_of::<B>();
let size_of_a = mem::size_of::<A>();
let size_of_b = mem::size_of::<B>();
let raw::Slice { data: start, len } = slice.repr();
let end = start.offset(len as isize);
let (head_start, tail_end) = (start as usize, end as usize);
let body_start = round_up(head_start, align_of_b);
let body_end = round_down(tail_end, align_of_b);
if body_start >= body_end {
(slice, &[], &[])
} else {
let head_end = body_start;
let head_len = (head_end - head_start) / size_of_a;
let body_len = (body_end - body_start) / size_of_b;
let tail_start = body_end;
let tail_len = (tail_end - tail_start) / size_of_a;
let head = mem::transmute(raw::Slice { data: head_start as *const A, len: head_len });
let body = mem::transmute(raw::Slice { data: body_start as *const B, len: body_len });
let tail = mem::transmute(raw::Slice { data: tail_start as *const A, len: tail_len });
(head, body, tail)
}
}
| {
n - n % k
} | identifier_body |
lib.rs | //! [Experimental] Generic programming with SIMD
#![cfg_attr(test, feature(test))]
#![cfg_attr(test, plugin(quickcheck_macros))]
#![deny(missing_docs)]
#![deny(warnings)]
#![feature(core)]
#![feature(plugin)]
#![feature(simd)]
#[cfg(test)] extern crate test as stdtest;
#[cfg(test)] extern crate approx;
#[cfg(test)] extern crate quickcheck;
#[cfg(test)] extern crate rand;
use traits::Simd;
#[cfg(test)]
mod test;
#[cfg(test)]
mod bench;
mod f32;
mod f64;
| pub struct f32x4(pub f32, pub f32, pub f32, pub f32);
#[allow(missing_docs, non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
#[simd]
pub struct f64x2(pub f64, pub f64);
/// Sum the elements of a slice using SIMD ops
///
/// # Benchmarks
///
/// Size: 1 million elements
///
/// ``` ignore
/// test bench::f32::plain::sum ... bench: 962914 ns/iter (+/- 64354)
/// test bench::f32::simd::sum ... bench: 242686 ns/iter (+/- 11847)
/// test bench::f64::plain::sum ... bench: 995390 ns/iter (+/- 27834)
/// test bench::f64::simd::sum ... bench: 504943 ns/iter (+/- 22928)
/// ```
pub fn sum<T>(slice: &[T]) -> T where
T: Simd,
{
use std::ops::Add;
use traits::Vector;
let (head, body, tail) = T::Vector::cast(slice);
let sum = body.iter().map(|&x| x).fold(T::Vector::zeroed(), Add::add).sum();
let sum = head.iter().map(|&x| x).fold(sum, Add::add);
tail.iter().map(|&x| x).fold(sum, Add::add)
}
/// "Casts" a `&[A]` into an aligned `&[B]`, the elements (both in the front and in the back) that
/// don't fit in the aligned slice, will be returned as slices.
unsafe fn cast<'a, A, B>(slice: &'a [A]) -> (&'a [A], &'a [B], &'a [A]) {
use std::raw::Repr;
use std::{raw, mem};
/// Rounds down `n` to the nearest multiple of `k`
fn round_down(n: usize, k: usize) -> usize {
n - n % k
}
/// Rounds up `n` to the nearest multiple of `k`
fn round_up(n: usize, k: usize) -> usize {
let r = n % k;
if r == 0 {
n
} else {
n + k - r
}
}
let align_of_b = mem::align_of::<B>();
let size_of_a = mem::size_of::<A>();
let size_of_b = mem::size_of::<B>();
let raw::Slice { data: start, len } = slice.repr();
let end = start.offset(len as isize);
let (head_start, tail_end) = (start as usize, end as usize);
let body_start = round_up(head_start, align_of_b);
let body_end = round_down(tail_end, align_of_b);
if body_start >= body_end {
(slice, &[], &[])
} else {
let head_end = body_start;
let head_len = (head_end - head_start) / size_of_a;
let body_len = (body_end - body_start) / size_of_b;
let tail_start = body_end;
let tail_len = (tail_end - tail_start) / size_of_a;
let head = mem::transmute(raw::Slice { data: head_start as *const A, len: head_len });
let body = mem::transmute(raw::Slice { data: body_start as *const B, len: body_len });
let tail = mem::transmute(raw::Slice { data: tail_start as *const A, len: tail_len });
(head, body, tail)
}
} | pub mod traits;
#[allow(missing_docs, non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
#[simd] | random_line_split |
lib.rs | //! [Experimental] Generic programming with SIMD
#![cfg_attr(test, feature(test))]
#![cfg_attr(test, plugin(quickcheck_macros))]
#![deny(missing_docs)]
#![deny(warnings)]
#![feature(core)]
#![feature(plugin)]
#![feature(simd)]
#[cfg(test)] extern crate test as stdtest;
#[cfg(test)] extern crate approx;
#[cfg(test)] extern crate quickcheck;
#[cfg(test)] extern crate rand;
use traits::Simd;
#[cfg(test)]
mod test;
#[cfg(test)]
mod bench;
mod f32;
mod f64;
pub mod traits;
#[allow(missing_docs, non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
#[simd]
pub struct f32x4(pub f32, pub f32, pub f32, pub f32);
#[allow(missing_docs, non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
#[simd]
pub struct f64x2(pub f64, pub f64);
/// Sum the elements of a slice using SIMD ops
///
/// # Benchmarks
///
/// Size: 1 million elements
///
/// ``` ignore
/// test bench::f32::plain::sum ... bench: 962914 ns/iter (+/- 64354)
/// test bench::f32::simd::sum ... bench: 242686 ns/iter (+/- 11847)
/// test bench::f64::plain::sum ... bench: 995390 ns/iter (+/- 27834)
/// test bench::f64::simd::sum ... bench: 504943 ns/iter (+/- 22928)
/// ```
pub fn | <T>(slice: &[T]) -> T where
T: Simd,
{
use std::ops::Add;
use traits::Vector;
let (head, body, tail) = T::Vector::cast(slice);
let sum = body.iter().map(|&x| x).fold(T::Vector::zeroed(), Add::add).sum();
let sum = head.iter().map(|&x| x).fold(sum, Add::add);
tail.iter().map(|&x| x).fold(sum, Add::add)
}
/// "Casts" a `&[A]` into an aligned `&[B]`, the elements (both in the front and in the back) that
/// don't fit in the aligned slice, will be returned as slices.
unsafe fn cast<'a, A, B>(slice: &'a [A]) -> (&'a [A], &'a [B], &'a [A]) {
use std::raw::Repr;
use std::{raw, mem};
/// Rounds down `n` to the nearest multiple of `k`
fn round_down(n: usize, k: usize) -> usize {
n - n % k
}
/// Rounds up `n` to the nearest multiple of `k`
fn round_up(n: usize, k: usize) -> usize {
let r = n % k;
if r == 0 {
n
} else {
n + k - r
}
}
let align_of_b = mem::align_of::<B>();
let size_of_a = mem::size_of::<A>();
let size_of_b = mem::size_of::<B>();
let raw::Slice { data: start, len } = slice.repr();
let end = start.offset(len as isize);
let (head_start, tail_end) = (start as usize, end as usize);
let body_start = round_up(head_start, align_of_b);
let body_end = round_down(tail_end, align_of_b);
if body_start >= body_end {
(slice, &[], &[])
} else {
let head_end = body_start;
let head_len = (head_end - head_start) / size_of_a;
let body_len = (body_end - body_start) / size_of_b;
let tail_start = body_end;
let tail_len = (tail_end - tail_start) / size_of_a;
let head = mem::transmute(raw::Slice { data: head_start as *const A, len: head_len });
let body = mem::transmute(raw::Slice { data: body_start as *const B, len: body_len });
let tail = mem::transmute(raw::Slice { data: tail_start as *const A, len: tail_len });
(head, body, tail)
}
}
| sum | identifier_name |
lib.rs | //! [Experimental] Generic programming with SIMD
#![cfg_attr(test, feature(test))]
#![cfg_attr(test, plugin(quickcheck_macros))]
#![deny(missing_docs)]
#![deny(warnings)]
#![feature(core)]
#![feature(plugin)]
#![feature(simd)]
#[cfg(test)] extern crate test as stdtest;
#[cfg(test)] extern crate approx;
#[cfg(test)] extern crate quickcheck;
#[cfg(test)] extern crate rand;
use traits::Simd;
#[cfg(test)]
mod test;
#[cfg(test)]
mod bench;
mod f32;
mod f64;
pub mod traits;
#[allow(missing_docs, non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
#[simd]
pub struct f32x4(pub f32, pub f32, pub f32, pub f32);
#[allow(missing_docs, non_camel_case_types)]
#[derive(Clone, Copy, Debug)]
#[simd]
pub struct f64x2(pub f64, pub f64);
/// Sum the elements of a slice using SIMD ops
///
/// # Benchmarks
///
/// Size: 1 million elements
///
/// ``` ignore
/// test bench::f32::plain::sum ... bench: 962914 ns/iter (+/- 64354)
/// test bench::f32::simd::sum ... bench: 242686 ns/iter (+/- 11847)
/// test bench::f64::plain::sum ... bench: 995390 ns/iter (+/- 27834)
/// test bench::f64::simd::sum ... bench: 504943 ns/iter (+/- 22928)
/// ```
pub fn sum<T>(slice: &[T]) -> T where
T: Simd,
{
use std::ops::Add;
use traits::Vector;
let (head, body, tail) = T::Vector::cast(slice);
let sum = body.iter().map(|&x| x).fold(T::Vector::zeroed(), Add::add).sum();
let sum = head.iter().map(|&x| x).fold(sum, Add::add);
tail.iter().map(|&x| x).fold(sum, Add::add)
}
/// "Casts" a `&[A]` into an aligned `&[B]`, the elements (both in the front and in the back) that
/// don't fit in the aligned slice, will be returned as slices.
unsafe fn cast<'a, A, B>(slice: &'a [A]) -> (&'a [A], &'a [B], &'a [A]) {
use std::raw::Repr;
use std::{raw, mem};
/// Rounds down `n` to the nearest multiple of `k`
fn round_down(n: usize, k: usize) -> usize {
n - n % k
}
/// Rounds up `n` to the nearest multiple of `k`
fn round_up(n: usize, k: usize) -> usize {
let r = n % k;
if r == 0 {
n
} else {
n + k - r
}
}
let align_of_b = mem::align_of::<B>();
let size_of_a = mem::size_of::<A>();
let size_of_b = mem::size_of::<B>();
let raw::Slice { data: start, len } = slice.repr();
let end = start.offset(len as isize);
let (head_start, tail_end) = (start as usize, end as usize);
let body_start = round_up(head_start, align_of_b);
let body_end = round_down(tail_end, align_of_b);
if body_start >= body_end | else {
let head_end = body_start;
let head_len = (head_end - head_start) / size_of_a;
let body_len = (body_end - body_start) / size_of_b;
let tail_start = body_end;
let tail_len = (tail_end - tail_start) / size_of_a;
let head = mem::transmute(raw::Slice { data: head_start as *const A, len: head_len });
let body = mem::transmute(raw::Slice { data: body_start as *const B, len: body_len });
let tail = mem::transmute(raw::Slice { data: tail_start as *const A, len: tail_len });
(head, body, tail)
}
}
| {
(slice, &[], &[])
} | conditional_block |
setup.py | from setuptools import setup, find_packages
setup( | name='zeit.content.gallery',
version='2.9.2.dev0',
author='gocept, Zeit Online',
author_email='[email protected]',
url='http://www.zeit.de/',
description="vivi Content-Type Portraitbox",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit', 'zeit.content'],
install_requires=[
'cssselect',
'Pillow',
'gocept.form',
'setuptools',
'zeit.cms >= 3.0.dev0',
'zeit.connector>=2.4.0.dev0',
'zeit.imp>=0.15.0.dev0',
'zeit.content.image',
'zeit.push>=1.21.0.dev0',
'zeit.wysiwyg',
'zope.app.appsetup',
'zope.app.testing',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.publisher',
'zope.security',
'zope.testing',
],
entry_points={
'fanstatic.libraries': [
'zeit_content_gallery=zeit.content.gallery.browser.resources:lib',
],
},
) | random_line_split |
|
version.rs | use std::collections::HashMap;
use conduit::{Request, Response};
use conduit_router::RequestParams;
use diesel;
use diesel::pg::Pg;
use diesel::pg::upsert::*;
use diesel::prelude::*;
use semver;
use serde_json;
use time::{Duration, Timespec, now_utc, strptime};
use url;
use Crate;
use app::RequestApp;
use db::RequestTransaction;
use dependency::{Dependency, EncodableDependency};
use download::{VersionDownload, EncodableVersionDownload};
use git;
use owner::{rights, Rights};
use schema::*;
use user::RequestUser;
use util::errors::CargoError;
use util::{RequestUtils, CargoResult, human};
use license_exprs;
// This is necessary to allow joining version to both crates and readme_rendering
// in the render-readmes script.
enable_multi_table_joins!(crates, readme_rendering);
// Queryable has a custom implementation below
#[derive(Clone, Identifiable, Associations, Debug)]
#[belongs_to(Crate)]
pub struct Version {
pub id: i32,
pub crate_id: i32,
pub num: semver::Version,
pub updated_at: Timespec,
pub created_at: Timespec,
pub downloads: i32,
pub features: HashMap<String, Vec<String>>,
pub yanked: bool,
pub license: Option<String>,
}
#[derive(Insertable, Debug)]
#[table_name = "versions"]
pub struct NewVersion {
crate_id: i32,
num: String,
features: String,
license: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct EncodableVersion {
pub id: i32,
#[serde(rename = "crate")]
pub krate: String,
pub num: String,
pub dl_path: String,
pub readme_path: String,
pub updated_at: String,
pub created_at: String,
pub downloads: i32,
pub features: HashMap<String, Vec<String>>,
pub yanked: bool,
pub license: Option<String>,
pub links: VersionLinks,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct VersionLinks {
pub dependencies: String,
pub version_downloads: String,
pub authors: String,
}
#[derive(Insertable, Identifiable, Queryable, Associations, Debug, Clone, Copy)]
#[belongs_to(Version)]
#[table_name = "readme_rendering"]
#[primary_key(version_id)]
struct ReadmeRendering {
version_id: i32,
rendered_at: Timespec,
}
impl Version {
pub fn encodable(self, crate_name: &str) -> EncodableVersion {
let Version {
id,
num,
updated_at,
created_at,
downloads,
features,
yanked,
license,
..
} = self;
let num = num.to_string();
EncodableVersion {
dl_path: format!("/api/v1/crates/{}/{}/download", crate_name, num),
readme_path: format!("/api/v1/crates/{}/{}/readme", crate_name, num),
num: num.clone(),
id: id,
krate: crate_name.to_string(),
updated_at: ::encode_time(updated_at),
created_at: ::encode_time(created_at),
downloads: downloads,
features: features,
yanked: yanked,
license: license,
links: VersionLinks {
dependencies: format!("/api/v1/crates/{}/{}/dependencies", crate_name, num),
version_downloads: format!("/api/v1/crates/{}/{}/downloads", crate_name, num),
authors: format!("/api/v1/crates/{}/{}/authors", crate_name, num),
},
}
}
/// Returns (dependency, crate dependency name) | .select((dependencies::all_columns, crates::name))
.order((dependencies::optional, crates::name))
.load(conn)
}
pub fn max<T>(versions: T) -> semver::Version
where
T: IntoIterator<Item = semver::Version>,
{
versions.into_iter().max().unwrap_or_else(|| {
semver::Version {
major: 0,
minor: 0,
patch: 0,
pre: vec![],
build: vec![],
}
})
}
pub fn record_readme_rendering(&self, conn: &PgConnection) -> CargoResult<()> {
let rendered = ReadmeRendering {
version_id: self.id,
rendered_at: ::now(),
};
diesel::insert(&rendered.on_conflict(
readme_rendering::version_id,
do_update().set(readme_rendering::rendered_at.eq(
excluded(
readme_rendering::rendered_at,
),
)),
)).into(readme_rendering::table)
.execute(&*conn)?;
Ok(())
}
}
impl NewVersion {
pub fn new(
crate_id: i32,
num: &semver::Version,
features: &HashMap<String, Vec<String>>,
license: Option<String>,
license_file: Option<&str>,
) -> CargoResult<Self> {
let features = serde_json::to_string(features)?;
let mut new_version = NewVersion {
crate_id: crate_id,
num: num.to_string(),
features: features,
license: license,
};
new_version.validate_license(license_file)?;
Ok(new_version)
}
pub fn save(&self, conn: &PgConnection, authors: &[String]) -> CargoResult<Version> {
use diesel::{select, insert};
use diesel::expression::dsl::exists;
use schema::versions::dsl::*;
let already_uploaded = versions.filter(crate_id.eq(self.crate_id)).filter(
num.eq(&self.num),
);
if select(exists(already_uploaded)).get_result(conn)? {
return Err(human(&format_args!(
"crate version `{}` is already \
uploaded",
self.num
)));
}
conn.transaction(|| {
let version = insert(self).into(versions).get_result::<Version>(conn)?;
let new_authors = authors
.iter()
.map(|s| {
NewAuthor {
version_id: version.id,
name: &*s,
}
})
.collect::<Vec<_>>();
insert(&new_authors).into(version_authors::table).execute(
conn,
)?;
Ok(version)
})
}
fn validate_license(&mut self, license_file: Option<&str>) -> CargoResult<()> {
if let Some(ref license) = self.license {
for part in license.split('/') {
license_exprs::validate_license_expr(part).map_err(|e| {
human(&format_args!(
"{}; see http://opensource.org/licenses \
for options, and http://spdx.org/licenses/ \
for their identifiers",
e
))
})?;
}
} else if license_file.is_some() {
// If no license is given, but a license file is given, flag this
// crate as having a nonstandard license. Note that we don't
// actually do anything else with license_file currently.
self.license = Some(String::from("non-standard"));
}
Ok(())
}
}
#[derive(Insertable, Debug)]
#[table_name = "version_authors"]
struct NewAuthor<'a> {
version_id: i32,
name: &'a str,
}
impl Queryable<versions::SqlType, Pg> for Version {
type Row = (i32, i32, String, Timespec, Timespec, i32, Option<String>, bool, Option<String>);
fn build(row: Self::Row) -> Self {
let features = row.6
.map(|s| serde_json::from_str(&s).unwrap())
.unwrap_or_else(HashMap::new);
Version {
id: row.0,
crate_id: row.1,
num: semver::Version::parse(&row.2).unwrap(),
updated_at: row.3,
created_at: row.4,
downloads: row.5,
features: features,
yanked: row.7,
license: row.8,
}
}
}
/// Handles the `GET /versions` route.
// FIXME: where/how is this used?
pub fn index(req: &mut Request) -> CargoResult<Response> {
use diesel::expression::dsl::any;
let conn = req.db_conn()?;
// Extract all ids requested.
let query = url::form_urlencoded::parse(req.query_string().unwrap_or("").as_bytes());
let ids = query
.filter_map(|(ref a, ref b)| if *a == "ids[]" {
b.parse().ok()
} else {
None
})
.collect::<Vec<i32>>();
let versions = versions::table
.inner_join(crates::table)
.select((versions::all_columns, crates::name))
.filter(versions::id.eq(any(ids)))
.load::<(Version, String)>(&*conn)?
.into_iter()
.map(|(version, crate_name)| version.encodable(&crate_name))
.collect();
#[derive(Serialize)]
struct R {
versions: Vec<EncodableVersion>,
}
Ok(req.json(&R { versions: versions }))
}
/// Handles the `GET /versions/:version_id` route.
pub fn show(req: &mut Request) -> CargoResult<Response> {
let (version, krate) = match req.params().find("crate_id") {
Some(..) => version_and_crate(req)?,
None => {
let id = &req.params()["version_id"];
let id = id.parse().unwrap_or(0);
let conn = req.db_conn()?;
versions::table
.find(id)
.inner_join(crates::table)
.select((versions::all_columns, ::krate::ALL_COLUMNS))
.first(&*conn)?
}
};
#[derive(Serialize)]
struct R {
version: EncodableVersion,
}
Ok(req.json(&R { version: version.encodable(&krate.name) }))
}
fn version_and_crate(req: &mut Request) -> CargoResult<(Version, Crate)> {
let crate_name = &req.params()["crate_id"];
let semver = &req.params()["version"];
if semver::Version::parse(semver).is_err() {
return Err(human(&format_args!("invalid semver: {}", semver)));
};
let conn = req.db_conn()?;
let krate = Crate::by_name(crate_name).first::<Crate>(&*conn)?;
let version = Version::belonging_to(&krate)
.filter(versions::num.eq(semver))
.first(&*conn)
.map_err(|_| {
human(&format_args!(
"crate `{}` does not have a version `{}`",
crate_name,
semver
))
})?;
Ok((version, krate))
}
/// Handles the `GET /crates/:crate_id/:version/dependencies` route.
pub fn dependencies(req: &mut Request) -> CargoResult<Response> {
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let deps = version.dependencies(&*conn)?;
let deps = deps.into_iter()
.map(|(dep, crate_name)| dep.encodable(&crate_name, None))
.collect();
#[derive(Serialize)]
struct R {
dependencies: Vec<EncodableDependency>,
}
Ok(req.json(&R { dependencies: deps }))
}
/// Handles the `GET /crates/:crate_id/:version/downloads` route.
pub fn downloads(req: &mut Request) -> CargoResult<Response> {
use diesel::expression::dsl::date;
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let cutoff_end_date = req.query()
.get("before_date")
.and_then(|d| strptime(d, "%Y-%m-%d").ok())
.unwrap_or_else(now_utc)
.to_timespec();
let cutoff_start_date = cutoff_end_date + Duration::days(-89);
let downloads = VersionDownload::belonging_to(&version)
.filter(version_downloads::date.between(
date(cutoff_start_date)..
date(cutoff_end_date),
))
.order(version_downloads::date)
.load(&*conn)?
.into_iter()
.map(VersionDownload::encodable)
.collect();
#[derive(Serialize)]
struct R {
version_downloads: Vec<EncodableVersionDownload>,
}
Ok(req.json(&R { version_downloads: downloads }))
}
/// Handles the `GET /crates/:crate_id/:version/authors` route.
pub fn authors(req: &mut Request) -> CargoResult<Response> {
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let names = version_authors::table
.filter(version_authors::version_id.eq(version.id))
.select(version_authors::name)
.order(version_authors::name)
.load(&*conn)?;
// It was imagined that we wold associate authors with users.
// This was never implemented. This complicated return struct
// is all that is left, hear for backwards compatibility.
#[derive(Serialize)]
struct R {
users: Vec<::user::EncodablePublicUser>,
meta: Meta,
}
#[derive(Serialize)]
struct Meta {
names: Vec<String>,
}
Ok(req.json(&R {
users: vec![],
meta: Meta { names: names },
}))
}
/// Handles the `DELETE /crates/:crate_id/:version/yank` route.
/// This does not delete a crate version, it makes the crate
/// version accessible only to crates that already have a
/// `Cargo.lock` containing this version.
///
/// Notes:
/// Crate deletion is not implemented to avoid breaking builds,
/// and the goal of yanking a crate is to prevent crates
/// beginning to depend on the yanked crate version.
pub fn yank(req: &mut Request) -> CargoResult<Response> {
modify_yank(req, true)
}
/// Handles the `PUT /crates/:crate_id/:version/unyank` route.
pub fn unyank(req: &mut Request) -> CargoResult<Response> {
modify_yank(req, false)
}
/// Changes `yanked` flag on a crate version record
fn modify_yank(req: &mut Request, yanked: bool) -> CargoResult<Response> {
let (version, krate) = version_and_crate(req)?;
let user = req.user()?;
let conn = req.db_conn()?;
let owners = krate.owners(&conn)?;
if rights(req.app(), &owners, user)? < Rights::Publish {
return Err(human("must already be an owner to yank or unyank"));
}
if version.yanked != yanked {
conn.transaction::<_, Box<CargoError>, _>(|| {
diesel::update(&version)
.set(versions::yanked.eq(yanked))
.execute(&*conn)?;
git::yank(&**req.app(), &krate.name, &version.num, yanked)?;
Ok(())
})?;
}
#[derive(Serialize)]
struct R {
ok: bool,
}
Ok(req.json(&R { ok: true }))
} | pub fn dependencies(&self, conn: &PgConnection) -> QueryResult<Vec<(Dependency, String)>> {
Dependency::belonging_to(self)
.inner_join(crates::table) | random_line_split |
version.rs | use std::collections::HashMap;
use conduit::{Request, Response};
use conduit_router::RequestParams;
use diesel;
use diesel::pg::Pg;
use diesel::pg::upsert::*;
use diesel::prelude::*;
use semver;
use serde_json;
use time::{Duration, Timespec, now_utc, strptime};
use url;
use Crate;
use app::RequestApp;
use db::RequestTransaction;
use dependency::{Dependency, EncodableDependency};
use download::{VersionDownload, EncodableVersionDownload};
use git;
use owner::{rights, Rights};
use schema::*;
use user::RequestUser;
use util::errors::CargoError;
use util::{RequestUtils, CargoResult, human};
use license_exprs;
// This is necessary to allow joining version to both crates and readme_rendering
// in the render-readmes script.
enable_multi_table_joins!(crates, readme_rendering);
// Queryable has a custom implementation below
#[derive(Clone, Identifiable, Associations, Debug)]
#[belongs_to(Crate)]
pub struct Version {
pub id: i32,
pub crate_id: i32,
pub num: semver::Version,
pub updated_at: Timespec,
pub created_at: Timespec,
pub downloads: i32,
pub features: HashMap<String, Vec<String>>,
pub yanked: bool,
pub license: Option<String>,
}
#[derive(Insertable, Debug)]
#[table_name = "versions"]
pub struct NewVersion {
crate_id: i32,
num: String,
features: String,
license: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct EncodableVersion {
pub id: i32,
#[serde(rename = "crate")]
pub krate: String,
pub num: String,
pub dl_path: String,
pub readme_path: String,
pub updated_at: String,
pub created_at: String,
pub downloads: i32,
pub features: HashMap<String, Vec<String>>,
pub yanked: bool,
pub license: Option<String>,
pub links: VersionLinks,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct VersionLinks {
pub dependencies: String,
pub version_downloads: String,
pub authors: String,
}
#[derive(Insertable, Identifiable, Queryable, Associations, Debug, Clone, Copy)]
#[belongs_to(Version)]
#[table_name = "readme_rendering"]
#[primary_key(version_id)]
struct ReadmeRendering {
version_id: i32,
rendered_at: Timespec,
}
impl Version {
pub fn encodable(self, crate_name: &str) -> EncodableVersion {
let Version {
id,
num,
updated_at,
created_at,
downloads,
features,
yanked,
license,
..
} = self;
let num = num.to_string();
EncodableVersion {
dl_path: format!("/api/v1/crates/{}/{}/download", crate_name, num),
readme_path: format!("/api/v1/crates/{}/{}/readme", crate_name, num),
num: num.clone(),
id: id,
krate: crate_name.to_string(),
updated_at: ::encode_time(updated_at),
created_at: ::encode_time(created_at),
downloads: downloads,
features: features,
yanked: yanked,
license: license,
links: VersionLinks {
dependencies: format!("/api/v1/crates/{}/{}/dependencies", crate_name, num),
version_downloads: format!("/api/v1/crates/{}/{}/downloads", crate_name, num),
authors: format!("/api/v1/crates/{}/{}/authors", crate_name, num),
},
}
}
/// Returns (dependency, crate dependency name)
pub fn dependencies(&self, conn: &PgConnection) -> QueryResult<Vec<(Dependency, String)>> {
Dependency::belonging_to(self)
.inner_join(crates::table)
.select((dependencies::all_columns, crates::name))
.order((dependencies::optional, crates::name))
.load(conn)
}
pub fn max<T>(versions: T) -> semver::Version
where
T: IntoIterator<Item = semver::Version>,
{
versions.into_iter().max().unwrap_or_else(|| {
semver::Version {
major: 0,
minor: 0,
patch: 0,
pre: vec![],
build: vec![],
}
})
}
pub fn record_readme_rendering(&self, conn: &PgConnection) -> CargoResult<()> {
let rendered = ReadmeRendering {
version_id: self.id,
rendered_at: ::now(),
};
diesel::insert(&rendered.on_conflict(
readme_rendering::version_id,
do_update().set(readme_rendering::rendered_at.eq(
excluded(
readme_rendering::rendered_at,
),
)),
)).into(readme_rendering::table)
.execute(&*conn)?;
Ok(())
}
}
impl NewVersion {
pub fn new(
crate_id: i32,
num: &semver::Version,
features: &HashMap<String, Vec<String>>,
license: Option<String>,
license_file: Option<&str>,
) -> CargoResult<Self> {
let features = serde_json::to_string(features)?;
let mut new_version = NewVersion {
crate_id: crate_id,
num: num.to_string(),
features: features,
license: license,
};
new_version.validate_license(license_file)?;
Ok(new_version)
}
pub fn save(&self, conn: &PgConnection, authors: &[String]) -> CargoResult<Version> {
use diesel::{select, insert};
use diesel::expression::dsl::exists;
use schema::versions::dsl::*;
let already_uploaded = versions.filter(crate_id.eq(self.crate_id)).filter(
num.eq(&self.num),
);
if select(exists(already_uploaded)).get_result(conn)? {
return Err(human(&format_args!(
"crate version `{}` is already \
uploaded",
self.num
)));
}
conn.transaction(|| {
let version = insert(self).into(versions).get_result::<Version>(conn)?;
let new_authors = authors
.iter()
.map(|s| {
NewAuthor {
version_id: version.id,
name: &*s,
}
})
.collect::<Vec<_>>();
insert(&new_authors).into(version_authors::table).execute(
conn,
)?;
Ok(version)
})
}
fn validate_license(&mut self, license_file: Option<&str>) -> CargoResult<()> {
if let Some(ref license) = self.license {
for part in license.split('/') {
license_exprs::validate_license_expr(part).map_err(|e| {
human(&format_args!(
"{}; see http://opensource.org/licenses \
for options, and http://spdx.org/licenses/ \
for their identifiers",
e
))
})?;
}
} else if license_file.is_some() {
// If no license is given, but a license file is given, flag this
// crate as having a nonstandard license. Note that we don't
// actually do anything else with license_file currently.
self.license = Some(String::from("non-standard"));
}
Ok(())
}
}
#[derive(Insertable, Debug)]
#[table_name = "version_authors"]
struct NewAuthor<'a> {
version_id: i32,
name: &'a str,
}
impl Queryable<versions::SqlType, Pg> for Version {
type Row = (i32, i32, String, Timespec, Timespec, i32, Option<String>, bool, Option<String>);
fn build(row: Self::Row) -> Self {
let features = row.6
.map(|s| serde_json::from_str(&s).unwrap())
.unwrap_or_else(HashMap::new);
Version {
id: row.0,
crate_id: row.1,
num: semver::Version::parse(&row.2).unwrap(),
updated_at: row.3,
created_at: row.4,
downloads: row.5,
features: features,
yanked: row.7,
license: row.8,
}
}
}
/// Handles the `GET /versions` route.
// FIXME: where/how is this used?
pub fn index(req: &mut Request) -> CargoResult<Response> {
use diesel::expression::dsl::any;
let conn = req.db_conn()?;
// Extract all ids requested.
let query = url::form_urlencoded::parse(req.query_string().unwrap_or("").as_bytes());
let ids = query
.filter_map(|(ref a, ref b)| if *a == "ids[]" {
b.parse().ok()
} else {
None
})
.collect::<Vec<i32>>();
let versions = versions::table
.inner_join(crates::table)
.select((versions::all_columns, crates::name))
.filter(versions::id.eq(any(ids)))
.load::<(Version, String)>(&*conn)?
.into_iter()
.map(|(version, crate_name)| version.encodable(&crate_name))
.collect();
#[derive(Serialize)]
struct R {
versions: Vec<EncodableVersion>,
}
Ok(req.json(&R { versions: versions }))
}
/// Handles the `GET /versions/:version_id` route.
pub fn show(req: &mut Request) -> CargoResult<Response> {
let (version, krate) = match req.params().find("crate_id") {
Some(..) => version_and_crate(req)?,
None => {
let id = &req.params()["version_id"];
let id = id.parse().unwrap_or(0);
let conn = req.db_conn()?;
versions::table
.find(id)
.inner_join(crates::table)
.select((versions::all_columns, ::krate::ALL_COLUMNS))
.first(&*conn)?
}
};
#[derive(Serialize)]
struct R {
version: EncodableVersion,
}
Ok(req.json(&R { version: version.encodable(&krate.name) }))
}
fn version_and_crate(req: &mut Request) -> CargoResult<(Version, Crate)> {
let crate_name = &req.params()["crate_id"];
let semver = &req.params()["version"];
if semver::Version::parse(semver).is_err() | ;
let conn = req.db_conn()?;
let krate = Crate::by_name(crate_name).first::<Crate>(&*conn)?;
let version = Version::belonging_to(&krate)
.filter(versions::num.eq(semver))
.first(&*conn)
.map_err(|_| {
human(&format_args!(
"crate `{}` does not have a version `{}`",
crate_name,
semver
))
})?;
Ok((version, krate))
}
/// Handles the `GET /crates/:crate_id/:version/dependencies` route.
pub fn dependencies(req: &mut Request) -> CargoResult<Response> {
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let deps = version.dependencies(&*conn)?;
let deps = deps.into_iter()
.map(|(dep, crate_name)| dep.encodable(&crate_name, None))
.collect();
#[derive(Serialize)]
struct R {
dependencies: Vec<EncodableDependency>,
}
Ok(req.json(&R { dependencies: deps }))
}
/// Handles the `GET /crates/:crate_id/:version/downloads` route.
pub fn downloads(req: &mut Request) -> CargoResult<Response> {
use diesel::expression::dsl::date;
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let cutoff_end_date = req.query()
.get("before_date")
.and_then(|d| strptime(d, "%Y-%m-%d").ok())
.unwrap_or_else(now_utc)
.to_timespec();
let cutoff_start_date = cutoff_end_date + Duration::days(-89);
let downloads = VersionDownload::belonging_to(&version)
.filter(version_downloads::date.between(
date(cutoff_start_date)..
date(cutoff_end_date),
))
.order(version_downloads::date)
.load(&*conn)?
.into_iter()
.map(VersionDownload::encodable)
.collect();
#[derive(Serialize)]
struct R {
version_downloads: Vec<EncodableVersionDownload>,
}
Ok(req.json(&R { version_downloads: downloads }))
}
/// Handles the `GET /crates/:crate_id/:version/authors` route.
pub fn authors(req: &mut Request) -> CargoResult<Response> {
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let names = version_authors::table
.filter(version_authors::version_id.eq(version.id))
.select(version_authors::name)
.order(version_authors::name)
.load(&*conn)?;
// It was imagined that we wold associate authors with users.
// This was never implemented. This complicated return struct
// is all that is left, hear for backwards compatibility.
#[derive(Serialize)]
struct R {
users: Vec<::user::EncodablePublicUser>,
meta: Meta,
}
#[derive(Serialize)]
struct Meta {
names: Vec<String>,
}
Ok(req.json(&R {
users: vec![],
meta: Meta { names: names },
}))
}
/// Handles the `DELETE /crates/:crate_id/:version/yank` route.
/// This does not delete a crate version, it makes the crate
/// version accessible only to crates that already have a
/// `Cargo.lock` containing this version.
///
/// Notes:
/// Crate deletion is not implemented to avoid breaking builds,
/// and the goal of yanking a crate is to prevent crates
/// beginning to depend on the yanked crate version.
pub fn yank(req: &mut Request) -> CargoResult<Response> {
modify_yank(req, true)
}
/// Handles the `PUT /crates/:crate_id/:version/unyank` route.
pub fn unyank(req: &mut Request) -> CargoResult<Response> {
modify_yank(req, false)
}
/// Changes `yanked` flag on a crate version record
fn modify_yank(req: &mut Request, yanked: bool) -> CargoResult<Response> {
let (version, krate) = version_and_crate(req)?;
let user = req.user()?;
let conn = req.db_conn()?;
let owners = krate.owners(&conn)?;
if rights(req.app(), &owners, user)? < Rights::Publish {
return Err(human("must already be an owner to yank or unyank"));
}
if version.yanked != yanked {
conn.transaction::<_, Box<CargoError>, _>(|| {
diesel::update(&version)
.set(versions::yanked.eq(yanked))
.execute(&*conn)?;
git::yank(&**req.app(), &krate.name, &version.num, yanked)?;
Ok(())
})?;
}
#[derive(Serialize)]
struct R {
ok: bool,
}
Ok(req.json(&R { ok: true }))
}
| {
return Err(human(&format_args!("invalid semver: {}", semver)));
} | conditional_block |
version.rs | use std::collections::HashMap;
use conduit::{Request, Response};
use conduit_router::RequestParams;
use diesel;
use diesel::pg::Pg;
use diesel::pg::upsert::*;
use diesel::prelude::*;
use semver;
use serde_json;
use time::{Duration, Timespec, now_utc, strptime};
use url;
use Crate;
use app::RequestApp;
use db::RequestTransaction;
use dependency::{Dependency, EncodableDependency};
use download::{VersionDownload, EncodableVersionDownload};
use git;
use owner::{rights, Rights};
use schema::*;
use user::RequestUser;
use util::errors::CargoError;
use util::{RequestUtils, CargoResult, human};
use license_exprs;
// This is necessary to allow joining version to both crates and readme_rendering
// in the render-readmes script.
enable_multi_table_joins!(crates, readme_rendering);
// Queryable has a custom implementation below
#[derive(Clone, Identifiable, Associations, Debug)]
#[belongs_to(Crate)]
pub struct Version {
pub id: i32,
pub crate_id: i32,
pub num: semver::Version,
pub updated_at: Timespec,
pub created_at: Timespec,
pub downloads: i32,
pub features: HashMap<String, Vec<String>>,
pub yanked: bool,
pub license: Option<String>,
}
#[derive(Insertable, Debug)]
#[table_name = "versions"]
pub struct NewVersion {
crate_id: i32,
num: String,
features: String,
license: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct EncodableVersion {
pub id: i32,
#[serde(rename = "crate")]
pub krate: String,
pub num: String,
pub dl_path: String,
pub readme_path: String,
pub updated_at: String,
pub created_at: String,
pub downloads: i32,
pub features: HashMap<String, Vec<String>>,
pub yanked: bool,
pub license: Option<String>,
pub links: VersionLinks,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct VersionLinks {
pub dependencies: String,
pub version_downloads: String,
pub authors: String,
}
#[derive(Insertable, Identifiable, Queryable, Associations, Debug, Clone, Copy)]
#[belongs_to(Version)]
#[table_name = "readme_rendering"]
#[primary_key(version_id)]
struct ReadmeRendering {
version_id: i32,
rendered_at: Timespec,
}
impl Version {
pub fn encodable(self, crate_name: &str) -> EncodableVersion {
let Version {
id,
num,
updated_at,
created_at,
downloads,
features,
yanked,
license,
..
} = self;
let num = num.to_string();
EncodableVersion {
dl_path: format!("/api/v1/crates/{}/{}/download", crate_name, num),
readme_path: format!("/api/v1/crates/{}/{}/readme", crate_name, num),
num: num.clone(),
id: id,
krate: crate_name.to_string(),
updated_at: ::encode_time(updated_at),
created_at: ::encode_time(created_at),
downloads: downloads,
features: features,
yanked: yanked,
license: license,
links: VersionLinks {
dependencies: format!("/api/v1/crates/{}/{}/dependencies", crate_name, num),
version_downloads: format!("/api/v1/crates/{}/{}/downloads", crate_name, num),
authors: format!("/api/v1/crates/{}/{}/authors", crate_name, num),
},
}
}
/// Returns (dependency, crate dependency name)
pub fn dependencies(&self, conn: &PgConnection) -> QueryResult<Vec<(Dependency, String)>> {
Dependency::belonging_to(self)
.inner_join(crates::table)
.select((dependencies::all_columns, crates::name))
.order((dependencies::optional, crates::name))
.load(conn)
}
pub fn max<T>(versions: T) -> semver::Version
where
T: IntoIterator<Item = semver::Version>,
{
versions.into_iter().max().unwrap_or_else(|| {
semver::Version {
major: 0,
minor: 0,
patch: 0,
pre: vec![],
build: vec![],
}
})
}
pub fn record_readme_rendering(&self, conn: &PgConnection) -> CargoResult<()> {
let rendered = ReadmeRendering {
version_id: self.id,
rendered_at: ::now(),
};
diesel::insert(&rendered.on_conflict(
readme_rendering::version_id,
do_update().set(readme_rendering::rendered_at.eq(
excluded(
readme_rendering::rendered_at,
),
)),
)).into(readme_rendering::table)
.execute(&*conn)?;
Ok(())
}
}
impl NewVersion {
pub fn new(
crate_id: i32,
num: &semver::Version,
features: &HashMap<String, Vec<String>>,
license: Option<String>,
license_file: Option<&str>,
) -> CargoResult<Self> {
let features = serde_json::to_string(features)?;
let mut new_version = NewVersion {
crate_id: crate_id,
num: num.to_string(),
features: features,
license: license,
};
new_version.validate_license(license_file)?;
Ok(new_version)
}
pub fn save(&self, conn: &PgConnection, authors: &[String]) -> CargoResult<Version> {
use diesel::{select, insert};
use diesel::expression::dsl::exists;
use schema::versions::dsl::*;
let already_uploaded = versions.filter(crate_id.eq(self.crate_id)).filter(
num.eq(&self.num),
);
if select(exists(already_uploaded)).get_result(conn)? {
return Err(human(&format_args!(
"crate version `{}` is already \
uploaded",
self.num
)));
}
conn.transaction(|| {
let version = insert(self).into(versions).get_result::<Version>(conn)?;
let new_authors = authors
.iter()
.map(|s| {
NewAuthor {
version_id: version.id,
name: &*s,
}
})
.collect::<Vec<_>>();
insert(&new_authors).into(version_authors::table).execute(
conn,
)?;
Ok(version)
})
}
fn validate_license(&mut self, license_file: Option<&str>) -> CargoResult<()> {
if let Some(ref license) = self.license {
for part in license.split('/') {
license_exprs::validate_license_expr(part).map_err(|e| {
human(&format_args!(
"{}; see http://opensource.org/licenses \
for options, and http://spdx.org/licenses/ \
for their identifiers",
e
))
})?;
}
} else if license_file.is_some() {
// If no license is given, but a license file is given, flag this
// crate as having a nonstandard license. Note that we don't
// actually do anything else with license_file currently.
self.license = Some(String::from("non-standard"));
}
Ok(())
}
}
#[derive(Insertable, Debug)]
#[table_name = "version_authors"]
struct NewAuthor<'a> {
version_id: i32,
name: &'a str,
}
impl Queryable<versions::SqlType, Pg> for Version {
type Row = (i32, i32, String, Timespec, Timespec, i32, Option<String>, bool, Option<String>);
fn build(row: Self::Row) -> Self {
let features = row.6
.map(|s| serde_json::from_str(&s).unwrap())
.unwrap_or_else(HashMap::new);
Version {
id: row.0,
crate_id: row.1,
num: semver::Version::parse(&row.2).unwrap(),
updated_at: row.3,
created_at: row.4,
downloads: row.5,
features: features,
yanked: row.7,
license: row.8,
}
}
}
/// Handles the `GET /versions` route.
// FIXME: where/how is this used?
pub fn index(req: &mut Request) -> CargoResult<Response> {
use diesel::expression::dsl::any;
let conn = req.db_conn()?;
// Extract all ids requested.
let query = url::form_urlencoded::parse(req.query_string().unwrap_or("").as_bytes());
let ids = query
.filter_map(|(ref a, ref b)| if *a == "ids[]" {
b.parse().ok()
} else {
None
})
.collect::<Vec<i32>>();
let versions = versions::table
.inner_join(crates::table)
.select((versions::all_columns, crates::name))
.filter(versions::id.eq(any(ids)))
.load::<(Version, String)>(&*conn)?
.into_iter()
.map(|(version, crate_name)| version.encodable(&crate_name))
.collect();
#[derive(Serialize)]
struct R {
versions: Vec<EncodableVersion>,
}
Ok(req.json(&R { versions: versions }))
}
/// Handles the `GET /versions/:version_id` route.
pub fn | (req: &mut Request) -> CargoResult<Response> {
let (version, krate) = match req.params().find("crate_id") {
Some(..) => version_and_crate(req)?,
None => {
let id = &req.params()["version_id"];
let id = id.parse().unwrap_or(0);
let conn = req.db_conn()?;
versions::table
.find(id)
.inner_join(crates::table)
.select((versions::all_columns, ::krate::ALL_COLUMNS))
.first(&*conn)?
}
};
#[derive(Serialize)]
struct R {
version: EncodableVersion,
}
Ok(req.json(&R { version: version.encodable(&krate.name) }))
}
fn version_and_crate(req: &mut Request) -> CargoResult<(Version, Crate)> {
let crate_name = &req.params()["crate_id"];
let semver = &req.params()["version"];
if semver::Version::parse(semver).is_err() {
return Err(human(&format_args!("invalid semver: {}", semver)));
};
let conn = req.db_conn()?;
let krate = Crate::by_name(crate_name).first::<Crate>(&*conn)?;
let version = Version::belonging_to(&krate)
.filter(versions::num.eq(semver))
.first(&*conn)
.map_err(|_| {
human(&format_args!(
"crate `{}` does not have a version `{}`",
crate_name,
semver
))
})?;
Ok((version, krate))
}
/// Handles the `GET /crates/:crate_id/:version/dependencies` route.
pub fn dependencies(req: &mut Request) -> CargoResult<Response> {
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let deps = version.dependencies(&*conn)?;
let deps = deps.into_iter()
.map(|(dep, crate_name)| dep.encodable(&crate_name, None))
.collect();
#[derive(Serialize)]
struct R {
dependencies: Vec<EncodableDependency>,
}
Ok(req.json(&R { dependencies: deps }))
}
/// Handles the `GET /crates/:crate_id/:version/downloads` route.
pub fn downloads(req: &mut Request) -> CargoResult<Response> {
use diesel::expression::dsl::date;
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let cutoff_end_date = req.query()
.get("before_date")
.and_then(|d| strptime(d, "%Y-%m-%d").ok())
.unwrap_or_else(now_utc)
.to_timespec();
let cutoff_start_date = cutoff_end_date + Duration::days(-89);
let downloads = VersionDownload::belonging_to(&version)
.filter(version_downloads::date.between(
date(cutoff_start_date)..
date(cutoff_end_date),
))
.order(version_downloads::date)
.load(&*conn)?
.into_iter()
.map(VersionDownload::encodable)
.collect();
#[derive(Serialize)]
struct R {
version_downloads: Vec<EncodableVersionDownload>,
}
Ok(req.json(&R { version_downloads: downloads }))
}
/// Handles the `GET /crates/:crate_id/:version/authors` route.
pub fn authors(req: &mut Request) -> CargoResult<Response> {
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let names = version_authors::table
.filter(version_authors::version_id.eq(version.id))
.select(version_authors::name)
.order(version_authors::name)
.load(&*conn)?;
// It was imagined that we wold associate authors with users.
// This was never implemented. This complicated return struct
// is all that is left, hear for backwards compatibility.
#[derive(Serialize)]
struct R {
users: Vec<::user::EncodablePublicUser>,
meta: Meta,
}
#[derive(Serialize)]
struct Meta {
names: Vec<String>,
}
Ok(req.json(&R {
users: vec![],
meta: Meta { names: names },
}))
}
/// Handles the `DELETE /crates/:crate_id/:version/yank` route.
/// This does not delete a crate version, it makes the crate
/// version accessible only to crates that already have a
/// `Cargo.lock` containing this version.
///
/// Notes:
/// Crate deletion is not implemented to avoid breaking builds,
/// and the goal of yanking a crate is to prevent crates
/// beginning to depend on the yanked crate version.
pub fn yank(req: &mut Request) -> CargoResult<Response> {
modify_yank(req, true)
}
/// Handles the `PUT /crates/:crate_id/:version/unyank` route.
pub fn unyank(req: &mut Request) -> CargoResult<Response> {
modify_yank(req, false)
}
/// Changes `yanked` flag on a crate version record
fn modify_yank(req: &mut Request, yanked: bool) -> CargoResult<Response> {
let (version, krate) = version_and_crate(req)?;
let user = req.user()?;
let conn = req.db_conn()?;
let owners = krate.owners(&conn)?;
if rights(req.app(), &owners, user)? < Rights::Publish {
return Err(human("must already be an owner to yank or unyank"));
}
if version.yanked != yanked {
conn.transaction::<_, Box<CargoError>, _>(|| {
diesel::update(&version)
.set(versions::yanked.eq(yanked))
.execute(&*conn)?;
git::yank(&**req.app(), &krate.name, &version.num, yanked)?;
Ok(())
})?;
}
#[derive(Serialize)]
struct R {
ok: bool,
}
Ok(req.json(&R { ok: true }))
}
| show | identifier_name |
version.rs | use std::collections::HashMap;
use conduit::{Request, Response};
use conduit_router::RequestParams;
use diesel;
use diesel::pg::Pg;
use diesel::pg::upsert::*;
use diesel::prelude::*;
use semver;
use serde_json;
use time::{Duration, Timespec, now_utc, strptime};
use url;
use Crate;
use app::RequestApp;
use db::RequestTransaction;
use dependency::{Dependency, EncodableDependency};
use download::{VersionDownload, EncodableVersionDownload};
use git;
use owner::{rights, Rights};
use schema::*;
use user::RequestUser;
use util::errors::CargoError;
use util::{RequestUtils, CargoResult, human};
use license_exprs;
// This is necessary to allow joining version to both crates and readme_rendering
// in the render-readmes script.
enable_multi_table_joins!(crates, readme_rendering);
// Queryable has a custom implementation below
#[derive(Clone, Identifiable, Associations, Debug)]
#[belongs_to(Crate)]
pub struct Version {
pub id: i32,
pub crate_id: i32,
pub num: semver::Version,
pub updated_at: Timespec,
pub created_at: Timespec,
pub downloads: i32,
pub features: HashMap<String, Vec<String>>,
pub yanked: bool,
pub license: Option<String>,
}
#[derive(Insertable, Debug)]
#[table_name = "versions"]
pub struct NewVersion {
crate_id: i32,
num: String,
features: String,
license: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct EncodableVersion {
pub id: i32,
#[serde(rename = "crate")]
pub krate: String,
pub num: String,
pub dl_path: String,
pub readme_path: String,
pub updated_at: String,
pub created_at: String,
pub downloads: i32,
pub features: HashMap<String, Vec<String>>,
pub yanked: bool,
pub license: Option<String>,
pub links: VersionLinks,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct VersionLinks {
pub dependencies: String,
pub version_downloads: String,
pub authors: String,
}
#[derive(Insertable, Identifiable, Queryable, Associations, Debug, Clone, Copy)]
#[belongs_to(Version)]
#[table_name = "readme_rendering"]
#[primary_key(version_id)]
struct ReadmeRendering {
version_id: i32,
rendered_at: Timespec,
}
impl Version {
pub fn encodable(self, crate_name: &str) -> EncodableVersion {
let Version {
id,
num,
updated_at,
created_at,
downloads,
features,
yanked,
license,
..
} = self;
let num = num.to_string();
EncodableVersion {
dl_path: format!("/api/v1/crates/{}/{}/download", crate_name, num),
readme_path: format!("/api/v1/crates/{}/{}/readme", crate_name, num),
num: num.clone(),
id: id,
krate: crate_name.to_string(),
updated_at: ::encode_time(updated_at),
created_at: ::encode_time(created_at),
downloads: downloads,
features: features,
yanked: yanked,
license: license,
links: VersionLinks {
dependencies: format!("/api/v1/crates/{}/{}/dependencies", crate_name, num),
version_downloads: format!("/api/v1/crates/{}/{}/downloads", crate_name, num),
authors: format!("/api/v1/crates/{}/{}/authors", crate_name, num),
},
}
}
/// Returns (dependency, crate dependency name)
pub fn dependencies(&self, conn: &PgConnection) -> QueryResult<Vec<(Dependency, String)>> {
Dependency::belonging_to(self)
.inner_join(crates::table)
.select((dependencies::all_columns, crates::name))
.order((dependencies::optional, crates::name))
.load(conn)
}
pub fn max<T>(versions: T) -> semver::Version
where
T: IntoIterator<Item = semver::Version>,
{
versions.into_iter().max().unwrap_or_else(|| {
semver::Version {
major: 0,
minor: 0,
patch: 0,
pre: vec![],
build: vec![],
}
})
}
pub fn record_readme_rendering(&self, conn: &PgConnection) -> CargoResult<()> {
let rendered = ReadmeRendering {
version_id: self.id,
rendered_at: ::now(),
};
diesel::insert(&rendered.on_conflict(
readme_rendering::version_id,
do_update().set(readme_rendering::rendered_at.eq(
excluded(
readme_rendering::rendered_at,
),
)),
)).into(readme_rendering::table)
.execute(&*conn)?;
Ok(())
}
}
impl NewVersion {
pub fn new(
crate_id: i32,
num: &semver::Version,
features: &HashMap<String, Vec<String>>,
license: Option<String>,
license_file: Option<&str>,
) -> CargoResult<Self> {
let features = serde_json::to_string(features)?;
let mut new_version = NewVersion {
crate_id: crate_id,
num: num.to_string(),
features: features,
license: license,
};
new_version.validate_license(license_file)?;
Ok(new_version)
}
pub fn save(&self, conn: &PgConnection, authors: &[String]) -> CargoResult<Version> {
use diesel::{select, insert};
use diesel::expression::dsl::exists;
use schema::versions::dsl::*;
let already_uploaded = versions.filter(crate_id.eq(self.crate_id)).filter(
num.eq(&self.num),
);
if select(exists(already_uploaded)).get_result(conn)? {
return Err(human(&format_args!(
"crate version `{}` is already \
uploaded",
self.num
)));
}
conn.transaction(|| {
let version = insert(self).into(versions).get_result::<Version>(conn)?;
let new_authors = authors
.iter()
.map(|s| {
NewAuthor {
version_id: version.id,
name: &*s,
}
})
.collect::<Vec<_>>();
insert(&new_authors).into(version_authors::table).execute(
conn,
)?;
Ok(version)
})
}
fn validate_license(&mut self, license_file: Option<&str>) -> CargoResult<()> {
if let Some(ref license) = self.license {
for part in license.split('/') {
license_exprs::validate_license_expr(part).map_err(|e| {
human(&format_args!(
"{}; see http://opensource.org/licenses \
for options, and http://spdx.org/licenses/ \
for their identifiers",
e
))
})?;
}
} else if license_file.is_some() {
// If no license is given, but a license file is given, flag this
// crate as having a nonstandard license. Note that we don't
// actually do anything else with license_file currently.
self.license = Some(String::from("non-standard"));
}
Ok(())
}
}
#[derive(Insertable, Debug)]
#[table_name = "version_authors"]
struct NewAuthor<'a> {
version_id: i32,
name: &'a str,
}
impl Queryable<versions::SqlType, Pg> for Version {
type Row = (i32, i32, String, Timespec, Timespec, i32, Option<String>, bool, Option<String>);
fn build(row: Self::Row) -> Self |
}
/// Handles the `GET /versions` route.
// FIXME: where/how is this used?
pub fn index(req: &mut Request) -> CargoResult<Response> {
use diesel::expression::dsl::any;
let conn = req.db_conn()?;
// Extract all ids requested.
let query = url::form_urlencoded::parse(req.query_string().unwrap_or("").as_bytes());
let ids = query
.filter_map(|(ref a, ref b)| if *a == "ids[]" {
b.parse().ok()
} else {
None
})
.collect::<Vec<i32>>();
let versions = versions::table
.inner_join(crates::table)
.select((versions::all_columns, crates::name))
.filter(versions::id.eq(any(ids)))
.load::<(Version, String)>(&*conn)?
.into_iter()
.map(|(version, crate_name)| version.encodable(&crate_name))
.collect();
#[derive(Serialize)]
struct R {
versions: Vec<EncodableVersion>,
}
Ok(req.json(&R { versions: versions }))
}
/// Handles the `GET /versions/:version_id` route.
pub fn show(req: &mut Request) -> CargoResult<Response> {
let (version, krate) = match req.params().find("crate_id") {
Some(..) => version_and_crate(req)?,
None => {
let id = &req.params()["version_id"];
let id = id.parse().unwrap_or(0);
let conn = req.db_conn()?;
versions::table
.find(id)
.inner_join(crates::table)
.select((versions::all_columns, ::krate::ALL_COLUMNS))
.first(&*conn)?
}
};
#[derive(Serialize)]
struct R {
version: EncodableVersion,
}
Ok(req.json(&R { version: version.encodable(&krate.name) }))
}
fn version_and_crate(req: &mut Request) -> CargoResult<(Version, Crate)> {
let crate_name = &req.params()["crate_id"];
let semver = &req.params()["version"];
if semver::Version::parse(semver).is_err() {
return Err(human(&format_args!("invalid semver: {}", semver)));
};
let conn = req.db_conn()?;
let krate = Crate::by_name(crate_name).first::<Crate>(&*conn)?;
let version = Version::belonging_to(&krate)
.filter(versions::num.eq(semver))
.first(&*conn)
.map_err(|_| {
human(&format_args!(
"crate `{}` does not have a version `{}`",
crate_name,
semver
))
})?;
Ok((version, krate))
}
/// Handles the `GET /crates/:crate_id/:version/dependencies` route.
pub fn dependencies(req: &mut Request) -> CargoResult<Response> {
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let deps = version.dependencies(&*conn)?;
let deps = deps.into_iter()
.map(|(dep, crate_name)| dep.encodable(&crate_name, None))
.collect();
#[derive(Serialize)]
struct R {
dependencies: Vec<EncodableDependency>,
}
Ok(req.json(&R { dependencies: deps }))
}
/// Handles the `GET /crates/:crate_id/:version/downloads` route.
pub fn downloads(req: &mut Request) -> CargoResult<Response> {
use diesel::expression::dsl::date;
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let cutoff_end_date = req.query()
.get("before_date")
.and_then(|d| strptime(d, "%Y-%m-%d").ok())
.unwrap_or_else(now_utc)
.to_timespec();
let cutoff_start_date = cutoff_end_date + Duration::days(-89);
let downloads = VersionDownload::belonging_to(&version)
.filter(version_downloads::date.between(
date(cutoff_start_date)..
date(cutoff_end_date),
))
.order(version_downloads::date)
.load(&*conn)?
.into_iter()
.map(VersionDownload::encodable)
.collect();
#[derive(Serialize)]
struct R {
version_downloads: Vec<EncodableVersionDownload>,
}
Ok(req.json(&R { version_downloads: downloads }))
}
/// Handles the `GET /crates/:crate_id/:version/authors` route.
pub fn authors(req: &mut Request) -> CargoResult<Response> {
let (version, _) = version_and_crate(req)?;
let conn = req.db_conn()?;
let names = version_authors::table
.filter(version_authors::version_id.eq(version.id))
.select(version_authors::name)
.order(version_authors::name)
.load(&*conn)?;
// It was imagined that we wold associate authors with users.
// This was never implemented. This complicated return struct
// is all that is left, hear for backwards compatibility.
#[derive(Serialize)]
struct R {
users: Vec<::user::EncodablePublicUser>,
meta: Meta,
}
#[derive(Serialize)]
struct Meta {
names: Vec<String>,
}
Ok(req.json(&R {
users: vec![],
meta: Meta { names: names },
}))
}
/// Handles the `DELETE /crates/:crate_id/:version/yank` route.
/// This does not delete a crate version, it makes the crate
/// version accessible only to crates that already have a
/// `Cargo.lock` containing this version.
///
/// Notes:
/// Crate deletion is not implemented to avoid breaking builds,
/// and the goal of yanking a crate is to prevent crates
/// beginning to depend on the yanked crate version.
pub fn yank(req: &mut Request) -> CargoResult<Response> {
modify_yank(req, true)
}
/// Handles the `PUT /crates/:crate_id/:version/unyank` route.
pub fn unyank(req: &mut Request) -> CargoResult<Response> {
modify_yank(req, false)
}
/// Changes `yanked` flag on a crate version record
fn modify_yank(req: &mut Request, yanked: bool) -> CargoResult<Response> {
let (version, krate) = version_and_crate(req)?;
let user = req.user()?;
let conn = req.db_conn()?;
let owners = krate.owners(&conn)?;
if rights(req.app(), &owners, user)? < Rights::Publish {
return Err(human("must already be an owner to yank or unyank"));
}
if version.yanked != yanked {
conn.transaction::<_, Box<CargoError>, _>(|| {
diesel::update(&version)
.set(versions::yanked.eq(yanked))
.execute(&*conn)?;
git::yank(&**req.app(), &krate.name, &version.num, yanked)?;
Ok(())
})?;
}
#[derive(Serialize)]
struct R {
ok: bool,
}
Ok(req.json(&R { ok: true }))
}
| {
let features = row.6
.map(|s| serde_json::from_str(&s).unwrap())
.unwrap_or_else(HashMap::new);
Version {
id: row.0,
crate_id: row.1,
num: semver::Version::parse(&row.2).unwrap(),
updated_at: row.3,
created_at: row.4,
downloads: row.5,
features: features,
yanked: row.7,
license: row.8,
}
} | identifier_body |
calc_itc_ali.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 30 20:51:31 2017
@author: mje
"""
import numpy as np
import mne
import matplotlib.pyplot as plt
from mne.stats import permutation_cluster_test
from my_settings import (subjects_select, tf_folder, epochs_folder)
d_ali_ent_right = []
for subject in subjects_select:
data_right_ent = np.load(tf_folder + "%s_ent_right-4-itc.npy" % subject)
data_left_ent = np.load(tf_folder + "%s_ent_left-4-itc.npy" % subject)
data_right_ctl = np.load(tf_folder + "%s_ctl_right-4-itc.npy" % subject)
data_left_ctl = np.load(tf_folder + "%s_ctl_left-4-itc.npy" % subject)
epochs = mne.read_epochs(
epochs_folder + "%s_trial_start-epo.fif" % subject, preload=False)
selection = mne.read_selection("Left-occipital")
selection = [f.replace(' ', '') for f in selection]
left_idx = mne.pick_types(
epochs.info,
meg='grad',
eeg=False,
eog=False,
stim=False,
exclude=[],
selection=selection)
selection = mne.read_selection("Right-occipital")
selection = [f.replace(' ', '') for f in selection]
right_idx = mne.pick_types(
epochs.info,
meg='grad',
eeg=False,
eog=False,
stim=False,
exclude=[],
selection=selection)
d_right_ent = (
data_left_ent[right_idx, :, :] - data_right_ent[right_idx, :, :])
d_left_ent = (
data_left_ent[left_idx, :, :] - data_right_ent[left_idx, :, :])
d_right_ctl = (
data_left_ctl[right_idx, :, :] - data_right_ctl[right_idx, :, :])
d_left_ctl = (
data_left_ctl[left_idx, :, :] - data_right_ctl[left_idx, :, :])
d_ali_ent_right = np.asarray(d_right_ent).mean(axis=1)
d_ali_ent_left = np.asarray(d_left_ent).mean(axis=1)
d_ali_ctl_right = np.asarray(d_right_ctl).mean(axis=1)
d_ali_ctl_left = np.asarray(d_left_ctl).mean(axis=1)
T_obs, clusters, cluster_pv, H0 = permutation_cluster_test(
[d_ali_ent_left, d_ali_ent_right], n_permutations=5000)
times = (epochs.times[::4][:-1]) * 1e3
plt.close('all')
plt.subplot(211)
plt.title("Ctl left v right")
plt.plot(
times,
d_ali_ent_left.mean(axis=0) - d_ali_ent_right.mean(axis=0),
label="ERF Contrast (Event 1 - Event 2)")
plt.ylabel("MEG (T / m)")
plt.legend()
plt.subplot(212)
for i_c, c in enumerate(clusters):
c = c[0]
if cluster_pv[i_c] <= 0.05:
h = plt.axvspan(
times[c.start], times[c.stop - 1], color='r', alpha=0.3)
else:
|
hf = plt.plot(times, T_obs, 'g')
plt.legend((h, ), ('cluster p-value < 0.05', ))
plt.xlabel("time (ms)")
plt.ylabel("f-values")
plt.show()
| plt.axvspan(
times[c.start],
times[c.stop - 1],
color=(0.3, 0.3, 0.3),
alpha=0.3) | conditional_block |
calc_itc_ali.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 30 20:51:31 2017
@author: mje
"""
import numpy as np
import mne
import matplotlib.pyplot as plt
from mne.stats import permutation_cluster_test
from my_settings import (subjects_select, tf_folder, epochs_folder)
d_ali_ent_right = []
for subject in subjects_select:
data_right_ent = np.load(tf_folder + "%s_ent_right-4-itc.npy" % subject)
data_left_ent = np.load(tf_folder + "%s_ent_left-4-itc.npy" % subject)
data_right_ctl = np.load(tf_folder + "%s_ctl_right-4-itc.npy" % subject)
data_left_ctl = np.load(tf_folder + "%s_ctl_left-4-itc.npy" % subject)
epochs = mne.read_epochs(
epochs_folder + "%s_trial_start-epo.fif" % subject, preload=False)
selection = mne.read_selection("Left-occipital")
selection = [f.replace(' ', '') for f in selection]
left_idx = mne.pick_types(
epochs.info,
meg='grad',
eeg=False,
eog=False,
stim=False,
exclude=[],
selection=selection)
selection = mne.read_selection("Right-occipital")
selection = [f.replace(' ', '') for f in selection]
right_idx = mne.pick_types(
epochs.info,
meg='grad',
eeg=False, |
d_right_ent = (
data_left_ent[right_idx, :, :] - data_right_ent[right_idx, :, :])
d_left_ent = (
data_left_ent[left_idx, :, :] - data_right_ent[left_idx, :, :])
d_right_ctl = (
data_left_ctl[right_idx, :, :] - data_right_ctl[right_idx, :, :])
d_left_ctl = (
data_left_ctl[left_idx, :, :] - data_right_ctl[left_idx, :, :])
d_ali_ent_right = np.asarray(d_right_ent).mean(axis=1)
d_ali_ent_left = np.asarray(d_left_ent).mean(axis=1)
d_ali_ctl_right = np.asarray(d_right_ctl).mean(axis=1)
d_ali_ctl_left = np.asarray(d_left_ctl).mean(axis=1)
T_obs, clusters, cluster_pv, H0 = permutation_cluster_test(
[d_ali_ent_left, d_ali_ent_right], n_permutations=5000)
times = (epochs.times[::4][:-1]) * 1e3
plt.close('all')
plt.subplot(211)
plt.title("Ctl left v right")
plt.plot(
times,
d_ali_ent_left.mean(axis=0) - d_ali_ent_right.mean(axis=0),
label="ERF Contrast (Event 1 - Event 2)")
plt.ylabel("MEG (T / m)")
plt.legend()
plt.subplot(212)
for i_c, c in enumerate(clusters):
c = c[0]
if cluster_pv[i_c] <= 0.05:
h = plt.axvspan(
times[c.start], times[c.stop - 1], color='r', alpha=0.3)
else:
plt.axvspan(
times[c.start],
times[c.stop - 1],
color=(0.3, 0.3, 0.3),
alpha=0.3)
hf = plt.plot(times, T_obs, 'g')
plt.legend((h, ), ('cluster p-value < 0.05', ))
plt.xlabel("time (ms)")
plt.ylabel("f-values")
plt.show() | eog=False,
stim=False,
exclude=[],
selection=selection) | random_line_split |
netc.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub type in_addr_t = u32;
pub type in_port_t = u16;
pub type socklen_t = u32;
pub type sa_family_t = u16;
pub const AF_INET: sa_family_t = 2;
pub const AF_INET6: sa_family_t = 23;
#[derive(Copy, Clone)]
#[repr(C)]
pub struct in_addr {
pub s_addr: in_addr_t,
}
#[derive(Copy, Clone)]
#[repr(align(4))]
#[repr(C)]
pub struct in6_addr {
pub s6_addr: [u8; 16],
}
#[derive(Copy, Clone)]
#[repr(C)]
pub struct sockaddr {
pub sa_family: sa_family_t,
pub sa_data: [u8; 14],
}
#[derive(Copy, Clone)]
#[repr(C)]
pub struct | {
pub sin_family: sa_family_t,
pub sin_port: in_port_t,
pub sin_addr: in_addr,
pub sin_zero: [u8; 8],
}
#[derive(Copy, Clone)]
#[repr(C)]
pub struct sockaddr_in6 {
pub sin6_family: sa_family_t,
pub sin6_port: in_port_t,
pub sin6_flowinfo: u32,
pub sin6_addr: in6_addr,
pub sin6_scope_id: u32,
}
| sockaddr_in | identifier_name |
netc.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub type in_addr_t = u32;
pub type in_port_t = u16;
pub type socklen_t = u32;
pub type sa_family_t = u16;
pub const AF_INET: sa_family_t = 2;
pub const AF_INET6: sa_family_t = 23;
| pub s_addr: in_addr_t,
}
#[derive(Copy, Clone)]
#[repr(align(4))]
#[repr(C)]
pub struct in6_addr {
pub s6_addr: [u8; 16],
}
#[derive(Copy, Clone)]
#[repr(C)]
pub struct sockaddr {
pub sa_family: sa_family_t,
pub sa_data: [u8; 14],
}
#[derive(Copy, Clone)]
#[repr(C)]
pub struct sockaddr_in {
pub sin_family: sa_family_t,
pub sin_port: in_port_t,
pub sin_addr: in_addr,
pub sin_zero: [u8; 8],
}
#[derive(Copy, Clone)]
#[repr(C)]
pub struct sockaddr_in6 {
pub sin6_family: sa_family_t,
pub sin6_port: in_port_t,
pub sin6_flowinfo: u32,
pub sin6_addr: in6_addr,
pub sin6_scope_id: u32,
} | #[derive(Copy, Clone)]
#[repr(C)]
pub struct in_addr { | random_line_split |
psar.rs | use std::f64::NAN;
use error::Err;
/// Parabolic SAR
/// iaf : increment acceleration factor / starting acceleration. Usually 0.02
/// maxaf : max acceleration factor. Usually 0.2
/// Hypothesis : assuming long for initial conditions
/// Formula :
pub fn | (high: &[f64], low: &[f64], iaf: f64, maxaf: f64) -> Result<Vec<f64>, Err> {
let mut psar = vec![NAN; high.len()];
if high.len() < 2 {
return Err(Err::NotEnoughtData);
};
let mut long = false;
if high[0] + low[0] <= high[1] + low[1] {
long = true;
}
let mut sar;
let mut extreme;
if long {
extreme = high[0];
sar = low[0];
} else {
extreme = low[0];
sar = high[0];
}
psar[0] = sar;
let mut af = iaf;
for i in 1..high.len() {
sar = (extreme - sar) * af + sar;
if long {
if i >= 2 && (sar > low[i - 2]) {
sar = low[i - 2]
};
if sar > low[i - 1] {
sar = low[i - 1]
};
if af < maxaf && high[i] > extreme {
af += iaf;
if af > maxaf {
af = maxaf
};
}
if high[i] > extreme {
extreme = high[i];
}
} else {
if i >= 2 && sar < high[i - 2] {
sar = high[i - 2]
};
if sar < high[i - 1] {
sar = high[i - 1]
};
if af < maxaf && low[i] < extreme {
af += iaf;
if af > maxaf {
af = maxaf
};
}
if low[i] < extreme {
extreme = low[i]
};
}
if long && low[i] < sar || !long && high[i] > sar {
af = iaf;
sar = extreme;
long = !long;
if !long {
extreme = low[i];
} else {
extreme = high[i];
}
}
psar[i] = sar;
}
Ok(psar)
}
| psar | identifier_name |
psar.rs | use std::f64::NAN;
use error::Err;
/// Parabolic SAR
/// iaf : increment acceleration factor / starting acceleration. Usually 0.02
/// maxaf : max acceleration factor. Usually 0.2
/// Hypothesis : assuming long for initial conditions
/// Formula :
pub fn psar(high: &[f64], low: &[f64], iaf: f64, maxaf: f64) -> Result<Vec<f64>, Err> | {
let mut psar = vec![NAN; high.len()];
if high.len() < 2 {
return Err(Err::NotEnoughtData);
};
let mut long = false;
if high[0] + low[0] <= high[1] + low[1] {
long = true;
}
let mut sar;
let mut extreme;
if long {
extreme = high[0];
sar = low[0];
} else {
extreme = low[0];
sar = high[0];
}
psar[0] = sar;
let mut af = iaf;
for i in 1..high.len() {
sar = (extreme - sar) * af + sar;
if long {
if i >= 2 && (sar > low[i - 2]) {
sar = low[i - 2]
};
if sar > low[i - 1] {
sar = low[i - 1]
};
if af < maxaf && high[i] > extreme {
af += iaf;
if af > maxaf {
af = maxaf
};
}
if high[i] > extreme {
extreme = high[i];
}
} else {
if i >= 2 && sar < high[i - 2] {
sar = high[i - 2]
};
if sar < high[i - 1] {
sar = high[i - 1]
};
if af < maxaf && low[i] < extreme {
af += iaf;
if af > maxaf {
af = maxaf
};
}
if low[i] < extreme {
extreme = low[i]
};
}
if long && low[i] < sar || !long && high[i] > sar {
af = iaf;
sar = extreme;
long = !long;
if !long {
extreme = low[i];
} else {
extreme = high[i];
}
}
psar[i] = sar;
}
Ok(psar)
} | identifier_body |
|
psar.rs | use std::f64::NAN;
use error::Err;
/// Parabolic SAR
/// iaf : increment acceleration factor / starting acceleration. Usually 0.02
/// maxaf : max acceleration factor. Usually 0.2
/// Hypothesis : assuming long for initial conditions
/// Formula :
pub fn psar(high: &[f64], low: &[f64], iaf: f64, maxaf: f64) -> Result<Vec<f64>, Err> {
let mut psar = vec![NAN; high.len()];
if high.len() < 2 {
return Err(Err::NotEnoughtData);
};
let mut long = false;
if high[0] + low[0] <= high[1] + low[1] {
long = true;
}
let mut sar;
let mut extreme;
if long {
extreme = high[0];
sar = low[0];
} else {
extreme = low[0];
sar = high[0];
}
psar[0] = sar;
let mut af = iaf;
for i in 1..high.len() {
sar = (extreme - sar) * af + sar;
if long {
if i >= 2 && (sar > low[i - 2]) {
sar = low[i - 2]
};
if sar > low[i - 1] {
sar = low[i - 1]
};
if af < maxaf && high[i] > extreme {
af += iaf;
if af > maxaf {
af = maxaf
};
}
if high[i] > extreme {
extreme = high[i];
}
} else {
if i >= 2 && sar < high[i - 2] {
sar = high[i - 2]
};
if sar < high[i - 1] {
sar = high[i - 1]
};
if af < maxaf && low[i] < extreme {
af += iaf;
if af > maxaf {
af = maxaf
};
}
if low[i] < extreme {
extreme = low[i]
};
}
if long && low[i] < sar || !long && high[i] > sar |
psar[i] = sar;
}
Ok(psar)
}
| {
af = iaf;
sar = extreme;
long = !long;
if !long {
extreme = low[i];
} else {
extreme = high[i];
}
} | conditional_block |
psar.rs | use std::f64::NAN;
use error::Err;
/// Parabolic SAR
/// iaf : increment acceleration factor / starting acceleration. Usually 0.02
/// maxaf : max acceleration factor. Usually 0.2
/// Hypothesis : assuming long for initial conditions
/// Formula :
pub fn psar(high: &[f64], low: &[f64], iaf: f64, maxaf: f64) -> Result<Vec<f64>, Err> {
let mut psar = vec![NAN; high.len()];
if high.len() < 2 {
return Err(Err::NotEnoughtData);
};
let mut long = false;
if high[0] + low[0] <= high[1] + low[1] {
long = true;
}
let mut sar;
let mut extreme;
if long {
extreme = high[0];
sar = low[0];
} else {
extreme = low[0];
sar = high[0];
}
psar[0] = sar;
let mut af = iaf;
for i in 1..high.len() {
sar = (extreme - sar) * af + sar;
if long {
if i >= 2 && (sar > low[i - 2]) {
sar = low[i - 2]
};
if sar > low[i - 1] {
sar = low[i - 1]
};
if af < maxaf && high[i] > extreme {
af += iaf;
if af > maxaf {
af = maxaf
};
}
if high[i] > extreme { | extreme = high[i];
}
} else {
if i >= 2 && sar < high[i - 2] {
sar = high[i - 2]
};
if sar < high[i - 1] {
sar = high[i - 1]
};
if af < maxaf && low[i] < extreme {
af += iaf;
if af > maxaf {
af = maxaf
};
}
if low[i] < extreme {
extreme = low[i]
};
}
if long && low[i] < sar || !long && high[i] > sar {
af = iaf;
sar = extreme;
long = !long;
if !long {
extreme = low[i];
} else {
extreme = high[i];
}
}
psar[i] = sar;
}
Ok(psar)
} | random_line_split |
|
StatisticsBenchmarkSuite.ts | import { BenchmarkSuite } from 'pip-benchmark-node';
import { Parameter } from 'pip-benchmark-node';
import { IncrementMongoDbStatisticsBenchmark } from './IncrementMongoDbStatisticsBenchmark';
export class StatisticsBenchmarkSuite extends BenchmarkSuite {
public constructor() {
super("Statistics", "Statistics benchmark");
this.addParameter(new Parameter('InitialRecordNumber', 'Number of records at start', '0'));
this.addParameter(new Parameter('CounterNumber', 'Number of counters', '10'));
this.addParameter(new Parameter('IterationNumber', 'Number of iterations', '10'));
this.addParameter(new Parameter('MongoUri', 'MongoDB URI', null));
this.addParameter(new Parameter('MongoHost', 'MongoDB Hostname', 'localhost'));
this.addParameter(new Parameter('MongoPort', 'MongoDB Port', '27017'));
this.addParameter(new Parameter('MongoDb', 'MongoDB Database', 'benchmark')); | }
} |
this.addBenchmark(new IncrementMongoDbStatisticsBenchmark()); | random_line_split |
StatisticsBenchmarkSuite.ts | import { BenchmarkSuite } from 'pip-benchmark-node';
import { Parameter } from 'pip-benchmark-node';
import { IncrementMongoDbStatisticsBenchmark } from './IncrementMongoDbStatisticsBenchmark';
export class | extends BenchmarkSuite {
public constructor() {
super("Statistics", "Statistics benchmark");
this.addParameter(new Parameter('InitialRecordNumber', 'Number of records at start', '0'));
this.addParameter(new Parameter('CounterNumber', 'Number of counters', '10'));
this.addParameter(new Parameter('IterationNumber', 'Number of iterations', '10'));
this.addParameter(new Parameter('MongoUri', 'MongoDB URI', null));
this.addParameter(new Parameter('MongoHost', 'MongoDB Hostname', 'localhost'));
this.addParameter(new Parameter('MongoPort', 'MongoDB Port', '27017'));
this.addParameter(new Parameter('MongoDb', 'MongoDB Database', 'benchmark'));
this.addBenchmark(new IncrementMongoDbStatisticsBenchmark());
}
} | StatisticsBenchmarkSuite | identifier_name |
pl-help.component.ts | import {Component, Input, OnInit} from '@angular/core';
import {Account} from "@model/Account";
import {Gift, GiftStatus} from "@model/Gift";
@Component({
selector: 'pl-help',
templateUrl: './pl-help.component.html',
styleUrls: ['../help.component.css']
})
export class PlHelpComponent implements OnInit {
fragment: string;
@Input() isAdmin: boolean;
@Input() user: Account;
@Input() admins: Account[];
gift: Gift;
gift_claimed: Gift;
gift_realized: Gift;
constructor() |
ngOnInit() {
this.createGifts();
}
private createGifts() {
this.gift = {
id: -1,
name: 'Kolejka elektryczna',
category: {name: 'Zabawki'},
description: 'Najlepiej drewniania, ew metalowa',
hidden: true,
hasImage: true,
links: ['#'],
createdBy: this.user.id,
engines: [
{
name: 'google',
icon: 'fa-google',
id: 1,
searchString: '#'
}
],
created: new Date()
};
this.gift_claimed = {
id: -1,
name: 'Piłka do nogi',
category: {name: 'Zabawki'},
description: 'Biało czarna',
claimed: this.user,
links: ['#'],
engines: [
{
name: 'google',
icon: 'fa-google',
id: 1,
searchString: '#'
}
],
created: new Date()
};
this.gift_realized = {
id: -1,
name: 'Samochodzik',
status: GiftStatus.REALISED,
category: {name: 'Zabawki'},
realised: new Date(),
description: 'Zdalnie sterowany, koniecznie czerwony',
links: ['#'],
engines: [
{
name: 'google',
icon: 'fa-google',
id: 1,
searchString: '#'
}
],
created: new Date()
}
}
menuClick(event: Event) {
event.stopPropagation();
}
}
| {
} | identifier_body |
pl-help.component.ts | import {Component, Input, OnInit} from '@angular/core';
import {Account} from "@model/Account";
import {Gift, GiftStatus} from "@model/Gift";
@Component({
selector: 'pl-help',
templateUrl: './pl-help.component.html',
styleUrls: ['../help.component.css']
})
export class | implements OnInit {
fragment: string;
@Input() isAdmin: boolean;
@Input() user: Account;
@Input() admins: Account[];
gift: Gift;
gift_claimed: Gift;
gift_realized: Gift;
constructor() {
}
ngOnInit() {
this.createGifts();
}
private createGifts() {
this.gift = {
id: -1,
name: 'Kolejka elektryczna',
category: {name: 'Zabawki'},
description: 'Najlepiej drewniania, ew metalowa',
hidden: true,
hasImage: true,
links: ['#'],
createdBy: this.user.id,
engines: [
{
name: 'google',
icon: 'fa-google',
id: 1,
searchString: '#'
}
],
created: new Date()
};
this.gift_claimed = {
id: -1,
name: 'Piłka do nogi',
category: {name: 'Zabawki'},
description: 'Biało czarna',
claimed: this.user,
links: ['#'],
engines: [
{
name: 'google',
icon: 'fa-google',
id: 1,
searchString: '#'
}
],
created: new Date()
};
this.gift_realized = {
id: -1,
name: 'Samochodzik',
status: GiftStatus.REALISED,
category: {name: 'Zabawki'},
realised: new Date(),
description: 'Zdalnie sterowany, koniecznie czerwony',
links: ['#'],
engines: [
{
name: 'google',
icon: 'fa-google',
id: 1,
searchString: '#'
}
],
created: new Date()
}
}
menuClick(event: Event) {
event.stopPropagation();
}
}
| PlHelpComponent | identifier_name |
pl-help.component.ts | import {Component, Input, OnInit} from '@angular/core';
import {Account} from "@model/Account";
import {Gift, GiftStatus} from "@model/Gift";
@Component({
selector: 'pl-help',
templateUrl: './pl-help.component.html',
styleUrls: ['../help.component.css']
})
export class PlHelpComponent implements OnInit {
fragment: string;
@Input() isAdmin: boolean;
@Input() user: Account;
@Input() admins: Account[];
gift: Gift;
gift_claimed: Gift;
gift_realized: Gift;
constructor() {
}
ngOnInit() {
this.createGifts();
}
private createGifts() {
this.gift = {
id: -1,
name: 'Kolejka elektryczna',
category: {name: 'Zabawki'},
description: 'Najlepiej drewniania, ew metalowa',
hidden: true,
hasImage: true,
links: ['#'],
createdBy: this.user.id,
engines: [
{
name: 'google',
icon: 'fa-google',
id: 1,
searchString: '#'
}
],
created: new Date()
};
this.gift_claimed = {
id: -1,
name: 'Piłka do nogi',
category: {name: 'Zabawki'},
description: 'Biało czarna',
claimed: this.user,
links: ['#'],
engines: [
{
name: 'google',
icon: 'fa-google',
id: 1,
searchString: '#'
}
],
created: new Date()
};
this.gift_realized = {
id: -1,
name: 'Samochodzik',
status: GiftStatus.REALISED,
category: {name: 'Zabawki'}, | {
name: 'google',
icon: 'fa-google',
id: 1,
searchString: '#'
}
],
created: new Date()
}
}
menuClick(event: Event) {
event.stopPropagation();
}
} | realised: new Date(),
description: 'Zdalnie sterowany, koniecznie czerwony',
links: ['#'],
engines: [ | random_line_split |
angle.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Specified angles.
use cssparser::{Parser, Token};
use parser::{ParserContext, Parse};
#[allow(unused_imports)] use std::ascii::AsciiExt;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ParseError, ToCss};
use values::CSSFloat;
use values::computed::{Context, ToComputedValue};
use values::computed::angle::Angle as ComputedAngle;
use values::specified::calc::CalcNode;
/// A specified angle.
///
/// Computed angles are essentially same as specified ones except for `calc()`
/// value serialization. Therefore we are storing a computed angle inside
/// to hold the actual value and its unit.
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq)]
pub struct Angle {
value: ComputedAngle,
was_calc: bool,
}
impl ToCss for Angle {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.was_calc {
dest.write_str("calc(")?;
}
self.value.to_css(dest)?;
if self.was_calc {
dest.write_str(")")?;
}
Ok(())
}
}
impl ToComputedValue for Angle {
type ComputedValue = ComputedAngle;
fn to_computed_value(&self, _context: &Context) -> Self::ComputedValue {
self.value
}
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
Angle {
value: *computed,
was_calc: false,
}
}
}
impl Angle {
/// Creates an angle with the given value in degrees.
pub fn from_degrees(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Deg(value), was_calc }
}
/// Creates an angle with the given value in gradians.
pub fn from_gradians(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Grad(value), was_calc }
}
/// Creates an angle with the given value in turns.
pub fn from_turns(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Turn(value), was_calc }
}
/// Creates an angle with the given value in radians.
pub fn from_radians(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Rad(value), was_calc }
}
/// Returns the amount of radians this angle represents.
#[inline]
pub fn radians(self) -> f32 {
self.value.radians()
}
/// Returns the amount of degrees this angle represents.
#[inline]
pub fn | (self) -> f32 {
use std::f32::consts::PI;
self.radians() * 360. / (2. * PI)
}
/// Returns `0deg`.
pub fn zero() -> Self {
Self::from_degrees(0.0, false)
}
/// Returns an `Angle` parsed from a `calc()` expression.
pub fn from_calc(radians: CSSFloat) -> Self {
Angle {
value: ComputedAngle::Rad(radians),
was_calc: true,
}
}
}
impl AsRef<ComputedAngle> for Angle {
#[inline]
fn as_ref(&self) -> &ComputedAngle {
&self.value
}
}
/// Whether to allow parsing an unitless zero as a valid angle.
///
/// This should always be `No`, except for exceptions like:
///
/// https://github.com/w3c/fxtf-drafts/issues/228
///
/// See also: https://github.com/w3c/csswg-drafts/issues/1162.
enum AllowUnitlessZeroAngle {
Yes,
No,
}
impl Parse for Angle {
/// Parses an angle according to CSS-VALUES § 6.1.
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Self::parse_internal(context, input, AllowUnitlessZeroAngle::No)
}
}
impl Angle {
/// Parse an `<angle>` value given a value and an unit.
pub fn parse_dimension(
value: CSSFloat,
unit: &str,
from_calc: bool,
) -> Result<Angle, ()> {
let angle = match_ignore_ascii_case! { unit,
"deg" => Angle::from_degrees(value, from_calc),
"grad" => Angle::from_gradians(value, from_calc),
"turn" => Angle::from_turns(value, from_calc),
"rad" => Angle::from_radians(value, from_calc),
_ => return Err(())
};
Ok(angle)
}
/// Parse an `<angle>` allowing unitless zero to represent a zero angle.
///
/// See the comment in `AllowUnitlessZeroAngle` for why.
pub fn parse_with_unitless<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Self::parse_internal(context, input, AllowUnitlessZeroAngle::Yes)
}
fn parse_internal<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
allow_unitless_zero: AllowUnitlessZeroAngle,
) -> Result<Self, ParseError<'i>> {
// FIXME: remove clone() when lifetimes are non-lexical
let token = input.next()?.clone();
match token {
Token::Dimension { value, ref unit, .. } => {
Angle::parse_dimension(value, unit, /* from_calc = */ false)
}
Token::Number { value, .. } if value == 0. => {
match allow_unitless_zero {
AllowUnitlessZeroAngle::Yes => Ok(Angle::zero()),
AllowUnitlessZeroAngle::No => Err(()),
}
},
Token::Function(ref name) if name.eq_ignore_ascii_case("calc") => {
return input.parse_nested_block(|i| CalcNode::parse_angle(context, i))
}
_ => Err(())
}.map_err(|()| input.new_unexpected_token_error(token.clone()))
}
}
| degrees | identifier_name |
angle.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Specified angles.
use cssparser::{Parser, Token};
use parser::{ParserContext, Parse};
#[allow(unused_imports)] use std::ascii::AsciiExt;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ParseError, ToCss};
use values::CSSFloat;
use values::computed::{Context, ToComputedValue};
use values::computed::angle::Angle as ComputedAngle;
use values::specified::calc::CalcNode;
/// A specified angle.
///
/// Computed angles are essentially same as specified ones except for `calc()`
/// value serialization. Therefore we are storing a computed angle inside
/// to hold the actual value and its unit.
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq)]
pub struct Angle {
value: ComputedAngle,
was_calc: bool,
}
impl ToCss for Angle {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.was_calc {
dest.write_str("calc(")?;
}
self.value.to_css(dest)?;
if self.was_calc {
dest.write_str(")")?;
}
Ok(())
}
}
impl ToComputedValue for Angle {
type ComputedValue = ComputedAngle;
fn to_computed_value(&self, _context: &Context) -> Self::ComputedValue |
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
Angle {
value: *computed,
was_calc: false,
}
}
}
impl Angle {
/// Creates an angle with the given value in degrees.
pub fn from_degrees(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Deg(value), was_calc }
}
/// Creates an angle with the given value in gradians.
pub fn from_gradians(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Grad(value), was_calc }
}
/// Creates an angle with the given value in turns.
pub fn from_turns(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Turn(value), was_calc }
}
/// Creates an angle with the given value in radians.
pub fn from_radians(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Rad(value), was_calc }
}
/// Returns the amount of radians this angle represents.
#[inline]
pub fn radians(self) -> f32 {
self.value.radians()
}
/// Returns the amount of degrees this angle represents.
#[inline]
pub fn degrees(self) -> f32 {
use std::f32::consts::PI;
self.radians() * 360. / (2. * PI)
}
/// Returns `0deg`.
pub fn zero() -> Self {
Self::from_degrees(0.0, false)
}
/// Returns an `Angle` parsed from a `calc()` expression.
pub fn from_calc(radians: CSSFloat) -> Self {
Angle {
value: ComputedAngle::Rad(radians),
was_calc: true,
}
}
}
impl AsRef<ComputedAngle> for Angle {
#[inline]
fn as_ref(&self) -> &ComputedAngle {
&self.value
}
}
/// Whether to allow parsing an unitless zero as a valid angle.
///
/// This should always be `No`, except for exceptions like:
///
/// https://github.com/w3c/fxtf-drafts/issues/228
///
/// See also: https://github.com/w3c/csswg-drafts/issues/1162.
enum AllowUnitlessZeroAngle {
Yes,
No,
}
impl Parse for Angle {
/// Parses an angle according to CSS-VALUES § 6.1.
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Self::parse_internal(context, input, AllowUnitlessZeroAngle::No)
}
}
impl Angle {
/// Parse an `<angle>` value given a value and an unit.
pub fn parse_dimension(
value: CSSFloat,
unit: &str,
from_calc: bool,
) -> Result<Angle, ()> {
let angle = match_ignore_ascii_case! { unit,
"deg" => Angle::from_degrees(value, from_calc),
"grad" => Angle::from_gradians(value, from_calc),
"turn" => Angle::from_turns(value, from_calc),
"rad" => Angle::from_radians(value, from_calc),
_ => return Err(())
};
Ok(angle)
}
/// Parse an `<angle>` allowing unitless zero to represent a zero angle.
///
/// See the comment in `AllowUnitlessZeroAngle` for why.
pub fn parse_with_unitless<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Self::parse_internal(context, input, AllowUnitlessZeroAngle::Yes)
}
fn parse_internal<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
allow_unitless_zero: AllowUnitlessZeroAngle,
) -> Result<Self, ParseError<'i>> {
// FIXME: remove clone() when lifetimes are non-lexical
let token = input.next()?.clone();
match token {
Token::Dimension { value, ref unit, .. } => {
Angle::parse_dimension(value, unit, /* from_calc = */ false)
}
Token::Number { value, .. } if value == 0. => {
match allow_unitless_zero {
AllowUnitlessZeroAngle::Yes => Ok(Angle::zero()),
AllowUnitlessZeroAngle::No => Err(()),
}
},
Token::Function(ref name) if name.eq_ignore_ascii_case("calc") => {
return input.parse_nested_block(|i| CalcNode::parse_angle(context, i))
}
_ => Err(())
}.map_err(|()| input.new_unexpected_token_error(token.clone()))
}
}
| {
self.value
} | identifier_body |
angle.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Specified angles.
use cssparser::{Parser, Token};
use parser::{ParserContext, Parse};
#[allow(unused_imports)] use std::ascii::AsciiExt;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ParseError, ToCss};
use values::CSSFloat;
use values::computed::{Context, ToComputedValue};
use values::computed::angle::Angle as ComputedAngle;
use values::specified::calc::CalcNode;
/// A specified angle.
///
/// Computed angles are essentially same as specified ones except for `calc()`
/// value serialization. Therefore we are storing a computed angle inside
/// to hold the actual value and its unit.
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq)]
pub struct Angle {
value: ComputedAngle,
was_calc: bool,
}
impl ToCss for Angle {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.was_calc {
dest.write_str("calc(")?;
}
self.value.to_css(dest)?;
if self.was_calc {
dest.write_str(")")?;
}
Ok(())
}
}
impl ToComputedValue for Angle {
type ComputedValue = ComputedAngle;
fn to_computed_value(&self, _context: &Context) -> Self::ComputedValue {
self.value
}
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
Angle {
value: *computed,
was_calc: false,
}
}
}
impl Angle {
/// Creates an angle with the given value in degrees.
pub fn from_degrees(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Deg(value), was_calc }
}
/// Creates an angle with the given value in gradians.
pub fn from_gradians(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Grad(value), was_calc }
}
/// Creates an angle with the given value in turns.
pub fn from_turns(value: CSSFloat, was_calc: bool) -> Self {
Angle { value: ComputedAngle::Turn(value), was_calc }
}
/// Creates an angle with the given value in radians.
pub fn from_radians(value: CSSFloat, was_calc: bool) -> Self { | Angle { value: ComputedAngle::Rad(value), was_calc }
}
/// Returns the amount of radians this angle represents.
#[inline]
pub fn radians(self) -> f32 {
self.value.radians()
}
/// Returns the amount of degrees this angle represents.
#[inline]
pub fn degrees(self) -> f32 {
use std::f32::consts::PI;
self.radians() * 360. / (2. * PI)
}
/// Returns `0deg`.
pub fn zero() -> Self {
Self::from_degrees(0.0, false)
}
/// Returns an `Angle` parsed from a `calc()` expression.
pub fn from_calc(radians: CSSFloat) -> Self {
Angle {
value: ComputedAngle::Rad(radians),
was_calc: true,
}
}
}
impl AsRef<ComputedAngle> for Angle {
#[inline]
fn as_ref(&self) -> &ComputedAngle {
&self.value
}
}
/// Whether to allow parsing an unitless zero as a valid angle.
///
/// This should always be `No`, except for exceptions like:
///
/// https://github.com/w3c/fxtf-drafts/issues/228
///
/// See also: https://github.com/w3c/csswg-drafts/issues/1162.
enum AllowUnitlessZeroAngle {
Yes,
No,
}
impl Parse for Angle {
/// Parses an angle according to CSS-VALUES § 6.1.
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Self::parse_internal(context, input, AllowUnitlessZeroAngle::No)
}
}
impl Angle {
/// Parse an `<angle>` value given a value and an unit.
pub fn parse_dimension(
value: CSSFloat,
unit: &str,
from_calc: bool,
) -> Result<Angle, ()> {
let angle = match_ignore_ascii_case! { unit,
"deg" => Angle::from_degrees(value, from_calc),
"grad" => Angle::from_gradians(value, from_calc),
"turn" => Angle::from_turns(value, from_calc),
"rad" => Angle::from_radians(value, from_calc),
_ => return Err(())
};
Ok(angle)
}
/// Parse an `<angle>` allowing unitless zero to represent a zero angle.
///
/// See the comment in `AllowUnitlessZeroAngle` for why.
pub fn parse_with_unitless<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Self::parse_internal(context, input, AllowUnitlessZeroAngle::Yes)
}
fn parse_internal<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
allow_unitless_zero: AllowUnitlessZeroAngle,
) -> Result<Self, ParseError<'i>> {
// FIXME: remove clone() when lifetimes are non-lexical
let token = input.next()?.clone();
match token {
Token::Dimension { value, ref unit, .. } => {
Angle::parse_dimension(value, unit, /* from_calc = */ false)
}
Token::Number { value, .. } if value == 0. => {
match allow_unitless_zero {
AllowUnitlessZeroAngle::Yes => Ok(Angle::zero()),
AllowUnitlessZeroAngle::No => Err(()),
}
},
Token::Function(ref name) if name.eq_ignore_ascii_case("calc") => {
return input.parse_nested_block(|i| CalcNode::parse_angle(context, i))
}
_ => Err(())
}.map_err(|()| input.new_unexpected_token_error(token.clone()))
}
} | random_line_split |
|
analytics.js | function generate_analytics_alert() {
var result =
'<div class="alert alert-warning">' +
novaideo_translate("Aucune valeur n'est trouvée!") +
"</div>"
return result
}
function generate_analytics_study(values) {
var result =
'<div class="alert alert-info">' +
novaideo_translate(
"Ces graphiques sont produits à partir d'un échantillon présentant:"
) +
'<ul class="list-unstyled">'
for (key in values) {
result += "<li>"
if (values[key] > 1) {
result += values[key] + " " + key
} else {
result += values[key] + " " + key
}
result += "</li>"
}
result += "</ul></div>"
return result
}
function get_ | {
return '<canvas id="' + id + '"></canvas>'
}
$(document).on("submit", ".analytics-form", function(event) {
var button = $(this).find("button").last()
var parent = $($(this).parents(".tab-pane").first())
var url = $(event.target).data("url")
$(button).addClass("disabled")
var values = $(this).serialize() + "&" + button.val() + "=" + button.val()
loading_progress()
$.post(url, values, function(data) {
if (data) {
$(parent.find("#chart-script")).html(data["body"])
var script = $(parent.find("#chart-script script").first())
if (script.data("has_value")) {
$(parent.find(".analytics-container .canvas-title")).removeClass(
"hide-bloc"
)
} else {
$(parent.find(".analytics-container .canvas-title")).addClass(
"hide-bloc"
)
$(parent.find(".analytics-container .legend")).html("")
var charts = $(parent.find(".chart-container"))
charts.removeClass("object-well")
charts.removeClass("well")
for (var i = 0; i < charts.length; i++) {
var chart = $(charts[i])
var canvas = chart.find("canvas")
var ctx = canvas.get(0).getContext("2d")
ctx.clearRect(0, 0, 1500, 1500)
var new_canvas = get_new_canvas($(canvas).attr("id"))
$(canvas).replaceWith(new_canvas)
}
}
}
$(button).removeClass("disabled")
finish_progress()
})
event.preventDefault()
})
| new_canvas(id) | identifier_name |
analytics.js | function generate_analytics_alert() {
var result =
'<div class="alert alert-warning">' +
novaideo_translate("Aucune valeur n'est trouvée!") +
"</div>"
return result
}
function generate_analytics_study(values) {
var result =
'<div class="alert alert-info">' +
novaideo_translate(
"Ces graphiques sont produits à partir d'un échantillon présentant:"
) +
'<ul class="list-unstyled">'
for (key in values) {
result += "<li>"
if (values[key] > 1) {
| e {
result += values[key] + " " + key
}
result += "</li>"
}
result += "</ul></div>"
return result
}
function get_new_canvas(id) {
return '<canvas id="' + id + '"></canvas>'
}
$(document).on("submit", ".analytics-form", function(event) {
var button = $(this).find("button").last()
var parent = $($(this).parents(".tab-pane").first())
var url = $(event.target).data("url")
$(button).addClass("disabled")
var values = $(this).serialize() + "&" + button.val() + "=" + button.val()
loading_progress()
$.post(url, values, function(data) {
if (data) {
$(parent.find("#chart-script")).html(data["body"])
var script = $(parent.find("#chart-script script").first())
if (script.data("has_value")) {
$(parent.find(".analytics-container .canvas-title")).removeClass(
"hide-bloc"
)
} else {
$(parent.find(".analytics-container .canvas-title")).addClass(
"hide-bloc"
)
$(parent.find(".analytics-container .legend")).html("")
var charts = $(parent.find(".chart-container"))
charts.removeClass("object-well")
charts.removeClass("well")
for (var i = 0; i < charts.length; i++) {
var chart = $(charts[i])
var canvas = chart.find("canvas")
var ctx = canvas.get(0).getContext("2d")
ctx.clearRect(0, 0, 1500, 1500)
var new_canvas = get_new_canvas($(canvas).attr("id"))
$(canvas).replaceWith(new_canvas)
}
}
}
$(button).removeClass("disabled")
finish_progress()
})
event.preventDefault()
})
| result += values[key] + " " + key
} els | conditional_block |
analytics.js | function generate_analytics_alert() {
var result =
'<div class="alert alert-warning">' +
novaideo_translate("Aucune valeur n'est trouvée!") +
"</div>"
return result
}
function generate_analytics_study(values) { | nction get_new_canvas(id) {
return '<canvas id="' + id + '"></canvas>'
}
$(document).on("submit", ".analytics-form", function(event) {
var button = $(this).find("button").last()
var parent = $($(this).parents(".tab-pane").first())
var url = $(event.target).data("url")
$(button).addClass("disabled")
var values = $(this).serialize() + "&" + button.val() + "=" + button.val()
loading_progress()
$.post(url, values, function(data) {
if (data) {
$(parent.find("#chart-script")).html(data["body"])
var script = $(parent.find("#chart-script script").first())
if (script.data("has_value")) {
$(parent.find(".analytics-container .canvas-title")).removeClass(
"hide-bloc"
)
} else {
$(parent.find(".analytics-container .canvas-title")).addClass(
"hide-bloc"
)
$(parent.find(".analytics-container .legend")).html("")
var charts = $(parent.find(".chart-container"))
charts.removeClass("object-well")
charts.removeClass("well")
for (var i = 0; i < charts.length; i++) {
var chart = $(charts[i])
var canvas = chart.find("canvas")
var ctx = canvas.get(0).getContext("2d")
ctx.clearRect(0, 0, 1500, 1500)
var new_canvas = get_new_canvas($(canvas).attr("id"))
$(canvas).replaceWith(new_canvas)
}
}
}
$(button).removeClass("disabled")
finish_progress()
})
event.preventDefault()
})
|
var result =
'<div class="alert alert-info">' +
novaideo_translate(
"Ces graphiques sont produits à partir d'un échantillon présentant:"
) +
'<ul class="list-unstyled">'
for (key in values) {
result += "<li>"
if (values[key] > 1) {
result += values[key] + " " + key
} else {
result += values[key] + " " + key
}
result += "</li>"
}
result += "</ul></div>"
return result
}
fu | identifier_body |
analytics.js | function generate_analytics_alert() {
var result =
'<div class="alert alert-warning">' +
novaideo_translate("Aucune valeur n'est trouvée!") +
"</div>"
return result
}
function generate_analytics_study(values) {
var result =
'<div class="alert alert-info">' + | novaideo_translate(
"Ces graphiques sont produits à partir d'un échantillon présentant:"
) +
'<ul class="list-unstyled">'
for (key in values) {
result += "<li>"
if (values[key] > 1) {
result += values[key] + " " + key
} else {
result += values[key] + " " + key
}
result += "</li>"
}
result += "</ul></div>"
return result
}
function get_new_canvas(id) {
return '<canvas id="' + id + '"></canvas>'
}
$(document).on("submit", ".analytics-form", function(event) {
var button = $(this).find("button").last()
var parent = $($(this).parents(".tab-pane").first())
var url = $(event.target).data("url")
$(button).addClass("disabled")
var values = $(this).serialize() + "&" + button.val() + "=" + button.val()
loading_progress()
$.post(url, values, function(data) {
if (data) {
$(parent.find("#chart-script")).html(data["body"])
var script = $(parent.find("#chart-script script").first())
if (script.data("has_value")) {
$(parent.find(".analytics-container .canvas-title")).removeClass(
"hide-bloc"
)
} else {
$(parent.find(".analytics-container .canvas-title")).addClass(
"hide-bloc"
)
$(parent.find(".analytics-container .legend")).html("")
var charts = $(parent.find(".chart-container"))
charts.removeClass("object-well")
charts.removeClass("well")
for (var i = 0; i < charts.length; i++) {
var chart = $(charts[i])
var canvas = chart.find("canvas")
var ctx = canvas.get(0).getContext("2d")
ctx.clearRect(0, 0, 1500, 1500)
var new_canvas = get_new_canvas($(canvas).attr("id"))
$(canvas).replaceWith(new_canvas)
}
}
}
$(button).removeClass("disabled")
finish_progress()
})
event.preventDefault()
}) | random_line_split |
|
lasdiff.py | try:
import traceback
import argparse
import textwrap
import glob
import os
import logging
import datetime
import multiprocessing
from libs import LasPyConverter
except ImportError as err:
print('Error {0} import module: {1}'.format(__name__, err))
traceback.print_exc()
exit(128)
script_path = __file__
header = textwrap.dedent('''LAS Diff''')
class LasPyParameters:
def __init__(self):
# predefinied paths
self.parser = argparse.ArgumentParser(prog="lasdiff",
formatter_class=argparse.RawDescriptionHelpFormatter,
description='',
epilog=textwrap.dedent('''
example:
'''))
# reguired parameters
self.parser.add_argument('-i', type=str, dest='input', required=True,
help='required: input file or folder')
self.parser.add_argument('-o', type=str, dest='output', required=True,
help='required: output file or folder (d:\lasfiles\\tests\\results)')
# optional parameters
self.parser.add_argument('-input_format', type=str, dest='input_format', required=False, choices=['las', 'laz'],
help='optional: input format (default=las, laz is not implemented (yet))')
self.parser.add_argument('-cores', type=int, dest='cores', required=False, default=1,
help='optional: cores (default=1)')
self.parser.add_argument('-v', dest='verbose', required=False,
help='optional: verbose toggle (-v=on, nothing=off)', action='store_true')
self.parser.add_argument('-version', action='version', version=self.parser.prog)
def parse(self):
self.args = self.parser.parse_args()
##defaults
if self.args.verbose:
self.args.verbose = ' -v'
else:
self.args.verbose = ''
if self.args.input_format == None:
self.args.input_format = 'las'
if self.args.cores == None:
self.args.cores = 1
# ---------PUBLIC METHODS--------------------
def | (self):
return self.args.output
def get_input(self):
return self.args.input
def get_input_format(self):
return self.args.input_format
def get_verbose(self):
return self.args.verbose
def get_cores(self):
return self.args.cores
def DiffLas(parameters):
# Parse incoming parameters
source_file = parameters[0]
destination_file = parameters[1]
# Get name for this process
current = multiprocessing.current_proces()
proc_name = current.name
logging.info('[%s] Starting ...' % (proc_name))
logging.info(
'[%s] Creating diff of %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
# Opening source LAS files for read and write
lasFiles = LasPyConverter.LasPyCompare(source_file, destination_file)
# Opening destination LAS file
logging.info('[%s] Opening %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
lasFiles.OpenReanOnly()
logging.info('[%s] Comparing %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
lasFiles.ComparePointCloud()
logging.info('[%s] Closing %s LAS PointCloud.' % (proc_name, destination_file))
lasFiles.Close()
logging.info('[%s] %s LAS PointCloud has closed.' % (proc_name, destination_file))
return 0
def SetLogging(logfilename):
logging.basicConfig(
filename=logfilename,
filemode='w',
format='%(asctime)s %(name)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S',
level=logging.DEBUG)
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S')
# tell the handler to use this format
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
def main():
logfilename = 'lasdiff_' + datetime.datetime.today().strftime('%Y%m%d_%H%M%S') + '.log'
SetLogging(logfilename)
logging.info(header)
lasconverterworkflow = LasPyParameters()
lasconverterworkflow.parse()
# File/Directory handler
inputfiles = lasconverterworkflow.get_input()
inputformat = lasconverterworkflow.get_input_format()
outputfiles = lasconverterworkflow.get_output()
outputpath = os.path.normpath(outputfiles)
cores = lasconverterworkflow.get_cores()
inputisdir = False
doing = []
if os.path.isdir(inputfiles):
inputisdir = True
inputfiles = glob.glob(os.path.join(inputfiles, '*' + inputformat))
if not os.path.exists(outputfiles):
os.makedirs(outputfiles)
for workfile in inputfiles:
if os.path.isfile(workfile) and os.path.isfile(os.path.join(outputpath, os.path.basename(workfile))):
logging.info('Adding %s to the queue.' % (workfile))
doing.append([workfile, os.path.join(outputpath, os.path.basename(workfile))])
else:
logging.info('The %s is not file, or pair of comparable files. Skipping.' % (workfile))
elif os.path.isfile(inputfiles):
inputisdir = False
workfile = inputfiles
if os.path.basename(outputfiles) is not "":
doing.append([workfile, outputfiles])
else:
doing.append([workfile, os.path.join(outputpath, os.path.basename(workfile))])
logging.info('Adding %s to the queue.' % (workfile))
else:
# Not a file, not a dir
logging.error('Cannot found input LAS PointCloud file: %s' % (inputfiles))
exit(1)
# If we got one file, start only one process
if inputisdir is False:
cores = 1
if cores != 1:
pool = multiprocessing.Pool(processes=cores)
results = pool.map_async(DiffLas, doing)
pool.close()
pool.join()
else:
for d in doing:
DiffLas(d)
logging.info('Finished, exiting and go home ...')
if __name__ == '__main__':
main()
| get_output | identifier_name |
lasdiff.py | try:
import traceback
import argparse
import textwrap
import glob
import os
import logging
import datetime
import multiprocessing
from libs import LasPyConverter
except ImportError as err:
print('Error {0} import module: {1}'.format(__name__, err))
traceback.print_exc()
exit(128)
script_path = __file__
header = textwrap.dedent('''LAS Diff''')
class LasPyParameters:
def __init__(self):
# predefinied paths
self.parser = argparse.ArgumentParser(prog="lasdiff",
formatter_class=argparse.RawDescriptionHelpFormatter,
description='',
epilog=textwrap.dedent('''
example:
'''))
# reguired parameters
self.parser.add_argument('-i', type=str, dest='input', required=True,
help='required: input file or folder')
self.parser.add_argument('-o', type=str, dest='output', required=True,
help='required: output file or folder (d:\lasfiles\\tests\\results)')
# optional parameters
self.parser.add_argument('-input_format', type=str, dest='input_format', required=False, choices=['las', 'laz'],
help='optional: input format (default=las, laz is not implemented (yet))')
self.parser.add_argument('-cores', type=int, dest='cores', required=False, default=1,
help='optional: cores (default=1)')
self.parser.add_argument('-v', dest='verbose', required=False,
help='optional: verbose toggle (-v=on, nothing=off)', action='store_true')
self.parser.add_argument('-version', action='version', version=self.parser.prog)
def parse(self):
self.args = self.parser.parse_args()
##defaults
if self.args.verbose:
self.args.verbose = ' -v'
else:
self.args.verbose = ''
if self.args.input_format == None:
self.args.input_format = 'las'
if self.args.cores == None:
self.args.cores = 1
# ---------PUBLIC METHODS--------------------
def get_output(self):
return self.args.output
def get_input(self):
return self.args.input
def get_input_format(self):
return self.args.input_format
def get_verbose(self):
return self.args.verbose
def get_cores(self):
return self.args.cores
def DiffLas(parameters):
# Parse incoming parameters
source_file = parameters[0]
destination_file = parameters[1]
# Get name for this process
current = multiprocessing.current_proces()
proc_name = current.name
logging.info('[%s] Starting ...' % (proc_name))
logging.info(
'[%s] Creating diff of %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
# Opening source LAS files for read and write
lasFiles = LasPyConverter.LasPyCompare(source_file, destination_file)
# Opening destination LAS file
logging.info('[%s] Opening %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
lasFiles.OpenReanOnly()
logging.info('[%s] Comparing %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
lasFiles.ComparePointCloud()
logging.info('[%s] Closing %s LAS PointCloud.' % (proc_name, destination_file))
lasFiles.Close()
logging.info('[%s] %s LAS PointCloud has closed.' % (proc_name, destination_file))
return 0
def SetLogging(logfilename):
logging.basicConfig(
filename=logfilename,
filemode='w',
format='%(asctime)s %(name)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S',
level=logging.DEBUG)
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S')
# tell the handler to use this format
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
def main():
logfilename = 'lasdiff_' + datetime.datetime.today().strftime('%Y%m%d_%H%M%S') + '.log'
SetLogging(logfilename)
logging.info(header)
lasconverterworkflow = LasPyParameters()
lasconverterworkflow.parse()
# File/Directory handler
inputfiles = lasconverterworkflow.get_input()
inputformat = lasconverterworkflow.get_input_format()
outputfiles = lasconverterworkflow.get_output()
outputpath = os.path.normpath(outputfiles)
cores = lasconverterworkflow.get_cores()
inputisdir = False
doing = []
if os.path.isdir(inputfiles):
inputisdir = True
inputfiles = glob.glob(os.path.join(inputfiles, '*' + inputformat))
if not os.path.exists(outputfiles):
|
for workfile in inputfiles:
if os.path.isfile(workfile) and os.path.isfile(os.path.join(outputpath, os.path.basename(workfile))):
logging.info('Adding %s to the queue.' % (workfile))
doing.append([workfile, os.path.join(outputpath, os.path.basename(workfile))])
else:
logging.info('The %s is not file, or pair of comparable files. Skipping.' % (workfile))
elif os.path.isfile(inputfiles):
inputisdir = False
workfile = inputfiles
if os.path.basename(outputfiles) is not "":
doing.append([workfile, outputfiles])
else:
doing.append([workfile, os.path.join(outputpath, os.path.basename(workfile))])
logging.info('Adding %s to the queue.' % (workfile))
else:
# Not a file, not a dir
logging.error('Cannot found input LAS PointCloud file: %s' % (inputfiles))
exit(1)
# If we got one file, start only one process
if inputisdir is False:
cores = 1
if cores != 1:
pool = multiprocessing.Pool(processes=cores)
results = pool.map_async(DiffLas, doing)
pool.close()
pool.join()
else:
for d in doing:
DiffLas(d)
logging.info('Finished, exiting and go home ...')
if __name__ == '__main__':
main()
| os.makedirs(outputfiles) | conditional_block |
lasdiff.py | try:
import traceback
import argparse
import textwrap
import glob
import os
import logging
import datetime
import multiprocessing
from libs import LasPyConverter
except ImportError as err:
print('Error {0} import module: {1}'.format(__name__, err))
traceback.print_exc()
exit(128)
script_path = __file__
header = textwrap.dedent('''LAS Diff''')
class LasPyParameters:
def __init__(self):
# predefinied paths
self.parser = argparse.ArgumentParser(prog="lasdiff",
formatter_class=argparse.RawDescriptionHelpFormatter,
description='',
epilog=textwrap.dedent('''
example:
'''))
# reguired parameters
self.parser.add_argument('-i', type=str, dest='input', required=True,
help='required: input file or folder')
self.parser.add_argument('-o', type=str, dest='output', required=True,
help='required: output file or folder (d:\lasfiles\\tests\\results)')
# optional parameters
self.parser.add_argument('-input_format', type=str, dest='input_format', required=False, choices=['las', 'laz'],
help='optional: input format (default=las, laz is not implemented (yet))')
self.parser.add_argument('-cores', type=int, dest='cores', required=False, default=1,
help='optional: cores (default=1)')
self.parser.add_argument('-v', dest='verbose', required=False,
help='optional: verbose toggle (-v=on, nothing=off)', action='store_true')
self.parser.add_argument('-version', action='version', version=self.parser.prog)
def parse(self):
self.args = self.parser.parse_args()
##defaults
if self.args.verbose:
self.args.verbose = ' -v'
else:
self.args.verbose = ''
if self.args.input_format == None:
self.args.input_format = 'las'
if self.args.cores == None:
self.args.cores = 1
# ---------PUBLIC METHODS--------------------
def get_output(self):
return self.args.output
def get_input(self):
return self.args.input
def get_input_format(self):
return self.args.input_format
def get_verbose(self):
|
def get_cores(self):
return self.args.cores
def DiffLas(parameters):
# Parse incoming parameters
source_file = parameters[0]
destination_file = parameters[1]
# Get name for this process
current = multiprocessing.current_proces()
proc_name = current.name
logging.info('[%s] Starting ...' % (proc_name))
logging.info(
'[%s] Creating diff of %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
# Opening source LAS files for read and write
lasFiles = LasPyConverter.LasPyCompare(source_file, destination_file)
# Opening destination LAS file
logging.info('[%s] Opening %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
lasFiles.OpenReanOnly()
logging.info('[%s] Comparing %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
lasFiles.ComparePointCloud()
logging.info('[%s] Closing %s LAS PointCloud.' % (proc_name, destination_file))
lasFiles.Close()
logging.info('[%s] %s LAS PointCloud has closed.' % (proc_name, destination_file))
return 0
def SetLogging(logfilename):
logging.basicConfig(
filename=logfilename,
filemode='w',
format='%(asctime)s %(name)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S',
level=logging.DEBUG)
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S')
# tell the handler to use this format
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
def main():
logfilename = 'lasdiff_' + datetime.datetime.today().strftime('%Y%m%d_%H%M%S') + '.log'
SetLogging(logfilename)
logging.info(header)
lasconverterworkflow = LasPyParameters()
lasconverterworkflow.parse()
# File/Directory handler
inputfiles = lasconverterworkflow.get_input()
inputformat = lasconverterworkflow.get_input_format()
outputfiles = lasconverterworkflow.get_output()
outputpath = os.path.normpath(outputfiles)
cores = lasconverterworkflow.get_cores()
inputisdir = False
doing = []
if os.path.isdir(inputfiles):
inputisdir = True
inputfiles = glob.glob(os.path.join(inputfiles, '*' + inputformat))
if not os.path.exists(outputfiles):
os.makedirs(outputfiles)
for workfile in inputfiles:
if os.path.isfile(workfile) and os.path.isfile(os.path.join(outputpath, os.path.basename(workfile))):
logging.info('Adding %s to the queue.' % (workfile))
doing.append([workfile, os.path.join(outputpath, os.path.basename(workfile))])
else:
logging.info('The %s is not file, or pair of comparable files. Skipping.' % (workfile))
elif os.path.isfile(inputfiles):
inputisdir = False
workfile = inputfiles
if os.path.basename(outputfiles) is not "":
doing.append([workfile, outputfiles])
else:
doing.append([workfile, os.path.join(outputpath, os.path.basename(workfile))])
logging.info('Adding %s to the queue.' % (workfile))
else:
# Not a file, not a dir
logging.error('Cannot found input LAS PointCloud file: %s' % (inputfiles))
exit(1)
# If we got one file, start only one process
if inputisdir is False:
cores = 1
if cores != 1:
pool = multiprocessing.Pool(processes=cores)
results = pool.map_async(DiffLas, doing)
pool.close()
pool.join()
else:
for d in doing:
DiffLas(d)
logging.info('Finished, exiting and go home ...')
if __name__ == '__main__':
main()
| return self.args.verbose | identifier_body |
lasdiff.py | try:
import traceback
import argparse
import textwrap
import glob
import os
import logging
import datetime
import multiprocessing
from libs import LasPyConverter
except ImportError as err:
print('Error {0} import module: {1}'.format(__name__, err))
traceback.print_exc()
exit(128)
script_path = __file__
header = textwrap.dedent('''LAS Diff''')
class LasPyParameters:
def __init__(self):
# predefinied paths
self.parser = argparse.ArgumentParser(prog="lasdiff",
formatter_class=argparse.RawDescriptionHelpFormatter,
description='',
epilog=textwrap.dedent('''
example:
'''))
# reguired parameters
self.parser.add_argument('-i', type=str, dest='input', required=True,
help='required: input file or folder')
self.parser.add_argument('-o', type=str, dest='output', required=True,
help='required: output file or folder (d:\lasfiles\\tests\\results)')
# optional parameters
self.parser.add_argument('-input_format', type=str, dest='input_format', required=False, choices=['las', 'laz'],
help='optional: input format (default=las, laz is not implemented (yet))')
self.parser.add_argument('-cores', type=int, dest='cores', required=False, default=1,
help='optional: cores (default=1)')
self.parser.add_argument('-v', dest='verbose', required=False,
help='optional: verbose toggle (-v=on, nothing=off)', action='store_true')
self.parser.add_argument('-version', action='version', version=self.parser.prog)
def parse(self):
self.args = self.parser.parse_args()
| else:
self.args.verbose = ''
if self.args.input_format == None:
self.args.input_format = 'las'
if self.args.cores == None:
self.args.cores = 1
# ---------PUBLIC METHODS--------------------
def get_output(self):
return self.args.output
def get_input(self):
return self.args.input
def get_input_format(self):
return self.args.input_format
def get_verbose(self):
return self.args.verbose
def get_cores(self):
return self.args.cores
def DiffLas(parameters):
# Parse incoming parameters
source_file = parameters[0]
destination_file = parameters[1]
# Get name for this process
current = multiprocessing.current_proces()
proc_name = current.name
logging.info('[%s] Starting ...' % (proc_name))
logging.info(
'[%s] Creating diff of %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
# Opening source LAS files for read and write
lasFiles = LasPyConverter.LasPyCompare(source_file, destination_file)
# Opening destination LAS file
logging.info('[%s] Opening %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
lasFiles.OpenReanOnly()
logging.info('[%s] Comparing %s LAS PointCloud file and %s LAS PointCloud file ...' % (
proc_name, source_file, destination_file))
lasFiles.ComparePointCloud()
logging.info('[%s] Closing %s LAS PointCloud.' % (proc_name, destination_file))
lasFiles.Close()
logging.info('[%s] %s LAS PointCloud has closed.' % (proc_name, destination_file))
return 0
def SetLogging(logfilename):
logging.basicConfig(
filename=logfilename,
filemode='w',
format='%(asctime)s %(name)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S',
level=logging.DEBUG)
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', datefmt='%d-%m-%Y %H:%M:%S')
# tell the handler to use this format
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
def main():
logfilename = 'lasdiff_' + datetime.datetime.today().strftime('%Y%m%d_%H%M%S') + '.log'
SetLogging(logfilename)
logging.info(header)
lasconverterworkflow = LasPyParameters()
lasconverterworkflow.parse()
# File/Directory handler
inputfiles = lasconverterworkflow.get_input()
inputformat = lasconverterworkflow.get_input_format()
outputfiles = lasconverterworkflow.get_output()
outputpath = os.path.normpath(outputfiles)
cores = lasconverterworkflow.get_cores()
inputisdir = False
doing = []
if os.path.isdir(inputfiles):
inputisdir = True
inputfiles = glob.glob(os.path.join(inputfiles, '*' + inputformat))
if not os.path.exists(outputfiles):
os.makedirs(outputfiles)
for workfile in inputfiles:
if os.path.isfile(workfile) and os.path.isfile(os.path.join(outputpath, os.path.basename(workfile))):
logging.info('Adding %s to the queue.' % (workfile))
doing.append([workfile, os.path.join(outputpath, os.path.basename(workfile))])
else:
logging.info('The %s is not file, or pair of comparable files. Skipping.' % (workfile))
elif os.path.isfile(inputfiles):
inputisdir = False
workfile = inputfiles
if os.path.basename(outputfiles) is not "":
doing.append([workfile, outputfiles])
else:
doing.append([workfile, os.path.join(outputpath, os.path.basename(workfile))])
logging.info('Adding %s to the queue.' % (workfile))
else:
# Not a file, not a dir
logging.error('Cannot found input LAS PointCloud file: %s' % (inputfiles))
exit(1)
# If we got one file, start only one process
if inputisdir is False:
cores = 1
if cores != 1:
pool = multiprocessing.Pool(processes=cores)
results = pool.map_async(DiffLas, doing)
pool.close()
pool.join()
else:
for d in doing:
DiffLas(d)
logging.info('Finished, exiting and go home ...')
if __name__ == '__main__':
main() | ##defaults
if self.args.verbose:
self.args.verbose = ' -v' | random_line_split |
helper.rs | use std::time::Duration;
use xpath_reader::Reader;
/// Note that the requirement of the `var` (variant) token is rather ugly but
/// required,
/// which is a limitation of the current Rust macro implementation.
///
/// Note that the macro wont expand if you miss ommit the last comma.
/// If this macro is ever extracted into a library this should be fixed.
///
/// - https://github.com/rust-lang/rust/issues/24189
/// - https://github.com/rust-lang/rust/issues/42838
macro_rules! enum_mb_xml
{
(
$(#[$attr:meta])* pub enum $enum:ident {
$(
$(#[$attr2:meta])*
var $variant:ident = $str:expr
),+
,
}
)
=>
{
$(#[$attr])*
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum $enum {
$(
$(#[$attr2])* $variant ,
)+
}
impl FromXml for $enum {
fn from_xml<'d>(reader: &'d Reader<'d>) -> Result<Self, ::xpath_reader::Error>
{
match String::from_xml(reader)?.as_str() {
$(
$str => Ok($enum::$variant),
)+
s => Err(
::xpath_reader::Error::custom_msg(
format!("Unknown `{}` value: '{}'", stringify!($enum), s)
)
)
}
}
}
impl ::std::fmt::Display for $enum {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result
{
let s = match *self {
$(
$enum::$variant => $str,
)+
};
write!(f, "{}", s)
}
}
}
}
macro_rules! enum_mb_xml_optional
{
(
$(#[$attr:meta])* pub enum $enum:ident {
$(
$(#[$attr2:meta])*
var $variant:ident = $str:expr
),+
,
}
)
=>
{
$(#[$attr])*
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum $enum {
$(
$(#[$attr2])* $variant ,
)+
}
impl FromXmlOptional for $enum {
fn from_xml_optional<'d>(reader: &'d Reader<'d>) -> Result<Option<Self>, ::xpath_reader::Error>
{
let s = Option::<String>::from_xml(reader)?;
if let Some(s) = s {
match s.as_ref() {
$(
$str => Ok(Some($enum::$variant)),
)+
s => Err(
::xpath_reader::Error::custom_msg(
format!("Unknown `{}` value: '{}'", stringify!($enum), s)
)
)
}
} else {
Ok(None)
}
}
}
impl ::std::fmt::Display for $enum {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result
{
let s = match *self {
$(
$enum::$variant => $str,
)+
};
write!(f, "{}", s)
}
}
}
}
pub fn | <'d>(
reader: &'d Reader<'d>,
path: &str,
) -> Result<Option<Duration>, ::xpath_reader::Error> {
let s: Option<String> = reader.read(path)?;
match s {
Some(millis) => Ok(Some(Duration::from_millis(
millis.parse().map_err(::xpath_reader::Error::custom_err)?,
))),
None => Ok(None),
}
}
| read_mb_duration | identifier_name |
helper.rs | use std::time::Duration;
use xpath_reader::Reader;
/// Note that the requirement of the `var` (variant) token is rather ugly but
/// required,
/// which is a limitation of the current Rust macro implementation.
///
/// Note that the macro wont expand if you miss ommit the last comma.
/// If this macro is ever extracted into a library this should be fixed.
///
/// - https://github.com/rust-lang/rust/issues/24189
/// - https://github.com/rust-lang/rust/issues/42838
macro_rules! enum_mb_xml
{
(
$(#[$attr:meta])* pub enum $enum:ident {
$(
$(#[$attr2:meta])*
var $variant:ident = $str:expr
),+
,
}
)
=>
{
$(#[$attr])*
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum $enum {
$(
$(#[$attr2])* $variant ,
)+
}
impl FromXml for $enum {
fn from_xml<'d>(reader: &'d Reader<'d>) -> Result<Self, ::xpath_reader::Error>
{
match String::from_xml(reader)?.as_str() {
$(
$str => Ok($enum::$variant),
)+
s => Err(
::xpath_reader::Error::custom_msg(
format!("Unknown `{}` value: '{}'", stringify!($enum), s) | }
impl ::std::fmt::Display for $enum {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result
{
let s = match *self {
$(
$enum::$variant => $str,
)+
};
write!(f, "{}", s)
}
}
}
}
macro_rules! enum_mb_xml_optional
{
(
$(#[$attr:meta])* pub enum $enum:ident {
$(
$(#[$attr2:meta])*
var $variant:ident = $str:expr
),+
,
}
)
=>
{
$(#[$attr])*
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum $enum {
$(
$(#[$attr2])* $variant ,
)+
}
impl FromXmlOptional for $enum {
fn from_xml_optional<'d>(reader: &'d Reader<'d>) -> Result<Option<Self>, ::xpath_reader::Error>
{
let s = Option::<String>::from_xml(reader)?;
if let Some(s) = s {
match s.as_ref() {
$(
$str => Ok(Some($enum::$variant)),
)+
s => Err(
::xpath_reader::Error::custom_msg(
format!("Unknown `{}` value: '{}'", stringify!($enum), s)
)
)
}
} else {
Ok(None)
}
}
}
impl ::std::fmt::Display for $enum {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result
{
let s = match *self {
$(
$enum::$variant => $str,
)+
};
write!(f, "{}", s)
}
}
}
}
pub fn read_mb_duration<'d>(
reader: &'d Reader<'d>,
path: &str,
) -> Result<Option<Duration>, ::xpath_reader::Error> {
let s: Option<String> = reader.read(path)?;
match s {
Some(millis) => Ok(Some(Duration::from_millis(
millis.parse().map_err(::xpath_reader::Error::custom_err)?,
))),
None => Ok(None),
}
} | )
)
}
} | random_line_split |
helper.rs | use std::time::Duration;
use xpath_reader::Reader;
/// Note that the requirement of the `var` (variant) token is rather ugly but
/// required,
/// which is a limitation of the current Rust macro implementation.
///
/// Note that the macro wont expand if you miss ommit the last comma.
/// If this macro is ever extracted into a library this should be fixed.
///
/// - https://github.com/rust-lang/rust/issues/24189
/// - https://github.com/rust-lang/rust/issues/42838
macro_rules! enum_mb_xml
{
(
$(#[$attr:meta])* pub enum $enum:ident {
$(
$(#[$attr2:meta])*
var $variant:ident = $str:expr
),+
,
}
)
=>
{
$(#[$attr])*
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum $enum {
$(
$(#[$attr2])* $variant ,
)+
}
impl FromXml for $enum {
fn from_xml<'d>(reader: &'d Reader<'d>) -> Result<Self, ::xpath_reader::Error>
{
match String::from_xml(reader)?.as_str() {
$(
$str => Ok($enum::$variant),
)+
s => Err(
::xpath_reader::Error::custom_msg(
format!("Unknown `{}` value: '{}'", stringify!($enum), s)
)
)
}
}
}
impl ::std::fmt::Display for $enum {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result
{
let s = match *self {
$(
$enum::$variant => $str,
)+
};
write!(f, "{}", s)
}
}
}
}
macro_rules! enum_mb_xml_optional
{
(
$(#[$attr:meta])* pub enum $enum:ident {
$(
$(#[$attr2:meta])*
var $variant:ident = $str:expr
),+
,
}
)
=>
{
$(#[$attr])*
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum $enum {
$(
$(#[$attr2])* $variant ,
)+
}
impl FromXmlOptional for $enum {
fn from_xml_optional<'d>(reader: &'d Reader<'d>) -> Result<Option<Self>, ::xpath_reader::Error>
{
let s = Option::<String>::from_xml(reader)?;
if let Some(s) = s {
match s.as_ref() {
$(
$str => Ok(Some($enum::$variant)),
)+
s => Err(
::xpath_reader::Error::custom_msg(
format!("Unknown `{}` value: '{}'", stringify!($enum), s)
)
)
}
} else {
Ok(None)
}
}
}
impl ::std::fmt::Display for $enum {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result
{
let s = match *self {
$(
$enum::$variant => $str,
)+
};
write!(f, "{}", s)
}
}
}
}
pub fn read_mb_duration<'d>(
reader: &'d Reader<'d>,
path: &str,
) -> Result<Option<Duration>, ::xpath_reader::Error> | {
let s: Option<String> = reader.read(path)?;
match s {
Some(millis) => Ok(Some(Duration::from_millis(
millis.parse().map_err(::xpath_reader::Error::custom_err)?,
))),
None => Ok(None),
}
} | identifier_body |
|
all_61.js | var searchData=
[
['argumentdefinition',['argumentDefinition',['../ezpgenerateautoloads_8php.html#ab9a0a1710f335bc48a13e8a94802be23',1,'argumentDefinition(): ezpgenerateautoloads.php'],['../ezpgenerateautoloads_8php.html#aed0ba19f5204d9659e823271642a6594',1,'argumentDefinition(): ezpgenerateautoloads.php']]],
['autoload_2eini_2eappend_2ephp',['autoload.ini.append.php',['../autoload_8ini_8append_8php.html',1,'']]], | ['autoloadgeneratorenum',['autoloadGeneratorEnum',['../classextension_1_1ezadvancedautoload_1_1classes_1_1enums_1_1autoload_generator_enum.html',1,'extension::ezadvancedautoload::classes::enums']]],
['autoloadgeneratorenum_2ephp',['autoloadgeneratorenum.php',['../autoloadgeneratorenum_8php.html',1,'']]]
]; | random_line_split |
|
course-format.ts | // (C) Copyright 2015 Moodle Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { Component, OnChanges, Input, ViewChild, Output, EventEmitter } from '@angular/core';
import { CoreSitePluginsProvider } from '../../providers/siteplugins';
import { CoreSitePluginsPluginContentComponent } from '../plugin-content/plugin-content';
import { CoreCourseFormatComponent } from '@core/course/components/format/format';
import { CoreCourseFormatDelegate } from '@core/course/providers/format-delegate';
/**
* Component that displays the index of a course format site plugin.
*/
@Component({
selector: 'core-site-plugins-course-format',
templateUrl: 'core-siteplugins-course-format.html',
})
export class CoreSitePluginsCourseFormatComponent implements OnChanges {
@Input() course: any; // The course to render.
@Input() sections: any[]; // List of course sections.
@Input() downloadEnabled?: boolean; // Whether the download of sections and modules is enabled.
@Input() initialSectionId?: number; // The section to load first (by ID).
@Input() initialSectionNumber?: number; // The section to load first (by number).
@Input() moduleId?: number; // The module ID to scroll to. Must be inside the initial selected section.
@Output() completionChanged?: EventEmitter<void>; // Will emit an event when any module completion changes.
// Special input, allows access to the parent instance properties and methods.
// Please notice that all the other inputs/outputs are also accessible through this instance, so they could be removed.
// However, we decided to keep them to support ngOnChanges and to make templates easier to read.
@Input() coreCourseFormatComponent: CoreCourseFormatComponent;
@ViewChild(CoreSitePluginsPluginContentComponent) content: CoreSitePluginsPluginContentComponent;
component: string;
method: string;
args: any;
initResult: any;
data: any;
constructor(protected sitePluginsProvider: CoreSitePluginsProvider,
protected courseFormatDelegate: CoreCourseFormatDelegate) { }
/**
* Detect changes on input properties.
*/
ngOnChanges(): void {
if (this.course && this.course.format) {
if (!this.component) {
// Initialize the data.
const handlerName = this.courseFormatDelegate.getHandlerName(this.course.format),
handler = this.sitePluginsProvider.getSitePluginHandler(handlerName);
if (handler) {
this.component = handler.plugin.component;
this.method = handler.handlerSchema.method;
this.args = {
courseid: this.course.id,
downloadenabled: this.downloadEnabled
};
this.initResult = handler.initResult;
}
}
// Pass input data to the component.
this.data = {
course: this.course,
sections: this.sections,
downloadEnabled: this.downloadEnabled,
initialSectionId: this.initialSectionId,
initialSectionNumber: this.initialSectionNumber,
moduleId: this.moduleId,
completionChanged: this.completionChanged,
coreCourseFormatComponent: this.coreCourseFormatComponent
};
}
}
/** | * @param refresher Refresher.
* @param done Function to call when done.
* @param afterCompletionChange Whether the refresh is due to a completion change.
* @return Promise resolved when done.
*/
doRefresh(refresher?: any, done?: () => void, afterCompletionChange?: boolean): Promise<any> {
return Promise.resolve(this.content.refreshContent(afterCompletionChange));
}
} | * Refresh the data.
* | random_line_split |
course-format.ts | // (C) Copyright 2015 Moodle Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { Component, OnChanges, Input, ViewChild, Output, EventEmitter } from '@angular/core';
import { CoreSitePluginsProvider } from '../../providers/siteplugins';
import { CoreSitePluginsPluginContentComponent } from '../plugin-content/plugin-content';
import { CoreCourseFormatComponent } from '@core/course/components/format/format';
import { CoreCourseFormatDelegate } from '@core/course/providers/format-delegate';
/**
* Component that displays the index of a course format site plugin.
*/
@Component({
selector: 'core-site-plugins-course-format',
templateUrl: 'core-siteplugins-course-format.html',
})
export class CoreSitePluginsCourseFormatComponent implements OnChanges {
@Input() course: any; // The course to render.
@Input() sections: any[]; // List of course sections.
@Input() downloadEnabled?: boolean; // Whether the download of sections and modules is enabled.
@Input() initialSectionId?: number; // The section to load first (by ID).
@Input() initialSectionNumber?: number; // The section to load first (by number).
@Input() moduleId?: number; // The module ID to scroll to. Must be inside the initial selected section.
@Output() completionChanged?: EventEmitter<void>; // Will emit an event when any module completion changes.
// Special input, allows access to the parent instance properties and methods.
// Please notice that all the other inputs/outputs are also accessible through this instance, so they could be removed.
// However, we decided to keep them to support ngOnChanges and to make templates easier to read.
@Input() coreCourseFormatComponent: CoreCourseFormatComponent;
@ViewChild(CoreSitePluginsPluginContentComponent) content: CoreSitePluginsPluginContentComponent;
component: string;
method: string;
args: any;
initResult: any;
data: any;
constructor(protected sitePluginsProvider: CoreSitePluginsProvider,
protected courseFormatDelegate: CoreCourseFormatDelegate) { }
/**
* Detect changes on input properties.
*/
ngOnChanges(): void {
if (this.course && this.course.format) |
}
/**
* Refresh the data.
*
* @param refresher Refresher.
* @param done Function to call when done.
* @param afterCompletionChange Whether the refresh is due to a completion change.
* @return Promise resolved when done.
*/
doRefresh(refresher?: any, done?: () => void, afterCompletionChange?: boolean): Promise<any> {
return Promise.resolve(this.content.refreshContent(afterCompletionChange));
}
}
| {
if (!this.component) {
// Initialize the data.
const handlerName = this.courseFormatDelegate.getHandlerName(this.course.format),
handler = this.sitePluginsProvider.getSitePluginHandler(handlerName);
if (handler) {
this.component = handler.plugin.component;
this.method = handler.handlerSchema.method;
this.args = {
courseid: this.course.id,
downloadenabled: this.downloadEnabled
};
this.initResult = handler.initResult;
}
}
// Pass input data to the component.
this.data = {
course: this.course,
sections: this.sections,
downloadEnabled: this.downloadEnabled,
initialSectionId: this.initialSectionId,
initialSectionNumber: this.initialSectionNumber,
moduleId: this.moduleId,
completionChanged: this.completionChanged,
coreCourseFormatComponent: this.coreCourseFormatComponent
};
} | conditional_block |
course-format.ts | // (C) Copyright 2015 Moodle Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { Component, OnChanges, Input, ViewChild, Output, EventEmitter } from '@angular/core';
import { CoreSitePluginsProvider } from '../../providers/siteplugins';
import { CoreSitePluginsPluginContentComponent } from '../plugin-content/plugin-content';
import { CoreCourseFormatComponent } from '@core/course/components/format/format';
import { CoreCourseFormatDelegate } from '@core/course/providers/format-delegate';
/**
* Component that displays the index of a course format site plugin.
*/
@Component({
selector: 'core-site-plugins-course-format',
templateUrl: 'core-siteplugins-course-format.html',
})
export class CoreSitePluginsCourseFormatComponent implements OnChanges {
@Input() course: any; // The course to render.
@Input() sections: any[]; // List of course sections.
@Input() downloadEnabled?: boolean; // Whether the download of sections and modules is enabled.
@Input() initialSectionId?: number; // The section to load first (by ID).
@Input() initialSectionNumber?: number; // The section to load first (by number).
@Input() moduleId?: number; // The module ID to scroll to. Must be inside the initial selected section.
@Output() completionChanged?: EventEmitter<void>; // Will emit an event when any module completion changes.
// Special input, allows access to the parent instance properties and methods.
// Please notice that all the other inputs/outputs are also accessible through this instance, so they could be removed.
// However, we decided to keep them to support ngOnChanges and to make templates easier to read.
@Input() coreCourseFormatComponent: CoreCourseFormatComponent;
@ViewChild(CoreSitePluginsPluginContentComponent) content: CoreSitePluginsPluginContentComponent;
component: string;
method: string;
args: any;
initResult: any;
data: any;
| (protected sitePluginsProvider: CoreSitePluginsProvider,
protected courseFormatDelegate: CoreCourseFormatDelegate) { }
/**
* Detect changes on input properties.
*/
ngOnChanges(): void {
if (this.course && this.course.format) {
if (!this.component) {
// Initialize the data.
const handlerName = this.courseFormatDelegate.getHandlerName(this.course.format),
handler = this.sitePluginsProvider.getSitePluginHandler(handlerName);
if (handler) {
this.component = handler.plugin.component;
this.method = handler.handlerSchema.method;
this.args = {
courseid: this.course.id,
downloadenabled: this.downloadEnabled
};
this.initResult = handler.initResult;
}
}
// Pass input data to the component.
this.data = {
course: this.course,
sections: this.sections,
downloadEnabled: this.downloadEnabled,
initialSectionId: this.initialSectionId,
initialSectionNumber: this.initialSectionNumber,
moduleId: this.moduleId,
completionChanged: this.completionChanged,
coreCourseFormatComponent: this.coreCourseFormatComponent
};
}
}
/**
* Refresh the data.
*
* @param refresher Refresher.
* @param done Function to call when done.
* @param afterCompletionChange Whether the refresh is due to a completion change.
* @return Promise resolved when done.
*/
doRefresh(refresher?: any, done?: () => void, afterCompletionChange?: boolean): Promise<any> {
return Promise.resolve(this.content.refreshContent(afterCompletionChange));
}
}
| constructor | identifier_name |
checklist-nested-tree-demo.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ChangeDetectorRef, ChangeDetectionStrategy, Component} from '@angular/core';
import {SelectionModel} from '@angular/cdk/collections';
import {NestedTreeControl} from '@angular/cdk/tree';
import {Observable} from 'rxjs';
import {ChecklistDatabase, TodoItemNode} from './checklist-database';
/**
* Checklist demo with nested tree
*/
@Component({
moduleId: module.id,
selector: 'checklist-nested-tree-demo',
templateUrl: 'checklist-nested-tree-demo.html',
styleUrls: ['checklist-tree-demo.css'],
providers: [ChecklistDatabase],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class ChecklistNestedTreeDemo {
treeControl: NestedTreeControl<TodoItemNode>;
dataSource: TodoItemNode[];
/** The selection for checklist */
checklistSelection = new SelectionModel<TodoItemNode>(true /* multiple */);
constructor(private _database: ChecklistDatabase, private _changeDetectorRef: ChangeDetectorRef) |
getChildren = (node: TodoItemNode): Observable<TodoItemNode[]> => node.children;
hasNoContent = (_nodeData: TodoItemNode) => { return _nodeData.item === ''; };
/** Whether all the descendants of the node are selected */
descendantsAllSelected(node: TodoItemNode): boolean {
const descendants = this.treeControl.getDescendants(node);
if (!descendants.length) {
return this.checklistSelection.isSelected(node);
}
const selected = this.checklistSelection.isSelected(node);
const allSelected = descendants.every(child => this.checklistSelection.isSelected(child));
if (!selected && allSelected) {
this.checklistSelection.select(node);
this._changeDetectorRef.markForCheck();
}
return allSelected;
}
/** Whether part of the descendants are selected */
descendantsPartiallySelected(node: TodoItemNode): boolean {
const descendants = this.treeControl.getDescendants(node);
if (!descendants.length) {
return false;
}
const result = descendants.some(child => this.checklistSelection.isSelected(child));
return result && !this.descendantsAllSelected(node);
}
/** Toggle the to-do item selection. Select/deselect all the descendants node */
todoItemSelectionToggle(node: TodoItemNode): void {
this.checklistSelection.toggle(node);
const descendants = this.treeControl.getDescendants(node);
this.checklistSelection.isSelected(node)
? this.checklistSelection.select(...descendants, node)
: this.checklistSelection.deselect(...descendants, node);
this._changeDetectorRef.markForCheck();
}
/** Select the category so we can insert the new item. */
addNewItem(node: TodoItemNode) {
this._database.insertItem(node, '');
this.treeControl.expand(node);
}
/** Save the node to database */
saveNode(node: TodoItemNode, itemValue: string) {
this._database.updateItem(node, itemValue);
}
}
| {
this.treeControl = new NestedTreeControl<TodoItemNode>(this.getChildren);
this.dataSource = _database.data;
} | identifier_body |
checklist-nested-tree-demo.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ChangeDetectorRef, ChangeDetectionStrategy, Component} from '@angular/core';
import {SelectionModel} from '@angular/cdk/collections';
import {NestedTreeControl} from '@angular/cdk/tree';
import {Observable} from 'rxjs';
import {ChecklistDatabase, TodoItemNode} from './checklist-database';
/**
* Checklist demo with nested tree
*/
@Component({
moduleId: module.id,
selector: 'checklist-nested-tree-demo',
templateUrl: 'checklist-nested-tree-demo.html',
styleUrls: ['checklist-tree-demo.css'],
providers: [ChecklistDatabase],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class ChecklistNestedTreeDemo {
treeControl: NestedTreeControl<TodoItemNode>;
dataSource: TodoItemNode[];
/** The selection for checklist */
checklistSelection = new SelectionModel<TodoItemNode>(true /* multiple */);
| (private _database: ChecklistDatabase, private _changeDetectorRef: ChangeDetectorRef) {
this.treeControl = new NestedTreeControl<TodoItemNode>(this.getChildren);
this.dataSource = _database.data;
}
getChildren = (node: TodoItemNode): Observable<TodoItemNode[]> => node.children;
hasNoContent = (_nodeData: TodoItemNode) => { return _nodeData.item === ''; };
/** Whether all the descendants of the node are selected */
descendantsAllSelected(node: TodoItemNode): boolean {
const descendants = this.treeControl.getDescendants(node);
if (!descendants.length) {
return this.checklistSelection.isSelected(node);
}
const selected = this.checklistSelection.isSelected(node);
const allSelected = descendants.every(child => this.checklistSelection.isSelected(child));
if (!selected && allSelected) {
this.checklistSelection.select(node);
this._changeDetectorRef.markForCheck();
}
return allSelected;
}
/** Whether part of the descendants are selected */
descendantsPartiallySelected(node: TodoItemNode): boolean {
const descendants = this.treeControl.getDescendants(node);
if (!descendants.length) {
return false;
}
const result = descendants.some(child => this.checklistSelection.isSelected(child));
return result && !this.descendantsAllSelected(node);
}
/** Toggle the to-do item selection. Select/deselect all the descendants node */
todoItemSelectionToggle(node: TodoItemNode): void {
this.checklistSelection.toggle(node);
const descendants = this.treeControl.getDescendants(node);
this.checklistSelection.isSelected(node)
? this.checklistSelection.select(...descendants, node)
: this.checklistSelection.deselect(...descendants, node);
this._changeDetectorRef.markForCheck();
}
/** Select the category so we can insert the new item. */
addNewItem(node: TodoItemNode) {
this._database.insertItem(node, '');
this.treeControl.expand(node);
}
/** Save the node to database */
saveNode(node: TodoItemNode, itemValue: string) {
this._database.updateItem(node, itemValue);
}
}
| constructor | identifier_name |
checklist-nested-tree-demo.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ChangeDetectorRef, ChangeDetectionStrategy, Component} from '@angular/core';
import {SelectionModel} from '@angular/cdk/collections';
import {NestedTreeControl} from '@angular/cdk/tree';
import {Observable} from 'rxjs';
import {ChecklistDatabase, TodoItemNode} from './checklist-database';
/**
* Checklist demo with nested tree
*/
@Component({
moduleId: module.id,
selector: 'checklist-nested-tree-demo',
templateUrl: 'checklist-nested-tree-demo.html',
styleUrls: ['checklist-tree-demo.css'],
providers: [ChecklistDatabase],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class ChecklistNestedTreeDemo {
treeControl: NestedTreeControl<TodoItemNode>;
dataSource: TodoItemNode[];
/** The selection for checklist */
checklistSelection = new SelectionModel<TodoItemNode>(true /* multiple */);
constructor(private _database: ChecklistDatabase, private _changeDetectorRef: ChangeDetectorRef) {
this.treeControl = new NestedTreeControl<TodoItemNode>(this.getChildren);
this.dataSource = _database.data;
}
getChildren = (node: TodoItemNode): Observable<TodoItemNode[]> => node.children;
hasNoContent = (_nodeData: TodoItemNode) => { return _nodeData.item === ''; };
/** Whether all the descendants of the node are selected */
descendantsAllSelected(node: TodoItemNode): boolean {
const descendants = this.treeControl.getDescendants(node);
if (!descendants.length) {
return this.checklistSelection.isSelected(node);
}
const selected = this.checklistSelection.isSelected(node);
const allSelected = descendants.every(child => this.checklistSelection.isSelected(child));
if (!selected && allSelected) {
this.checklistSelection.select(node);
this._changeDetectorRef.markForCheck();
}
return allSelected;
}
| /** Whether part of the descendants are selected */
descendantsPartiallySelected(node: TodoItemNode): boolean {
const descendants = this.treeControl.getDescendants(node);
if (!descendants.length) {
return false;
}
const result = descendants.some(child => this.checklistSelection.isSelected(child));
return result && !this.descendantsAllSelected(node);
}
/** Toggle the to-do item selection. Select/deselect all the descendants node */
todoItemSelectionToggle(node: TodoItemNode): void {
this.checklistSelection.toggle(node);
const descendants = this.treeControl.getDescendants(node);
this.checklistSelection.isSelected(node)
? this.checklistSelection.select(...descendants, node)
: this.checklistSelection.deselect(...descendants, node);
this._changeDetectorRef.markForCheck();
}
/** Select the category so we can insert the new item. */
addNewItem(node: TodoItemNode) {
this._database.insertItem(node, '');
this.treeControl.expand(node);
}
/** Save the node to database */
saveNode(node: TodoItemNode, itemValue: string) {
this._database.updateItem(node, itemValue);
}
} | random_line_split |
|
checklist-nested-tree-demo.ts | /**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ChangeDetectorRef, ChangeDetectionStrategy, Component} from '@angular/core';
import {SelectionModel} from '@angular/cdk/collections';
import {NestedTreeControl} from '@angular/cdk/tree';
import {Observable} from 'rxjs';
import {ChecklistDatabase, TodoItemNode} from './checklist-database';
/**
* Checklist demo with nested tree
*/
@Component({
moduleId: module.id,
selector: 'checklist-nested-tree-demo',
templateUrl: 'checklist-nested-tree-demo.html',
styleUrls: ['checklist-tree-demo.css'],
providers: [ChecklistDatabase],
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class ChecklistNestedTreeDemo {
treeControl: NestedTreeControl<TodoItemNode>;
dataSource: TodoItemNode[];
/** The selection for checklist */
checklistSelection = new SelectionModel<TodoItemNode>(true /* multiple */);
constructor(private _database: ChecklistDatabase, private _changeDetectorRef: ChangeDetectorRef) {
this.treeControl = new NestedTreeControl<TodoItemNode>(this.getChildren);
this.dataSource = _database.data;
}
getChildren = (node: TodoItemNode): Observable<TodoItemNode[]> => node.children;
hasNoContent = (_nodeData: TodoItemNode) => { return _nodeData.item === ''; };
/** Whether all the descendants of the node are selected */
descendantsAllSelected(node: TodoItemNode): boolean {
const descendants = this.treeControl.getDescendants(node);
if (!descendants.length) {
return this.checklistSelection.isSelected(node);
}
const selected = this.checklistSelection.isSelected(node);
const allSelected = descendants.every(child => this.checklistSelection.isSelected(child));
if (!selected && allSelected) {
this.checklistSelection.select(node);
this._changeDetectorRef.markForCheck();
}
return allSelected;
}
/** Whether part of the descendants are selected */
descendantsPartiallySelected(node: TodoItemNode): boolean {
const descendants = this.treeControl.getDescendants(node);
if (!descendants.length) |
const result = descendants.some(child => this.checklistSelection.isSelected(child));
return result && !this.descendantsAllSelected(node);
}
/** Toggle the to-do item selection. Select/deselect all the descendants node */
todoItemSelectionToggle(node: TodoItemNode): void {
this.checklistSelection.toggle(node);
const descendants = this.treeControl.getDescendants(node);
this.checklistSelection.isSelected(node)
? this.checklistSelection.select(...descendants, node)
: this.checklistSelection.deselect(...descendants, node);
this._changeDetectorRef.markForCheck();
}
/** Select the category so we can insert the new item. */
addNewItem(node: TodoItemNode) {
this._database.insertItem(node, '');
this.treeControl.expand(node);
}
/** Save the node to database */
saveNode(node: TodoItemNode, itemValue: string) {
this._database.updateItem(node, itemValue);
}
}
| {
return false;
} | conditional_block |
geonetwork.py | #########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from geonode.catalogue.backends.generic import CatalogueBackend \
as GenericCatalogueBackend
class CatalogueBackend(GenericCatalogueBackend):
"""GeoNetwork CSW Backend"""
def __init__(self, *args, **kwargs):
| super(CatalogueBackend, self).__init__(*args, **kwargs)
self.catalogue.formats = ['Dublin Core', 'ISO'] | identifier_body |
|
geonetwork.py | #########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by | # This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from geonode.catalogue.backends.generic import CatalogueBackend \
as GenericCatalogueBackend
class CatalogueBackend(GenericCatalogueBackend):
"""GeoNetwork CSW Backend"""
def __init__(self, *args, **kwargs):
super(CatalogueBackend, self).__init__(*args, **kwargs)
self.catalogue.formats = ['Dublin Core', 'ISO'] | # the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# | random_line_split |
geonetwork.py | #########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from geonode.catalogue.backends.generic import CatalogueBackend \
as GenericCatalogueBackend
class CatalogueBackend(GenericCatalogueBackend):
"""GeoNetwork CSW Backend"""
def | (self, *args, **kwargs):
super(CatalogueBackend, self).__init__(*args, **kwargs)
self.catalogue.formats = ['Dublin Core', 'ISO']
| __init__ | identifier_name |
biotestflavor.py | from fabric.api import *
from fabric.contrib.files import *
from cloudbio.flavor import Flavor
from cloudbio.custom.shared import (_fetch_and_unpack)
| """
def __init__(self, env):
Flavor.__init__(self,env)
self.name = "Bio* cross-lang flavor"
def rewrite_config_items(self, name, items):
if name == "packages":
# list.remove('screen')
# list.append('test')
return items
elif name == "python":
return [ 'biopython' ]
elif name == "perl":
return [ 'bioperl' ]
elif name == "ruby":
return [ 'bio' ]
elif name == "custom":
return []
else:
return items
def post_install(self):
env.logger.info("Starting post-install")
env.logger.info("Load Scalability tests")
if exists('Scalability'):
with cd('Scalability'):
run('git pull')
else:
_fetch_and_unpack("git clone git://github.com/pjotrp/Scalability.git")
# Now run a post installation routine (for the heck of it)
run('./Scalability/scripts/hello.sh')
env.logger.info("Load Cross-language tests")
if exists('Cross-language-interfacing'):
with cd('Cross-language-interfacing'):
run('git pull')
else:
_fetch_and_unpack("git clone git://github.com/pjotrp/Cross-language-interfacing.git")
# Special installs for the tests
with cd('Cross-language-interfacing'):
sudo('./scripts/install-packages-root.sh ')
run('./scripts/install-packages.sh')
run('./scripts/create_test_files.rb')
env.flavor = BioTestFlavor(env) | class BioTestFlavor(Flavor):
"""A Flavor for cross Bio* tests | random_line_split |
biotestflavor.py | from fabric.api import *
from fabric.contrib.files import *
from cloudbio.flavor import Flavor
from cloudbio.custom.shared import (_fetch_and_unpack)
class BioTestFlavor(Flavor):
"""A Flavor for cross Bio* tests
"""
def __init__(self, env):
Flavor.__init__(self,env)
self.name = "Bio* cross-lang flavor"
def rewrite_config_items(self, name, items):
if name == "packages":
# list.remove('screen')
# list.append('test')
return items
elif name == "python":
return [ 'biopython' ]
elif name == "perl":
return [ 'bioperl' ]
elif name == "ruby":
return [ 'bio' ]
elif name == "custom":
return []
else:
return items
def post_install(self):
env.logger.info("Starting post-install")
env.logger.info("Load Scalability tests")
if exists('Scalability'):
with cd('Scalability'):
run('git pull')
else:
_fetch_and_unpack("git clone git://github.com/pjotrp/Scalability.git")
# Now run a post installation routine (for the heck of it)
run('./Scalability/scripts/hello.sh')
env.logger.info("Load Cross-language tests")
if exists('Cross-language-interfacing'):
with cd('Cross-language-interfacing'):
run('git pull')
else:
|
# Special installs for the tests
with cd('Cross-language-interfacing'):
sudo('./scripts/install-packages-root.sh ')
run('./scripts/install-packages.sh')
run('./scripts/create_test_files.rb')
env.flavor = BioTestFlavor(env)
| _fetch_and_unpack("git clone git://github.com/pjotrp/Cross-language-interfacing.git") | conditional_block |
biotestflavor.py | from fabric.api import *
from fabric.contrib.files import *
from cloudbio.flavor import Flavor
from cloudbio.custom.shared import (_fetch_and_unpack)
class BioTestFlavor(Flavor):
"""A Flavor for cross Bio* tests
"""
def __init__(self, env):
Flavor.__init__(self,env)
self.name = "Bio* cross-lang flavor"
def rewrite_config_items(self, name, items):
if name == "packages":
# list.remove('screen')
# list.append('test')
return items
elif name == "python":
return [ 'biopython' ]
elif name == "perl":
return [ 'bioperl' ]
elif name == "ruby":
return [ 'bio' ]
elif name == "custom":
return []
else:
return items
def post_install(self):
|
env.flavor = BioTestFlavor(env)
| env.logger.info("Starting post-install")
env.logger.info("Load Scalability tests")
if exists('Scalability'):
with cd('Scalability'):
run('git pull')
else:
_fetch_and_unpack("git clone git://github.com/pjotrp/Scalability.git")
# Now run a post installation routine (for the heck of it)
run('./Scalability/scripts/hello.sh')
env.logger.info("Load Cross-language tests")
if exists('Cross-language-interfacing'):
with cd('Cross-language-interfacing'):
run('git pull')
else:
_fetch_and_unpack("git clone git://github.com/pjotrp/Cross-language-interfacing.git")
# Special installs for the tests
with cd('Cross-language-interfacing'):
sudo('./scripts/install-packages-root.sh ')
run('./scripts/install-packages.sh')
run('./scripts/create_test_files.rb') | identifier_body |
biotestflavor.py | from fabric.api import *
from fabric.contrib.files import *
from cloudbio.flavor import Flavor
from cloudbio.custom.shared import (_fetch_and_unpack)
class BioTestFlavor(Flavor):
"""A Flavor for cross Bio* tests
"""
def __init__(self, env):
Flavor.__init__(self,env)
self.name = "Bio* cross-lang flavor"
def rewrite_config_items(self, name, items):
if name == "packages":
# list.remove('screen')
# list.append('test')
return items
elif name == "python":
return [ 'biopython' ]
elif name == "perl":
return [ 'bioperl' ]
elif name == "ruby":
return [ 'bio' ]
elif name == "custom":
return []
else:
return items
def | (self):
env.logger.info("Starting post-install")
env.logger.info("Load Scalability tests")
if exists('Scalability'):
with cd('Scalability'):
run('git pull')
else:
_fetch_and_unpack("git clone git://github.com/pjotrp/Scalability.git")
# Now run a post installation routine (for the heck of it)
run('./Scalability/scripts/hello.sh')
env.logger.info("Load Cross-language tests")
if exists('Cross-language-interfacing'):
with cd('Cross-language-interfacing'):
run('git pull')
else:
_fetch_and_unpack("git clone git://github.com/pjotrp/Cross-language-interfacing.git")
# Special installs for the tests
with cd('Cross-language-interfacing'):
sudo('./scripts/install-packages-root.sh ')
run('./scripts/install-packages.sh')
run('./scripts/create_test_files.rb')
env.flavor = BioTestFlavor(env)
| post_install | identifier_name |
core.py | """
Core classes for the XBlock family.
This code is in the Runtime layer, because it is authored once by edX
and used by all runtimes.
"""
import inspect
import pkg_resources
import warnings
from collections import defaultdict
from xblock.exceptions import DisallowedFileError
from xblock.fields import String, List, Scope
from xblock.internal import class_lazy
import xblock.mixins
from xblock.mixins import (
ScopedStorageMixin,
HierarchyMixin,
RuntimeServicesMixin,
HandlersMixin,
XmlSerializationMixin,
IndexInfoMixin,
ViewsMixin,
)
from xblock.plugin import Plugin
from xblock.validation import Validation
# exposing XML_NAMESPACES as a member of core, in order to avoid importing mixins where
# XML_NAMESPACES are needed (e.g. runtime.py).
XML_NAMESPACES = xblock.mixins.XML_NAMESPACES
# __all__ controls what classes end up in the docs.
__all__ = ['XBlock']
UNSET = object()
class XBlockMixin(ScopedStorageMixin):
"""
Base class for XBlock Mixin classes.
XBlockMixin classes can add new fields and new properties to all XBlocks
created by a particular runtime.
"""
pass
class SharedBlockBase(Plugin):
"""
Behaviors and attrs which all XBlock like things should share
"""
@classmethod
def open_local_resource(cls, uri):
"""Open a local resource.
The container calls this method when it receives a request for a
resource on a URL which was generated by Runtime.local_resource_url().
It will pass the URI from the original call to local_resource_url()
back to this method. The XBlock must parse this URI and return an open
file-like object for the resource.
For security reasons, the default implementation will return only a
very restricted set of file types, which must be located in a folder
called "public". XBlock authors who want to override this behavior will
need to take care to ensure that the method only serves legitimate
public resources. At the least, the URI should be matched against a
whitelist regex to ensure that you do not serve an unauthorized
resource.
"""
# Verify the URI is in whitelisted form before opening for serving.
# URI must begin with public/, and no file path component can start
# with a dot, which prevents ".." and ".hidden" files.
if not uri.startswith("public/"):
raise DisallowedFileError("Only files from public/ are allowed: %r" % uri)
if "/." in uri:
raise DisallowedFileError("Only safe file names are allowed: %r" % uri)
return pkg_resources.resource_stream(cls.__module__, uri)
# -- Base Block
class XBlock(XmlSerializationMixin, HierarchyMixin, ScopedStorageMixin, RuntimeServicesMixin, HandlersMixin,
IndexInfoMixin, ViewsMixin, SharedBlockBase):
"""Base class for XBlocks.
Derive from this class to create a new kind of XBlock. There are no
required methods, but you will probably need at least one view.
Don't provide the ``__init__`` method when deriving from this class.
"""
entry_point = 'xblock.v1'
name = String(help="Short name for the block", scope=Scope.settings)
tags = List(help="Tags for this block", scope=Scope.settings)
@class_lazy
def _class_tags(cls): # pylint: disable=no-self-argument
"""
Collect the tags from all base classes.
"""
class_tags = set()
for base in cls.mro()[1:]: # pylint: disable=no-member
class_tags.update(getattr(base, '_class_tags', set()))
return class_tags
@staticmethod
def tag(tags):
"""Returns a function that adds the words in `tags` as class tags to this class."""
def dec(cls):
"""Add the words in `tags` as class tags to this class."""
# Add in this class's tags
cls._class_tags.update(tags.replace(",", " ").split()) # pylint: disable=protected-access
return cls
return dec
@classmethod
def load_tagged_classes(cls, tag, fail_silently=True):
|
def __init__(self, runtime, field_data=None, scope_ids=UNSET, *args, **kwargs):
"""
Construct a new XBlock.
This class should only be instantiated by runtimes.
Arguments:
runtime (:class:`.Runtime`): Use it to access the environment.
It is available in XBlock code as ``self.runtime``.
field_data (:class:`.FieldData`): Interface used by the XBlock
fields to access their data from wherever it is persisted.
Deprecated.
scope_ids (:class:`.ScopeIds`): Identifiers needed to resolve
scopes.
"""
if scope_ids is UNSET:
raise TypeError('scope_ids are required')
# Provide backwards compatibility for external access through _field_data
super(XBlock, self).__init__(runtime=runtime, scope_ids=scope_ids, field_data=field_data, *args, **kwargs)
def render(self, view, context=None):
"""Render `view` with this block's runtime and the supplied `context`"""
return self.runtime.render(self, view, context)
def validate(self):
"""
Ask this xblock to validate itself. Subclasses are expected to override this
method, as there is currently only a no-op implementation. Any overriding method
should call super to collect validation results from its superclasses, and then
add any additional results as necessary.
"""
return Validation(self.scope_ids.usage_id)
class XBlockAside(XmlSerializationMixin, ScopedStorageMixin, RuntimeServicesMixin, HandlersMixin, SharedBlockBase):
"""
This mixin allows Xblock-like class to declare that it provides aside functionality.
"""
entry_point = "xblock_asides.v1"
@classmethod
def aside_for(cls, view_name):
"""
A decorator to indicate a function is the aside view for the given view_name.
Aside views should have a signature like:
@XBlockAside.aside_for('student_view')
def student_aside(self, block, context=None):
...
return Fragment(...)
"""
# pylint: disable=protected-access
def _decorator(func): # pylint: disable=missing-docstring
if not hasattr(func, '_aside_for'):
func._aside_for = []
func._aside_for.append(view_name) # pylint: disable=protected-access
return func
return _decorator
@class_lazy
def _combined_asides(cls): # pylint: disable=no-self-argument
"""
A dictionary mapping XBlock view names to the aside method that
decorates them (or None, if there is no decorator for the specified view).
"""
# The method declares what views it decorates. We rely on `dir`
# to handle subclasses and overrides.
combined_asides = defaultdict(None)
for _view_name, view_func in inspect.getmembers(cls, lambda attr: hasattr(attr, '_aside_for')):
aside_for = getattr(view_func, '_aside_for', [])
for view in aside_for:
combined_asides[view] = view_func.__name__
return combined_asides
def aside_view_declaration(self, view_name):
"""
Find and return a function object if one is an aside_view for the given view_name
Aside methods declare their view provision via @XBlockAside.aside_for(view_name)
This function finds those declarations for a block.
Arguments:
view_name (string): the name of the view requested.
Returns:
either the function or None
"""
if view_name in self._combined_asides:
return getattr(self, self._combined_asides[view_name])
else:
return None
def needs_serialization(self):
"""
Return True if the aside has any data to serialize to XML.
If all of the aside's data is empty or a default value, then the aside shouldn't
be serialized as XML at all.
"""
return any([field.is_set_on(self) for field in self.fields.itervalues()])
# Maintain backwards compatibility
import xblock.exceptions
class KeyValueMultiSaveError(xblock.exceptions.KeyValueMultiSaveError):
"""
Backwards compatibility class wrapper around :class:`.KeyValueMultiSaveError`.
"""
def __init__(self, *args, **kwargs):
warnings.warn("Please use xblock.exceptions.KeyValueMultiSaveError", DeprecationWarning, stacklevel=2)
super(KeyValueMultiSaveError, self).__init__(*args, **kwargs)
class XBlockSaveError(xblock.exceptions.XBlockSaveError):
"""
Backwards compatibility class wrapper around :class:`.XBlockSaveError`.
"""
def __init__(self, *args, **kwargs):
warnings.warn("Please use xblock.exceptions.XBlockSaveError", DeprecationWarning, stacklevel=2)
super(XBlockSaveError, self).__init__(*args, **kwargs)
| """
Produce a sequence of all XBlock classes tagged with `tag`.
fail_silently causes the code to simply log warnings if a
plugin cannot import. The goal is to be able to use part of
libraries from an XBlock (and thus have it installed), even if
the overall XBlock cannot be used (e.g. depends on Django in a
non-Django application). There is diagreement about whether
this is a good idea, or whether we should see failures early
(e.g. on startup or first page load), and in what
contexts. Hence, the flag.
"""
# Allow this method to access the `_class_tags`
# pylint: disable=W0212
for name, class_ in cls.load_classes(fail_silently):
if tag in class_._class_tags:
yield name, class_ | identifier_body |
core.py | """
Core classes for the XBlock family.
This code is in the Runtime layer, because it is authored once by edX
and used by all runtimes.
"""
import inspect
import pkg_resources
import warnings
from collections import defaultdict
from xblock.exceptions import DisallowedFileError
from xblock.fields import String, List, Scope
from xblock.internal import class_lazy
import xblock.mixins
from xblock.mixins import (
ScopedStorageMixin,
HierarchyMixin,
RuntimeServicesMixin,
HandlersMixin,
XmlSerializationMixin,
IndexInfoMixin,
ViewsMixin,
)
from xblock.plugin import Plugin
from xblock.validation import Validation
# exposing XML_NAMESPACES as a member of core, in order to avoid importing mixins where
# XML_NAMESPACES are needed (e.g. runtime.py).
XML_NAMESPACES = xblock.mixins.XML_NAMESPACES
# __all__ controls what classes end up in the docs.
__all__ = ['XBlock']
UNSET = object()
class XBlockMixin(ScopedStorageMixin):
"""
Base class for XBlock Mixin classes.
XBlockMixin classes can add new fields and new properties to all XBlocks
created by a particular runtime.
"""
pass
class SharedBlockBase(Plugin):
"""
Behaviors and attrs which all XBlock like things should share
"""
@classmethod
def open_local_resource(cls, uri):
"""Open a local resource.
The container calls this method when it receives a request for a
resource on a URL which was generated by Runtime.local_resource_url().
It will pass the URI from the original call to local_resource_url()
back to this method. The XBlock must parse this URI and return an open
file-like object for the resource.
For security reasons, the default implementation will return only a
very restricted set of file types, which must be located in a folder
called "public". XBlock authors who want to override this behavior will
need to take care to ensure that the method only serves legitimate
public resources. At the least, the URI should be matched against a
whitelist regex to ensure that you do not serve an unauthorized
resource.
"""
# Verify the URI is in whitelisted form before opening for serving.
# URI must begin with public/, and no file path component can start
# with a dot, which prevents ".." and ".hidden" files.
if not uri.startswith("public/"):
raise DisallowedFileError("Only files from public/ are allowed: %r" % uri)
if "/." in uri:
raise DisallowedFileError("Only safe file names are allowed: %r" % uri)
return pkg_resources.resource_stream(cls.__module__, uri)
# -- Base Block
class XBlock(XmlSerializationMixin, HierarchyMixin, ScopedStorageMixin, RuntimeServicesMixin, HandlersMixin,
IndexInfoMixin, ViewsMixin, SharedBlockBase):
"""Base class for XBlocks.
Derive from this class to create a new kind of XBlock. There are no
required methods, but you will probably need at least one view.
Don't provide the ``__init__`` method when deriving from this class.
"""
entry_point = 'xblock.v1'
name = String(help="Short name for the block", scope=Scope.settings)
tags = List(help="Tags for this block", scope=Scope.settings)
@class_lazy
def _class_tags(cls): # pylint: disable=no-self-argument
"""
Collect the tags from all base classes.
"""
class_tags = set()
for base in cls.mro()[1:]: # pylint: disable=no-member
class_tags.update(getattr(base, '_class_tags', set()))
return class_tags
@staticmethod
def tag(tags):
"""Returns a function that adds the words in `tags` as class tags to this class."""
def dec(cls):
"""Add the words in `tags` as class tags to this class."""
# Add in this class's tags
cls._class_tags.update(tags.replace(",", " ").split()) # pylint: disable=protected-access
return cls
return dec
@classmethod
def load_tagged_classes(cls, tag, fail_silently=True):
"""
Produce a sequence of all XBlock classes tagged with `tag`.
fail_silently causes the code to simply log warnings if a
plugin cannot import. The goal is to be able to use part of
libraries from an XBlock (and thus have it installed), even if
the overall XBlock cannot be used (e.g. depends on Django in a
non-Django application). There is diagreement about whether
this is a good idea, or whether we should see failures early | contexts. Hence, the flag.
"""
# Allow this method to access the `_class_tags`
# pylint: disable=W0212
for name, class_ in cls.load_classes(fail_silently):
if tag in class_._class_tags:
yield name, class_
def __init__(self, runtime, field_data=None, scope_ids=UNSET, *args, **kwargs):
"""
Construct a new XBlock.
This class should only be instantiated by runtimes.
Arguments:
runtime (:class:`.Runtime`): Use it to access the environment.
It is available in XBlock code as ``self.runtime``.
field_data (:class:`.FieldData`): Interface used by the XBlock
fields to access their data from wherever it is persisted.
Deprecated.
scope_ids (:class:`.ScopeIds`): Identifiers needed to resolve
scopes.
"""
if scope_ids is UNSET:
raise TypeError('scope_ids are required')
# Provide backwards compatibility for external access through _field_data
super(XBlock, self).__init__(runtime=runtime, scope_ids=scope_ids, field_data=field_data, *args, **kwargs)
def render(self, view, context=None):
"""Render `view` with this block's runtime and the supplied `context`"""
return self.runtime.render(self, view, context)
def validate(self):
"""
Ask this xblock to validate itself. Subclasses are expected to override this
method, as there is currently only a no-op implementation. Any overriding method
should call super to collect validation results from its superclasses, and then
add any additional results as necessary.
"""
return Validation(self.scope_ids.usage_id)
class XBlockAside(XmlSerializationMixin, ScopedStorageMixin, RuntimeServicesMixin, HandlersMixin, SharedBlockBase):
"""
This mixin allows Xblock-like class to declare that it provides aside functionality.
"""
entry_point = "xblock_asides.v1"
@classmethod
def aside_for(cls, view_name):
"""
A decorator to indicate a function is the aside view for the given view_name.
Aside views should have a signature like:
@XBlockAside.aside_for('student_view')
def student_aside(self, block, context=None):
...
return Fragment(...)
"""
# pylint: disable=protected-access
def _decorator(func): # pylint: disable=missing-docstring
if not hasattr(func, '_aside_for'):
func._aside_for = []
func._aside_for.append(view_name) # pylint: disable=protected-access
return func
return _decorator
@class_lazy
def _combined_asides(cls): # pylint: disable=no-self-argument
"""
A dictionary mapping XBlock view names to the aside method that
decorates them (or None, if there is no decorator for the specified view).
"""
# The method declares what views it decorates. We rely on `dir`
# to handle subclasses and overrides.
combined_asides = defaultdict(None)
for _view_name, view_func in inspect.getmembers(cls, lambda attr: hasattr(attr, '_aside_for')):
aside_for = getattr(view_func, '_aside_for', [])
for view in aside_for:
combined_asides[view] = view_func.__name__
return combined_asides
def aside_view_declaration(self, view_name):
"""
Find and return a function object if one is an aside_view for the given view_name
Aside methods declare their view provision via @XBlockAside.aside_for(view_name)
This function finds those declarations for a block.
Arguments:
view_name (string): the name of the view requested.
Returns:
either the function or None
"""
if view_name in self._combined_asides:
return getattr(self, self._combined_asides[view_name])
else:
return None
def needs_serialization(self):
"""
Return True if the aside has any data to serialize to XML.
If all of the aside's data is empty or a default value, then the aside shouldn't
be serialized as XML at all.
"""
return any([field.is_set_on(self) for field in self.fields.itervalues()])
# Maintain backwards compatibility
import xblock.exceptions
class KeyValueMultiSaveError(xblock.exceptions.KeyValueMultiSaveError):
"""
Backwards compatibility class wrapper around :class:`.KeyValueMultiSaveError`.
"""
def __init__(self, *args, **kwargs):
warnings.warn("Please use xblock.exceptions.KeyValueMultiSaveError", DeprecationWarning, stacklevel=2)
super(KeyValueMultiSaveError, self).__init__(*args, **kwargs)
class XBlockSaveError(xblock.exceptions.XBlockSaveError):
"""
Backwards compatibility class wrapper around :class:`.XBlockSaveError`.
"""
def __init__(self, *args, **kwargs):
warnings.warn("Please use xblock.exceptions.XBlockSaveError", DeprecationWarning, stacklevel=2)
super(XBlockSaveError, self).__init__(*args, **kwargs) | (e.g. on startup or first page load), and in what | random_line_split |
core.py | """
Core classes for the XBlock family.
This code is in the Runtime layer, because it is authored once by edX
and used by all runtimes.
"""
import inspect
import pkg_resources
import warnings
from collections import defaultdict
from xblock.exceptions import DisallowedFileError
from xblock.fields import String, List, Scope
from xblock.internal import class_lazy
import xblock.mixins
from xblock.mixins import (
ScopedStorageMixin,
HierarchyMixin,
RuntimeServicesMixin,
HandlersMixin,
XmlSerializationMixin,
IndexInfoMixin,
ViewsMixin,
)
from xblock.plugin import Plugin
from xblock.validation import Validation
# exposing XML_NAMESPACES as a member of core, in order to avoid importing mixins where
# XML_NAMESPACES are needed (e.g. runtime.py).
XML_NAMESPACES = xblock.mixins.XML_NAMESPACES
# __all__ controls what classes end up in the docs.
__all__ = ['XBlock']
UNSET = object()
class XBlockMixin(ScopedStorageMixin):
"""
Base class for XBlock Mixin classes.
XBlockMixin classes can add new fields and new properties to all XBlocks
created by a particular runtime.
"""
pass
class SharedBlockBase(Plugin):
"""
Behaviors and attrs which all XBlock like things should share
"""
@classmethod
def open_local_resource(cls, uri):
"""Open a local resource.
The container calls this method when it receives a request for a
resource on a URL which was generated by Runtime.local_resource_url().
It will pass the URI from the original call to local_resource_url()
back to this method. The XBlock must parse this URI and return an open
file-like object for the resource.
For security reasons, the default implementation will return only a
very restricted set of file types, which must be located in a folder
called "public". XBlock authors who want to override this behavior will
need to take care to ensure that the method only serves legitimate
public resources. At the least, the URI should be matched against a
whitelist regex to ensure that you do not serve an unauthorized
resource.
"""
# Verify the URI is in whitelisted form before opening for serving.
# URI must begin with public/, and no file path component can start
# with a dot, which prevents ".." and ".hidden" files.
if not uri.startswith("public/"):
raise DisallowedFileError("Only files from public/ are allowed: %r" % uri)
if "/." in uri:
raise DisallowedFileError("Only safe file names are allowed: %r" % uri)
return pkg_resources.resource_stream(cls.__module__, uri)
# -- Base Block
class | (XmlSerializationMixin, HierarchyMixin, ScopedStorageMixin, RuntimeServicesMixin, HandlersMixin,
IndexInfoMixin, ViewsMixin, SharedBlockBase):
"""Base class for XBlocks.
Derive from this class to create a new kind of XBlock. There are no
required methods, but you will probably need at least one view.
Don't provide the ``__init__`` method when deriving from this class.
"""
entry_point = 'xblock.v1'
name = String(help="Short name for the block", scope=Scope.settings)
tags = List(help="Tags for this block", scope=Scope.settings)
@class_lazy
def _class_tags(cls): # pylint: disable=no-self-argument
"""
Collect the tags from all base classes.
"""
class_tags = set()
for base in cls.mro()[1:]: # pylint: disable=no-member
class_tags.update(getattr(base, '_class_tags', set()))
return class_tags
@staticmethod
def tag(tags):
"""Returns a function that adds the words in `tags` as class tags to this class."""
def dec(cls):
"""Add the words in `tags` as class tags to this class."""
# Add in this class's tags
cls._class_tags.update(tags.replace(",", " ").split()) # pylint: disable=protected-access
return cls
return dec
@classmethod
def load_tagged_classes(cls, tag, fail_silently=True):
"""
Produce a sequence of all XBlock classes tagged with `tag`.
fail_silently causes the code to simply log warnings if a
plugin cannot import. The goal is to be able to use part of
libraries from an XBlock (and thus have it installed), even if
the overall XBlock cannot be used (e.g. depends on Django in a
non-Django application). There is diagreement about whether
this is a good idea, or whether we should see failures early
(e.g. on startup or first page load), and in what
contexts. Hence, the flag.
"""
# Allow this method to access the `_class_tags`
# pylint: disable=W0212
for name, class_ in cls.load_classes(fail_silently):
if tag in class_._class_tags:
yield name, class_
def __init__(self, runtime, field_data=None, scope_ids=UNSET, *args, **kwargs):
"""
Construct a new XBlock.
This class should only be instantiated by runtimes.
Arguments:
runtime (:class:`.Runtime`): Use it to access the environment.
It is available in XBlock code as ``self.runtime``.
field_data (:class:`.FieldData`): Interface used by the XBlock
fields to access their data from wherever it is persisted.
Deprecated.
scope_ids (:class:`.ScopeIds`): Identifiers needed to resolve
scopes.
"""
if scope_ids is UNSET:
raise TypeError('scope_ids are required')
# Provide backwards compatibility for external access through _field_data
super(XBlock, self).__init__(runtime=runtime, scope_ids=scope_ids, field_data=field_data, *args, **kwargs)
def render(self, view, context=None):
"""Render `view` with this block's runtime and the supplied `context`"""
return self.runtime.render(self, view, context)
def validate(self):
"""
Ask this xblock to validate itself. Subclasses are expected to override this
method, as there is currently only a no-op implementation. Any overriding method
should call super to collect validation results from its superclasses, and then
add any additional results as necessary.
"""
return Validation(self.scope_ids.usage_id)
class XBlockAside(XmlSerializationMixin, ScopedStorageMixin, RuntimeServicesMixin, HandlersMixin, SharedBlockBase):
"""
This mixin allows Xblock-like class to declare that it provides aside functionality.
"""
entry_point = "xblock_asides.v1"
@classmethod
def aside_for(cls, view_name):
"""
A decorator to indicate a function is the aside view for the given view_name.
Aside views should have a signature like:
@XBlockAside.aside_for('student_view')
def student_aside(self, block, context=None):
...
return Fragment(...)
"""
# pylint: disable=protected-access
def _decorator(func): # pylint: disable=missing-docstring
if not hasattr(func, '_aside_for'):
func._aside_for = []
func._aside_for.append(view_name) # pylint: disable=protected-access
return func
return _decorator
@class_lazy
def _combined_asides(cls): # pylint: disable=no-self-argument
"""
A dictionary mapping XBlock view names to the aside method that
decorates them (or None, if there is no decorator for the specified view).
"""
# The method declares what views it decorates. We rely on `dir`
# to handle subclasses and overrides.
combined_asides = defaultdict(None)
for _view_name, view_func in inspect.getmembers(cls, lambda attr: hasattr(attr, '_aside_for')):
aside_for = getattr(view_func, '_aside_for', [])
for view in aside_for:
combined_asides[view] = view_func.__name__
return combined_asides
def aside_view_declaration(self, view_name):
"""
Find and return a function object if one is an aside_view for the given view_name
Aside methods declare their view provision via @XBlockAside.aside_for(view_name)
This function finds those declarations for a block.
Arguments:
view_name (string): the name of the view requested.
Returns:
either the function or None
"""
if view_name in self._combined_asides:
return getattr(self, self._combined_asides[view_name])
else:
return None
def needs_serialization(self):
"""
Return True if the aside has any data to serialize to XML.
If all of the aside's data is empty or a default value, then the aside shouldn't
be serialized as XML at all.
"""
return any([field.is_set_on(self) for field in self.fields.itervalues()])
# Maintain backwards compatibility
import xblock.exceptions
class KeyValueMultiSaveError(xblock.exceptions.KeyValueMultiSaveError):
"""
Backwards compatibility class wrapper around :class:`.KeyValueMultiSaveError`.
"""
def __init__(self, *args, **kwargs):
warnings.warn("Please use xblock.exceptions.KeyValueMultiSaveError", DeprecationWarning, stacklevel=2)
super(KeyValueMultiSaveError, self).__init__(*args, **kwargs)
class XBlockSaveError(xblock.exceptions.XBlockSaveError):
"""
Backwards compatibility class wrapper around :class:`.XBlockSaveError`.
"""
def __init__(self, *args, **kwargs):
warnings.warn("Please use xblock.exceptions.XBlockSaveError", DeprecationWarning, stacklevel=2)
super(XBlockSaveError, self).__init__(*args, **kwargs)
| XBlock | identifier_name |
core.py | """
Core classes for the XBlock family.
This code is in the Runtime layer, because it is authored once by edX
and used by all runtimes.
"""
import inspect
import pkg_resources
import warnings
from collections import defaultdict
from xblock.exceptions import DisallowedFileError
from xblock.fields import String, List, Scope
from xblock.internal import class_lazy
import xblock.mixins
from xblock.mixins import (
ScopedStorageMixin,
HierarchyMixin,
RuntimeServicesMixin,
HandlersMixin,
XmlSerializationMixin,
IndexInfoMixin,
ViewsMixin,
)
from xblock.plugin import Plugin
from xblock.validation import Validation
# exposing XML_NAMESPACES as a member of core, in order to avoid importing mixins where
# XML_NAMESPACES are needed (e.g. runtime.py).
XML_NAMESPACES = xblock.mixins.XML_NAMESPACES
# __all__ controls what classes end up in the docs.
__all__ = ['XBlock']
UNSET = object()
class XBlockMixin(ScopedStorageMixin):
"""
Base class for XBlock Mixin classes.
XBlockMixin classes can add new fields and new properties to all XBlocks
created by a particular runtime.
"""
pass
class SharedBlockBase(Plugin):
"""
Behaviors and attrs which all XBlock like things should share
"""
@classmethod
def open_local_resource(cls, uri):
"""Open a local resource.
The container calls this method when it receives a request for a
resource on a URL which was generated by Runtime.local_resource_url().
It will pass the URI from the original call to local_resource_url()
back to this method. The XBlock must parse this URI and return an open
file-like object for the resource.
For security reasons, the default implementation will return only a
very restricted set of file types, which must be located in a folder
called "public". XBlock authors who want to override this behavior will
need to take care to ensure that the method only serves legitimate
public resources. At the least, the URI should be matched against a
whitelist regex to ensure that you do not serve an unauthorized
resource.
"""
# Verify the URI is in whitelisted form before opening for serving.
# URI must begin with public/, and no file path component can start
# with a dot, which prevents ".." and ".hidden" files.
if not uri.startswith("public/"):
raise DisallowedFileError("Only files from public/ are allowed: %r" % uri)
if "/." in uri:
raise DisallowedFileError("Only safe file names are allowed: %r" % uri)
return pkg_resources.resource_stream(cls.__module__, uri)
# -- Base Block
class XBlock(XmlSerializationMixin, HierarchyMixin, ScopedStorageMixin, RuntimeServicesMixin, HandlersMixin,
IndexInfoMixin, ViewsMixin, SharedBlockBase):
"""Base class for XBlocks.
Derive from this class to create a new kind of XBlock. There are no
required methods, but you will probably need at least one view.
Don't provide the ``__init__`` method when deriving from this class.
"""
entry_point = 'xblock.v1'
name = String(help="Short name for the block", scope=Scope.settings)
tags = List(help="Tags for this block", scope=Scope.settings)
@class_lazy
def _class_tags(cls): # pylint: disable=no-self-argument
"""
Collect the tags from all base classes.
"""
class_tags = set()
for base in cls.mro()[1:]: # pylint: disable=no-member
class_tags.update(getattr(base, '_class_tags', set()))
return class_tags
@staticmethod
def tag(tags):
"""Returns a function that adds the words in `tags` as class tags to this class."""
def dec(cls):
"""Add the words in `tags` as class tags to this class."""
# Add in this class's tags
cls._class_tags.update(tags.replace(",", " ").split()) # pylint: disable=protected-access
return cls
return dec
@classmethod
def load_tagged_classes(cls, tag, fail_silently=True):
"""
Produce a sequence of all XBlock classes tagged with `tag`.
fail_silently causes the code to simply log warnings if a
plugin cannot import. The goal is to be able to use part of
libraries from an XBlock (and thus have it installed), even if
the overall XBlock cannot be used (e.g. depends on Django in a
non-Django application). There is diagreement about whether
this is a good idea, or whether we should see failures early
(e.g. on startup or first page load), and in what
contexts. Hence, the flag.
"""
# Allow this method to access the `_class_tags`
# pylint: disable=W0212
for name, class_ in cls.load_classes(fail_silently):
if tag in class_._class_tags:
yield name, class_
def __init__(self, runtime, field_data=None, scope_ids=UNSET, *args, **kwargs):
"""
Construct a new XBlock.
This class should only be instantiated by runtimes.
Arguments:
runtime (:class:`.Runtime`): Use it to access the environment.
It is available in XBlock code as ``self.runtime``.
field_data (:class:`.FieldData`): Interface used by the XBlock
fields to access their data from wherever it is persisted.
Deprecated.
scope_ids (:class:`.ScopeIds`): Identifiers needed to resolve
scopes.
"""
if scope_ids is UNSET:
|
# Provide backwards compatibility for external access through _field_data
super(XBlock, self).__init__(runtime=runtime, scope_ids=scope_ids, field_data=field_data, *args, **kwargs)
def render(self, view, context=None):
"""Render `view` with this block's runtime and the supplied `context`"""
return self.runtime.render(self, view, context)
def validate(self):
"""
Ask this xblock to validate itself. Subclasses are expected to override this
method, as there is currently only a no-op implementation. Any overriding method
should call super to collect validation results from its superclasses, and then
add any additional results as necessary.
"""
return Validation(self.scope_ids.usage_id)
class XBlockAside(XmlSerializationMixin, ScopedStorageMixin, RuntimeServicesMixin, HandlersMixin, SharedBlockBase):
"""
This mixin allows Xblock-like class to declare that it provides aside functionality.
"""
entry_point = "xblock_asides.v1"
@classmethod
def aside_for(cls, view_name):
"""
A decorator to indicate a function is the aside view for the given view_name.
Aside views should have a signature like:
@XBlockAside.aside_for('student_view')
def student_aside(self, block, context=None):
...
return Fragment(...)
"""
# pylint: disable=protected-access
def _decorator(func): # pylint: disable=missing-docstring
if not hasattr(func, '_aside_for'):
func._aside_for = []
func._aside_for.append(view_name) # pylint: disable=protected-access
return func
return _decorator
@class_lazy
def _combined_asides(cls): # pylint: disable=no-self-argument
"""
A dictionary mapping XBlock view names to the aside method that
decorates them (or None, if there is no decorator for the specified view).
"""
# The method declares what views it decorates. We rely on `dir`
# to handle subclasses and overrides.
combined_asides = defaultdict(None)
for _view_name, view_func in inspect.getmembers(cls, lambda attr: hasattr(attr, '_aside_for')):
aside_for = getattr(view_func, '_aside_for', [])
for view in aside_for:
combined_asides[view] = view_func.__name__
return combined_asides
def aside_view_declaration(self, view_name):
"""
Find and return a function object if one is an aside_view for the given view_name
Aside methods declare their view provision via @XBlockAside.aside_for(view_name)
This function finds those declarations for a block.
Arguments:
view_name (string): the name of the view requested.
Returns:
either the function or None
"""
if view_name in self._combined_asides:
return getattr(self, self._combined_asides[view_name])
else:
return None
def needs_serialization(self):
"""
Return True if the aside has any data to serialize to XML.
If all of the aside's data is empty or a default value, then the aside shouldn't
be serialized as XML at all.
"""
return any([field.is_set_on(self) for field in self.fields.itervalues()])
# Maintain backwards compatibility
import xblock.exceptions
class KeyValueMultiSaveError(xblock.exceptions.KeyValueMultiSaveError):
"""
Backwards compatibility class wrapper around :class:`.KeyValueMultiSaveError`.
"""
def __init__(self, *args, **kwargs):
warnings.warn("Please use xblock.exceptions.KeyValueMultiSaveError", DeprecationWarning, stacklevel=2)
super(KeyValueMultiSaveError, self).__init__(*args, **kwargs)
class XBlockSaveError(xblock.exceptions.XBlockSaveError):
"""
Backwards compatibility class wrapper around :class:`.XBlockSaveError`.
"""
def __init__(self, *args, **kwargs):
warnings.warn("Please use xblock.exceptions.XBlockSaveError", DeprecationWarning, stacklevel=2)
super(XBlockSaveError, self).__init__(*args, **kwargs)
| raise TypeError('scope_ids are required') | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.