file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
isTimeSignatureString.spec.js
|
import isTimeSignatureString from '../../../../src/parser/matchers/isTimeSignatureString';
describe('isTimeSignatureString', () => {
test('Module', () => {
expect(isTimeSignatureString).toBeInstanceOf(Function);
});
});
describe.each([
['2/2', true],
['3/2', true],
['2/4', true],
['3/4', true],
['4/4', true],
['5/4', true],
['3/8', true],
['6/8', true],
['9/8', true],
['12/8', true],
[' 4/4', false],
['4/4 ', false],
[' 4/4 ', false],
['5/2', false],
|
])('Time signature string %s', (tsString, result) => {
test('Correctly detect time signature', () => {
expect(isTimeSignatureString(tsString)).toEqual(result);
});
});
|
['3/3', false],
['13/8', false],
|
class.rs
|
/* automatically generated by rust-bindgen */
#![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)]
#[repr(C)]
#[derive(Default)]
pub struct __IncompleteArrayField<T>(::std::marker::PhantomData<T>);
impl<T> __IncompleteArrayField<T> {
#[inline]
pub fn new() -> Self {
__IncompleteArrayField(::std::marker::PhantomData)
}
#[inline]
pub unsafe fn as_ptr(&self) -> *const T {
::std::mem::transmute(self)
}
#[inline]
pub unsafe fn as_mut_ptr(&mut self) -> *mut T {
::std::mem::transmute(self)
}
#[inline]
pub unsafe fn as_slice(&self, len: usize) -> &[T] {
::std::slice::from_raw_parts(self.as_ptr(), len)
}
#[inline]
pub unsafe fn as_mut_slice(&mut self, len: usize) -> &mut [T] {
::std::slice::from_raw_parts_mut(self.as_mut_ptr(), len)
}
}
impl<T> ::std::fmt::Debug for __IncompleteArrayField<T> {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
fmt.write_str("__IncompleteArrayField")
}
}
impl<T> ::std::clone::Clone for __IncompleteArrayField<T> {
#[inline]
fn clone(&self) -> Self {
Self::new()
}
}
impl<T> ::std::marker::Copy for __IncompleteArrayField<T> {}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct C {
pub a: ::std::os::raw::c_int,
pub big_array: [::std::os::raw::c_char; 33usize],
}
#[test]
fn bindgen_test_layout_C() {
assert_eq!(
::std::mem::size_of::<C>(),
40usize,
concat!("Size of: ", stringify!(C))
);
assert_eq!(
::std::mem::align_of::<C>(),
4usize,
concat!("Alignment of ", stringify!(C))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<C>())).a as *const _ as usize },
0usize,
concat!("Offset of field: ", stringify!(C), "::", stringify!(a))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<C>())).big_array as *const _ as usize },
4usize,
concat!(
"Offset of field: ",
stringify!(C),
"::",
stringify!(big_array)
)
);
}
impl Default for C {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
pub struct C_with_zero_length_array {
pub a: ::std::os::raw::c_int,
pub big_array: [::std::os::raw::c_char; 33usize],
pub zero_length_array: __IncompleteArrayField<::std::os::raw::c_char>,
}
#[test]
fn bindgen_test_layout_C_with_zero_length_array() {
assert_eq!(
::std::mem::size_of::<C_with_zero_length_array>(),
40usize,
concat!("Size of: ", stringify!(C_with_zero_length_array))
);
assert_eq!(
::std::mem::align_of::<C_with_zero_length_array>(),
4usize,
concat!("Alignment of ", stringify!(C_with_zero_length_array))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<C_with_zero_length_array>())).a as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(C_with_zero_length_array),
"::",
stringify!(a)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<C_with_zero_length_array>())).big_array as *const _ as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(C_with_zero_length_array),
"::",
stringify!(big_array)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<C_with_zero_length_array>())).zero_length_array as *const _
as usize
},
37usize,
concat!(
"Offset of field: ",
stringify!(C_with_zero_length_array),
"::",
stringify!(zero_length_array)
)
);
}
|
impl Default for C_with_zero_length_array {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Debug, Default)]
pub struct C_with_zero_length_array_2 {
pub a: ::std::os::raw::c_int,
pub zero_length_array: __IncompleteArrayField<::std::os::raw::c_char>,
}
#[test]
fn bindgen_test_layout_C_with_zero_length_array_2() {
assert_eq!(
::std::mem::size_of::<C_with_zero_length_array_2>(),
4usize,
concat!("Size of: ", stringify!(C_with_zero_length_array_2))
);
assert_eq!(
::std::mem::align_of::<C_with_zero_length_array_2>(),
4usize,
concat!("Alignment of ", stringify!(C_with_zero_length_array_2))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<C_with_zero_length_array_2>())).a as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(C_with_zero_length_array_2),
"::",
stringify!(a)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<C_with_zero_length_array_2>())).zero_length_array as *const _
as usize
},
4usize,
concat!(
"Offset of field: ",
stringify!(C_with_zero_length_array_2),
"::",
stringify!(zero_length_array)
)
);
}
#[repr(C)]
pub struct C_with_incomplete_array {
pub a: ::std::os::raw::c_int,
pub big_array: [::std::os::raw::c_char; 33usize],
pub incomplete_array: __IncompleteArrayField<::std::os::raw::c_char>,
}
#[test]
fn bindgen_test_layout_C_with_incomplete_array() {
assert_eq!(
::std::mem::size_of::<C_with_incomplete_array>(),
40usize,
concat!("Size of: ", stringify!(C_with_incomplete_array))
);
assert_eq!(
::std::mem::align_of::<C_with_incomplete_array>(),
4usize,
concat!("Alignment of ", stringify!(C_with_incomplete_array))
);
}
impl Default for C_with_incomplete_array {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Debug, Default)]
pub struct C_with_incomplete_array_2 {
pub a: ::std::os::raw::c_int,
pub incomplete_array: __IncompleteArrayField<::std::os::raw::c_char>,
}
#[test]
fn bindgen_test_layout_C_with_incomplete_array_2() {
assert_eq!(
::std::mem::size_of::<C_with_incomplete_array_2>(),
4usize,
concat!("Size of: ", stringify!(C_with_incomplete_array_2))
);
assert_eq!(
::std::mem::align_of::<C_with_incomplete_array_2>(),
4usize,
concat!("Alignment of ", stringify!(C_with_incomplete_array_2))
);
}
#[repr(C)]
pub struct C_with_zero_length_array_and_incomplete_array {
pub a: ::std::os::raw::c_int,
pub big_array: [::std::os::raw::c_char; 33usize],
pub zero_length_array: __IncompleteArrayField<::std::os::raw::c_char>,
pub incomplete_array: __IncompleteArrayField<::std::os::raw::c_char>,
}
#[test]
fn bindgen_test_layout_C_with_zero_length_array_and_incomplete_array() {
assert_eq!(
::std::mem::size_of::<C_with_zero_length_array_and_incomplete_array>(),
40usize,
concat!(
"Size of: ",
stringify!(C_with_zero_length_array_and_incomplete_array)
)
);
assert_eq!(
::std::mem::align_of::<C_with_zero_length_array_and_incomplete_array>(),
4usize,
concat!(
"Alignment of ",
stringify!(C_with_zero_length_array_and_incomplete_array)
)
);
}
impl Default for C_with_zero_length_array_and_incomplete_array {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Debug, Default)]
pub struct C_with_zero_length_array_and_incomplete_array_2 {
pub a: ::std::os::raw::c_int,
pub zero_length_array: __IncompleteArrayField<::std::os::raw::c_char>,
pub incomplete_array: __IncompleteArrayField<::std::os::raw::c_char>,
}
#[test]
fn bindgen_test_layout_C_with_zero_length_array_and_incomplete_array_2() {
assert_eq!(
::std::mem::size_of::<C_with_zero_length_array_and_incomplete_array_2>(),
4usize,
concat!(
"Size of: ",
stringify!(C_with_zero_length_array_and_incomplete_array_2)
)
);
assert_eq!(
::std::mem::align_of::<C_with_zero_length_array_and_incomplete_array_2>(),
4usize,
concat!(
"Alignment of ",
stringify!(C_with_zero_length_array_and_incomplete_array_2)
)
);
}
#[repr(C)]
#[derive(Debug, Default, Hash, PartialOrd, Ord, PartialEq, Eq)]
pub struct WithDtor {
pub b: ::std::os::raw::c_int,
}
#[test]
fn bindgen_test_layout_WithDtor() {
assert_eq!(
::std::mem::size_of::<WithDtor>(),
4usize,
concat!("Size of: ", stringify!(WithDtor))
);
assert_eq!(
::std::mem::align_of::<WithDtor>(),
4usize,
concat!("Alignment of ", stringify!(WithDtor))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<WithDtor>())).b as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(WithDtor),
"::",
stringify!(b)
)
);
}
#[repr(C)]
pub struct IncompleteArrayNonCopiable {
pub whatever: *mut ::std::os::raw::c_void,
pub incomplete_array: __IncompleteArrayField<C>,
}
#[test]
fn bindgen_test_layout_IncompleteArrayNonCopiable() {
assert_eq!(
::std::mem::size_of::<IncompleteArrayNonCopiable>(),
8usize,
concat!("Size of: ", stringify!(IncompleteArrayNonCopiable))
);
assert_eq!(
::std::mem::align_of::<IncompleteArrayNonCopiable>(),
8usize,
concat!("Alignment of ", stringify!(IncompleteArrayNonCopiable))
);
}
impl Default for IncompleteArrayNonCopiable {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union Union {
pub d: f32,
pub i: ::std::os::raw::c_int,
_bindgen_union_align: u32,
}
#[test]
fn bindgen_test_layout_Union() {
assert_eq!(
::std::mem::size_of::<Union>(),
4usize,
concat!("Size of: ", stringify!(Union))
);
assert_eq!(
::std::mem::align_of::<Union>(),
4usize,
concat!("Alignment of ", stringify!(Union))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<Union>())).d as *const _ as usize },
0usize,
concat!("Offset of field: ", stringify!(Union), "::", stringify!(d))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<Union>())).i as *const _ as usize },
0usize,
concat!("Offset of field: ", stringify!(Union), "::", stringify!(i))
);
}
impl Default for Union {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct WithUnion {
pub data: Union,
}
#[test]
fn bindgen_test_layout_WithUnion() {
assert_eq!(
::std::mem::size_of::<WithUnion>(),
4usize,
concat!("Size of: ", stringify!(WithUnion))
);
assert_eq!(
::std::mem::align_of::<WithUnion>(),
4usize,
concat!("Alignment of ", stringify!(WithUnion))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<WithUnion>())).data as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(WithUnion),
"::",
stringify!(data)
)
);
}
impl Default for WithUnion {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Debug, Default, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
pub struct RealAbstractionWithTonsOfMethods {
pub _address: u8,
}
#[test]
fn bindgen_test_layout_RealAbstractionWithTonsOfMethods() {
assert_eq!(
::std::mem::size_of::<RealAbstractionWithTonsOfMethods>(),
1usize,
concat!("Size of: ", stringify!(RealAbstractionWithTonsOfMethods))
);
assert_eq!(
::std::mem::align_of::<RealAbstractionWithTonsOfMethods>(),
1usize,
concat!(
"Alignment of ",
stringify!(RealAbstractionWithTonsOfMethods)
)
);
}
extern "C" {
#[link_name = "\u{1}_ZNK32RealAbstractionWithTonsOfMethods3barEv"]
pub fn RealAbstractionWithTonsOfMethods_bar(this: *const RealAbstractionWithTonsOfMethods);
}
extern "C" {
#[link_name = "\u{1}_ZN32RealAbstractionWithTonsOfMethods3barEv"]
pub fn RealAbstractionWithTonsOfMethods_bar1(this: *mut RealAbstractionWithTonsOfMethods);
}
extern "C" {
#[link_name = "\u{1}_ZN32RealAbstractionWithTonsOfMethods3barEi"]
pub fn RealAbstractionWithTonsOfMethods_bar2(
this: *mut RealAbstractionWithTonsOfMethods,
foo: ::std::os::raw::c_int,
);
}
extern "C" {
#[link_name = "\u{1}_ZN32RealAbstractionWithTonsOfMethods3staEv"]
pub fn RealAbstractionWithTonsOfMethods_sta();
}
impl RealAbstractionWithTonsOfMethods {
#[inline]
pub unsafe fn bar(&self) {
RealAbstractionWithTonsOfMethods_bar(self)
}
#[inline]
pub unsafe fn bar1(&mut self) {
RealAbstractionWithTonsOfMethods_bar1(self)
}
#[inline]
pub unsafe fn bar2(&mut self, foo: ::std::os::raw::c_int) {
RealAbstractionWithTonsOfMethods_bar2(self, foo)
}
#[inline]
pub unsafe fn sta() {
RealAbstractionWithTonsOfMethods_sta()
}
}
| |
repository-factory.data.ts
|
import { AxiosInstance } from 'axios';
import Repository from './repository.data';
import EntityHydrator from './entity-hydrator.data';
import ChangesetGenerator from './changeset-generator.data';
import EntityFactory from './entity-factory.data';
import ErrorResolver from './error-resolver.data';
export default class
|
{
private hydrator: EntityHydrator;
private changesetGenerator: ChangesetGenerator;
private entityFactory: EntityFactory;
private httpClient: AxiosInstance;
private errorResolver: ErrorResolver;
constructor(
hydrator: EntityHydrator,
changesetGenerator: ChangesetGenerator,
entityFactory: EntityFactory,
httpClient: AxiosInstance,
errorResolver: ErrorResolver,
) {
this.hydrator = hydrator;
this.changesetGenerator = changesetGenerator;
this.entityFactory = entityFactory;
this.httpClient = httpClient;
this.errorResolver = errorResolver;
}
/**
* Creates a repository for the provided entity.
* The route parameter allows to configure a custom route for the entity - used for association loading.
*/
create(entityName: string, route = '', options = {}): Repository {
if (!route) {
route = `/${entityName.replace(/_/g, '-')}`;
}
const definition = Shopware.EntityDefinition.get(entityName);
return new Repository(
route,
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
definition.entity,
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
this.httpClient,
this.hydrator,
this.changesetGenerator,
this.entityFactory,
this.errorResolver,
options,
);
}
}
|
RepositoryFactory
|
lib.rs
|
// Copyright (c) 2018, The Tor Project, Inc.
// Copyright (c) 2018, isis agora lovecruft
// See LICENSE for licensing information
//! Common cryptographic functions and utilities.
//!
//! # Hash Digests and eXtendable Output Functions (XOFs)
//!
//! The `digests` module contains submodules for specific hash digests
//! and extendable output functions.
//!
//! ```rust,no_run
//! use crypto::digests::sha2::*;
//!
//! let mut hasher: Sha256 = Sha256::default();
//! let mut result: [u8; 32] = [0u8; 32];
//!
//! hasher.input(b"foo");
//! hasher.input(b"bar");
//! hasher.input(b"baz");
//!
//! result.copy_from_slice(hasher.result().as_slice());
//!
//! assert!(result == [b'X'; DIGEST256_LEN]);
//! ```
#[deny(missing_docs)]
|
extern crate rand_core;
// External dependencies for tests.
#[cfg(test)]
extern crate rand as rand_crate;
// Our local crates.
extern crate external;
#[cfg(not(test))]
#[macro_use]
extern crate tor_log;
pub mod digests; // Unfortunately named "digests" plural to avoid name conflict with the digest crate
pub mod rand;
|
// External crates from cargo or TOR_RUST_DEPENDENCIES.
extern crate digest;
extern crate libc;
|
dram_clk.rs
|
#[doc = "Register `DRAM_CLK` reader"]
pub struct R(crate::R<DRAM_CLK_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<DRAM_CLK_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<DRAM_CLK_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<DRAM_CLK_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `DRAM_CLK` writer"]
pub struct W(crate::W<DRAM_CLK_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<DRAM_CLK_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<DRAM_CLK_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<DRAM_CLK_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Gating Clock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CLK_GATING_A {
#[doc = "0: `0`"]
OFF = 0,
#[doc = "1: `1`"]
ON = 1,
}
impl From<CLK_GATING_A> for bool {
#[inline(always)]
fn from(variant: CLK_GATING_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `CLK_GATING` reader - Gating Clock"]
pub struct CLK_GATING_R(crate::FieldReader<bool, CLK_GATING_A>);
impl CLK_GATING_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
CLK_GATING_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CLK_GATING_A {
match self.bits {
false => CLK_GATING_A::OFF,
true => CLK_GATING_A::ON,
}
}
#[doc = "Checks if the value of the field is `OFF`"]
#[inline(always)]
pub fn is_off(&self) -> bool {
**self == CLK_GATING_A::OFF
}
#[doc = "Checks if the value of the field is `ON`"]
#[inline(always)]
pub fn is_on(&self) -> bool {
**self == CLK_GATING_A::ON
}
}
impl core::ops::Deref for CLK_GATING_R {
type Target = crate::FieldReader<bool, CLK_GATING_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CLK_GATING` writer - Gating Clock"]
pub struct CLK_GATING_W<'a> {
w: &'a mut W,
}
impl<'a> CLK_GATING_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CLK_GATING_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "`0`"]
#[inline(always)]
pub fn off(self) -> &'a mut W {
self.variant(CLK_GATING_A::OFF)
}
#[doc = "`1`"]
#[inline(always)]
pub fn on(self) -> &'a mut W {
self.variant(CLK_GATING_A::ON)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | ((value as u32 & 0x01) << 31);
self.w
}
}
#[doc = "SDRCLK Configuration 0 Update\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SDRCLK_UPD_A {
#[doc = "0: `0`"]
INVALID = 0,
#[doc = "1: `1`"]
VALID = 1,
}
impl From<SDRCLK_UPD_A> for bool {
#[inline(always)]
fn from(variant: SDRCLK_UPD_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SDRCLK_UPD` reader - SDRCLK Configuration 0 Update"]
pub struct SDRCLK_UPD_R(crate::FieldReader<bool, SDRCLK_UPD_A>);
impl SDRCLK_UPD_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SDRCLK_UPD_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SDRCLK_UPD_A {
match self.bits {
false => SDRCLK_UPD_A::INVALID,
true => SDRCLK_UPD_A::VALID,
}
}
#[doc = "Checks if the value of the field is `INVALID`"]
#[inline(always)]
pub fn is_invalid(&self) -> bool {
**self == SDRCLK_UPD_A::INVALID
}
#[doc = "Checks if the value of the field is `VALID`"]
#[inline(always)]
pub fn is_valid(&self) -> bool {
**self == SDRCLK_UPD_A::VALID
}
}
impl core::ops::Deref for SDRCLK_UPD_R {
type Target = crate::FieldReader<bool, SDRCLK_UPD_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SDRCLK_UPD` writer - SDRCLK Configuration 0 Update"]
pub struct SDRCLK_UPD_W<'a> {
w: &'a mut W,
}
impl<'a> SDRCLK_UPD_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SDRCLK_UPD_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "`0`"]
#[inline(always)]
pub fn invalid(self) -> &'a mut W {
self.variant(SDRCLK_UPD_A::INVALID)
}
#[doc = "`1`"]
#[inline(always)]
pub fn valid(self) -> &'a mut W {
self.variant(SDRCLK_UPD_A::VALID)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 27)) | ((value as u32 & 0x01) << 27);
self.w
}
}
#[doc = "Clock Source Select\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum CLK_SRC_SEL_A {
#[doc = "0: `0`"]
PLL_DDR = 0,
#[doc = "1: `1`"]
PLL_AUDIO1_DIV2 = 1,
#[doc = "2: `10`"]
PLL_PERI_2X = 2,
#[doc = "3: `11`"]
PLL_PERI_800M = 3,
}
impl From<CLK_SRC_SEL_A> for u8 {
#[inline(always)]
fn from(variant: CLK_SRC_SEL_A) -> Self {
variant as _
}
}
#[doc = "Field `CLK_SRC_SEL` reader - Clock Source Select"]
pub struct CLK_SRC_SEL_R(crate::FieldReader<u8, CLK_SRC_SEL_A>);
impl CLK_SRC_SEL_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
CLK_SRC_SEL_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CLK_SRC_SEL_A> {
match self.bits {
0 => Some(CLK_SRC_SEL_A::PLL_DDR),
1 => Some(CLK_SRC_SEL_A::PLL_AUDIO1_DIV2),
2 => Some(CLK_SRC_SEL_A::PLL_PERI_2X),
3 => Some(CLK_SRC_SEL_A::PLL_PERI_800M),
_ => None,
}
}
#[doc = "Checks if the value of the field is `PLL_DDR`"]
#[inline(always)]
pub fn is_pll_ddr(&self) -> bool {
**self == CLK_SRC_SEL_A::PLL_DDR
}
#[doc = "Checks if the value of the field is `PLL_AUDIO1_DIV2`"]
#[inline(always)]
pub fn is_pll_audio1_div2(&self) -> bool {
**self == CLK_SRC_SEL_A::PLL_AUDIO1_DIV2
}
#[doc = "Checks if the value of the field is `PLL_PERI_2X`"]
#[inline(always)]
pub fn is_pll_peri_2x(&self) -> bool {
**self == CLK_SRC_SEL_A::PLL_PERI_2X
}
#[doc = "Checks if the value of the field is `PLL_PERI_800M`"]
#[inline(always)]
pub fn is_pll_peri_800m(&self) -> bool {
**self == CLK_SRC_SEL_A::PLL_PERI_800M
}
}
impl core::ops::Deref for CLK_SRC_SEL_R {
type Target = crate::FieldReader<u8, CLK_SRC_SEL_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CLK_SRC_SEL` writer - Clock Source Select"]
pub struct CLK_SRC_SEL_W<'a> {
w: &'a mut W,
}
impl<'a> CLK_SRC_SEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CLK_SRC_SEL_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "`0`"]
#[inline(always)]
pub fn pll_ddr(self) -> &'a mut W {
self.variant(CLK_SRC_SEL_A::PLL_DDR)
}
#[doc = "`1`"]
#[inline(always)]
pub fn pll_audio1_div2(self) -> &'a mut W {
self.variant(CLK_SRC_SEL_A::PLL_AUDIO1_DIV2)
}
#[doc = "`10`"]
#[inline(always)]
pub fn pll_peri_2x(self) -> &'a mut W {
self.variant(CLK_SRC_SEL_A::PLL_PERI_2X)
}
#[doc = "`11`"]
#[inline(always)]
pub fn pll_peri_800m(self) -> &'a mut W {
self.variant(CLK_SRC_SEL_A::PLL_PERI_800M)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 24)) | ((value as u32 & 0x07) << 24);
self.w
}
}
#[doc = "Factor N\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum DRAM_DIV2_A {
#[doc = "0: `0`"]
N1 = 0,
#[doc = "1: `1`"]
N2 = 1,
#[doc = "2: `10`"]
N4 = 2,
#[doc = "3: `11`"]
N8 = 3,
}
impl From<DRAM_DIV2_A> for u8 {
#[inline(always)]
fn from(variant: DRAM_DIV2_A) -> Self {
variant as _
}
}
#[doc = "Field `DRAM_DIV2` reader - Factor N"]
pub struct DRAM_DIV2_R(crate::FieldReader<u8, DRAM_DIV2_A>);
impl DRAM_DIV2_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
DRAM_DIV2_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DRAM_DIV2_A {
match self.bits {
0 => DRAM_DIV2_A::N1,
1 => DRAM_DIV2_A::N2,
2 => DRAM_DIV2_A::N4,
3 => DRAM_DIV2_A::N8,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `N1`"]
#[inline(always)]
pub fn is_n1(&self) -> bool {
**self == DRAM_DIV2_A::N1
}
#[doc = "Checks if the value of the field is `N2`"]
#[inline(always)]
pub fn is_n2(&self) -> bool {
**self == DRAM_DIV2_A::N2
}
#[doc = "Checks if the value of the field is `N4`"]
#[inline(always)]
pub fn is_n4(&self) -> bool {
**self == DRAM_DIV2_A::N4
}
#[doc = "Checks if the value of the field is `N8`"]
#[inline(always)]
pub fn is_n8(&self) -> bool {
**self == DRAM_DIV2_A::N8
}
}
impl core::ops::Deref for DRAM_DIV2_R {
type Target = crate::FieldReader<u8, DRAM_DIV2_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `DRAM_DIV2` writer - Factor N"]
pub struct DRAM_DIV2_W<'a> {
w: &'a mut W,
}
impl<'a> DRAM_DIV2_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DRAM_DIV2_A) -> &'a mut W {
self.bits(variant.into())
}
#[doc = "`0`"]
#[inline(always)]
pub fn n1(self) -> &'a mut W {
self.variant(DRAM_DIV2_A::N1)
}
#[doc = "`1`"]
#[inline(always)]
pub fn n2(self) -> &'a mut W {
self.variant(DRAM_DIV2_A::N2)
}
#[doc = "`10`"]
#[inline(always)]
pub fn n4(self) -> &'a mut W {
self.variant(DRAM_DIV2_A::N4)
}
#[doc = "`11`"]
#[inline(always)]
pub fn n8(self) -> &'a mut W {
self.variant(DRAM_DIV2_A::N8)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | ((value as u32 & 0x03) << 8);
self.w
}
}
#[doc = "Field `DRAM_DIV1` reader - Factor M"]
pub struct DRAM_DIV1_R(crate::FieldReader<u8, u8>);
impl DRAM_DIV1_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
DRAM_DIV1_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for DRAM_DIV1_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `DRAM_DIV1` writer - Factor M"]
pub struct DRAM_DIV1_W<'a> {
w: &'a mut W,
}
impl<'a> DRAM_DIV1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03) | (value as u32 & 0x03);
self.w
}
}
impl R {
#[doc = "Bit 31 - Gating Clock"]
#[inline(always)]
pub fn clk_gating(&self) -> CLK_GATING_R {
CLK_GATING_R::new(((self.bits >> 31) & 0x01) != 0)
}
#[doc = "Bit 27 - SDRCLK Configuration 0 Update"]
#[inline(always)]
pub fn sdrclk_upd(&self) -> SDRCLK_UPD_R {
SDRCLK_UPD_R::new(((self.bits >> 27) & 0x01) != 0)
}
#[doc = "Bits 24:26 - Clock Source Select"]
#[inline(always)]
pub fn clk_src_sel(&self) -> CLK_SRC_SEL_R {
CLK_SRC_SEL_R::new(((self.bits >> 24) & 0x07) as u8)
}
#[doc = "Bits 8:9 - Factor N"]
#[inline(always)]
pub fn dram_div2(&self) -> DRAM_DIV2_R {
DRAM_DIV2_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bits 0:1 - Factor M"]
#[inline(always)]
pub fn dram_div1(&self) -> DRAM_DIV1_R {
DRAM_DIV1_R::new((self.bits & 0x03) as u8)
}
}
impl W {
#[doc = "Bit 31 - Gating Clock"]
#[inline(always)]
pub fn clk_gating(&mut self) -> CLK_GATING_W {
CLK_GATING_W { w: self }
}
#[doc = "Bit 27 - SDRCLK Configuration 0 Update"]
#[inline(always)]
pub fn sdrclk_upd(&mut self) -> SDRCLK_UPD_W
|
#[doc = "Bits 24:26 - Clock Source Select"]
#[inline(always)]
pub fn clk_src_sel(&mut self) -> CLK_SRC_SEL_W {
CLK_SRC_SEL_W { w: self }
}
#[doc = "Bits 8:9 - Factor N"]
#[inline(always)]
pub fn dram_div2(&mut self) -> DRAM_DIV2_W {
DRAM_DIV2_W { w: self }
}
#[doc = "Bits 0:1 - Factor M"]
#[inline(always)]
pub fn dram_div1(&mut self) -> DRAM_DIV1_W {
DRAM_DIV1_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "DRAM Clock Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dram_clk](index.html) module"]
pub struct DRAM_CLK_SPEC;
impl crate::RegisterSpec for DRAM_CLK_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [dram_clk::R](R) reader structure"]
impl crate::Readable for DRAM_CLK_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [dram_clk::W](W) writer structure"]
impl crate::Writable for DRAM_CLK_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets DRAM_CLK to value 0"]
impl crate::Resettable for DRAM_CLK_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
|
{
SDRCLK_UPD_W { w: self }
}
|
b_test.go
|
// Code generated by copypasta/template/leetcode/generator_test.go
package main
import (
"github.com/EndlessCheng/codeforces-go/leetcode/testutil"
"testing"
)
func Test(t *testing.T)
|
// https://leetcode-cn.com/contest/biweekly-contest-13/problems/smallest-common-region/
|
{
t.Log("Current test is [b]")
examples := [][]string{
{
`[["Earth","North America","South America"],
["North America","United States","Canada"],
["United States","New York","Boston"],
["Canada","Ontario","Quebec"],
["South America","Brazil"]]`, `"Quebec"`, `"New York"`,
`"North America"`,
},
}
targetCaseNum := 0
if err := testutil.RunLeetCodeFuncWithExamples(t, findSmallestRegion, examples, targetCaseNum); err != nil {
t.Fatal(err)
}
}
|
lib.rs
|
pub mod fifth;
pub mod first;
pub mod fourth;
pub mod second;
pub mod sixth;
pub mod third;
| ||
CNN.py
|
import torchvision
EPOCH = 1
BATCH_SIZE = 50
LR = 0.001
DOWNLOAD_MNIST = False
if not(os.path.exists('./mnist/')) or not os.listdir('./mnist/'):
DOWNLOAD_MNIST = True
train_data = torchvision.datasets.MNIST(
root='./mnist/',
train=True,
transform=torchvision.transforms.ToTensor(),
download=DOWNLOAD_MNIST,
)
print(train_data.data.size())
print(train_data.targets.size())
train_loader = Data.DataLoader(dataset=train_data, batch_size=BATCH_SIZE, shuffle=True) # batch shape:(50,1,28,28)
test_data = torchvision.datasets.MNIST(root='./mnist/', train=False)
test_x = torch.unsqueeze(test_data.data, dim=1).type(torch.FloatTensor)[:2000]/255. # shape:(2000,1,28,28)
test_y = test_data.targets[:2000]
class CNN(nn.Module):
def __init__(self):
super(CNN, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(1, 16, 5, 1, 2),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2),
)
self.conv2 = nn.Sequential(
nn.Conv2d(16, 32, 5, 1, 2),
nn.ReLU(),
nn.MaxPool2d(2),
)
self.out = nn.Linear(32*7*7, 10)
def forward(self, x):
x = self.conv1(x)
x = self.conv2(x)
x = x.view(x.size(0), -1)
out = self.out(x)
return out, x
cnn = CNN()
print(cnn)
optimizer = torch.optim.Adam(cnn.parameters(), lr=LR)
loss_func = nn.CrossEntropyLoss()
for epoch in range(EPOCH):
for step, (b_x, b_y) in enumerate(train_loader):
output = cnn(b_x)[0]
loss = loss_func(output, b_y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if step % 50 == 0:
test_output, last_layer = cnn(test_x)
pred_y = torch.max(test_output, 1)[1].data.numpy()
accuracy = float((pred_y == test_y.data.numpy()).astype(int).sum())/float(test_y.size(0))
print('Epoch: ', epoch, '| Training Loss: %.4f' % loss.data.numpy(), '| Test Accuracy: %.2f' % accuracy)
test_output, _ = cnn(test_x[:20])
pred_y = torch.max(test_output, 1)[1].data.numpy()
print(pred_y, 'Prediction Number')
print(test_y[:20].numpy(), 'Real Number')
|
import os
import torch
import torch.nn as nn
import torch.utils.data as Data
|
|
AffineLayer.py
|
# -*- coding: utf-8 -*-
"""
Created on Fri May 06 14:54:11 2016
@author: Alexander Weaver
"""
"""
Performs an affine (fully connected) operation on its input
An affine layer with out_dim neurons takes a data array of size Nx(in_dim), x
and returns a linearly transformed Nx(out_dim) data array
The transformation result, z, is determined by a (in_dim)x(out_dim) weight matrix, W, and
a (out_dim) bias vector, b. The transformation of any one data point (one row in x) is given by:
z = Wx + b
Constructing this object initializes the parameters following a gaussian random distribution with
standard deviation given by weight_scale.
Forward propagating this object performs the affine transformation on the given array, X.
Backpropagating this object returns the derivatives of x, W, and b with respect to the final output of
the network.
"""
|
class AffineLayer(object):
def __init__(self, in_dim, out_dim, weight_scale, data_type=np.float32):
self.in_dim = in_dim
self.out_dim = out_dim
self.weight_scale = weight_scale
self.data_type = data_type
self.W = np.random.randn(in_dim, out_dim) * weight_scale
self.W = self.W.astype(self.data_type)
self.b = np.zeros(out_dim)
self.b = self.b.astype(self.data_type)
def forward(self, x, W=None, b=None):
if W is None:
W = self.W
if b is None:
b = self.b
N = x.shape[0]
reshaped_x = x.reshape(N, np.prod(x.shape[1:]))
out = reshaped_x.dot(W) + b
self.cache_x = x
return out
def backward(self, dout):
x = self.cache_x
N = x.shape[0]
reshaped_x = x.reshape(N, np.prod(x.shape[1:]))
dx = dout.dot(np.transpose(self.W)).reshape(x.shape)
self.dW = np.transpose(reshaped_x).dot(dout)
self.db = np.sum(dout, axis=0)
return dx
|
import numpy as np
|
get_cross_connect_group_request_response.go
|
// Copyright (c) 2016, 2018, 2021, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
package core
import (
"github.com/oracle/oci-go-sdk/v46/common"
"net/http"
)
// GetCrossConnectGroupRequest wrapper for the GetCrossConnectGroup operation
//
// See also
//
// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/core/GetCrossConnectGroup.go.html to see an example of how to use GetCrossConnectGroupRequest.
type GetCrossConnectGroupRequest struct {
// The OCID (https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the cross-connect group.
CrossConnectGroupId *string `mandatory:"true" contributesTo:"path" name:"crossConnectGroupId"`
// Unique Oracle-assigned identifier for the request.
// If you need to contact Oracle about a particular request, please provide the request ID.
OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"`
// Metadata about the request. This information will not be transmitted to the service, but
// represents information that the SDK will consume to drive retry behavior.
RequestMetadata common.RequestMetadata
}
func (request GetCrossConnectGroupRequest) String() string {
return common.PointerString(request)
|
return common.MakeDefaultHTTPRequestWithTaggedStructAndExtraHeaders(method, path, request, extraHeaders)
}
// BinaryRequestBody implements the OCIRequest interface
func (request GetCrossConnectGroupRequest) BinaryRequestBody() (*common.OCIReadSeekCloser, bool) {
return nil, false
}
// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy.
func (request GetCrossConnectGroupRequest) RetryPolicy() *common.RetryPolicy {
return request.RequestMetadata.RetryPolicy
}
// GetCrossConnectGroupResponse wrapper for the GetCrossConnectGroup operation
type GetCrossConnectGroupResponse struct {
// The underlying http response
RawResponse *http.Response
// The CrossConnectGroup instance
CrossConnectGroup `presentIn:"body"`
// For optimistic concurrency control. See `if-match`.
Etag *string `presentIn:"header" name:"etag"`
// Unique Oracle-assigned identifier for the request. If you need to contact
// Oracle about a particular request, please provide the request ID.
OpcRequestId *string `presentIn:"header" name:"opc-request-id"`
}
func (response GetCrossConnectGroupResponse) String() string {
return common.PointerString(response)
}
// HTTPResponse implements the OCIResponse interface
func (response GetCrossConnectGroupResponse) HTTPResponse() *http.Response {
return response.RawResponse
}
|
}
// HTTPRequest implements the OCIRequest interface
func (request GetCrossConnectGroupRequest) HTTPRequest(method, path string, binaryRequestBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (http.Request, error) {
|
utils.go
|
package common
import (
"os/user"
"github.com/pkg/errors"
)
func GetUidArgs() ([]string, error) {
u, err := user.Current()
if err != nil
|
return []string{"--env", "THETOOL_UID=" + u.Uid, "--env", "THETOOL_GID=" + u.Gid}, nil
}
func GetSshKeyArgs(sshkey string) []string {
// mount user's key as read only, to guarantee it is unharmed
args := []string{"-v", sshkey + ":/etc/user-data/ssh-keys/id_rsa:ro"}
// prepare a tmp volume to hold the key, as we need to chmod \ chown it
args = append(args, "--mount", "type=tmpfs,destination=/etc/github/")
return args
}
func CreateUserTemplate(homedir string) string {
return `
if [ -n "$THETOOL_UID" ]; then
groupadd --gid $THETOOL_GID -f thetoolgroup
useradd -o --uid $THETOOL_UID --gid $THETOOL_GID --no-create-home --home-dir ` + homedir + ` thetool
fi
`
}
const PrepareKeyTemplate = `
if [ -f "/etc/user-data/ssh-keys/id_rsa" ];
then
cp /etc/user-data/ssh-keys/id_rsa /etc/github/id_rsa
chmod 400 /etc/github/id_rsa
chown thetool /etc/github/id_rsa
fi
`
|
{
return nil, errors.Wrap(err, "unable to get current user")
}
|
test_bcc.py
|
# encoding=utf-8
# Author: Yu-Lun Chiang
# Description: Test NewsCrawler
import logging
import pytest
from collections import namedtuple
from src.crawler.media import bcc
from src.utils.struct import NewsStruct
logger = logging.getLogger(__name__)
TEST_DATA = namedtuple(
typename="TEST_DATA",
field_names=[
"name",
"link",
"expected_output",
],
)
TEST_DATA_1 = TEST_DATA(
name="中國廣播公司_1",
link="https://www.bcc.com.tw/newsView.6473942",
expected_output=NewsStruct(
title="「這家超商」6/23開賣快篩試劑 雙北2門市限量100盒",
content="\r\n 為了方便民眾居家檢測新冠肺炎,食藥署在19日公布核准5款家用快篩試劑,可就近到藥局、醫療器材販售業者,如藥妝店、醫療器材行、便利商店等商家選購。萊爾富位於雙北的2家門市明(23)日起將首度開賣家用快篩試劑,每店限量100盒,售完為止。萊爾富首度引進國產泰博科技的「福爾威創家用新型冠狀病毒抗原快速檢驗套組」,明天下午3點起,將在台北市迪化店、北縣五工店限量開賣,每盒5入售價1700元,每店限量100盒,不拆售。根據食藥署公布的指引,如果快篩陽性,居家檢疫或隔離者須先與衛生單位聯繫,一般民眾則到社區採檢院所採檢確認;如果是陰性,民眾仍要遵循防疫規範,做好個人防護,持續自我健康管理。(快篩試劑資料照)\r\n ",
keywords=None,
category=None,
media="中國廣播公司",
datetime="2021/06/22 18:49 報導",
link="https://www.bcc.com.tw/newsView.6473942",
),
)
TEST_DATA_2 = TEST_DATA(
name="中國廣播公司_2",
link="https://www.bcc.com.tw/newsView.4839712",
expected_output=NewsStruct(
title="台積電衝關未成 聯電ADR爆漲股價再登新高",
content="\r\n 半導體類股正當紅,台積電今天(24日)早盤衝關500元短暫達標後拉回,聯電延續昨天的強勢,在ADR飆漲超過20%助威下,股價漲幅超過7%,最高攻至39.7元,市值擠下股王大立光,繼續成為台股人氣王。因為聯電的狂飆,大盤儘管稍事休息,拉回的幅度也很有限。(張佳琪報導)台股週一的兩大支柱台積電、聯電,週二股價兩樣情,台積電挑戰500元大關,早盤開盤隨即攻頂,但是衝高後買盤追價謹慎,導致股價翻黑呈現小跌。聯電因週一股價漲停板鎖住,美國ADR強漲20.24%,帶動股價開盤後強勢走高,隨即衝過39元一路向上,攻至39.7元,股價又改寫18年新高,且追價買單積極,漲幅超過7%,市值擠下股王大立光。讓股價瞬間點火爆衝的關鍵是美系外資分析師最新出具的報告大力看好聯電。理由是受惠於5G、AI、高速運算等發展,聯電產用率將提高至90%到95%,因此,8吋晶圓價格調漲、12吋晶圓產用率提升,以及28奈米拓展有成,推估聯電明後年資本支出將達12億美元,重申「買進」評等,目標價由32元上調至54.5元。分析師表示,三大法人週一同步大買聯電,週二的漲勢,內外資應都有貢獻。至於是否漲到外資報告訂下的目標價,分析師認為,以今年聯電EPS預估2.25元推算,如果漲到54.5元,本益比落在24倍,雖然高但不至於離譜,因此認為如果外資買盤力道夠強,目標價就可能達標。(圖:雅虎奇摩)\r\n ",
keywords=None,
category=None,
media="中國廣播公司",
datetime="2020/11/24 11:26 報導",
link="https://www.bcc.com.tw/newsView.4839712",
),
)
TEST_DATA_LIST = [TEST_DATA_1, TEST_DATA_2]
@pytest.fixture(scope="module")
def newsCrawler():
logger.warning("Init News Crawler ...")
return bcc.BCCNewsCrawler()
@pytest.mark.parametrize(
argnames="name, link, expected_output",
argvalues=[tuple(t) for t in TEST_DATA_LIST],
ids=[
f"{t.name}, {t.link[:50]+'...' if len(t.link) > 50 else t.link}"
for t in TEST_DATA_LIST
],
)
def test_get_info(
newsCrawler,
name,
link,
expected_output,
):
output = newsCrawler.getInfo(link=link)
assert NewsStruct.__2dict__(output) == NewsStruct.__2dict__(expected_output)
| ||
extract_cds.py
|
#!/usr/bin/env python
from Bio import SeqIO
import argparse
import pathlib
def get_arguments():
parser = argparse.ArgumentParser(description='Extract CDS from a genbank to output a fasta',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('input', type=str,
help='Path to input genbank file')
parser.add_argument('output', type=str,help='Path to put file/folder output')
parser.add_argument('-i', '--ignore', type=str, metavar = 'KEY', default=None, help="if 'key' matches a CDS name it won't be included in the output")
parser.add_argument('-m', '--multi', action ='store_true', help = "Specify if the input file is a multigenbank, in which case the CDS of each entry would be extracted in a different fasta file in an output directory in the specified output path")
args = parser.parse_args()
return args
def get_features(record, key):
cds = {}
if key == None:
for i,ft in enumerate(record.features):
if ft.type == "CDS":
if "gene" in ft.qualifiers.keys():
gene = ft.qualifiers["gene"][0]
cds[gene] = ft.extract(record)
else:
for i,ft in enumerate(record.features):
if ft.type == "CDS":
if "gene" in ft.qualifiers.keys():
if key not in ft.qualifiers["gene"][0]:
gene = ft.qualifiers["gene"][0]
cds[gene] = ft.extract(record)
return cds
def reformat(cds):
for gene, record in cds.items():
record.id = gene
record.description = ""
return cds
def main():
args = get_arguments()
#if args.ignore == None:
# args.ignore == ""
if args.multi is True:
recs = SeqIO.parse(args.input,"gb")
taxa = {}
for rec in recs:
specie = rec.annotations["organism"].replace(" ","_")
taxa[specie] = reformat(get_features(rec, args.ignore))
## Create directory
pathlib.Path(args.output.rstrip("/")+'/extract_cds_output').mkdir(parents=True, exist_ok=True)
## Write fastas
for specie, genes in taxa.items():
filepath = args.output.rstrip("/")+'/extract_cds_output'+"/"+specie+".fasta"
SeqIO.write(genes.values(),filepath,"fasta")
else:
rec = SeqIO.read(args.input, "gb")
aux = get_features(rec, args.ignore)
cds = reformat(aux)
## Write filenames
filename = args.output.strip("/")
# filename = args.output.strip("/") + "/" + rec.annotations["organism"].replace(" ","_") + ".fasta"
SeqIO.write(cds.values(), filename, "fasta")
if __name__ == '__main__':
|
main()
|
|
block-end.tsx
|
/** @jsx jsx */
import { Transforms } from 'slate'
import { jsx } from '../../..'
export const run = editor => {
Transforms.insertText(editor, 'a')
}
export const input = (
<editor>
<block>
word
<cursor />
</block>
|
export const output = (
<editor>
<block>
worda
<cursor />
</block>
</editor>
)
|
</editor>
)
|
server.go
|
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package server
import (
"context"
"crypto/tls"
"fmt"
"io"
"net"
"net/http"
"net/http/pprof"
"net/url"
"reflect"
goruntime "runtime"
"strconv"
"strings"
"time"
"github.com/emicklei/go-restful"
cadvisormetrics "github.com/google/cadvisor/container"
cadvisorapi "github.com/google/cadvisor/info/v1"
cadvisorv2 "github.com/google/cadvisor/info/v2"
"github.com/google/cadvisor/metrics"
"google.golang.org/grpc"
"k8s.io/klog/v2"
"k8s.io/kubernetes/pkg/kubelet/metrics/collectors"
"k8s.io/utils/clock"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/proxy"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apiserver/pkg/authentication/authenticator"
"k8s.io/apiserver/pkg/authorization/authorizer"
"k8s.io/apiserver/pkg/server/healthz"
"k8s.io/apiserver/pkg/server/httplog"
"k8s.io/apiserver/pkg/server/routes"
utilfeature "k8s.io/apiserver/pkg/util/feature"
"k8s.io/apiserver/pkg/util/flushwriter"
"k8s.io/component-base/configz"
"k8s.io/component-base/logs"
compbasemetrics "k8s.io/component-base/metrics"
"k8s.io/component-base/metrics/legacyregistry"
podresourcesapi "k8s.io/kubelet/pkg/apis/podresources/v1"
podresourcesapiv1alpha1 "k8s.io/kubelet/pkg/apis/podresources/v1alpha1"
"k8s.io/kubernetes/pkg/api/legacyscheme"
api "k8s.io/kubernetes/pkg/apis/core"
"k8s.io/kubernetes/pkg/apis/core/v1/validation"
"k8s.io/kubernetes/pkg/features"
"k8s.io/kubernetes/pkg/kubelet/apis/podresources"
kubecontainer "k8s.io/kubernetes/pkg/kubelet/container"
"k8s.io/kubernetes/pkg/kubelet/cri/streaming"
"k8s.io/kubernetes/pkg/kubelet/cri/streaming/portforward"
remotecommandserver "k8s.io/kubernetes/pkg/kubelet/cri/streaming/remotecommand"
"k8s.io/kubernetes/pkg/kubelet/prober"
servermetrics "k8s.io/kubernetes/pkg/kubelet/server/metrics"
"k8s.io/kubernetes/pkg/kubelet/server/stats"
kubelettypes "k8s.io/kubernetes/pkg/kubelet/types"
"k8s.io/kubernetes/pkg/kubelet/util"
)
const (
metricsPath = "/metrics"
cadvisorMetricsPath = "/metrics/cadvisor"
resourceMetricsPath = "/metrics/resource"
proberMetricsPath = "/metrics/probes"
specPath = "/spec/"
statsPath = "/stats/"
logsPath = "/logs/"
)
// Server is a http.Handler which exposes kubelet functionality over HTTP.
type Server struct {
auth AuthInterface
host HostInterface
restfulCont containerInterface
metricsBuckets sets.String
metricsMethodBuckets sets.String
resourceAnalyzer stats.ResourceAnalyzer
}
// TLSOptions holds the TLS options.
type TLSOptions struct {
Config *tls.Config
CertFile string
KeyFile string
}
// containerInterface defines the restful.Container functions used on the root container
type containerInterface interface {
Add(service *restful.WebService) *restful.Container
Handle(path string, handler http.Handler)
Filter(filter restful.FilterFunction)
ServeHTTP(w http.ResponseWriter, r *http.Request)
RegisteredWebServices() []*restful.WebService
// RegisteredHandlePaths returns the paths of handlers registered directly with the container (non-web-services)
// Used to test filters are being applied on non-web-service handlers
RegisteredHandlePaths() []string
}
// filteringContainer delegates all Handle(...) calls to Container.HandleWithFilter(...),
// so we can ensure restful.FilterFunctions are used for all handlers
type filteringContainer struct {
*restful.Container
registeredHandlePaths []string
}
func (a *filteringContainer) Handle(path string, handler http.Handler) {
a.HandleWithFilter(path, handler)
a.registeredHandlePaths = append(a.registeredHandlePaths, path)
}
func (a *filteringContainer) RegisteredHandlePaths() []string {
return a.registeredHandlePaths
}
// ListenAndServeKubeletServer initializes a server to respond to HTTP network requests on the Kubelet.
func ListenAndServeKubeletServer(
host HostInterface,
resourceAnalyzer stats.ResourceAnalyzer,
address net.IP,
port uint,
tlsOptions *TLSOptions,
auth AuthInterface,
enableCAdvisorJSONEndpoints,
enableDebuggingHandlers,
enableContentionProfiling,
enableSystemLogHandler bool) {
klog.Infof("Starting to listen on %s:%d", address, port)
handler := NewServer(host, resourceAnalyzer, auth, enableCAdvisorJSONEndpoints, enableDebuggingHandlers, enableContentionProfiling, enableSystemLogHandler)
s := &http.Server{
Addr: net.JoinHostPort(address.String(), strconv.FormatUint(uint64(port), 10)),
Handler: &handler,
ReadTimeout: 4 * 60 * time.Minute,
WriteTimeout: 4 * 60 * time.Minute,
MaxHeaderBytes: 1 << 20,
}
if tlsOptions != nil {
s.TLSConfig = tlsOptions.Config
// Passing empty strings as the cert and key files means no
// cert/keys are specified and GetCertificate in the TLSConfig
// should be called instead.
klog.Fatal(s.ListenAndServeTLS(tlsOptions.CertFile, tlsOptions.KeyFile))
} else {
klog.Fatal(s.ListenAndServe())
}
}
// ListenAndServeKubeletReadOnlyServer initializes a server to respond to HTTP network requests on the Kubelet.
func ListenAndServeKubeletReadOnlyServer(host HostInterface, resourceAnalyzer stats.ResourceAnalyzer, address net.IP, port uint, enableCAdvisorJSONEndpoints bool) {
klog.V(1).Infof("Starting to listen read-only on %s:%d", address, port)
s := NewServer(host, resourceAnalyzer, nil, enableCAdvisorJSONEndpoints, false, false, false)
server := &http.Server{
Addr: net.JoinHostPort(address.String(), strconv.FormatUint(uint64(port), 10)),
Handler: &s,
MaxHeaderBytes: 1 << 20,
}
klog.Fatal(server.ListenAndServe())
}
// ListenAndServePodResources initializes a gRPC server to serve the PodResources service
func ListenAndServePodResources(socket string, podsProvider podresources.PodsProvider, devicesProvider podresources.DevicesProvider, cpusProvider podresources.CPUsProvider) {
server := grpc.NewServer()
podresourcesapiv1alpha1.RegisterPodResourcesListerServer(server, podresources.NewV1alpha1PodResourcesServer(podsProvider, devicesProvider))
podresourcesapi.RegisterPodResourcesListerServer(server, podresources.NewV1PodResourcesServer(podsProvider, devicesProvider, cpusProvider))
l, err := util.CreateListener(socket)
if err != nil {
klog.Fatalf("Failed to create listener for podResources endpoint: %v", err)
}
klog.Fatal(server.Serve(l))
}
// AuthInterface contains all methods required by the auth filters
type AuthInterface interface {
authenticator.Request
authorizer.RequestAttributesGetter
authorizer.Authorizer
}
// HostInterface contains all the kubelet methods required by the server.
// For testability.
type HostInterface interface {
stats.Provider
GetVersionInfo() (*cadvisorapi.VersionInfo, error)
GetCachedMachineInfo() (*cadvisorapi.MachineInfo, error)
GetRunningPods() ([]*v1.Pod, error)
RunInContainer(name string, uid types.UID, container string, cmd []string) ([]byte, error)
GetKubeletContainerLogs(ctx context.Context, podFullName, containerName string, logOptions *v1.PodLogOptions, stdout, stderr io.Writer) error
ServeLogs(w http.ResponseWriter, req *http.Request)
ResyncInterval() time.Duration
GetHostname() string
LatestLoopEntryTime() time.Time
GetExec(podFullName string, podUID types.UID, containerName string, cmd []string, streamOpts remotecommandserver.Options) (*url.URL, error)
GetAttach(podFullName string, podUID types.UID, containerName string, streamOpts remotecommandserver.Options) (*url.URL, error)
GetPortForward(podName, podNamespace string, podUID types.UID, portForwardOpts portforward.V4Options) (*url.URL, error)
}
// NewServer initializes and configures a kubelet.Server object to handle HTTP requests.
func NewServer(
host HostInterface,
resourceAnalyzer stats.ResourceAnalyzer,
auth AuthInterface,
enableCAdvisorJSONEndpoints,
enableDebuggingHandlers,
enableContentionProfiling,
enableSystemLogHandler bool) Server {
server := Server{
host: host,
resourceAnalyzer: resourceAnalyzer,
auth: auth,
restfulCont: &filteringContainer{Container: restful.NewContainer()},
metricsBuckets: sets.NewString(),
metricsMethodBuckets: sets.NewString("OPTIONS", "GET", "HEAD", "POST", "PUT", "DELETE", "TRACE", "CONNECT"),
}
if auth != nil {
server.InstallAuthFilter()
}
server.InstallDefaultHandlers(enableCAdvisorJSONEndpoints)
if enableDebuggingHandlers {
server.InstallDebuggingHandlers()
// To maintain backward compatibility serve logs only when enableDebuggingHandlers is also enabled
// see https://github.com/kubernetes/kubernetes/pull/87273
server.InstallSystemLogHandler(enableSystemLogHandler)
if enableContentionProfiling {
goruntime.SetBlockProfileRate(1)
}
} else {
server.InstallDebuggingDisabledHandlers()
}
return server
}
// InstallAuthFilter installs authentication filters with the restful Container.
func (s *Server) InstallAuthFilter() {
s.restfulCont.Filter(func(req *restful.Request, resp *restful.Response, chain *restful.FilterChain) {
// Authenticate
info, ok, err := s.auth.AuthenticateRequest(req.Request)
if err != nil {
klog.Errorf("Unable to authenticate the request due to an error: %v", err)
resp.WriteErrorString(http.StatusUnauthorized, "Unauthorized")
return
}
if !ok {
resp.WriteErrorString(http.StatusUnauthorized, "Unauthorized")
return
}
// Get authorization attributes
attrs := s.auth.GetRequestAttributes(info.User, req.Request)
// Authorize
decision, _, err := s.auth.Authorize(req.Request.Context(), attrs)
if err != nil {
msg := fmt.Sprintf("Authorization error (user=%s, verb=%s, resource=%s, subresource=%s)", attrs.GetUser().GetName(), attrs.GetVerb(), attrs.GetResource(), attrs.GetSubresource())
klog.Errorf(msg, err)
resp.WriteErrorString(http.StatusInternalServerError, msg)
return
}
if decision != authorizer.DecisionAllow {
msg := fmt.Sprintf("Forbidden (user=%s, verb=%s, resource=%s, subresource=%s)", attrs.GetUser().GetName(), attrs.GetVerb(), attrs.GetResource(), attrs.GetSubresource())
klog.V(2).Info(msg)
resp.WriteErrorString(http.StatusForbidden, msg)
return
}
// Continue
chain.ProcessFilter(req, resp)
})
}
// addMetricsBucketMatcher adds a regexp matcher and the relevant bucket to use when
// it matches. Please be aware this is not thread safe and should not be used dynamically
func (s *Server) addMetricsBucketMatcher(bucket string) {
s.metricsBuckets.Insert(bucket)
}
// getMetricBucket find the appropriate metrics reporting bucket for the given path
func (s *Server) getMetricBucket(path string) string {
root := getURLRootPath(path)
if s.metricsBuckets.Has(root) {
return root
}
return "other"
}
// getMetricMethodBucket checks for unknown or invalid HTTP verbs
func (s *Server) getMetricMethodBucket(method string) string {
if s.metricsMethodBuckets.Has(method) {
return method
}
return "other"
}
// InstallDefaultHandlers registers the default set of supported HTTP request
// patterns with the restful Container.
func (s *Server) InstallDefaultHandlers(enableCAdvisorJSONEndpoints bool) {
s.addMetricsBucketMatcher("healthz")
healthz.InstallHandler(s.restfulCont,
healthz.PingHealthz,
healthz.LogHealthz,
healthz.NamedCheck("syncloop", s.syncLoopHealthCheck),
)
s.addMetricsBucketMatcher("pods")
ws := new(restful.WebService)
ws.
Path("/pods").
Produces(restful.MIME_JSON)
ws.Route(ws.GET("").
To(s.getPods).
Operation("getPods"))
s.restfulCont.Add(ws)
s.addMetricsBucketMatcher("stats")
s.restfulCont.Add(stats.CreateHandlers(statsPath, s.host, s.resourceAnalyzer, enableCAdvisorJSONEndpoints))
s.addMetricsBucketMatcher("metrics")
s.addMetricsBucketMatcher("metrics/cadvisor")
s.addMetricsBucketMatcher("metrics/probes")
s.addMetricsBucketMatcher("metrics/resource")
//lint:ignore SA1019 https://github.com/kubernetes/enhancements/issues/1206
s.restfulCont.Handle(metricsPath, legacyregistry.Handler())
// cAdvisor metrics are exposed under the secured handler as well
r := compbasemetrics.NewKubeRegistry()
includedMetrics := cadvisormetrics.MetricSet{
cadvisormetrics.CpuUsageMetrics: struct{}{},
cadvisormetrics.MemoryUsageMetrics: struct{}{},
cadvisormetrics.CpuLoadMetrics: struct{}{},
cadvisormetrics.DiskIOMetrics: struct{}{},
cadvisormetrics.DiskUsageMetrics: struct{}{},
cadvisormetrics.NetworkUsageMetrics: struct{}{},
cadvisormetrics.AppMetrics: struct{}{},
cadvisormetrics.ProcessMetrics: struct{}{},
}
// Only add the Accelerator metrics if the feature is inactive
// Note: Accelerator metrics will be removed in the future, hence the feature gate.
if !utilfeature.DefaultFeatureGate.Enabled(features.DisableAcceleratorUsageMetrics) {
includedMetrics.Add(cadvisormetrics.MetricKind(cadvisormetrics.AcceleratorUsageMetrics))
}
cadvisorOpts := cadvisorv2.RequestOptions{
IdType: cadvisorv2.TypeName,
Count: 1,
Recursive: true,
}
r.RawMustRegister(metrics.NewPrometheusCollector(prometheusHostAdapter{s.host}, containerPrometheusLabelsFunc(s.host), includedMetrics, clock.RealClock{}, cadvisorOpts))
r.RawMustRegister(metrics.NewPrometheusMachineCollector(prometheusHostAdapter{s.host}, includedMetrics))
s.restfulCont.Handle(cadvisorMetricsPath,
compbasemetrics.HandlerFor(r, compbasemetrics.HandlerOpts{ErrorHandling: compbasemetrics.ContinueOnError}),
)
s.addMetricsBucketMatcher("metrics/resource")
resourceRegistry := compbasemetrics.NewKubeRegistry()
resourceRegistry.CustomMustRegister(collectors.NewResourceMetricsCollector(s.resourceAnalyzer))
s.restfulCont.Handle(resourceMetricsPath,
compbasemetrics.HandlerFor(resourceRegistry, compbasemetrics.HandlerOpts{ErrorHandling: compbasemetrics.ContinueOnError}),
)
// prober metrics are exposed under a different endpoint
s.addMetricsBucketMatcher("metrics/probes")
p := compbasemetrics.NewKubeRegistry()
_ = compbasemetrics.RegisterProcessStartTime(p.Register)
p.MustRegister(prober.ProberResults)
s.restfulCont.Handle(proberMetricsPath,
compbasemetrics.HandlerFor(p, compbasemetrics.HandlerOpts{ErrorHandling: compbasemetrics.ContinueOnError}),
)
s.addMetricsBucketMatcher("spec")
if enableCAdvisorJSONEndpoints {
ws := new(restful.WebService)
ws.
Path(specPath).
Produces(restful.MIME_JSON)
ws.Route(ws.GET("").
To(s.getSpec).
Operation("getSpec").
Writes(cadvisorapi.MachineInfo{}))
s.restfulCont.Add(ws)
}
}
const pprofBasePath = "/debug/pprof/"
// InstallDebuggingHandlers registers the HTTP request patterns that serve logs or run commands/containers
func (s *Server) InstallDebuggingHandlers() {
klog.Infof("Adding debug handlers to kubelet server.")
s.addMetricsBucketMatcher("run")
ws := new(restful.WebService)
ws.
Path("/run")
ws.Route(ws.POST("/{podNamespace}/{podID}/{containerName}").
To(s.getRun).
Operation("getRun"))
ws.Route(ws.POST("/{podNamespace}/{podID}/{uid}/{containerName}").
To(s.getRun).
Operation("getRun"))
s.restfulCont.Add(ws)
s.addMetricsBucketMatcher("exec")
ws = new(restful.WebService)
ws.
Path("/exec")
ws.Route(ws.GET("/{podNamespace}/{podID}/{containerName}").
To(s.getExec).
Operation("getExec"))
ws.Route(ws.POST("/{podNamespace}/{podID}/{containerName}").
To(s.getExec).
Operation("getExec"))
ws.Route(ws.GET("/{podNamespace}/{podID}/{uid}/{containerName}").
To(s.getExec).
Operation("getExec"))
ws.Route(ws.POST("/{podNamespace}/{podID}/{uid}/{containerName}").
To(s.getExec).
Operation("getExec"))
s.restfulCont.Add(ws)
s.addMetricsBucketMatcher("attach")
ws = new(restful.WebService)
ws.
Path("/attach")
ws.Route(ws.GET("/{podNamespace}/{podID}/{containerName}").
To(s.getAttach).
Operation("getAttach"))
ws.Route(ws.POST("/{podNamespace}/{podID}/{containerName}").
To(s.getAttach).
Operation("getAttach"))
ws.Route(ws.GET("/{podNamespace}/{podID}/{uid}/{containerName}").
To(s.getAttach).
Operation("getAttach"))
ws.Route(ws.POST("/{podNamespace}/{podID}/{uid}/{containerName}").
To(s.getAttach).
Operation("getAttach"))
s.restfulCont.Add(ws)
s.addMetricsBucketMatcher("portForward")
ws = new(restful.WebService)
ws.
Path("/portForward")
ws.Route(ws.GET("/{podNamespace}/{podID}").
To(s.getPortForward).
Operation("getPortForward"))
ws.Route(ws.POST("/{podNamespace}/{podID}").
To(s.getPortForward).
Operation("getPortForward"))
ws.Route(ws.GET("/{podNamespace}/{podID}/{uid}").
To(s.getPortForward).
Operation("getPortForward"))
ws.Route(ws.POST("/{podNamespace}/{podID}/{uid}").
To(s.getPortForward).
Operation("getPortForward"))
s.restfulCont.Add(ws)
s.addMetricsBucketMatcher("containerLogs")
ws = new(restful.WebService)
ws.
Path("/containerLogs")
ws.Route(ws.GET("/{podNamespace}/{podID}/{containerName}").
To(s.getContainerLogs).
Operation("getContainerLogs"))
s.restfulCont.Add(ws)
s.addMetricsBucketMatcher("configz")
configz.InstallHandler(s.restfulCont)
s.addMetricsBucketMatcher("debug")
handlePprofEndpoint := func(req *restful.Request, resp *restful.Response) {
name := strings.TrimPrefix(req.Request.URL.Path, pprofBasePath)
switch name {
case "profile":
pprof.Profile(resp, req.Request)
case "symbol":
pprof.Symbol(resp, req.Request)
case "cmdline":
pprof.Cmdline(resp, req.Request)
case "trace":
pprof.Trace(resp, req.Request)
default:
pprof.Index(resp, req.Request)
}
}
// Setup pprof handlers.
ws = new(restful.WebService).Path(pprofBasePath)
ws.Route(ws.GET("/{subpath:*}").To(func(req *restful.Request, resp *restful.Response) {
handlePprofEndpoint(req, resp)
})).Doc("pprof endpoint")
s.restfulCont.Add(ws)
// Setup flags handlers.
// so far, only logging related endpoints are considered valid to add for these debug flags.
s.restfulCont.Handle("/debug/flags/v", routes.StringFlagPutHandler(logs.GlogSetter))
// The /runningpods endpoint is used for testing only.
s.addMetricsBucketMatcher("runningpods")
ws = new(restful.WebService)
ws.
Path("/runningpods/").
Produces(restful.MIME_JSON)
ws.Route(ws.GET("").
To(s.getRunningPods).
Operation("getRunningPods"))
s.restfulCont.Add(ws)
}
// InstallDebuggingDisabledHandlers registers the HTTP request patterns that provide better error message
func (s *Server) InstallDebuggingDisabledHandlers() {
h := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
http.Error(w, "Debug endpoints are disabled.", http.StatusMethodNotAllowed)
})
s.addMetricsBucketMatcher("run")
|
s.addMetricsBucketMatcher("exec")
s.addMetricsBucketMatcher("attach")
s.addMetricsBucketMatcher("portForward")
s.addMetricsBucketMatcher("containerLogs")
s.addMetricsBucketMatcher("runningpods")
s.addMetricsBucketMatcher("pprof")
s.addMetricsBucketMatcher("logs")
paths := []string{
"/run/", "/exec/", "/attach/", "/portForward/", "/containerLogs/",
"/runningpods/", pprofBasePath, logsPath}
for _, p := range paths {
s.restfulCont.Handle(p, h)
}
}
// InstallSystemLogHandler registers the HTTP request patterns for logs endpoint.
func (s *Server) InstallSystemLogHandler(enableSystemLogHandler bool) {
s.addMetricsBucketMatcher("logs")
if enableSystemLogHandler {
ws := new(restful.WebService)
ws.Path(logsPath)
ws.Route(ws.GET("").
To(s.getLogs).
Operation("getLogs"))
ws.Route(ws.GET("/{logpath:*}").
To(s.getLogs).
Operation("getLogs").
Param(ws.PathParameter("logpath", "path to the log").DataType("string")))
s.restfulCont.Add(ws)
} else {
h := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
http.Error(w, "logs endpoint is disabled.", http.StatusMethodNotAllowed)
})
s.restfulCont.Handle(logsPath, h)
}
}
// Checks if kubelet's sync loop that updates containers is working.
func (s *Server) syncLoopHealthCheck(req *http.Request) error {
duration := s.host.ResyncInterval() * 2
minDuration := time.Minute * 5
if duration < minDuration {
duration = minDuration
}
enterLoopTime := s.host.LatestLoopEntryTime()
if !enterLoopTime.IsZero() && time.Now().After(enterLoopTime.Add(duration)) {
return fmt.Errorf("sync Loop took longer than expected")
}
return nil
}
// getContainerLogs handles containerLogs request against the Kubelet
func (s *Server) getContainerLogs(request *restful.Request, response *restful.Response) {
podNamespace := request.PathParameter("podNamespace")
podID := request.PathParameter("podID")
containerName := request.PathParameter("containerName")
ctx := request.Request.Context()
if len(podID) == 0 {
// TODO: Why return JSON when the rest return plaintext errors?
// TODO: Why return plaintext errors?
response.WriteError(http.StatusBadRequest, fmt.Errorf(`{"message": "Missing podID."}`))
return
}
if len(containerName) == 0 {
// TODO: Why return JSON when the rest return plaintext errors?
response.WriteError(http.StatusBadRequest, fmt.Errorf(`{"message": "Missing container name."}`))
return
}
if len(podNamespace) == 0 {
// TODO: Why return JSON when the rest return plaintext errors?
response.WriteError(http.StatusBadRequest, fmt.Errorf(`{"message": "Missing podNamespace."}`))
return
}
query := request.Request.URL.Query()
// backwards compatibility for the "tail" query parameter
if tail := request.QueryParameter("tail"); len(tail) > 0 {
query["tailLines"] = []string{tail}
// "all" is the same as omitting tail
if tail == "all" {
delete(query, "tailLines")
}
}
// container logs on the kubelet are locked to the v1 API version of PodLogOptions
logOptions := &v1.PodLogOptions{}
if err := legacyscheme.ParameterCodec.DecodeParameters(query, v1.SchemeGroupVersion, logOptions); err != nil {
response.WriteError(http.StatusBadRequest, fmt.Errorf(`{"message": "Unable to decode query."}`))
return
}
logOptions.TypeMeta = metav1.TypeMeta{}
if errs := validation.ValidatePodLogOptions(logOptions); len(errs) > 0 {
response.WriteError(http.StatusUnprocessableEntity, fmt.Errorf(`{"message": "Invalid request."}`))
return
}
pod, ok := s.host.GetPodByName(podNamespace, podID)
if !ok {
response.WriteError(http.StatusNotFound, fmt.Errorf("pod %q does not exist", podID))
return
}
// Check if containerName is valid.
if kubecontainer.GetContainerSpec(pod, containerName) == nil {
response.WriteError(http.StatusNotFound, fmt.Errorf("container %q not found in pod %q", containerName, podID))
return
}
if _, ok := response.ResponseWriter.(http.Flusher); !ok {
response.WriteError(http.StatusInternalServerError, fmt.Errorf("unable to convert %v into http.Flusher, cannot show logs", reflect.TypeOf(response)))
return
}
fw := flushwriter.Wrap(response.ResponseWriter)
response.Header().Set("Transfer-Encoding", "chunked")
if err := s.host.GetKubeletContainerLogs(ctx, kubecontainer.GetPodFullName(pod), containerName, logOptions, fw, fw); err != nil {
response.WriteError(http.StatusBadRequest, err)
return
}
}
// encodePods creates an v1.PodList object from pods and returns the encoded
// PodList.
func encodePods(pods []*v1.Pod) (data []byte, err error) {
podList := new(v1.PodList)
for _, pod := range pods {
podList.Items = append(podList.Items, *pod)
}
// TODO: this needs to be parameterized to the kubelet, not hardcoded. Depends on Kubelet
// as API server refactor.
// TODO: Locked to v1, needs to be made generic
codec := legacyscheme.Codecs.LegacyCodec(schema.GroupVersion{Group: v1.GroupName, Version: "v1"})
return runtime.Encode(codec, podList)
}
// getPods returns a list of pods bound to the Kubelet and their spec.
func (s *Server) getPods(request *restful.Request, response *restful.Response) {
pods := s.host.GetPods()
data, err := encodePods(pods)
if err != nil {
response.WriteError(http.StatusInternalServerError, err)
return
}
writeJSONResponse(response, data)
}
// getRunningPods returns a list of pods running on Kubelet. The list is
// provided by the container runtime, and is different from the list returned
// by getPods, which is a set of desired pods to run.
func (s *Server) getRunningPods(request *restful.Request, response *restful.Response) {
pods, err := s.host.GetRunningPods()
if err != nil {
response.WriteError(http.StatusInternalServerError, err)
return
}
data, err := encodePods(pods)
if err != nil {
response.WriteError(http.StatusInternalServerError, err)
return
}
writeJSONResponse(response, data)
}
// getLogs handles logs requests against the Kubelet.
func (s *Server) getLogs(request *restful.Request, response *restful.Response) {
s.host.ServeLogs(response, request.Request)
}
// getSpec handles spec requests against the Kubelet.
func (s *Server) getSpec(request *restful.Request, response *restful.Response) {
info, err := s.host.GetCachedMachineInfo()
if err != nil {
response.WriteError(http.StatusInternalServerError, err)
return
}
response.WriteEntity(info)
}
type execRequestParams struct {
podNamespace string
podName string
podUID types.UID
containerName string
cmd []string
}
func getExecRequestParams(req *restful.Request) execRequestParams {
return execRequestParams{
podNamespace: req.PathParameter("podNamespace"),
podName: req.PathParameter("podID"),
podUID: types.UID(req.PathParameter("uid")),
containerName: req.PathParameter("containerName"),
cmd: req.Request.URL.Query()[api.ExecCommandParam],
}
}
type portForwardRequestParams struct {
podNamespace string
podName string
podUID types.UID
}
func getPortForwardRequestParams(req *restful.Request) portForwardRequestParams {
return portForwardRequestParams{
podNamespace: req.PathParameter("podNamespace"),
podName: req.PathParameter("podID"),
podUID: types.UID(req.PathParameter("uid")),
}
}
type responder struct{}
func (r *responder) Error(w http.ResponseWriter, req *http.Request, err error) {
klog.Errorf("Error while proxying request: %v", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
}
// proxyStream proxies stream to url.
func proxyStream(w http.ResponseWriter, r *http.Request, url *url.URL) {
// TODO(random-liu): Set MaxBytesPerSec to throttle the stream.
handler := proxy.NewUpgradeAwareHandler(url, nil /*transport*/, false /*wrapTransport*/, true /*upgradeRequired*/, &responder{})
handler.ServeHTTP(w, r)
}
// getAttach handles requests to attach to a container.
func (s *Server) getAttach(request *restful.Request, response *restful.Response) {
params := getExecRequestParams(request)
streamOpts, err := remotecommandserver.NewOptions(request.Request)
if err != nil {
utilruntime.HandleError(err)
response.WriteError(http.StatusBadRequest, err)
return
}
pod, ok := s.host.GetPodByName(params.podNamespace, params.podName)
if !ok {
response.WriteError(http.StatusNotFound, fmt.Errorf("pod does not exist"))
return
}
podFullName := kubecontainer.GetPodFullName(pod)
url, err := s.host.GetAttach(podFullName, params.podUID, params.containerName, *streamOpts)
if err != nil {
streaming.WriteError(err, response.ResponseWriter)
return
}
proxyStream(response.ResponseWriter, request.Request, url)
}
// getExec handles requests to run a command inside a container.
func (s *Server) getExec(request *restful.Request, response *restful.Response) {
params := getExecRequestParams(request)
streamOpts, err := remotecommandserver.NewOptions(request.Request)
if err != nil {
utilruntime.HandleError(err)
response.WriteError(http.StatusBadRequest, err)
return
}
pod, ok := s.host.GetPodByName(params.podNamespace, params.podName)
if !ok {
response.WriteError(http.StatusNotFound, fmt.Errorf("pod does not exist"))
return
}
podFullName := kubecontainer.GetPodFullName(pod)
url, err := s.host.GetExec(podFullName, params.podUID, params.containerName, params.cmd, *streamOpts)
if err != nil {
streaming.WriteError(err, response.ResponseWriter)
return
}
proxyStream(response.ResponseWriter, request.Request, url)
}
// getRun handles requests to run a command inside a container.
func (s *Server) getRun(request *restful.Request, response *restful.Response) {
params := getExecRequestParams(request)
pod, ok := s.host.GetPodByName(params.podNamespace, params.podName)
if !ok {
response.WriteError(http.StatusNotFound, fmt.Errorf("pod does not exist"))
return
}
// For legacy reasons, run uses different query param than exec.
params.cmd = strings.Split(request.QueryParameter("cmd"), " ")
data, err := s.host.RunInContainer(kubecontainer.GetPodFullName(pod), params.podUID, params.containerName, params.cmd)
if err != nil {
response.WriteError(http.StatusInternalServerError, err)
return
}
writeJSONResponse(response, data)
}
// Derived from go-restful writeJSON.
func writeJSONResponse(response *restful.Response, data []byte) {
if data == nil {
response.WriteHeader(http.StatusOK)
// do not write a nil representation
return
}
response.Header().Set(restful.HEADER_ContentType, restful.MIME_JSON)
response.WriteHeader(http.StatusOK)
if _, err := response.Write(data); err != nil {
klog.Errorf("Error writing response: %v", err)
}
}
// getPortForward handles a new restful port forward request. It determines the
// pod name and uid and then calls ServePortForward.
func (s *Server) getPortForward(request *restful.Request, response *restful.Response) {
params := getPortForwardRequestParams(request)
portForwardOptions, err := portforward.NewV4Options(request.Request)
if err != nil {
utilruntime.HandleError(err)
response.WriteError(http.StatusBadRequest, err)
return
}
pod, ok := s.host.GetPodByName(params.podNamespace, params.podName)
if !ok {
response.WriteError(http.StatusNotFound, fmt.Errorf("pod does not exist"))
return
}
if len(params.podUID) > 0 && pod.UID != params.podUID {
response.WriteError(http.StatusNotFound, fmt.Errorf("pod not found"))
return
}
url, err := s.host.GetPortForward(pod.Name, pod.Namespace, pod.UID, *portForwardOptions)
if err != nil {
streaming.WriteError(err, response.ResponseWriter)
return
}
proxyStream(response.ResponseWriter, request.Request, url)
}
// getURLRootPath trims a URL path.
// For paths in the format of "/metrics/xxx", "metrics/xxx" is returned;
// For all other paths, the first part of the path is returned.
func getURLRootPath(path string) string {
parts := strings.SplitN(strings.TrimPrefix(path, "/"), "/", 3)
if len(parts) == 0 {
return path
}
if parts[0] == "metrics" && len(parts) > 1 {
return fmt.Sprintf("%s/%s", parts[0], parts[1])
}
return parts[0]
}
var longRunningRequestPathMap = map[string]bool{
"exec": true,
"attach": true,
"portforward": true,
"debug": true,
}
// isLongRunningRequest determines whether the request is long-running or not.
func isLongRunningRequest(path string) bool {
_, ok := longRunningRequestPathMap[path]
return ok
}
var statusesNoTracePred = httplog.StatusIsNot(
http.StatusOK,
http.StatusFound,
http.StatusMovedPermanently,
http.StatusTemporaryRedirect,
http.StatusBadRequest,
http.StatusNotFound,
http.StatusSwitchingProtocols,
)
// ServeHTTP responds to HTTP requests on the Kubelet.
func (s *Server) ServeHTTP(w http.ResponseWriter, req *http.Request) {
handler := httplog.WithLogging(s.restfulCont, statusesNoTracePred, nil)
// monitor http requests
var serverType string
if s.auth == nil {
serverType = "readonly"
} else {
serverType = "readwrite"
}
method, path := s.getMetricMethodBucket(req.Method), s.getMetricBucket(req.URL.Path)
longRunning := strconv.FormatBool(isLongRunningRequest(path))
servermetrics.HTTPRequests.WithLabelValues(method, path, serverType, longRunning).Inc()
servermetrics.HTTPInflightRequests.WithLabelValues(method, path, serverType, longRunning).Inc()
defer servermetrics.HTTPInflightRequests.WithLabelValues(method, path, serverType, longRunning).Dec()
startTime := time.Now()
defer servermetrics.HTTPRequestsDuration.WithLabelValues(method, path, serverType, longRunning).Observe(servermetrics.SinceInSeconds(startTime))
handler.ServeHTTP(w, req)
}
// prometheusHostAdapter adapts the HostInterface to the interface expected by the
// cAdvisor prometheus collector.
type prometheusHostAdapter struct {
host HostInterface
}
func (a prometheusHostAdapter) GetRequestedContainersInfo(containerName string, options cadvisorv2.RequestOptions) (map[string]*cadvisorapi.ContainerInfo, error) {
return a.host.GetRequestedContainersInfo(containerName, options)
}
func (a prometheusHostAdapter) GetVersionInfo() (*cadvisorapi.VersionInfo, error) {
return a.host.GetVersionInfo()
}
func (a prometheusHostAdapter) GetMachineInfo() (*cadvisorapi.MachineInfo, error) {
return a.host.GetCachedMachineInfo()
}
func containerPrometheusLabelsFunc(s stats.Provider) metrics.ContainerLabelsFunc {
// containerPrometheusLabels maps cAdvisor labels to prometheus labels.
return func(c *cadvisorapi.ContainerInfo) map[string]string {
// Prometheus requires that all metrics in the same family have the same labels,
// so we arrange to supply blank strings for missing labels
var name, image, podName, namespace, containerName string
if len(c.Aliases) > 0 {
name = c.Aliases[0]
}
image = c.Spec.Image
if v, ok := c.Spec.Labels[kubelettypes.KubernetesPodNameLabel]; ok {
podName = v
}
if v, ok := c.Spec.Labels[kubelettypes.KubernetesPodNamespaceLabel]; ok {
namespace = v
}
if v, ok := c.Spec.Labels[kubelettypes.KubernetesContainerNameLabel]; ok {
containerName = v
}
// Associate pod cgroup with pod so we have an accurate accounting of sandbox
if podName == "" && namespace == "" {
if pod, found := s.GetPodByCgroupfs(c.Name); found {
podName = pod.Name
namespace = pod.Namespace
}
}
set := map[string]string{
metrics.LabelID: c.Name,
metrics.LabelName: name,
metrics.LabelImage: image,
"pod": podName,
"namespace": namespace,
"container": containerName,
}
return set
}
}
| |
reducer.js
|
switch (action.type) {
case types.SET_ETA_VIEW:
return {
...state,
etaView: action.etaView
};
case types.SET_DESTINATION_DATASET:
return {
...state,
destinationDataset: action.destinationDataset
};
case types.SET_TIME_LIMIT:
return {
...state,
timeLimit: action.timeLimit
};
case types.SET_TIME_AT_DESTINATION:
return {
...state,
timeAtDestination: action.time
};
case types.SET_MAP_OPACITY:
return {
...state,
mapOpacity: action.mapOpacity
};
case types.SET_MAP_MIN_VALUE:
return {
...state,
mapMinValue: action.mapMinValue
};
case types.SET_MAP_MAX_VALUE:
return {
...state,
mapMaxValue: action.mapMaxValue
};
case types.SET_MAP_COLOR_SCHEME:
return {
...state,
mapColorScheme: action.mapColorScheme
};
case types.SET_MAP_VIEW_STATE:
return {
...state,
mapViewState: action.mapViewState
};
case types.SET_SELECTED_DATA_ZONE:
return {
...state,
selectedDataZone: action.selectedDataZone
};
case types.SET_MAP_TOOLTIP:
return {
...state,
mapTooltip: action.mapTooltip
};
case types.SET_ETA:
return {
...state,
eta: action.eta
};
case types.SET_LOCATIONDT:
return {
...state,
locationDT: action.locationDT
};
default:
return { ...state }
}
}
|
import * as types from './actionTypes';
import initialState from './initialState';
export default function reducer(state = initialState, action) {
|
|
model.py
|
from __future__ import print_function
import config
from config import *
from utils import *
# INPUT:
# VGG16 - block5_pool (MaxPooling2D) (None, 7, 7, 512)
# OUTPUT:
# Branch1 - Class Prediction
# Branch2 - IOU Prediction
# NOTE: Both models in create_model_train() and create_model_predict() should be exaclty same
def create_model(is_input_bottleneck, is_load_weights, input_shape, output_classes, optimizer='Adagrad', learn_rate=None, decay=0.0, momentum=0.0, activation='relu', dropout_rate=0.5):
logging.debug('input_shape {}'.format(input_shape))
logging.debug('input_shape {}'.format(type(input_shape)))
# Optimizer
optimizer, learn_rate = get_optimizer(optimizer, learn_rate, decay, momentum)
# Train
if is_input_bottleneck is True:
model_inputs = Input(shape=(input_shape))
common_inputs = model_inputs
# Predict
else: #input_shape = (img_width, img_height, 3)
base_model = applications.VGG16(weights='imagenet', include_top=False, input_shape=input_shape)
#base_model = applications.inception_v3.InceptionV3(include_top=False, weights='imagenet', input_shape=input_shape)
logging.debug('base_model inputs {}'.format(base_model.input)) # shape=(?, 224, 224, 3)
logging.debug('base_model outputs {}'.format(base_model.output)) # shape=(?, 7, 7, 512)
model_inputs = base_model.input
common_inputs = base_model.output
## Model Classification
|
x = Dropout(dropout_rate)(x)
predictions_class = Dense(output_classes, activation='softmax', name='predictions_class')(x)
## Model (Regression) IOU score
x = Flatten()(common_inputs)
x = Dense(256, activation='tanh')(x)
x = Dropout(dropout_rate)(x)
x = Dense(256, activation='tanh')(x)
x = Dropout(dropout_rate)(x)
predictions_iou = Dense(1, activation='sigmoid', name='predictions_iou')(x)
## Create Model
model = Model(inputs=model_inputs, outputs=[predictions_class, predictions_iou])
# logging.debug('model summary {}'.format(model.summary()))
## Load weights
if is_load_weights is True:
model.load_weights(top_model_weights_path_load, by_name=True)
## Compile
model.compile(optimizer=optimizer,
loss={'predictions_class': 'sparse_categorical_crossentropy', 'predictions_iou': 'mean_squared_error'}, metrics=['accuracy'],
loss_weights={'predictions_class': predictions_class_weight, 'predictions_iou': predictions_iou_weight})
logging.info('optimizer:{} learn_rate:{} decay:{} momentum:{} activation:{} dropout_rate:{}'.format(
optimizer, learn_rate, decay, momentum, activation, dropout_rate))
return model
|
x = Flatten()(common_inputs)
x = Dense(256, activation='tanh')(x)
|
volume_efficiency_policy_get_responses.go
|
// Code generated by go-swagger; DO NOT EDIT.
package storage
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/netapp/trident/storage_drivers/ontap/api/rest/models"
)
// VolumeEfficiencyPolicyGetReader is a Reader for the VolumeEfficiencyPolicyGet structure.
type VolumeEfficiencyPolicyGetReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *VolumeEfficiencyPolicyGetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewVolumeEfficiencyPolicyGetOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
default:
result := NewVolumeEfficiencyPolicyGetDefault(response.Code())
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
if response.Code()/100 == 2 {
return result, nil
}
return nil, result
}
}
// NewVolumeEfficiencyPolicyGetOK creates a VolumeEfficiencyPolicyGetOK with default headers values
func
|
() *VolumeEfficiencyPolicyGetOK {
return &VolumeEfficiencyPolicyGetOK{}
}
/* VolumeEfficiencyPolicyGetOK describes a response with status code 200, with default header values.
OK
*/
type VolumeEfficiencyPolicyGetOK struct {
Payload *models.VolumeEfficiencyPolicy
}
func (o *VolumeEfficiencyPolicyGetOK) Error() string {
return fmt.Sprintf("[GET /storage/volume-efficiency-policies/{uuid}][%d] volumeEfficiencyPolicyGetOK %+v", 200, o.Payload)
}
func (o *VolumeEfficiencyPolicyGetOK) GetPayload() *models.VolumeEfficiencyPolicy {
return o.Payload
}
func (o *VolumeEfficiencyPolicyGetOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.VolumeEfficiencyPolicy)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewVolumeEfficiencyPolicyGetDefault creates a VolumeEfficiencyPolicyGetDefault with default headers values
func NewVolumeEfficiencyPolicyGetDefault(code int) *VolumeEfficiencyPolicyGetDefault {
return &VolumeEfficiencyPolicyGetDefault{
_statusCode: code,
}
}
/* VolumeEfficiencyPolicyGetDefault describes a response with status code -1, with default header values.
Error
*/
type VolumeEfficiencyPolicyGetDefault struct {
_statusCode int
Payload *models.ErrorResponse
}
// Code gets the status code for the volume efficiency policy get default response
func (o *VolumeEfficiencyPolicyGetDefault) Code() int {
return o._statusCode
}
func (o *VolumeEfficiencyPolicyGetDefault) Error() string {
return fmt.Sprintf("[GET /storage/volume-efficiency-policies/{uuid}][%d] volume_efficiency_policy_get default %+v", o._statusCode, o.Payload)
}
func (o *VolumeEfficiencyPolicyGetDefault) GetPayload() *models.ErrorResponse {
return o.Payload
}
func (o *VolumeEfficiencyPolicyGetDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorResponse)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
|
NewVolumeEfficiencyPolicyGetOK
|
mod.rs
|
pub use component_behaviour_manager_impl::*;
pub use component_manager_impl::*;
pub use entity_behaviour_manager_impl::*;
pub use entity_instance_manager_impl::*;
pub use entity_type_manager_impl::*;
pub use entity_vertex_manager_impl::*;
pub use flow_manager_impl::*;
pub use graph_database_impl::*;
pub use graphql_server_impl::*;
pub use plugin_registry_impl::*;
pub use reactive_entity_instance_manager_impl::*;
pub use reactive_flow_manager_impl::*;
pub use reactive_relation_instance_manager_impl::*;
pub use relation_behaviour_manager_impl::*;
pub use relation_edge_manager_impl::*;
pub use relation_instance_manager_impl::*;
pub use relation_type_manager_impl::*;
pub use shutdown_manager_impl::*;
pub use web_resource_manager_impl::*;
pub mod component_behaviour_manager_impl;
pub mod component_manager_impl;
pub mod entity_behaviour_manager_impl;
pub mod entity_instance_manager_impl;
pub mod entity_type_manager_impl;
pub mod entity_vertex_manager_impl;
|
pub mod graph_database_impl;
pub mod graphql_server_impl;
pub mod plugin_registry_impl;
pub mod reactive_entity_instance_manager_impl;
pub mod reactive_flow_manager_impl;
pub mod reactive_relation_instance_manager_impl;
pub mod relation_behaviour_manager_impl;
pub mod relation_edge_manager_impl;
pub mod relation_instance_manager_impl;
pub mod relation_type_manager_impl;
pub mod shutdown_manager_impl;
pub mod web_resource_manager_impl;
|
pub mod flow_manager_impl;
|
region_name.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use borrow_check::nll::region_infer::RegionInferenceContext;
use borrow_check::nll::ToRegionVid;
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::infer::InferCtxt;
use rustc::mir::Mir;
use rustc::ty::subst::{Substs, UnpackedKind};
use rustc::ty::{self, RegionKind, RegionVid, Ty, TyCtxt};
use rustc::util::ppaux::with_highlight_region;
use rustc_errors::DiagnosticBuilder;
use syntax::ast::{Name, DUMMY_NODE_ID};
use syntax::symbol::keywords;
use syntax_pos::symbol::InternedString;
impl<'tcx> RegionInferenceContext<'tcx> {
/// Maps from an internal MIR region vid to something that we can
/// report to the user. In some cases, the region vids will map
/// directly to lifetimes that the user has a name for (e.g.,
/// `'static`). But frequently they will not, in which case we
/// have to find some way to identify the lifetime to the user. To
/// that end, this function takes a "diagnostic" so that it can
/// create auxiliary notes as needed.
///
/// Example (function arguments):
///
/// Suppose we are trying to give a name to the lifetime of the
/// reference `x`:
///
/// ```
/// fn foo(x: &u32) { .. }
/// ```
///
/// This function would create a label like this:
///
/// ```
/// | fn foo(x: &u32) { .. }
/// ------- fully elaborated type of `x` is `&'1 u32`
/// ```
///
/// and then return the name `'1` for us to use.
crate fn give_region_a_name(
&self,
infcx: &InferCtxt<'_, '_, 'tcx>,
mir: &Mir<'tcx>,
mir_def_id: DefId,
fr: RegionVid,
counter: &mut usize,
diag: &mut DiagnosticBuilder,
) -> InternedString
|
/// Check for the case where `fr` maps to something that the
/// *user* has a name for. In that case, we'll be able to map
/// `fr` to a `Region<'tcx>`, and that region will be one of
/// named variants.
fn give_name_from_error_region(
&self,
tcx: TyCtxt<'_, '_, 'tcx>,
mir_def_id: DefId,
fr: RegionVid,
counter: &mut usize,
diag: &mut DiagnosticBuilder<'_>,
) -> Option<InternedString> {
let error_region = self.to_error_region(fr)?;
debug!("give_region_a_name: error_region = {:?}", error_region);
match error_region {
ty::ReEarlyBound(ebr) => {
self.highlight_named_span(tcx, error_region, &ebr.name, diag);
Some(ebr.name)
},
ty::ReStatic => Some(keywords::StaticLifetime.name().as_interned_str()),
ty::ReFree(free_region) => match free_region.bound_region {
ty::BoundRegion::BrNamed(_, name) => {
self.highlight_named_span(tcx, error_region, &name, diag);
Some(name)
},
ty::BoundRegion::BrEnv => {
let closure_span = tcx.hir.span_if_local(mir_def_id).unwrap();
let region_name = self.synthesize_region_name(counter);
diag.span_label(
closure_span,
format!("lifetime `{}` represents the closure body", region_name),
);
Some(region_name)
}
ty::BoundRegion::BrAnon(_) | ty::BoundRegion::BrFresh(_) => None,
},
ty::ReLateBound(..)
| ty::ReScope(..)
| ty::ReVar(..)
| ty::ReSkolemized(..)
| ty::ReEmpty
| ty::ReErased
| ty::ReClosureBound(..)
| ty::ReCanonical(..) => None,
}
}
/// Highlight a named span to provide context for error messages that
/// mention that span, for example:
///
/// ```
/// |
/// | fn two_regions<'a, 'b, T>(cell: Cell<&'a ()>, t: T)
/// | -- -- lifetime `'b` defined here
/// | |
/// | lifetime `'a` defined here
/// |
/// | with_signature(cell, t, |cell, t| require(cell, t));
/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'b` must
/// | outlive `'a`
/// ```
fn highlight_named_span(
&self,
tcx: TyCtxt<'_, '_, 'tcx>,
error_region: &RegionKind,
name: &InternedString,
diag: &mut DiagnosticBuilder<'_>,
) {
let cm = tcx.sess.codemap();
let scope = error_region.free_region_binding_scope(tcx);
let node = tcx.hir.as_local_node_id(scope).unwrap_or(DUMMY_NODE_ID);
let mut sp = cm.def_span(tcx.hir.span(node));
if let Some(param) = tcx.hir.get_generics(scope).and_then(|generics| {
generics.get_named(name)
}) {
sp = param.span;
}
diag.span_label(
sp,
format!("lifetime `{}` defined here", name),
);
}
/// Find an argument that contains `fr` and label it with a fully
/// elaborated type, returning something like `'1`. Result looks
/// like:
///
/// ```
/// | fn foo(x: &u32) { .. }
/// ------- fully elaborated type of `x` is `&'1 u32`
/// ```
fn give_name_if_anonymous_region_appears_in_arguments(
&self,
infcx: &InferCtxt<'_, '_, 'tcx>,
mir: &Mir<'tcx>,
mir_def_id: DefId,
fr: RegionVid,
counter: &mut usize,
diag: &mut DiagnosticBuilder<'_>,
) -> Option<InternedString> {
let implicit_inputs = self.universal_regions.defining_ty.implicit_inputs();
let argument_index = self.get_argument_index_for_region(infcx.tcx, fr)?;
let arg_ty =
self.universal_regions.unnormalized_input_tys[implicit_inputs + argument_index];
if let Some(region_name) = self.give_name_if_we_can_match_hir_ty_from_argument(
infcx,
mir,
mir_def_id,
fr,
arg_ty,
argument_index,
counter,
diag,
) {
return Some(region_name);
}
let (_argument_name, argument_span) = self.get_argument_name_and_span_for_region(
mir, argument_index);
let region_name = self.synthesize_region_name(counter);
diag.span_label(
argument_span,
format!("lifetime `{}` appears in this argument", region_name,),
);
Some(region_name)
}
fn give_name_if_we_can_match_hir_ty_from_argument(
&self,
infcx: &InferCtxt<'_, '_, 'tcx>,
mir: &Mir<'tcx>,
mir_def_id: DefId,
needle_fr: RegionVid,
argument_ty: Ty<'tcx>,
argument_index: usize,
counter: &mut usize,
diag: &mut DiagnosticBuilder<'_>,
) -> Option<InternedString> {
let mir_node_id = infcx.tcx.hir.as_local_node_id(mir_def_id)?;
let fn_decl = infcx.tcx.hir.fn_decl(mir_node_id)?;
let argument_hir_ty: &hir::Ty = &fn_decl.inputs[argument_index];
match argument_hir_ty.node {
// This indicates a variable with no type annotation, like
// `|x|`... in that case, we can't highlight the type but
// must highlight the variable.
hir::TyKind::Infer => self.give_name_if_we_cannot_match_hir_ty(
infcx,
mir,
needle_fr,
argument_ty,
counter,
diag,
),
_ => self.give_name_if_we_can_match_hir_ty(
infcx.tcx,
needle_fr,
argument_ty,
argument_hir_ty,
counter,
diag,
),
}
}
/// Attempts to highlight the specific part of a type in an argument
/// that has no type annotation.
/// For example, we might produce an annotation like this:
///
/// ```
/// | foo(|a, b| b)
/// | - -
/// | | |
/// | | has type `&'1 u32`
/// | has type `&'2 u32`
/// ```
fn give_name_if_we_cannot_match_hir_ty(
&self,
infcx: &InferCtxt<'_, '_, 'tcx>,
mir: &Mir<'tcx>,
needle_fr: RegionVid,
argument_ty: Ty<'tcx>,
counter: &mut usize,
diag: &mut DiagnosticBuilder<'_>,
) -> Option<InternedString> {
let type_name = with_highlight_region(needle_fr, *counter, || {
infcx.extract_type_name(&argument_ty)
});
debug!("give_name_if_we_cannot_match_hir_ty: type_name={:?} needle_fr={:?}",
type_name, needle_fr);
let assigned_region_name = if type_name.find(&format!("'{}", counter)).is_some() {
// Only add a label if we can confirm that a region was labelled.
let argument_index = self.get_argument_index_for_region(infcx.tcx, needle_fr)?;
let (_, span) = self.get_argument_name_and_span_for_region(mir, argument_index);
diag.span_label(span, format!("has type `{}`", type_name));
// This counter value will already have been used, so this function will increment it
// so the next value will be used next and return the region name that would have been
// used.
Some(self.synthesize_region_name(counter))
} else {
None
};
assigned_region_name
}
/// Attempts to highlight the specific part of a type annotation
/// that contains the anonymous reference we want to give a name
/// to. For example, we might produce an annotation like this:
///
/// ```
/// | fn a<T>(items: &[T]) -> Box<dyn Iterator<Item=&T>> {
/// | - let's call the lifetime of this reference `'1`
/// ```
///
/// the way this works is that we match up `argument_ty`, which is
/// a `Ty<'tcx>` (the internal form of the type) with
/// `argument_hir_ty`, a `hir::Ty` (the syntax of the type
/// annotation). We are descending through the types stepwise,
/// looking in to find the region `needle_fr` in the internal
/// type. Once we find that, we can use the span of the `hir::Ty`
/// to add the highlight.
///
/// This is a somewhat imperfect process, so long the way we also
/// keep track of the **closest** type we've found. If we fail to
/// find the exact `&` or `'_` to highlight, then we may fall back
/// to highlighting that closest type instead.
fn give_name_if_we_can_match_hir_ty(
&self,
tcx: TyCtxt<'_, '_, 'tcx>,
needle_fr: RegionVid,
argument_ty: Ty<'tcx>,
argument_hir_ty: &hir::Ty,
counter: &mut usize,
diag: &mut DiagnosticBuilder<'_>,
) -> Option<InternedString> {
let search_stack: &mut Vec<(Ty<'tcx>, &hir::Ty)> = &mut Vec::new();
search_stack.push((argument_ty, argument_hir_ty));
let mut closest_match: &hir::Ty = argument_hir_ty;
while let Some((ty, hir_ty)) = search_stack.pop() {
// While we search, also track the closet match.
if tcx.any_free_region_meets(&ty, |r| r.to_region_vid() == needle_fr) {
closest_match = hir_ty;
}
match (&ty.sty, &hir_ty.node) {
// Check if the `argument_ty` is `&'X ..` where `'X`
// is the region we are looking for -- if so, and we have a `&T`
// on the RHS, then we want to highlight the `&` like so:
//
// &
// - let's call the lifetime of this reference `'1`
(
ty::TyRef(region, referent_ty, _),
hir::TyKind::Rptr(_lifetime, referent_hir_ty),
) => {
if region.to_region_vid() == needle_fr {
let region_name = self.synthesize_region_name(counter);
// Just grab the first character, the `&`.
let codemap = tcx.sess.codemap();
let ampersand_span = codemap.start_point(hir_ty.span);
diag.span_label(
ampersand_span,
format!(
"let's call the lifetime of this reference `{}`",
region_name
),
);
return Some(region_name);
}
// Otherwise, let's descend into the referent types.
search_stack.push((referent_ty, &referent_hir_ty.ty));
}
// Match up something like `Foo<'1>`
(
ty::TyAdt(_adt_def, substs),
hir::TyKind::Path(hir::QPath::Resolved(None, path)),
) => {
if let Some(last_segment) = path.segments.last() {
if let Some(name) = self.match_adt_and_segment(
substs,
needle_fr,
last_segment,
counter,
diag,
search_stack,
) {
return Some(name);
}
}
}
// The following cases don't have lifetimes, so we
// just worry about trying to match up the rustc type
// with the HIR types:
(ty::TyTuple(elem_tys), hir::TyKind::Tup(elem_hir_tys)) => {
search_stack.extend(elem_tys.iter().cloned().zip(elem_hir_tys));
}
(ty::TySlice(elem_ty), hir::TyKind::Slice(elem_hir_ty))
| (ty::TyArray(elem_ty, _), hir::TyKind::Array(elem_hir_ty, _)) => {
search_stack.push((elem_ty, elem_hir_ty));
}
(ty::TyRawPtr(mut_ty), hir::TyKind::Ptr(mut_hir_ty)) => {
search_stack.push((mut_ty.ty, &mut_hir_ty.ty));
}
_ => {
// FIXME there are other cases that we could trace
}
}
}
let region_name = self.synthesize_region_name(counter);
diag.span_label(
closest_match.span,
format!("lifetime `{}` appears in this type", region_name),
);
return Some(region_name);
}
/// We've found an enum/struct/union type with the substitutions
/// `substs` and -- in the HIR -- a path type with the final
/// segment `last_segment`. Try to find a `'_` to highlight in
/// the generic args (or, if not, to produce new zipped pairs of
/// types+hir to search through).
fn match_adt_and_segment<'hir>(
&self,
substs: &'tcx Substs<'tcx>,
needle_fr: RegionVid,
last_segment: &'hir hir::PathSegment,
counter: &mut usize,
diag: &mut DiagnosticBuilder<'_>,
search_stack: &mut Vec<(Ty<'tcx>, &'hir hir::Ty)>,
) -> Option<InternedString> {
// Did the user give explicit arguments? (e.g., `Foo<..>`)
let args = last_segment.args.as_ref()?;
let lifetime = self.try_match_adt_and_generic_args(substs, needle_fr, args, search_stack)?;
match lifetime.name {
hir::LifetimeName::Param(_)
| hir::LifetimeName::Static
| hir::LifetimeName::Underscore => {
let region_name = self.synthesize_region_name(counter);
let ampersand_span = lifetime.span;
diag.span_label(ampersand_span, format!("let's call this `{}`", region_name));
return Some(region_name);
}
hir::LifetimeName::Implicit => {
// In this case, the user left off the lifetime; so
// they wrote something like:
//
// ```
// x: Foo<T>
// ```
//
// where the fully elaborated form is `Foo<'_, '1,
// T>`. We don't consider this a match; instead we let
// the "fully elaborated" type fallback above handle
// it.
return None;
}
}
}
/// We've found an enum/struct/union type with the substitutions
/// `substs` and -- in the HIR -- a path with the generic
/// arguments `args`. If `needle_fr` appears in the args, return
/// the `hir::Lifetime` that corresponds to it. If not, push onto
/// `search_stack` the types+hir to search through.
fn try_match_adt_and_generic_args<'hir>(
&self,
substs: &'tcx Substs<'tcx>,
needle_fr: RegionVid,
args: &'hir hir::GenericArgs,
search_stack: &mut Vec<(Ty<'tcx>, &'hir hir::Ty)>,
) -> Option<&'hir hir::Lifetime> {
for (kind, hir_arg) in substs.iter().zip(&args.args) {
match (kind.unpack(), hir_arg) {
(UnpackedKind::Lifetime(r), hir::GenericArg::Lifetime(lt)) => {
if r.to_region_vid() == needle_fr {
return Some(lt);
}
}
(UnpackedKind::Type(ty), hir::GenericArg::Type(hir_ty)) => {
search_stack.push((ty, hir_ty));
}
(UnpackedKind::Lifetime(_), _) | (UnpackedKind::Type(_), _) => {
// I *think* that HIR lowering should ensure this
// doesn't happen, even in erroneous
// programs. Else we should use delay-span-bug.
span_bug!(
hir_arg.span(),
"unmatched subst and hir arg: found {:?} vs {:?}",
kind,
hir_arg,
);
}
}
}
None
}
/// Find a closure upvar that contains `fr` and label it with a
/// fully elaborated type, returning something like `'1`. Result
/// looks like:
///
/// ```
/// | let x = Some(&22);
/// - fully elaborated type of `x` is `Option<&'1 u32>`
/// ```
fn give_name_if_anonymous_region_appears_in_upvars(
&self,
tcx: TyCtxt<'_, '_, 'tcx>,
mir: &Mir<'tcx>,
fr: RegionVid,
counter: &mut usize,
diag: &mut DiagnosticBuilder<'_>,
) -> Option<InternedString> {
let upvar_index = self.get_upvar_index_for_region(tcx, fr)?;
let (upvar_name, upvar_span) = self.get_upvar_name_and_span_for_region(tcx, mir,
upvar_index);
let region_name = self.synthesize_region_name(counter);
diag.span_label(
upvar_span,
format!("lifetime `{}` appears in the type of `{}`", region_name, upvar_name),
);
Some(region_name)
}
/// Check for arguments appearing in the (closure) return type. It
/// must be a closure since, in a free fn, such an argument would
/// have to either also appear in an argument (if using elision)
/// or be early bound (named, not in argument).
fn give_name_if_anonymous_region_appears_in_output(
&self,
tcx: TyCtxt<'_, '_, 'tcx>,
mir: &Mir<'tcx>,
fr: RegionVid,
counter: &mut usize,
diag: &mut DiagnosticBuilder<'_>,
) -> Option<InternedString> {
let return_ty = self.universal_regions.unnormalized_output_ty;
debug!(
"give_name_if_anonymous_region_appears_in_output: return_ty = {:?}",
return_ty
);
if !tcx.any_free_region_meets(&return_ty, |r| r.to_region_vid() == fr) {
return None;
}
let region_name = self.synthesize_region_name(counter);
diag.span_label(
mir.span,
format!("lifetime `{}` appears in return type", region_name),
);
Some(region_name)
}
/// Create a synthetic region named `'1`, incrementing the
/// counter.
fn synthesize_region_name(&self, counter: &mut usize) -> InternedString {
let c = *counter;
*counter += 1;
Name::intern(&format!("'{:?}", c)).as_interned_str()
}
}
|
{
debug!("give_region_a_name(fr={:?}, counter={})", fr, counter);
assert!(self.universal_regions.is_universal_region(fr));
self.give_name_from_error_region(infcx.tcx, mir_def_id, fr, counter, diag)
.or_else(|| {
self.give_name_if_anonymous_region_appears_in_arguments(
infcx, mir, mir_def_id, fr, counter, diag)
})
.or_else(|| {
self.give_name_if_anonymous_region_appears_in_upvars(
infcx.tcx, mir, fr, counter, diag)
})
.or_else(|| {
self.give_name_if_anonymous_region_appears_in_output(
infcx.tcx, mir, fr, counter, diag)
})
.unwrap_or_else(|| span_bug!(mir.span, "can't make a name for free region {:?}", fr))
}
|
fund.go
|
package model
import "github.com/yuleihua/trade/pkg/dbclient"
var fundName = "fund"
type Fund struct {
Id int64 `gorm:"column:id;type:bigint(20)" json:"id"`
Bid int64 `gorm:"column:bid;type:bigint(20);default:'0'" json:"bid"`
MoneyType string `gorm:"column:money_type;type:varchar(8);default:''" json:"money_type"` // 源IP地址
Balance int64 `gorm:"column:balance;type:bigint(20);default:'0'" json:"balance"`
FreezeBalance int64 `gorm:"column:freeze_balance;type:bigint(20);default:'0'" json:"freeze_balance"`
LastBalance int64 `gorm:"column:last_balance;type:bigint(20);default:'0'" json:"last_balance"`
Created string `gorm:"column:created;type:timestamp;default:CURRENT_TIMESTAMP" json:"created"` // 创建时间
}
//get real primary key name
func (fund *Fund) GetKey() string {
return "id"
}
//get primary key in model
func (fund *Fund) GetKeyProperty() int64 {
return fund.Id
}
//set primary key
func (fund *Fund) SetKeyProperty(id int64) {
fund.Id = id
}
//get real table name
func (fund *Fund) TableName() string {
return "fund"
}
func GetFundFirst() (*Fund, error) {
var f Fund
err := dbclient.DB(fundName).Model(&Fund{}).First(&f).Error
if err != nil {
return nil, err
}
return &f, nil
}
func GetFundLast() (*Fund, error) {
var f Fund
err := dbclient.DB(fundName).Model(&Fund{}).Last(&f).Error
if err != nil {
return nil, err
}
return &f, nil
}
func GetFundOne() (*Fund, error) {
var f Fund
err := dbclient.DB(fundName).Model(&Fund{}).Take(&f).Error
if err != nil {
return nil, err
}
return &f, nil
}
func GetFundById(id int64) (*Fund, error) {
var f Fund
err := dbclient.DB(fundName).Model(&Fund{}).Where("id = ?", id).Find(&f).Error
if err != nil {
return nil, err
}
return &f, nil
}
func GetFundByBranchId(bid int64) (*Fund, error) {
var f Fund
err := dbclient.DB(fundName).Model(&Fund{}).Where("bid = ?", bid).Find(&f).Error
if err != nil {
return nil, err
}
return &f, nil
}
func GetFundAll() ([]*Fund, error) {
var fs []*Fund
err := dbclient.DB(fundName).Model(&Fund{}).Order("id desc").Find(&fs).Error
if err != nil {
return nil, err
}
return fs, nil
}
func GetFund(where string, args ...interface{}) ([]*Fund, error) {
var fs []*Fund
err := dbclient.DB(fundName).Model(&Fund{}).Find(&fs, where, args).Error
if err != nil {
return nil
|
il
}
func GetFundList(page, limit int64, where string, args ...interface{}) ([]*Fund, error) {
var fs []*Fund
err := dbclient.DB(fundName).Model(&Fund{}).Limit(limit).Offset((page-1)*limit).Find(&fs, where, args).Error
if err != nil {
return nil, err
}
return fs, nil
}
func (fund *Fund) Create() []error {
return dbclient.DB(fundName).Model(&Fund{}).Create(fund).GetErrors()
}
func (fund *Fund) Update(f Fund) []error {
return dbclient.DB(fundName).Model(&Fund{}).UpdateColumns(f).GetErrors()
}
func (fund *Fund) UpdateById(id int64) (int64, error) {
ravDatabase := dbclient.DB(fundName).Model(&Fund{}).Where("id=?", id).Update(fund)
return ravDatabase.RowsAffected, ravDatabase.Error
}
func (fund *Fund) Delete() {
dbclient.DB(fundName).Model(&Fund{}).Delete(fund)
}
func AddFundTX(t *dbclient.DBTransaction, f Fund) error {
return t.GetTx().Model(&Fund{}).Create(&f).Error
}
func UpdateFundTX(t *dbclient.DBTransaction, f Fund) error {
return t.GetTx().Model(&Fund{}).Where("id=?", f.Id).Update(f).Error
}
func UpdateFundTXDownBalance(t *dbclient.DBTransaction, fid, amt int64) error {
return t.GetTx().Raw("UPDATE fund SET balance = (balance-?) WHERE id = ?", amt, fid).Error
}
func UpdateFundTXUpBalance(t *dbclient.DBTransaction, fid, amt int64) error {
return t.GetTx().Raw("UPDATE fund SET balance = (balance+?) WHERE id = ?", amt, fid).Error
}
func UpdateFundTXDownFreezeBalance(t *dbclient.DBTransaction, fid, amt int64) error {
return t.GetTx().Raw("UPDATE fund SET freeze_balance = (freeze_balance-?) WHERE id = ?", amt, fid).Error
}
func UpdateFundTXDownFreezeBalanceAll(t *dbclient.DBTransaction, fid, amt int64) error {
return t.GetTx().Raw("UPDATE fund SET balance = (balance-?), freeze_balance = (freeze_balance+?) WHERE id = ?", amt, amt, fid).Error
}
func DeleteFundTX(t *dbclient.DBTransaction, f Fund) error {
return t.GetTx().Model(&Fund{}).Where("id=?", f.Id).Delete(nil).Error
}
|
, err
}
return fs, n
|
ex3.py
|
#!/usr/bin/env python
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Read from our rollups data and print every time minute-over-minute
transaction volume drops more than 50%.
"""
import argparse
from google.cloud import bigtable
def
|
(project_id, instance_id, table_id):
client = bigtable.Client(project=project_id)
instance = client.instance(instance_id)
table = instance.table(table_id)
partial_rows = table.read_rows(start_key="hourly")
partial_rows.consume_all()
for row_key, row in partial_rows.rows.items():
# TODO: Iterate and print a message if there is a > 50% drop minute over minute.
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('project_id', help='Your Cloud Platform project ID.')
parser.add_argument(
'instance_id', help='ID of the Cloud Bigtable instance to connect to.')
parser.add_argument(
'table',
help='Table from previous exercises')
args = parser.parse_args()
main(args.project_id, args.instance_id, args.table)
|
main
|
handle.rs
|
use crate::runtime::context;
use crate::time::driver::Inner;
use std::fmt;
use std::sync::{Arc, Weak};
/// Handle to time driver instance.
#[derive(Clone)]
pub(crate) struct Handle {
inner: Weak<Inner>,
}
impl Handle {
/// Creates a new timer `Handle` from a shared `Inner` timer state.
pub(crate) fn new(inner: Weak<Inner>) -> Self {
Handle { inner }
}
/// Tries to get a handle to the current timer.
///
/// # Panics
///
/// This function panics if there is no current timer set.
///
/// It can be triggered when `Builder::enable_time()` or
/// `Builder::enable_all()` are not included in the builder.
///
/// It can also panic whenever a timer is created outside of a Tokio
/// runtime. That is why `rt.block_on(delay_for(...))` will panic,
/// since the function is executed outside of the runtime.
/// Whereas `rt.block_on(async {delay_for(...).await})` doesn't
/// panic. And this is because wrapping the function on an async makes it
/// lazy, and so gets executed inside the runtime successfuly without
/// panicking.
pub(crate) fn current() -> Self
|
/// Tries to return a strong ref to the inner
pub(crate) fn inner(&self) -> Option<Arc<Inner>> {
self.inner.upgrade()
}
}
impl fmt::Debug for Handle {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Handle")
}
}
|
{
context::time_handle()
.expect("there is no timer running, must be called from the context of Tokio runtime")
}
|
gatsby-config.js
|
module.exports = {
siteMetadata: {
title: `Gatsby Default Starter`,
description: `Kick off your next, great Gatsby project with this default starter. This barebones starter ships with the main Gatsby configuration files you might need.`,
author: `@emmzytech`,
},
pathPrefix: "/Construct-Site",
plugins: [
`gatsby-plugin-react-helmet`,
`gatsby-plugin-image`,
{
resolve: `gatsby-source-filesystem`,
options: {
name: `images`,
path: `${__dirname}/src/images`,
},
},
`gatsby-transformer-sharp`,
`gatsby-plugin-sharp`,
{
resolve: `gatsby-plugin-manifest`,
options: {
|
start_url: `/`,
background_color: `#663399`,
theme_color: `#663399`,
display: `minimal-ui`,
icon: `src/images/gatsby-icon.png`, // This path is relative to the root of the site.
},
},
`gatsby-plugin-gatsby-cloud`,
// this (optional) plugin enables Progressive Web App + Offline functionality
// To learn more, visit: https://gatsby.dev/offline
// `gatsby-plugin-offline`,
],
}
|
name: `gatsby-starter-default`,
short_name: `starter`,
|
loss_utils.py
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from . import box_utils
from . import center_utils
try:
from itertools import ifilterfalse
except ImportError: # py3k
from itertools import filterfalse as ifilterfalse
class SigmoidFocalClassificationLoss(nn.Module):
"""
Sigmoid focal cross entropy loss.
"""
def __init__(self, gamma: float = 2.0, alpha: float = 0.25):
"""
Args:
gamma: Weighting parameter to balance loss for hard and easy examples.
alpha: Weighting parameter to balance loss for positive and negative examples.
"""
super(SigmoidFocalClassificationLoss, self).__init__()
self.alpha = alpha
self.gamma = gamma
@staticmethod
def sigmoid_cross_entropy_with_logits(input: torch.Tensor, target: torch.Tensor):
""" PyTorch Implementation for tf.nn.sigmoid_cross_entropy_with_logits:
max(x, 0) - x * z + log(1 + exp(-abs(x))) in
https://www.tensorflow.org/api_docs/python/tf/nn/sigmoid_cross_entropy_with_logits
Args:
input: (B, #anchors, #classes) float tensor.
Predicted logits for each class
target: (B, #anchors, #classes) float tensor.
One-hot encoded classification targets
Returns:
loss: (B, #anchors, #classes) float tensor.
Sigmoid cross entropy loss without reduction
"""
loss = torch.clamp(input, min=0) - input * target + \
torch.log1p(torch.exp(-torch.abs(input)))
return loss
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor):
"""
Args:
input: (B, #anchors, #classes) float tensor.
Predicted logits for each class
target: (B, #anchors, #classes) float tensor.
One-hot encoded classification targets
weights: (B, #anchors) float tensor.
Anchor-wise weights.
Returns:
weighted_loss: (B, #anchors, #classes) float tensor after weighting.
"""
pred_sigmoid = torch.sigmoid(input)
alpha_weight = target * self.alpha + (1 - target) * (1 - self.alpha)
pt = target * (1.0 - pred_sigmoid) + (1.0 - target) * pred_sigmoid
focal_weight = alpha_weight * torch.pow(pt, self.gamma)
bce_loss = self.sigmoid_cross_entropy_with_logits(input, target)
loss = focal_weight * bce_loss
if weights.shape.__len__() == 2 or \
(weights.shape.__len__() == 1 and target.shape.__len__() == 2):
weights = weights.unsqueeze(-1)
assert weights.shape.__len__() == loss.shape.__len__()
return loss * weights
class WeightedSmoothL1Loss(nn.Module):
"""
Code-wise Weighted Smooth L1 Loss modified based on fvcore.nn.smooth_l1_loss
https://github.com/facebookresearch/fvcore/blob/master/fvcore/nn/smooth_l1_loss.py
| 0.5 * x ** 2 / beta if abs(x) < beta
smoothl1(x) = |
| abs(x) - 0.5 * beta otherwise,
where x = input - target.
"""
def __init__(self, beta: float = 1.0 / 9.0, code_weights: list = None):
"""
Args:
beta: Scalar float.
L1 to L2 change point.
For beta values < 1e-5, L1 loss is computed.
code_weights: (#codes) float list if not None.
Code-wise weights.
"""
super(WeightedSmoothL1Loss, self).__init__()
self.beta = beta
if code_weights is not None:
self.code_weights = np.array(code_weights, dtype=np.float32)
self.code_weights = torch.from_numpy(self.code_weights).cuda()
@staticmethod
def smooth_l1_loss(diff, beta):
if beta < 1e-5:
loss = torch.abs(diff)
else:
n = torch.abs(diff)
loss = torch.where(n < beta, 0.5 * n ** 2 / beta, n - 0.5 * beta)
return loss
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor = None):
"""
Args:
input: (B, #anchors, #codes) float tensor.
Ecoded predicted locations of objects.
target: (B, #anchors, #codes) float tensor.
Regression targets.
weights: (B, #anchors) float tensor if not None.
Returns:
loss: (B, #anchors) float tensor.
Weighted smooth l1 loss without reduction.
"""
target = torch.where(torch.isnan(target), input, target) # ignore nan targets
diff = input - target
# code-wise weighting
if self.code_weights is not None:
diff = diff * self.code_weights.view(1, 1, -1)
loss = self.smooth_l1_loss(diff, self.beta)
# anchor-wise weighting
if weights is not None:
assert weights.shape[0] == loss.shape[0] and weights.shape[1] == loss.shape[1]
loss = loss * weights.unsqueeze(-1)
return loss
class WeightedL1Loss(nn.Module):
def __init__(self, code_weights: list = None):
"""
Args:
code_weights: (#codes) float list if not None.
Code-wise weights.
"""
super(WeightedL1Loss, self).__init__()
if code_weights is not None:
self.code_weights = np.array(code_weights, dtype=np.float32)
self.code_weights = torch.from_numpy(self.code_weights).cuda()
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor = None):
"""
Args:
input: (B, #anchors, #codes) float tensor.
Ecoded predicted locations of objects.
target: (B, #anchors, #codes) float tensor.
Regression targets.
weights: (B, #anchors) float tensor if not None.
Returns:
loss: (B, #anchors) float tensor.
Weighted smooth l1 loss without reduction.
"""
target = torch.where(torch.isnan(target), input, target) # ignore nan targets
diff = input - target
# code-wise weighting
if self.code_weights is not None:
diff = diff * self.code_weights.view(1, 1, -1)
loss = torch.abs(diff)
# anchor-wise weighting
if weights is not None:
assert weights.shape[0] == loss.shape[0] and weights.shape[1] == loss.shape[1]
loss = loss * weights.unsqueeze(-1)
return loss
class WeightedCrossEntropyLoss(nn.Module):
"""
Transform input to fit the fomation of PyTorch offical cross entropy loss
with anchor-wise weighting.
"""
def __init__(self):
super(WeightedCrossEntropyLoss, self).__init__()
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor):
"""
Args:
input: (B, #anchors, #classes) float tensor.
Predited logits for each class.
target: (B, #anchors, #classes) float tensor.
One-hot classification targets.
weights: (B, #anchors) float tensor.
Anchor-wise weights.
Returns:
loss: (B, #anchors) float tensor.
Weighted cross entropy loss without reduction
"""
input = input.permute(0, 2, 1)
target = target.argmax(dim=-1)
loss = F.cross_entropy(input, target, reduction='none') * weights
return loss
def get_corner_loss_lidar(pred_bbox3d: torch.Tensor, gt_bbox3d: torch.Tensor):
"""
Args:
pred_bbox3d: (N, 7) float Tensor.
gt_bbox3d: (N, 7) float Tensor.
Returns:
corner_loss: (N) float Tensor.
"""
assert pred_bbox3d.shape[0] == gt_bbox3d.shape[0]
pred_box_corners = box_utils.boxes_to_corners_3d(pred_bbox3d)
gt_box_corners = box_utils.boxes_to_corners_3d(gt_bbox3d)
# 这里flip的目的应该是忽略朝向,但实际上呢把朝向也纳入整体更好还是说它会造成不稳定呢?
gt_bbox3d_flip = gt_bbox3d.clone()
gt_bbox3d_flip[:, 6] += np.pi
gt_box_corners_flip = box_utils.boxes_to_corners_3d(gt_bbox3d_flip)
# (N, 8)
corner_dist = torch.min(torch.norm(pred_box_corners - gt_box_corners, dim=2),
torch.norm(pred_box_corners - gt_box_corners_flip, dim=2))
# (N, 8)
corner_loss = WeightedSmoothL1Loss.smooth_l1_loss(corner_dist, beta=1.0)
return corner_loss.mean(dim=1)
def get_corner_loss_mse(pred_bbox3d: torch.Tensor, gt_bbox3d: torch.Tensor):
"""
Args:
pred_bbox3d: (N, 7) float Tensor.
gt_bbox3d: (N, 7) float Tensor.
Returns:
corner_loss: (1,) float scaler
"""
assert pred_bbox3d.shape[0] == gt_bbox3d.shape[0]
# (N, 8, 3)
pred_box_corners = box_utils.boxes_to_corners_3d(pred_bbox3d)
gt_box_corners = box_utils.boxes_to_corners_3d(gt_bbox3d)
# print('==> pred_box_corners[0, :, :]')
# print(pred_box_corners[0,:,:])
# print('==> gt_box_corners[0, :, :]')
# print(gt_box_corners[0,:,:])
# print('==> pred_box_corners[10, :, :]')
# print(pred_box_corners[10,:,:])
# print('==> gt_box_corners[10, :, :]')
# print(gt_box_corners[10,:,:])
# print('==> pred_box_corners[100, :, :]')
# print(pred_box_corners[100,:,:])
# print('==> gt_box_corners[100, :, :]')
# print(gt_box_corners[100,:,:])
# for each box, mean by 8 corners.
corner_loss_x = F.mse_loss(input=pred_box_corners[:,:,0], target=gt_box_corners[:,:,0]) # (N, 8) -> (N)
corner_loss_y = F.mse_loss(input=pred_box_corners[:,:,1], target=gt_box_corners[:,:,1]) # (N, 8) -> (N)
corner_loss_z = F.mse_loss(input=pred_box_corners[:,:,2], target=gt_box_corners[:,:,2]) # (N, 8) -> (N)
# xyz之间求和
corner_loss = corner_loss_x + corner_loss_y + corner_loss_z
return corner_loss
def get_iouscore_loss_bce(iou_preds, iou_gts, iou_fg_thresh=0.75, iou_bg_thresh=0.25):
"""
Args:
iou_preds: (N,)
iou_gts: (N, )
Returns:
loss_iouscore:
"""
# prepare the labels
# now only for car class, 08132020
# iou_preds = iou_preds.view(-1)
# iou_gts = iou_gts.view(-1)
# print('==> iou_preds.size()')
# print(iou_preds.size())
# print(torch.sigmoid(iou_preds))
# print('==> iou_gts.size()')
# print(iou_gts.size())
# print(iou_gts)
# CLS_FG_THRESH: 0.75
# CLS_BG_THRESH: 0.25
# iou_bg_thresh = self.roi_sampler_cfg.CLS_BG_THRESH
# iou_fg_thresh = self.roi_sampler_cfg.CLS_FG_THRESH
# iou_bg_thresh = 0.25
# iou_fg_thresh = 0.75
fg_mask = iou_gts > iou_fg_thresh
bg_mask = iou_gts < iou_bg_thresh
interval_mask = (fg_mask == 0) & (bg_mask == 0)
iou_cls_labels = (fg_mask > 0).float()
iou_cls_labels[interval_mask] = \
(iou_gts[interval_mask] - iou_bg_thresh) / (iou_fg_thresh - iou_bg_thresh)
# print('==> iou_cls_labels')
# print(iou_cls_labels.size())
# print(iou_cls_labels[:50])
# 这里CE是计算的整个范围的iou,但是最后求和的时候只计算了iou>=0这部分的。
# 条件 iou_cls_labels >= 0 选出来了那些iou >= 0 的候选框。
loss_ioucls = F.binary_cross_entropy(torch.sigmoid(iou_preds), iou_cls_labels.float(), reduction='none')
cls_valid_mask = (iou_cls_labels >= 0).float()
loss_iouscore = (loss_ioucls * cls_valid_mask).sum() / torch.clamp(cls_valid_mask.sum(), min=1.0)
return loss_iouscore
def get_rot_binres_loss(pred_reg, reg_label, num_head_bin, get_ry_fine=False):
"""
Bin-based 3D bounding boxes regression loss. See https://arxiv.org/abs/1812.04244 for more details.
:param pred_reg: (N, C)
:param reg_label: (N, 1), ry
:param num_head_bin: constant
:param get_ry_fine: False
:return:
"""
# print('==> pred_reg.size()')
# print(pred_reg.size()) # should be (N, 24)
reg_loss_dict = {}
# angle loss
start_offset = 0
ry_bin_l, ry_bin_r = start_offset, start_offset + num_head_bin
ry_res_l, ry_res_r = ry_bin_r, ry_bin_r + num_head_bin
start_offset = ry_res_r
ry_label = reg_label.squeeze(dim=-1)
# print('==> reg_label[] in encode')
# print(reg_label.size()) # should be (N, C)
# print(reg_label[100:150])
# print('==> ry_label[] in encode')
# print(ry_label.size()) # should be (N,)
# print(ry_label[100:150])
if get_ry_fine:
assert False, "one-stage should not get_ry_fine."
# divide pi/2 into several bins
angle_per_class = (np.pi / 2) / num_head_bin
ry_label = ry_label % (2 * np.pi) # 0 ~ 2pi
opposite_flag = (ry_label > np.pi * 0.5) & (ry_label < np.pi * 1.5)
ry_label[opposite_flag] = (ry_label[opposite_flag] + np.pi) % (2 * np.pi) # (0 ~ pi/2, 3pi/2 ~ 2pi)
shift_angle = (ry_label + np.pi * 0.5) % (2 * np.pi) # (0 ~ pi)
shift_angle = torch.clamp(shift_angle - np.pi * 0.25, min=1e-3, max=np.pi * 0.5 - 1e-3) # (0, pi/2)
# bin center is (5, 10, 15, ..., 85)
ry_bin_label = (shift_angle / angle_per_class).floor().long()
ry_res_label = shift_angle - (ry_bin_label.float() * angle_per_class + angle_per_class / 2)
ry_res_norm_label = ry_res_label / (angle_per_class / 2)
else:
# divide 2pi into several bins
angle_per_class = (2 * np.pi) / num_head_bin
heading_angle = ry_label % (2 * np.pi) # 0 ~ 2pi
# print('==> heading_angle[] in encode')
# print(heading_angle.size())
# print(heading_angle[100:150])
shift_angle = (heading_angle + angle_per_class / 2) % (2 * np.pi)
ry_bin_label = (shift_angle / angle_per_class).floor().long()
ry_res_label = shift_angle - (ry_bin_label.float() * angle_per_class + angle_per_class / 2)
ry_res_norm_label = ry_res_label / (angle_per_class / 2)
# print('==> ry_bin_label in encode')
# print(ry_bin_label.size())
# print(ry_bin_label[100:150])
ry_bin_onehot = torch.cuda.FloatTensor(ry_bin_label.size(0), num_head_bin).zero_()
ry_bin_onehot.scatter_(1, ry_bin_label.view(-1, 1).long(), 1)
loss_ry_bin = F.cross_entropy(pred_reg[:, ry_bin_l:ry_bin_r], ry_bin_label)
loss_ry_res = F.smooth_l1_loss((pred_reg[:, ry_res_l: ry_res_r] * ry_bin_onehot).sum(dim=1), ry_res_norm_label)
reg_loss_dict['loss_ry_bin'] = loss_ry_bin.item()
reg_loss_dict['loss_ry_res'] = loss_ry_res.item()
angle_loss = loss_ry_bin + loss_ry_res
# Total regression loss
reg_loss_dict['loss_angle'] = angle_loss
return angle_loss, reg_loss_dict
class CenterNetFocalLoss(nn.Module):
'''nn.Module warpper for focal loss'''
def __init__(self, gamma=4, alpha=2):
super(CenterNetFocalLoss, self).__init__()
# self.neg_loss = _neg_loss
self.gamma = gamma
self.alpha = alpha
def _sigmoid(self, x):
# y = torch.clamp(x.sigmoid_(), min=1e-4, max=1 - 1e-4)
# dnnt use the replace version!
y = torch.clamp(torch.sigmoid(x), min=1e-4, max=1 - 1e-4)
# too small will cause loss nan.
# y = torch.clamp(x.sigmoid_(), min=1e-12, max=1 - 1e-12)
return y
def _neg_loss(self, pred, gt):
''' Modified focal loss. Exactly the same as CornerNet.
Runs faster and costs a little bit more memory
Arguments:
pred: (batch x c x h x w), do some clamp or not?. should be clampped already.
gt: (batch x c x h x w)
'''
pos_inds = gt.eq(1).float()
neg_inds = gt.lt(1).float()
# neg_weights = torch.pow(1 - gt, 4)
neg_weights = torch.pow(1 - gt, self.gamma)
loss = 0
# pos_loss = torch.log(pred) * torch.pow(1 - pred, 2) * pos_inds
# neg_loss = torch.log(1 - pred) * torch.pow(pred, 2) * neg_weights * neg_inds
pos_loss = torch.log(pred) * torch.pow(1 - pred, self.alpha) * pos_inds
neg_loss = torch.log(1 - pred) * torch.pow(pred, self.alpha) * neg_weights * neg_inds
num_pos = pos_inds.float().sum()
pos_loss = pos_loss.sum()
neg_loss = neg_loss.sum()
if num_pos == 0:
loss = loss - neg_loss
else:
loss = loss - (pos_loss + neg_loss) / num_pos
return loss
def forward(self, out, target):
out_norm = self._sigmoid(out)
return self._neg_loss(out_norm, target)
class CenterNetResLoss(nn.Module):
def __init__(self, cfg):
super(CenterNetResLoss, self).__init__()
self.res_func_type = cfg['res_func']
def forward(self, output, mask, ind, target):
"""
Args:
output: torch.Size([B, C, 152, 152])
mask: torch.Size([B, max_objs])
ind: torch.Size([B, max_objs])
target: torch.Size([B, max_objs, C])
Returns:
reduced and weighted loss term.
"""
pred = center_utils._transpose_and_gather_feat(output, ind) # (B, max_objs, C)
# print('==> (ind != 0).float().sum(): ', (ind != 0).float().sum() )
# print('==> mask.sum(): ', mask.sum() )
if mask.sum():
# 1. flatten.
pred_flat = pred.view(-1, pred.shape[-1]) #(B*max_objs, C)
target_flat = target.view(-1, target.shape[-1]) #(B*max_objs, C)
mask_flat = mask.view(-1).bool() #(B*max_objs)
# 2. valid select
pred_valid = pred_flat[mask_flat] #(num_valid, C)
target_valid = target_flat[mask_flat] #(num_valid, C)
# 3. un-reduced loss term
if self.res_func_type == 'smooth-l1':
loss = F.smooth_l1_loss(pred_valid, target_valid, reduction='none')
elif self.res_func_type == 'l1':
loss = F.l1_loss(pred_valid, target_valid, reduction='none')
elif self.res_func_type == 'balanced_l1':
loss = get_balanced_l1_loss(pred_valid, target_valid)
else:
raise NotImplementedError
# mean for num_obj_dims, sum for channel_dims
# (num_valid, C) -> (C) -> ()
loss = loss.mean(dim=0).sum()
else:
loss = 0.
return loss
class CenterNetRotBinResLoss(nn.Module):
def __init__(self, cfg):
super(CenterNetRotBinResLoss, self).__init__()
self.num_head_bin = cfg['num_bins']
def forward(self, output, mask, ind, target):
"""
Args:
output: torch.Size([B, C, 152, 152])
mask: torch.Size([B, max_objs])
ind: torch.Size([B, max_objs])
target: torch.Size([B, max_objs, C])
Returns:
reduced and weighted loss term.
"""
pred = center_utils._transpose_and_gather_feat(output, ind) # torch.Size([1, 500, 2])
if mask.sum():
# 1. flatten
pred_flat = pred.view(-1, pred.shape[-1]) # (B*max_objs, C)
target_flat = target.view(-1, target.shape[-1]) # (B*max_objs, 1)
mask_flat = mask.view(-1).bool() # (B*max_objs)
# 2. valid select
pred_valid = pred_flat[mask_flat] # (num_valid, C)
target_valid = target_flat[mask_flat] # (num_valid, 1)
# 3. return the reduced rot loss term.
loss, _ = get_rot_binres_loss(pred_valid, target_valid, num_head_bin=self.num_head_bin)
else:
loss = 0.
# print('==> loss in rot')
# print(loss)
return loss
def lovasz_softmax(probas, labels, classes='present', per_image=False, ignore=None):
"""
Multi-class Lovasz-Softmax loss
NOTE probas should be applied with softmax.
probas: [B, C, H, W] Variable, class probabilities at each prediction (between 0 and 1).
Interpreted as binary (sigmoid) output with outputs of size [B, H, W].
labels: [B, H, W] Tensor, ground truth labels (between 0 and C - 1)
classes: 'all' for all, 'present' for classes present in labels, or a list of classes to average.
per_image: compute the loss per image instead of per batch
ignore: void class labels
"""
# print('==> lovasz_softmax, classes: ', classes)
# print('==> lovasz_softmax, per_image: ', per_image)
# print('==> lovasz_softmax, ignore: ', ignore)
if per_image:
loss = mean(lovasz_softmax_flat(*flatten_probas(prob.unsqueeze(0), lab.unsqueeze(0), ignore), classes=classes)
for prob, lab in zip(probas, labels))
else:
loss = lovasz_softmax_flat(*flatten_probas(probas, labels, ignore), classes=classes)
return loss
def lovasz_softmax_flat(probas, labels, classes='present'):
"""
Multi-class Lovasz-Softmax loss
probas: [P, C] Variable, class probabilities at each prediction (between 0 and 1)
labels: [P] Tensor, ground truth labels (between 0 and C - 1)
classes: 'all' for all, 'present' for classes present in labels, or a list of classes to average.
"""
if probas.numel() == 0:
# only void pixels, the gradients should be 0
return probas * 0.
C = probas.size(1)
losses = []
class_to_sum = list(range(C)) if classes in ['all', 'present'] else classes
for c in class_to_sum:
fg = (labels == c).float() # foreground for class c
if (classes is 'present' and fg.sum() == 0):
continue
if C == 1:
if len(classes) > 1:
raise ValueError('Sigmoid output possible only with 1 class')
class_pred = probas[:, 0]
else:
class_pred = probas[:, c]
errors = (Variable(fg) - class_pred).abs()
errors_sorted, perm = torch.sort(errors, 0, descending=True)
perm = perm.data
fg_sorted = fg[perm]
losses.append(torch.dot(errors_sorted, Variable(lovasz_grad(fg_sorted))))
return mean(losses)
def lovasz_grad(gt_sorted):
"""
Computes gradient of the Lovasz extension w.r.t sorted errors
See Alg. 1 in paper
"""
p = len(gt_sorted)
gts = gt_sorted.sum()
intersection = gts - gt_sorted.float().cumsum(0)
union = gts + (1 - gt_sorted).float().cumsum(0)
jaccard = 1. - intersection / union
if p > 1: # cover 1-pixel case
jaccard[1:p] = jaccard[1:p] - jaccard[0:-1]
return jaccard
def flatten_probas(probas, labels, ignore=None):
"""
Flattens predictions in the batch
"""
if probas.dim() == 2:
# do nothing, 3D segmentation for sparse tensor
pass
elif probas.dim() == 3:
|
nmean compatible with generators.
"""
l = iter(l)
if ignore_nan:
l = ifilterfalse(isnan, l)
try:
n = 1
acc = next(l)
except StopIteration:
if empty == 'raise':
raise ValueError('Empty mean')
return empty
for n, v in enumerate(l, 2):
acc += v
if n == 1:
return acc
return acc / n
|
# assumes output of a sigmoid layer
B, H, W = probas.size()
probas = probas.view(B, 1, H, W)
probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C) # B * H * W, C = P, C
elif probas.dim() == 5:
# 3D segmentation for dense tensor
B, C, L, H, W = probas.size()
probas = probas.contiguous().view(B, C, L, H*W)
probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C) # B * H * W, C = P, C
labels = labels.view(-1)
if ignore is not None:
valid = (labels != ignore)
# vprobas = probas[valid.nonzero().squeeze()]
# for newer pytorch
vprobas = probas[torch.nonzero(valid, as_tuple=False).squeeze()]
vlabels = labels[valid]
return vprobas, vlabels
else:
return probas, labels
# --------------------------- HELPER FUNCTIONS ---------------------------
def isnan(x):
return x != x
def mean(l, ignore_nan=False, empty=0):
"""
na
|
boxed.py
|
from typing import (Iterable,
Tuple,
Type)
from .hints import (Box,
Contour,
Point,
Polygon,
Scalar,
Segment)
def from_contour(contour: Contour, box_cls: Type[Box]) -> Box:
return from_points(contour.vertices, box_cls)
def from_contours(contours: Iterable[Contour], box_cls: Type[Box]) -> Box:
iterator = iter(contours)
contour = next(iterator)
min_x, max_x, min_y, max_y = _from_points(contour.vertices)
for contour in iterator:
(contour_min_x, contour_max_x, contour_min_y,
contour_max_y) = _from_points(contour.vertices)
if max_x < contour_max_x:
max_x = contour_max_x
if contour_min_x < min_x:
min_x = contour_min_x
if max_y < contour_max_y:
max_y = contour_max_y
if contour_min_y < min_y:
min_y = contour_min_y
return box_cls(min_x, max_x, min_y, max_y)
def from_points(points: Iterable[Point], box_cls: Type[Box]) -> Box:
return box_cls(*_from_points(points))
def from_polygon(polygon: Polygon, box_cls: Type[Box]) -> Box:
return from_points(polygon.border.vertices, box_cls)
def from_polygons(polygons: Iterable[Polygon], box_cls: Type[Box]) -> Box:
return from_contours((polygon.border for polygon in polygons), box_cls)
def from_segment(segment: Segment, box_cls: Type[Box]) -> Box:
return box_cls(*_from_segment(segment))
def from_segments(segments: Iterable[Segment], box_cls: Type[Box]) -> Box:
iterator = iter(segments)
segment = next(iterator)
min_x, max_x, min_y, max_y = _from_segment(segment)
for segment in iterator:
(segment_min_x, segment_max_x, segment_min_y,
segment_max_y) = _from_segment(segment)
if max_x < segment_max_x:
max_x = segment_max_x
if segment_min_x < min_x:
min_x = segment_min_x
if max_y < segment_max_y:
|
return box_cls(min_x, max_x, min_y, max_y)
def _from_points(points: Iterable[Point]
) -> Tuple[Scalar, Scalar, Scalar, Scalar]:
iterator = iter(points)
point = next(iterator)
max_x = min_x = point.x
max_y = min_y = point.y
for point in iterator:
if max_x < point.x:
max_x = point.x
elif point.x < min_x:
min_x = point.x
if max_y < point.y:
max_y = point.y
elif point.y < min_y:
min_y = point.y
return min_x, max_x, min_y, max_y
def _from_segment(segment: Segment) -> Tuple[Scalar, Scalar, Scalar, Scalar]:
start, end = segment.start, segment.end
max_x, min_x = (end.x, start.x) if start.x < end.x else (start.x, end.x)
max_y, min_y = (end.y, start.y) if start.y < end.y else (start.y, end.y)
return min_x, max_x, min_y, max_y
|
max_y = segment_max_y
if segment_min_y < min_y:
min_y = segment_min_y
|
test_matrix.py
|
"""
test_matrix
===========
Tests for the scoring matrices of the `malign` package.
"""
# TODO: add test for identity matrix
# TODO: add test for initialization only from sparse subdomain
# TODO: add test providing domains
# TODO: add, in general, tests where there is disagreement between scores/subm/domain
# TODO: replace .num_domains with len(.domains) -- or maybe just __len__?
# Import Python libraries
import math
import pytest
# Impor the library itself
import malign
# Vectors for tests
PAIRWISE_TEST_VECTORS = {
("-", "-"): 0.0,
("-", "X"): -3.0,
("-", "Y"): -9.0,
("a", "-"): -3.0,
("a", "X"): 0.0,
("a", "Y"): 8.0,
("b", "-"): -5.0,
("b", "X"): 4.0,
("b", "Y"): 4.0,
("c", "-"): 2.0,
("c", "X"): -1.0,
("c", "Y"): 7.0,
}
MULTIWISE_TEST_VECTORS = {
("-", "-", "-"): 0.0,
("-", "-", "i"): -4.0,
("-", "-", "j"): -8.0,
("-", "X", "-"): -5.0,
("-", "X", "i"): -3.0,
("-", "X", "j"): -5.0,
("-", "Y", "-"): -5.0,
("-", "Y", "i"): -9.0,
("-", "Y", "j"): -6.0,
("a", "-", "-"): 0.0,
("a", "-", "i"): -3.0,
("a", "-", "j"): 3.0,
("a", "X", "-"): 0.0,
("a", "X", "i"): 0.0,
("a", "X", "j"): 8.0,
("a", "Y", "-"): 8.0,
("a", "Y", "i"): 8.0,
("a", "Y", "j"): 8.0,
("b", "-", "-"): 0.0,
("b", "-", "i"): -5.0,
("b", "-", "j"): -6.0,
("b", "X", "-"): 4.0,
("b", "X", "i"): 4.0,
("b", "X", "j"): 5.0,
("b", "Y", "-"): 4.0,
("b", "Y", "i"): 4.0,
("b", "Y", "j"): 4.0,
("c", "-", "-"): 0.0,
("c", "-", "i"): 2.0,
("c", "-", "j"): -5.0,
("c", "X", "-"): -1.0,
("c", "X", "i"): -1.0,
("c", "X", "j"): 6.0,
("c", "Y", "-"): -7.0,
("c", "Y", "i"): 7.0,
("c", "Y", "j"): 7.0,
}
PAIRWISE_TEST_SPARSE_VECTOR_01 = {
("-", "X"): -3.0,
("a", "-"): -3.0,
("a", "X"): 0.0,
("a", "Y"): 8.0,
("b", "-"): -5.0,
("b", "Y"): 4.0,
("c", "X"): -1.0,
("c", "Y"): 7.0,
}
PAIRWISE_TEST_SPARSE_VECTOR_02 = {
("a", "-"): -4.0,
("a", "i"): 2.0,
("a", "j"): 2.0,
("b", "i"): -5.0,
("b", "j"): 9.0,
("c", "-"): -7.0,
("c", "j"): 4.0,
}
MULTIWISE_TEST_VECTORS_SPARSE = {
("-", "-", "-"): 0.0,
("-", "-", "i"): -4.0,
("-", "-", "j"): -8.0,
("-", "X", "i"): -3.0,
("-", "X", "j"): -5.0,
("-", "Y", "-"): -5.0,
("-", "Y", "i"): -9.0,
("-", "Y", "j"): -6.0,
("a", "-", "-"): 0.0,
("a", "-", "i"): -3.0,
("a", "-", "j"): 3.0,
("a", "X", "-"): 0.0,
("a", "X", "i"): 0.0,
("a", "Y", "-"): 8.0,
("a", "Y", "i"): 8.0,
("a", "Y", "j"): 8.0,
("b", "-", "-"): 0.0,
("b", "X", "-"): 4.0,
("b", "X", "i"): 4.0,
("b", "X", "j"): 5.0,
("b", "Y", "-"): 4.0,
("b", "Y", "i"): 4.0,
("b", "Y", "j"): 4.0,
("c", "-", "-"): 0.0,
("c", "-", "i"): 2.0,
("c", "-", "j"): -5.0,
("c", "X", "i"): -1.0,
("c", "X", "j"): 6.0,
("c", "Y", "-"): -7.0,
("c", "Y", "i"): 7.0,
}
def test_pairwise_from_full_vectors():
"""
Test pairwise matrices built from complete vectors.
"""
# Build matrix
matrix = malign.ScoringMatrix(PAIRWISE_TEST_VECTORS)
# Assertions
assert matrix.num_domains == 2
assert matrix.gap == "-"
assert len(matrix.scores) == 12
assert matrix["-", "-"] == 0.0
assert matrix["a", "Y"] == 8.0
assert len(matrix.domains) == 2
assert tuple(matrix.domains[1]) == ("-", "X", "Y")
def test_pairwise_from_full_vectors_with_domains():
"""
Test pairwise matrices built from complete vectors with domains.
"""
# Build matrix with "correct" domains
matrix_a = malign.ScoringMatrix(
PAIRWISE_TEST_VECTORS, domains=[["-", "a", "b", "c"], ["-", "X", "Y"]]
)
# Build matrix with "expanded" domains
matrix_b = malign.ScoringMatrix(
PAIRWISE_TEST_VECTORS,
domains=[["-", "a", "b", "c", "d"], ["-", "X", "Y", "Z"]],
)
# Build matrix with "insufficient" domains
with pytest.raises(ValueError):
malign.ScoringMatrix(
PAIRWISE_TEST_VECTORS, domains=[["-", "a", "b"], ["-", "Y", "Z"]]
)
# Assertions
assert tuple(matrix_a.domains[1]) == ("-", "X", "Y")
assert tuple(matrix_b.domains[1]) == ("-", "X", "Y", "Z")
def test_multiwise_from_full_vectors():
"""
Test multiwise matrices built from complete vectors.
"""
# Build matrix
matrix = malign.ScoringMatrix(MULTIWISE_TEST_VECTORS)
# Assertions
assert matrix.num_domains == 3
assert matrix.gap == "-"
assert len(matrix.scores) == 69
assert len(matrix.domains) == 3
assert tuple(matrix.domains[2]) == ("-", "i", "j")
assert matrix["-", "-", "-"] == 0.0
assert matrix["a", "Y", "j"] == pytest.approx(8.0)
# TODO: add test with "default" (currently "mean")
@pytest.mark.parametrize(
"method,num_domains,gap,size,tests",
[
[
"mean",
3,
"-",
69,
[
[("a", "Y", "j"), 8.0, 1e-03],
[("a", "X", "j"), 0.366666, 1e-03],
[("c", "X", "-"), 0.366666, 1e-03],
[("b", "-", "i"), 0.366666, 1e-03],
[("b", "-", "j"), 0.366666, 1e-03],
[("c", "Y", "j"), 0.366666, 1e-03],
[("-", "X", "-"), 0.366666, 1e-03],
],
],
[
"median",
3,
"-",
69,
[
[("a", "Y", "j"), 8.0, 1e-03],
[("a", "X", "j"), 0.0, 1e-03],
[("c", "X", "-"), 0.0, 1e-03],
[("b", "-", "i"), 0.0, 1e-03],
[("b", "-", "j"), 0.0, 1e-03],
[("c", "Y", "j"), 0.0, 1e-03],
[("-", "X", "-"), 0.0, 1e-03],
],
],
[
"decision_tree",
3,
"-",
69,
[
[("a", "Y", "j"), 8.0, 1e-03],
[("a", "X", "j"), 0.0, 1e-03],
[("c", "X", "-"), 0.0, 1e-03],
[("b", "-", "i"), -4.0, 1e-03],
[("b", "-", "j"), -8.0, 1e-03],
[("c", "Y", "j"), -6.0, 1e-03],
[("-", "X", "-"), 0.0, 1e-03],
],
],
[
"extra_trees",
3,
"-",
69,
[
[("a", "Y", "j"), 8.0, 1e-03],
[("a", "X", "j"), 0.0, 1e-03],
[("c", "X", "-"), -7.0, 1e-03],
[("b", "-", "i"), -3.0, 1e-03],
[("b", "-", "j"), 3.0, 1e-03],
[("c", "Y", "j"), 7.0, 1e-03],
[("-", "X", "-"), -4.5, 1e-03],
],
],
[
"k_neighbors",
3,
"-",
69,
[
[("a", "Y", "j"), 8.0, 1e-03],
[("a", "X", "j"), 1.86666, 1e-03],
[("c", "X", "-"), 0.93333, 1e-03],
[("b", "-", "i"), 1.46666, 1e-03],
[("b", "-", "j"), 1.2, 1e-03],
[("c", "Y", "j"), 1.6, 1e-03],
[("-", "X", "-"), 0.06666, 1e-03],
],
],
[
"bayesian_ridge",
3,
"-",
69,
[
[("a", "Y", "j"), 8.0, 1e-03],
[("a", "X", "j"), 2.83974, 1e-03],
[("c", "X", "-"), 0.48655, 1e-03],
[("b", "-", "i"), 1.36074, 1e-03],
[("b", "-", "j"), 1.45179, 1e-03],
[("c", "Y", "j"), 1.43649, 1e-03],
[("-", "X", "-"), -3.45639, 1e-03],
],
],
],
)
def test_multiwise_from_sparse_vectors(method, num_domains, gap, size, tests):
"""
Test multiwise matrices built from sparse vectors.
"""
matrix = malign.ScoringMatrix(MULTIWISE_TEST_VECTORS_SPARSE, impute_method=method)
assert matrix.num_domains == num_domains
assert matrix.gap == gap
assert len(matrix.scores) == size
assert ["-", "i", "j"] in matrix.domains
assert matrix["-", "-", "-"] == 0.0
for key, expected, rel in tests:
assert matrix[key] == pytest.approx(expected, rel=rel)
def test_multiwise_from_subvectors():
"""
Test multiwise matrices built from sub vectors.
"""
# Build sub matrices, and then the main matrix
scores_01 = {
(key[0], key[1], None): value
for key, value in PAIRWISE_TEST_SPARSE_VECTOR_01.items()
}
scores_02 = {
(key[0], None, key[1]): value
for key, value in PAIRWISE_TEST_SPARSE_VECTOR_02.items()
}
scores = {**scores_01, **scores_02}
matrix = malign.ScoringMatrix(scores)
# Assertions
assert matrix.num_domains == 3
assert matrix.gap == "-"
assert len(matrix.scores) == 69
assert len(matrix.domains) == 3
assert matrix["-", "-", "-"] == 0.0
assert math.isclose(matrix["a", "Y", "j"], 0.5, rel_tol=1e-05)
assert math.isclose(matrix["a", "X", "j"], 0.5, rel_tol=1e-05)
assert math.isclose(matrix["c", "X", "-"], 0.5, rel_tol=1e-05)
assert math.isclose(matrix["b", "-", "i"], 0.5, rel_tol=1e-05)
assert math.isclose(matrix["b", "-", "j"], 0.5, rel_tol=1e-05)
assert math.isclose(matrix["c", "Y", "j"], 0.5, rel_tol=1e-05)
assert math.isclose(matrix["-", "X", "-"], 0.5, rel_tol=1e-05)
def test_subdomain_query():
"""
Test querying of subdomains.
"""
# Build matrices with the various filling methods
matrix = malign.ScoringMatrix(MULTIWISE_TEST_VECTORS)
assert math.isclose(matrix[None, "X", "i"], 0.25, rel_tol=1e-05)
assert math.isclose(matrix["c", None, "i"], 0.25, rel_tol=1e-05)
assert math.isclose(matrix["c", "X", None], 0.25, rel_tol=1e-05)
def test_load_save():
"""
Test load and saving matrices
"""
# Build matrices with the various filling methods
matrix = malign.ScoringMatrix(MULTIWISE_TEST_VECTORS)
# Build a temporary file name and save
# TODO: does not work on windows...
|
# handler = tempfile.NamedTemporaryFile()
# matrix.save(handler.name)
#
# # Load and check
# matrix2 = malign.ScoringMatrix(handler.name)
#
# # Assertions
# assert matrix.scores == matrix2.scores
# assert tuple(matrix.domains) == tuple(matrix2.domains)
def test_copy():
"""
Test method for matrix copy.
"""
# Build reference matrix
ref_matrix = malign.ScoringMatrix(MULTIWISE_TEST_VECTORS)
# Get copy
cpy_matrix = ref_matrix.copy()
# Perform manual comparison
assert ref_matrix.scores == cpy_matrix.scores
assert ref_matrix.domains == cpy_matrix.domains
# Assert they are different
assert id(ref_matrix) != id(cpy_matrix)
def test_set_item():
"""
Test matrix __setitem__.
"""
# Build reference matrix
matrix = malign.ScoringMatrix(MULTIWISE_TEST_VECTORS)
# Various sets and tests
matrix["a", "X", "i"] = -111
matrix[None, "X", "i"] = -222
with pytest.raises(ValueError):
matrix["<", "X", "i"] = -333
assert matrix["a", "X", "i"] == -111
assert matrix[None, "X", "i"] == -222
def test_tabulate():
"""
Test matrix tabulation.
"""
# Build reference matrix
matrix_a = malign.ScoringMatrix(PAIRWISE_TEST_VECTORS)
matrix_b = malign.ScoringMatrix(MULTIWISE_TEST_VECTORS)
# NOTE: currently only building it, to get coverage
assert len(matrix_a.tabulate()) > 0
assert len(matrix_b.tabulate()) > 0
def test_missing_key():
"""
Test pairwise matrices built from complete vectors.
"""
# Build matrix
matrix = malign.ScoringMatrix(PAIRWISE_TEST_VECTORS)
assert matrix["a", "1"] == -3.0
assert matrix["A", "X"] == -3.0
assert matrix["A", "1"] == -9.0
| |
test_bulk_transfers.py
|
import copy
import json
import logging
import pytest
import burn_lock_functions
import test_utilities
from integration_env_credentials import sifchain_cli_credentials_for_test
from pytest_utilities import generate_minimal_test_account
from test_utilities import EthereumToSifchainTransferRequest, SifchaincliCredentials
def create_new_sifaddr():
new_account_key = test_utilities.get_shell_output("uuidgen")
credentials = sifchain_cli_credentials_for_test(new_account_key)
new_addr = burn_lock_functions.create_new_sifaddr(credentials=credentials, keyname=new_account_key)
return new_addr["address"]
def create_new_sifaddr_and_key():
new_account_key = test_utilities.get_shell_output("uuidgen")
credentials = sifchain_cli_credentials_for_test(new_account_key)
new_addr = burn_lock_functions.create_new_sifaddr(credentials=credentials, keyname=new_account_key)
return new_addr["address"], new_addr["name"]
@pytest.mark.skip(reason="run manually")
def
|
(
basic_transfer_request: EthereumToSifchainTransferRequest,
smart_contracts_dir,
source_ethereum_address,
bridgebank_address,
bridgetoken_address,
ethereum_network,
):
n_transfers = int(test_utilities.get_optional_env_var("NTRANSFERS", 2))
ganache_delay = test_utilities.get_optional_env_var("GANACHE_DELAY", 1)
# test_utilities.get_shell_output(f"{integration_dir}/ganache_start.sh {ganache_delay}")
amount = "{:d}".format(5 * test_utilities.highest_gas_cost)
new_addresses_and_keys = list(map(lambda x: create_new_sifaddr_and_key(), range(n_transfers)))
logging.info(f"aandk: {new_addresses_and_keys}")
new_addresses = list(map(lambda a: a[0], new_addresses_and_keys))
logging.debug(f"new_addresses: {new_addresses}")
new_eth_addrs = test_utilities.create_ethereum_addresses(smart_contracts_dir, basic_transfer_request.ethereum_network, len(new_addresses))
logging.info(f"new eth addrs: {new_eth_addrs}")
request: EthereumToSifchainTransferRequest = copy.deepcopy(basic_transfer_request)
requests = list(map(lambda addr: {
"amount": amount,
"symbol": test_utilities.NULL_ADDRESS,
"sifchain_address": addr
}, new_addresses))
json_requests = json.dumps(requests)
test_utilities.run_yarn_command(
" ".join([
f"yarn --cwd {smart_contracts_dir}",
"integrationtest:sendBulkLockTx",
f"--amount {amount}",
f"--symbol eth",
f"--json_path {request.solidity_json_path}",
f"--sifchain_address {new_addresses[0]}",
f"--transactions \'{json_requests}\'",
f"--ethereum_address {source_ethereum_address}",
f"--bridgebank_address {bridgebank_address}",
f"--ethereum_network {ethereum_network}",
])
)
requests = list(map(lambda addr: {
"amount": amount,
"symbol": bridgetoken_address,
"sifchain_address": addr
}, new_addresses))
json_requests = json.dumps(requests)
yarn_result = test_utilities.run_yarn_command(
" ".join([
f"yarn --cwd {smart_contracts_dir}",
"integrationtest:sendBulkLockTx",
f"--amount {amount}",
"--lock_or_burn burn",
f"--symbol {bridgetoken_address}",
f"--json_path {request.solidity_json_path}",
f"--sifchain_address {new_addresses[0]}",
f"--transactions \'{json_requests}\'",
f"--ethereum_address {source_ethereum_address}",
f"--bridgebank_address {bridgebank_address}",
f"--ethereum_network {ethereum_network}",
])
)
logging.info(f"bulk result: {yarn_result}")
manual_advance = False
if manual_advance:
test_utilities.advance_n_ethereum_blocks(test_utilities.n_wait_blocks, smart_contracts_dir)
test_utilities.wait_for_ethereum_block_number(yarn_result["blockNumber"] + test_utilities.n_wait_blocks, basic_transfer_request);
for a in new_addresses:
test_utilities.wait_for_sif_account(a, basic_transfer_request.sifnoded_node, 90)
test_utilities.wait_for_sifchain_addr_balance(a, "ceth", amount, basic_transfer_request.sifnoded_node, 180)
test_utilities.wait_for_sifchain_addr_balance(a, "rowan", amount, basic_transfer_request.sifnoded_node, 180)
text_file = open("pfile.cmds", "w")
simple_credentials = SifchaincliCredentials(
keyring_passphrase=None,
keyring_backend="test",
from_key=None,
sifnoded_homedir=None
)
logging.info(f"all accounts are on sifchain and have the correct balance")
for sifaddr, ethaddr in zip(new_addresses_and_keys, new_eth_addrs):
r = copy.deepcopy(basic_transfer_request)
r.sifchain_address = sifaddr[0]
r.ethereum_address = ethaddr["address"]
r.amount = 100
simple_credentials.from_key = sifaddr[1]
c = test_utilities.send_from_sifchain_to_ethereum_cmd(r, simple_credentials)
text_file.write(f"{c}\n")
text_file.close()
# test_utilities.get_shell_output("cat pfile.cmds | parallel --trim lr -v {}")
test_utilities.get_shell_output("bash -x pfile.cmds")
for sifaddr, ethaddr in zip(new_addresses_and_keys, new_eth_addrs):
r = copy.deepcopy(basic_transfer_request)
r.ethereum_address = ethaddr["address"]
r.amount = 100
test_utilities.wait_for_eth_balance(r, 100, 300)
|
test_bulk_transfers
|
function_cache.py
|
#!/usr/bin/env python
# cardinal_pythonlib/django/function_cache.py
"""
===============================================================================
Original code copyright (C) 2009-2020 Rudolf Cardinal ([email protected]).
This file is part of cardinal_pythonlib.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
**Cache the results of function calls using Django.**
Based on https://github.com/rchrd2/django-cache-decorator
but fixed for Python 3 / Django 1.10.
"""
import hashlib
from typing import Any, Callable, Dict, Tuple
# noinspection PyUnresolvedReferences
from django.core.cache import cache # default cache
from cardinal_pythonlib.logs import get_brace_style_log_with_null_handler
from cardinal_pythonlib.json.serialize import json_encode
log = get_brace_style_log_with_null_handler(__name__)
FunctionType = Callable[..., Any]
ArgsType = Tuple[Any, ...]
KwargsType = Dict[str, Any]
def get_call_signature(fn: FunctionType,
args: ArgsType,
kwargs: KwargsType,
debug_cache: bool = False) -> str:
"""
Takes a function and its args/kwargs, and produces a string description
of the function call (the call signature) suitable for use indirectly as a
cache key. The string is a JSON representation. See ``make_cache_key`` for
a more suitable actual cache key.
"""
# Note that the function won't have the __self__ argument (as in
# fn.__self__), at this point, even if it's a member function.
try:
call_sig = json_encode((fn.__qualname__, args, kwargs))
except TypeError:
log.critical(
"\nTo decorate using @django_cache_function without specifying "
"cache_key, the decorated function's owning class and its "
"parameters must be JSON-serializable (see jsonfunc.py, "
"django_cache_fn.py).\n")
raise
if debug_cache:
log.debug("Making call signature {!r}", call_sig)
return call_sig
def make_cache_key(call_signature: str,
debug_cache: bool = False) -> str:
"""
Takes a function and its args/kwargs, and produces a string description
of the function call (the call signature) suitable for use as a cache key.
The string is an MD5 hash of the JSON-encoded call signature.
The logic behind these decisions is as follows:
- We have a bunch of components of arbitrary type, and we need to get
a unique string out.
- We shouldn't use ``str()``, because that is often poorly specified; e.g.
is ``'a.b.c'`` a ``TableId``, or is it a ``ColumnId`` with no ``'db'``
field?
- We could use ``repr()``: sometimes that gives us helpful things that
could in principle be passed to ``eval()``, in which case ``repr()`` would
be fine, but sometimes it doesn't, and gives unhelpful things like
``'<__main__.Thing object at 0x7ff3093ebda0>'``.
- However, if something encodes to JSON, that representation should
be reversible and thus contain the right sort of information.
- Note also that bound methods will come with a ``self`` argument, for
which the address may be very relevant...
- Let's go with ``repr()``. Users of the cache decorator should not pass
objects whose ``repr()`` includes the memory address of the object unless
they want those objects to be treated as distinct.
- Ah, no. The cache itself will pickle and unpickle things, and this
will change memory addresses of objects. So we can't store a reference
to an object using ``repr()`` and using ``cache.add()``/``pickle()`` and
hope they'll come out the same.
- Use the JSON after all.
- And do it in ``get_call_signature()``, not here.
- That means that any class we wish to decorate WITHOUT specifying a
cache key manually must support JSON.
"""
key = hashlib.md5(call_signature.encode("utf-8")).hexdigest()
if debug_cache:
log.debug("Making cache key {} from call_signature {!r}",
key, call_signature)
return key
def django_cache_function(timeout: int = 5 * 60,
cache_key: str = '',
debug_cache: bool = False):
"""
Decorator to add caching to a function in Django.
Uses the Django default cache.
Args:
timeout: timeout in seconds; use None for "never expire", as 0 means
"do not cache".
cache_key: optional cache key to use (if falsy, we'll invent one)
debug_cache: show hits/misses?
"""
cache_key = cache_key or None
def decorator(fn):
def wrapper(*args, **kwargs):
# - NOTE that Django returns None from cache.get() for "not in
# cache", so can't cache a None value;
# https://docs.djangoproject.com/en/1.10/topics/cache/#basic-usage # noqa
# - We need to store a bit more than just the function result
# anyway, to detect hash collisions when the user doesn't specify
# the cache_key, so we may as well use that format even if the
# user does specify the cache_key, and then we can store a None
# result properly as well.
if cache_key:
# User specified a cache key. This is easy.
call_sig = ''
_cache_key = cache_key
check_stored_call_sig = False
else:
# User didn't specify a cache key, so we'll do one
# automatically. Since we do this via a hash, there is a small
# but non-zero chance of a hash collision.
call_sig = get_call_signature(fn, args, kwargs)
_cache_key = make_cache_key(call_sig)
check_stored_call_sig = True
if debug_cache:
log.critical("Checking cache for key: " + _cache_key)
cache_result_tuple = cache.get(_cache_key) # TALKS TO CACHE HERE
if cache_result_tuple is None:
if debug_cache:
log.debug("Cache miss")
else:
if debug_cache:
|
cached_call_sig, func_result = cache_result_tuple
if (not check_stored_call_sig) or cached_call_sig == call_sig:
return func_result
log.warning(
f"... Cache hit was due to hash collision; "
f"cached_call_sig {cached_call_sig!r} != "
f"call_sig {call_sig!r}")
# If we get here, either it wasn't in the cache, or something
# was in the cache that matched by cache_key but was actually a
# hash collision. Either way, we must do the real work.
func_result = fn(*args, **kwargs)
cache_result_tuple = (call_sig, func_result)
cache.set(key=_cache_key, value=cache_result_tuple,
timeout=timeout) # TALKS TO CACHE HERE
return func_result
return wrapper
return decorator
|
log.debug("Cache hit")
|
manga.go
|
package mangaeden
|
"html"
"io/ioutil"
"net/http"
"regexp"
)
type rawChapterInfo []interface{}
func (c rawChapterInfo) getNumber() float64 {
return c[0].(float64)
}
func (c rawChapterInfo) getTitle() string {
if c[2] == nil {
return ""
}
return c[2].(string)
}
func (c rawChapterInfo) getID() string {
if c[3] == nil {
return ""
}
return c[3].(string)
}
// rawMangaInfo contains the informations regarding a manga
type rawMangaInfo struct {
Title string `json:"title"`
Description string `json:"description"`
Image string `json:"image"`
Chapters []rawChapterInfo `json:"chapters"`
}
type chapterInfo struct {
Number float64
Title string
ID string
}
func newChapterInfo(raw rawChapterInfo) chapterInfo {
return chapterInfo{
Number: raw.getNumber(),
Title: raw.getTitle(),
ID: raw.getID(),
}
}
type MangaInfo struct {
ID string
Title string
Description string
Image string
Chapters []chapterInfo
}
func newMangaInfo(raw rawMangaInfo, mangaID string) MangaInfo {
var chapters []chapterInfo
for _, c := range raw.Chapters {
chapters = append(chapters, newChapterInfo(c))
}
return MangaInfo{
ID: mangaID,
Title: raw.Title,
Description: html.UnescapeString(raw.Description), // unescape the special characters (like è)
Image: raw.Image,
Chapters: chapters,
}
}
// GetMangaID gets the page from a link like "/en/en-manga/soul-eater/" and extracts the manga's ID
func GetMangaID(mangaURL string) (string, error) {
pattern := regexp.MustCompile(`window\.manga_id2 = "([^"]*)"`)
resp, err := http.Get("https://www.mangaeden.com" + mangaURL)
if err != nil {
return "", err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return "", err
}
return pattern.FindStringSubmatch(string(body))[1], nil
}
// GetMangaInfo downloads the manga info corresponding to the id and returns a MangaInfo struct
func GetMangaInfo(mangaID string) (MangaInfo, error) {
// manga info download
resp, err := http.Get("https://www.mangaeden.com/api/manga/" + mangaID)
if err != nil {
return MangaInfo{}, err
}
defer resp.Body.Close()
// download content reading
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return MangaInfo{}, err
}
// json content processing
var raw rawMangaInfo
err = json.Unmarshal(body, &raw)
if err != nil {
return MangaInfo{}, err
}
return newMangaInfo(raw, mangaID), nil
}
|
import (
"encoding/json"
|
alertsms.go
|
package twilio
import (
"bytes"
"strings"
"text/template"
"unicode"
"github.com/pkg/errors"
)
// 160 GSM characters (140 bytes) is the max for a single segment message.
// Multi-segment messages include a 6-byte header limiting to 153 GSM characters
// per segment.
//
// Non-GSM will use UCS-2 encoding, using 2-bytes per character. The max would
// then be 70 or 67 characters for single or multi-segmented messages, respectively.
const maxGSMLen = 160
type alertSMS struct {
ID int
Count int
Body string
Link string
Code int
}
var smsTmpl = template.Must(template.New("alertSMS").Parse(`
{{- if .ID}}Alert #{{.ID}}: {{.Body}}
{{- else if .Count}}Svc '{{.Body}}': {{.Count}} unacked alert{{if gt .Count 1}}s{{end}}
{{- end}}
{{- if .Link }}
{{.Link}}
{{- end}}
{{- if and .Count .ID }}
{{.Count}} other alert{{if gt .Count 1}}s have{{else}} has{{end}} been updated.
{{- end}}
{{- if .Code}}
Reply '{{.Code}}a{{if .Count}}a{{end}}' to ack{{if .Count}} all{{end}}, '{{.Code}}c{{if .Count}}c{{end}}' to close{{if .Count}} all{{end}}.
{{- end}}`,
))
const gsmAlphabet = "@∆ 0¡P¿p£!1AQaq$Φ\"2BRbr¥Γ#3CScsèΛ¤4DTdtéΩ%5EUeuùΠ&6FVfvìΨ'7GWgwòΣ(8HXhxÇΘ)9IYiy\n Ξ *:JZjzØ+;KÄkäøÆ,<LÖlö\ræ-=MÑmñÅß.>NÜnüåÉ/?O§oà"
var gsmChr = make(map[rune]bool, len(gsmAlphabet))
func init() {
for _, r := range gsmAlphabet {
gsmChr[r] = true
}
}
func mapGSM(r rune) rune {
if unicode.IsSpace
|
return ' '
}
if !unicode.IsPrint(r) {
return -1
}
if gsmChr[r] {
return r
}
// Map similar characters to keep as much meaning as possible.
switch r {
case '_', '|', '~':
return '-'
case '[', '{':
return '('
case ']', '}':
return ')'
case '»':
return '>'
case '`', '’', '‘':
return '\''
}
switch {
case unicode.Is(unicode.Dash, r):
return '-'
case unicode.Is(unicode.Quotation_Mark, r):
return '"'
}
// If no substitute, replace with '?'
return '?'
}
// hasTwoWaySMSSupport returns true if a number supports 2-way SMS messaging (replies).
func hasTwoWaySMSSupport(number string) bool {
// India numbers do not support SMS replies.
return !strings.HasPrefix(number, "+91")
}
// Render will render a single-segment SMS.
//
// Non-GSM characters will be replaced with '?' and Body will be
// truncated (if needed) until the output is <= 160 characters.
func (a alertSMS) Render() (string, error) {
a.Body = strings.Map(mapGSM, a.Body)
a.Body = strings.Replace(a.Body, " ", " ", -1)
a.Body = strings.TrimSpace(a.Body)
var buf bytes.Buffer
err := smsTmpl.Execute(&buf, a)
if err != nil {
return "", err
}
if buf.Len() > maxGSMLen {
newBodyLen := len(a.Body) - (buf.Len() - maxGSMLen)
if newBodyLen <= 0 {
return "", errors.New("message too long to include body")
}
a.Body = strings.TrimSpace(a.Body[:newBodyLen])
buf.Reset()
err = smsTmpl.Execute(&buf, a)
if err != nil {
return "", err
}
}
return buf.String(), nil
}
|
(r) {
|
main.js
|
//模块的配置
require.config({
paths: {
'jquery': 'https://cdn.bootcdn.net/ajax/libs/jquery/3.3.1/jquery.min',
'jcookie': 'https://cdn.bootcdn.net/ajax/libs/jquery-cookie/1.0/jquery.cookie.min',
'jlazyload': 'https://cdn.bootcdn.net/ajax/libs/jquery.lazyload/1.8.3/jquery.lazyload.min',
},
shim: {
'jcookie': {
deps: ['jquery'],
exprots: 'jcookie'
},
'jlazyload': {
deps: ['jquery'],
exports: 'jlazyload'
}
|
})
require(['jquery', 'jcookie', 'jlazyload'], function() {
let pagemod = $('#currentpage').attr('data-page');
require([pagemod], function(page) {
page.init();
});
});
|
}
|
Modify.js
|
TC.control = TC.control || {};
if (!TC.Control) {
TC.syncLoadJS(TC.apiLocation + 'TC/Control');
}
TC.Consts.event.BEFOREFEATUREMODIFY = "beforefeaturemodify.tc";
TC.Consts.event.FEATUREMODIFY = "featuremodify.tc";
TC.Consts.event.FEATURESSELECT = "featuresselect.tc";
TC.Consts.event.FEATURESUNSELECT = "featuresunselect.tc";
TC.Consts.event.CHANGE = 'change';
TC.control.Modify = function () {
const self = this;
TC.Control.apply(self, arguments);
if (!TC.browserFeatures.inputTypeColor() && !window.CP) {
TC.loadCSS(TC.apiLocation + 'lib/color-picker/color-picker.min.css');
TC.syncLoadJS(TC.apiLocation + 'lib/color-picker/color-picker.min.js');
}
self.styles = TC.Util.extend(true, TC.Cfg.styles.selection, self.options.styles);
self.styles.text = self.styles.text || {
fontSize: self.styles.line.fontSize,
fontColor: self.styles.line.fontColor,
labelOutlineColor: self.styles.line.labelOutlineColor,
labelOutlineWidth: self.styles.line.labelOutlineWidth
};
self._classSelector = '.' + self.CLASS;
self.wrap = new TC.wrap.control.Modify(self);
self.snapping = (typeof self.options.snapping === 'boolean') ? self.options.snapping : true;
};
TC.inherit(TC.control.Modify, TC.Control);
(function () {
var ctlProto = TC.control.Modify.prototype;
ctlProto.CLASS = 'tc-ctl-mod';
ctlProto.template = {};
ctlProto.template[ctlProto.CLASS] = TC.apiLocation + "TC/templates/Modify.html";
ctlProto.template[ctlProto.CLASS + '-attr'] = TC.apiLocation + "TC/templates/ModifyAttributes.html";
const setFeatureSelectedState = function (ctl, features) {
ctl._deleteBtn.disabled = features.length === 0;
ctl._editAttrBtn.disabled = features.length !== 1;
ctl._joinBtn.disabled = features.length < 2;
ctl._splitBtn.disabled = features.filter(complexGeometryFilter).length === 0;
ctl.displayLabelText();
};
//const styleFunction = function (feature, mapStyles) {
// var result;
// switch (true) {
// case TC.feature.Polygon && feature instanceof TC.feature.Polygon:
// case TC.feature.MultiPolygon && feature instanceof TC.feature.MultiPolygon:
// result = TC.Util.extend({}, mapStyles.polygon);
// break;
// case TC.feature.Point && feature instanceof TC.feature.Point:
// case TC.feature.MultiPoint && feature instanceof TC.feature.MultiPoint:
// result = TC.Util.extend({}, mapStyles.point);
// break;
// default:
// result = TC.Util.extend({}, mapStyles.line);
// break;
// }
// const style = feature.getStyle();
// if (style.label) {
// result.label = style.label;
// result.fontSize = style.fontSize;
// result.fontColor = style.fontColor;
// result.labelOutlineColor = style.labelOutlineColor;
// result.labelOutlineWidth = style.labelOutlineWidth;
// }
// return result;
//};
//const setFeatureSelectedStyle = function (ctl, features) {
// const mapStyles = ctl.map.options.styles.selection;
// features.forEach(function (feature) {
// feature._originalStyle = TC.Util.extend({}, feature.getStyle());
// feature.setStyle(ctl.styleFunction(feature));
// });
//};
//const setFeatureUnselectedStyle = function (ctl, features) {
// features.forEach(function (feature) {
// if (feature._originalStyle) {
// const style = feature.getStyle();
// if (style.label) {
// const originalStyle = feature._originalStyle;
// originalStyle.label = style.label;
// originalStyle.fontSize = style.fontSize;
// originalStyle.fontColor = style.fontColor;
// originalStyle.labelOutlineColor = style.labelOutlineColor;
// originalStyle.labelOutlineWidth = style.labelOutlineWidth;
// }
// feature.setStyle(feature._originalStyle);
// feature._originalStyle = undefined;
// }
// })
//};
const complexGeometryFilter = function (elm) {
var result = false;
if ((TC.feature.MultiPolygon && elm instanceof TC.feature.MultiPolygon) ||
(TC.feature.MultiPolyline && elm instanceof TC.feature.MultiPolyline)) {
if (elm.geometry.length > 1) {
result = true;
}
}
return result;
};
ctlProto.register = function (map) {
const self = this;
const result = TC.Control.prototype.register.call(self, map);
if (self.options.layer) {
self.setLayer(self.options.layer);
}
map
.on(TC.Consts.event.FEATUREADD + ' ' + TC.Consts.event.FEATURESADD, function (e) {
Promise.all([self.getLayer(), self.renderPromise()]).then(function (objects) {
const layer = objects[0];
if (e.layer === layer) {
self.setSelectableState(true);
}
});
})
.on(TC.Consts.event.FEATUREREMOVE + ' ' + TC.Consts.event.FEATURESCLEAR, function (e) {
const layer = e.layer;
const feature = e.feature;
Promise.all([self.getLayer(), self.renderPromise()]).then(function (objects) {
if (layer === objects[0]) {
if (feature) {
self.unselectFeatures([feature]);
}
else {
self.unselectFeatures();
}
setFeatureSelectedState(self, self.getSelectedFeatures());
if (self.layer.features.length === 0) {
self.setSelectableState(false);
self.setTextMode(false);
}
}
});
})
.on(TC.Consts.event.LAYERUPDATE, function (e) {
const layer = e.layer;
Promise.all([self.getLayer(), self.renderPromise()]).then(function (objects) {
if (layer === objects[0]) {
setFeatureSelectedState(self, self.getSelectedFeatures());
}
});
});
self.on(TC.Consts.event.FEATURESSELECT + ' ' + TC.Consts.event.FEATURESUNSELECT, function () {
const selectedFeatures = self.getSelectedFeatures();
setFeatureSelectedState(self, selectedFeatures);
const unselectedFeatures = self.layer.features.filter(function (feature) {
return selectedFeatures.indexOf(feature) < 0;
});
unselectedFeatures.forEach(function (feature) {
feature.toggleSelectedStyle(false);
});
selectedFeatures.forEach(function (feature) {
feature.toggleSelectedStyle(true);
});
if (!self.getAttributeDisplayTarget().classList.contains(TC.Consts.classes.HIDDEN)) {
self.displayAttributes();
}
if (!selectedFeatures.length) {
self.closeAttributes();
}
});
return result;
};
ctlProto.render = function (callback) {
const self = this;
const renderCallback = function () {
self._selectBtn = self.div.querySelector('.' + self.CLASS + '-btn-select');
self._selectBtn.addEventListener(TC.Consts.event.CLICK, function (e) {
if (!e.target.disabled) {
if (self.isActive) {
self.deactivate();
}
else {
self.activate();
}
}
});
self._deleteBtn = self.div.querySelector('.' + self.CLASS + '-btn-delete');
self._deleteBtn.addEventListener(TC.Consts.event.CLICK, function () {
self.deleteSelectedFeatures();
});
self._textBtn = self.div.querySelector('.' + self.CLASS + '-btn-text');
self._textBtn.addEventListener(TC.Consts.event.CLICK, function () {
self.setTextMode(!self.textActive);
});
self._joinBtn = self.div.querySelector('.' + self.CLASS + '-btn-join');
self._splitBtn = self.div.querySelector('.' + self.CLASS + '-btn-split');
self._editAttrBtn = self.div.querySelector('.' + self.CLASS + '-btn-attr');
self._editAttrBtn.addEventListener(TC.Consts.event.CLICK, function () {
self.toggleAttributes();
});
self._textInput = self.div.querySelector('input.' + self.CLASS + '-txt');
self._textInput.addEventListener('input', function (e) {
self.labelFeatures(e.target.value);
});
self._styleSection = self.div.querySelector('.' + self.CLASS + '-style');
self._fontColorPicker = self.div.querySelector(self._classSelector + '-fnt-c');
self._fontColorPicker.addEventListener(TC.Consts.event.CHANGE, function (e) {
self.setFontColor(e.target.value);
});
self._fontSizeSelector = self.div.querySelector('.' + self.CLASS + '-fnt-s');
self._fontSizeSelector.addEventListener(TC.Consts.event.CHANGE, function (e) {
self.setFontSize(e.target.value);
});
self._attributesSection = self.div.querySelector('.' + self.CLASS + '-attr');
if (TC.Util.isFunction(callback)) {
callback();
}
};
const renderObject = {
fontSize: self.styles.text.fontSize,
fontColor: self.styles.text.fontColor,
labelOutlineColor: self.styles.text.labelOutlineColor,
labelOutlineWidth: self.styles.text.labelOutlineWidth
};
var promise;
if (TC.browserFeatures.inputTypeColor()) {
promise = self._set1stRenderPromise(self.renderData(renderObject, renderCallback));
}
else {
// El navegador no soporta input[type=color], usamos polyfill
promise = self._set1stRenderPromise(self.renderData(renderObject, function () {
const input = self.div.querySelector('input[type=color]');
input.style.backgroundColor = input.value;
input.style.color = 'transparent';
const picker = new CP(input, 'click', document.body);
input.onclick = function (e) {
e.preventDefault();
};
// Evitamos que salga el teclado virtual en iOS
input.onfocus = function (e) {
this.blur();
};
input.onchange = function (e) {
this.style.backgroundColor = this.value;
};
self.map.loaded(function () {
picker.on("change", function (color) {
self.setFontColor('#' + color);
});
});
renderCallback();
}));
}
return promise;
};
ctlProto.activate = function () {
const self = this;
self._selectBtn.classList.add(TC.Consts.classes.ACTIVE);
TC.Control.prototype.activate.call(self);
self.wrap.activate(self.mode);
};
ctlProto.deactivate = function () {
const self = this;
TC.Control.prototype.deactivate.call(self);
if (self._selectBtn) {
setFeatureSelectedState(self, []);
}
if (self.wrap) {
self.wrap.deactivate();
}
//self.trigger(TC.Consts.event.DRAWCANCEL, { ctrl: self });
if (self._selectBtn) {
self._selectBtn.classList.remove(TC.Consts.classes.ACTIVE);
if (self.layer) {
self.layer.features.forEach(function (feature) {
feature.toggleSelectedStyle(false);
});
}
//setFeatureUnselectedStyle(self, self.getSelectedFeatures());
}
};
ctlProto.clear = function () {
const self = this;
if (self.layer) {
self.layer.clearFatures();
}
return self;
};
ctlProto.isExclusive = function () {
return true;
};
ctlProto.end = function () {
const self = this;
self.wrap.end();
return self;
};
ctlProto.setMode = function (mode, activate) {
const self = this;
if (mode)
self.mode = mode;
if (activate && mode) {
if (self.layer) {
self.layer.map.putLayerOnTop(self.layer);
}
self.activate();
}
else {
self.deactivate();
}
return self;
};
ctlProto.getLayer = function () {
var self = this;
// Se ha instanciado un control sin capa asociada
if (self.options && typeof self.options.layer === 'boolean' && !self.options.layer) {
return Promise.resolve(null);
}
if (self.layer) {
return Promise.resolve(self.layer);
}
return self._layerPromise;
};
ctlProto.setLayer = function (layer) {
const self = this;
if (self.map) {
self.setSelectedFeatures([]);
self._layerPromise = new Promise(function (resolve, reject) {
if (typeof (layer) === "string") {
self.map.loaded(function () {
self.layer = self.map.getLayer(layer);
resolve(self.layer);
});
|
else {
self.layer = layer;
resolve(self.layer);
}
});
Promise.all([self._layerPromise, self.renderPromise()]).then(function (objs) {
const layer = objs[0];
self.setSelectableState(layer && layer.features.length > 0);
});
}
};
ctlProto.setSelectableState = function (active) {
const self = this;
self._selectBtn.disabled = !active;
self._textBtn.disabled = !active;
};
ctlProto.getSelectedFeatures = function () {
return this.wrap.getSelectedFeatures();
};
ctlProto.setSelectedFeatures = function (features) {
const self = this;
const result = self.wrap.setSelectedFeatures(features);
self.displayLabelText();
return result;
};
ctlProto.getActiveFeatures = function () {
const self = this;
const result = self.getSelectedFeatures();
if (!result.length && self.layer.features.length) {
result.push(self.layer.features[self.layer.features.length - 1]);
}
return result;
};
ctlProto.unselectFeatures = function (features) {
features = features || [];
this.wrap.unselectFeatures(features.map(function (feat) {
return feat.wrap.feature;
}));
return this;
};
ctlProto.deleteSelectedFeatures = function () {
const self = this;
const features = self.getSelectedFeatures();
self.wrap.unselectFeatures(features);
features.forEach(function (feature) {
self.layer.removeFeature(feature);
self.trigger(TC.Consts.event.FEATUREREMOVE, { feature: feature });
});
return self;
};
ctlProto.styleFunction = function (feature, resolution) {
const self = this;
var result;
const mapStyles = self.map.options.styles.selection;
switch (true) {
case TC.feature.Polygon && feature instanceof TC.feature.Polygon:
case TC.feature.MultiPolygon && feature instanceof TC.feature.MultiPolygon:
result = TC.Util.extend({}, mapStyles.polygon);
break;
case TC.feature.Point && feature instanceof TC.feature.Point:
case TC.feature.MultiPoint && feature instanceof TC.feature.MultiPoint:
result = TC.Util.extend({}, mapStyles.point);
break;
default:
result = TC.Util.extend({}, mapStyles.line);
break;
}
const style = feature.getStyle();
if (style.label) {
result.label = style.label;
result.fontSize = style.fontSize;
result.fontColor = style.fontColor;
result.labelOutlineColor = style.labelOutlineColor;
result.labelOutlineWidth = style.labelOutlineWidth;
}
return result;
};
ctlProto.setTextMode = function (active) {
const self = this;
self.textActive = active;
if (active) {
self._textBtn.classList.add(TC.Consts.classes.ACTIVE, active);
self._styleSection.classList.remove(TC.Consts.classes.HIDDEN);
}
else {
self._textBtn.classList.remove(TC.Consts.classes.ACTIVE, active);
self._styleSection.classList.add(TC.Consts.classes.HIDDEN);
}
self.displayLabelText();
return self;
};
ctlProto.setFontColorWatch = function (color, outlineColor) {
const self = this;
if (color === undefined) {
color = self.styles.text.fontColor;
}
color = TC.Util.colorArrayToString(color);
outlineColor = outlineColor || self.getLabelOutlineColor(color);
self.renderPromise().then(function () {
self._fontColorPicker.value = color;
self._textInput.style.color = color;
self._textInput.style.textShadow = '0 0 ' + self.styles.text.labelOutlineWidth + 'px ' + outlineColor;
if (!TC.browserFeatures.inputTypeColor()) {
self._fontColorPicker.style.backgroundColor = color;
self._fontColorPicker.blur();
}
});
return self;
};
ctlProto.setFontColor = function (color) {
const self = this;
self.styles.text.fontColor = color;
self.styles.text.labelOutlineColor = self.getLabelOutlineColor(color);
self.setFontColorWatch(color, self.styles.text.labelOutlineColor);
const features = self.getActiveFeatures();
features.forEach(function (feature) {
const style = feature.getStyle();
style.fontColor = color;
style.labelOutlineColor = self.styles.text.labelOutlineColor;
feature.setStyle(style);
});
return self;
};
ctlProto.setFontSizeWatch = function (size) {
const self = this;
if (size === undefined) {
size = self.styles.text.fontSize;
}
const sizeValue = parseInt(size);
if (sizeValue !== Number.NaN) {
self.renderPromise().then(function () {
self._fontSizeSelector.value = sizeValue;
self._textInput.style.fontSize = sizeValue + 'pt';
});
}
return self;
};
ctlProto.setFontSize = function (size) {
const self = this;
const sizeValue = parseInt(size);
if (sizeValue !== Number.NaN) {
self.styles.text.fontSize = sizeValue;
self.setFontSizeWatch(sizeValue);
const features = self.getActiveFeatures();
features.forEach(function (feature) {
const style = feature.getStyle();
style.fontSize = sizeValue;
feature.setStyle(style);
});
}
return self;
};
ctlProto.getLabelOutlineColor = function (fontColor) {
if (fontColor) {
fontColor = TC.Util.colorArrayToString(fontColor);
const matchForShort = fontColor.match(/^#([0-9a-f])([0-9a-f])([0-9a-f])$/i);
if (matchForShort && matchForShort.length) {
fontColor = '#' + matchForShort[1] + matchForShort[1] + matchForShort[2] + matchForShort[2] + matchForShort[3] + matchForShort[3];
}
const matchForLong = fontColor.match(/^#([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})$/i);
if (matchForLong && matchForLong.length) {
const r = parseInt(matchForLong[1], 16);
const g = parseInt(matchForLong[2], 16);
const b = parseInt(matchForLong[3], 16);
return (r + g + b) / 3 < 128 ? '#ffffff' : '#000000';
}
}
return '#ffffff';
};
ctlProto.displayLabelText = function () {
const self = this;
const features = self.getSelectedFeatures();
var text;
var size;
var color;
if (self.isActive && features.length) {
const feature = features[features.length - 1];
const style = feature.getStyle();
text = style.label;
color = style.fontColor;
size = style.fontSize;
}
else {
text = '';
color = self.styles.text.fontColor;
size = self.styles.text.fontSize;
}
self.renderPromise().then(function () {
self
.setFontSizeWatch(size)
.setFontColorWatch(color)
._textInput.value = text || '';
});
return self;
};
ctlProto.labelFeatures = function (text) {
const self = this;
const features = self.getActiveFeatures();
if (features.length) {
const style = features[0].getStyle();
features.forEach(function (feature) {
const textStyle = TC.Util.extend({}, self.styles.text, style);
style.label = text;
style.labelOffset = textStyle.labelOffset;
style.fontColor = textStyle.fontColor;
style.fontSize = textStyle.fontSize;
style.labelOutlineColor = textStyle.labelOutlineColor;
style.labelOutlineWidth = textStyle.labelOutlineWidth;
feature.setStyle(style);
});
}
return self;
};
ctlProto.getAttributeDisplayTarget = function () {
return this._attributesSection;
};
ctlProto.displayAttributes = function () {
const self = this;
const selectedFeatures = self.getSelectedFeatures();
const feature = selectedFeatures[selectedFeatures.length - 1];
if (feature) {
self.getRenderedHtml(self.CLASS + '-attr', { data: feature.getData() }, function (html) {
const attributesSection = self.getAttributeDisplayTarget();
attributesSection.innerHTML = html;
attributesSection.classList.remove(TC.Consts.classes.HIDDEN);
self._editAttrBtn.classList.add(TC.Consts.classes.ACTIVE);
attributesSection.querySelector(`${self.CLASS}-btn-attr-ok`).addEventListener(TC.Consts.event.CLICK, function (e) {
self._onAttrOK();
});
attributesSection.querySelector(`.${self.modifyControl.CLASS}-btn-attr-cancel`).addEventListener(TC.Consts.event.CLICK, function () {
self.closeAttributes();
});
});
}
};
ctlProto.closeAttributes = function () {
const self = this;
self._attributesSection.classList.add(TC.Consts.classes.HIDDEN);
self._editAttrBtn.classList.remove(TC.Consts.classes.ACTIVE);
};
ctlProto.toggleAttributes = function () {
const self = this;
if (self._editAttrBtn.classList.toggle(TC.Consts.classes.ACTIVE)) {
self.displayAttributes();
}
else {
self.closeAttributes();
}
};
ctlProto._onAttrOK = function () {
const self = this;
const feature = self.getSelectedFeatures()[0];
if (feature) {
const data = {};
self.getAttributeDisplayTarget().querySelectorAll('input').forEach(function (input) {
data[input.getAttribute('name')] = input.value;
});
feature.setData(data);
self.trigger(TC.Consts.event.FEATUREMODIFY, { feature: feature, layer: self.layer });
self.closeAttributes();
}
};
ctlProto.joinFeatures = function (features) {
const self = this;
if (self.geometryType === TC.Consts.geom.MULTIPOLYLINE ||
self.geometryType === TC.Consts.geom.MULTIPOLYGON ||
self.geometryType === TC.Consts.geom.MULTIPOINT) {
self._joinedFeatureAttributes = [];
if (features.length > 1) {
var geometries = features.map(function (elm) {
self._joinedFeatureAttributes[self._joinedFeatureAttributes.length] = elm.getData();
return elm.geometry;
});
var newGeometry = geometries.reduce(function (a, b) {
return a.concat(b);
});
var newFeature = new features[0].constructor(newGeometry);
for (var i = 0, len = features.length; i < len; i++) {
var feature = features[i];
self.layer.removeFeature(feature);
self.trigger(TC.Consts.event.FEATUREREMOVE, { feature: feature });
}
self.layer.addFeature(newFeature).then(function (feat) {
self.setSelectedFeatures([newFeature]);
self.trigger(TC.Consts.event.FEATUREADD, { feature: feat });
feat.showPopup(self.attributeEditor);
});
}
setFeatureSelectedState(self, [newFeature]);
}
};
})();
|
}
|
two-phase-nonrecv-autoref.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// revisions: lxl nll g2p
//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
//[g2p]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll -Z two-phase-beyond-autoref
// This is a test checking that when we limit two-phase borrows to
// method receivers, we do not let other kinds of auto-ref to leak
// through.
//
// The g2p revision illustrates the "undesirable" behavior you would
// otherwise observe without limiting the phasing to autoref on method
// receivers (namely, in many cases demonstrated below, the error
// would not arise).
// (If we revise the compiler or this test so that the g2p revision
// passes, turn the `rustc_attrs` feature back on and tag the `fn
// main` with `#[rustc_error]` so that this remains a valid
// compile-fail test.)
//
// #![feature(rustc_attrs)]
use std::ops::{Index, IndexMut};
use std::ops::{AddAssign, SubAssign, MulAssign, DivAssign, RemAssign};
use std::ops::{BitAndAssign, BitOrAssign, BitXorAssign, ShlAssign, ShrAssign};
// This is case outlined by Niko that we want to ensure we reject
// (at least initially).
fn foo(x: &mut u32, y: u32) {
*x += y;
}
fn deref_coercion(x: &mut u32) {
foo(x, *x);
//[lxl]~^ ERROR cannot use `*x` because it was mutably borrowed [E0503]
//[nll]~^^ ERROR cannot use `*x` because it was mutably borrowed [E0503]
}
// While adding a flag to adjustments (indicating whether they
// should support two-phase borrows, here are the cases I
// encountered:
//
// - [x] Resolving overloaded_call_traits (call, call_mut, call_once)
// - [x] deref_coercion (shown above)
// - [x] coerce_unsized e.g. `&[T; n]`, `&mut [T; n] -> &[T]`,
// `&mut [T; n] -> &mut [T]`, `&Concrete -> &Trait`
// - [x] Method Call Receivers (the case we want to support!)
// - [x] ExprIndex and ExprUnary Deref; only need to handle coerce_index_op
// - [x] overloaded_binops
fn overloaded_call_traits() {
// Regarding overloaded call traits, note that there is no
// scenario where adding two-phase borrows should "fix" these
// cases, because either we will resolve both invocations to
// `call_mut` (in which case the inner call requires a mutable
// borrow which will conflict with the outer reservation), or we
// will resolve both to `call` (which will just work, regardless
// of two-phase borrow support), or we will resolve both to
// `call_once` (in which case the inner call requires moving the
// receiver, invalidating the outer call).
fn twice_ten_sm<F: FnMut(i32) -> i32>(f: &mut F) {
f(f(10));
//[lxl]~^ ERROR cannot borrow `*f` as mutable more than once at a time
//[lxl]~| ERROR cannot borrow `*f` as mutable more than once at a time
//[nll]~^^^ ERROR cannot borrow `*f` as mutable more than once at a time
//[nll]~| ERROR cannot borrow `*f` as mutable more than once at a time
//[g2p]~^^^^^ ERROR cannot borrow `*f` as mutable more than once at a time
}
fn twice_ten_si<F: Fn(i32) -> i32>(f: &mut F) {
f(f(10));
}
fn twice_ten_so<F: FnOnce(i32) -> i32>(f: Box<F>) {
f(f(10));
//[lxl]~^ ERROR use of moved value: `*f`
//[nll]~^^ ERROR use of moved value: `*f`
//[g2p]~^^^ ERROR use of moved value: `*f`
}
fn twice_ten_om(f: &mut FnMut(i32) -> i32) {
f(f(10));
//[lxl]~^ ERROR cannot borrow `*f` as mutable more than once at a time
//[lxl]~| ERROR cannot borrow `*f` as mutable more than once at a time
//[nll]~^^^ ERROR cannot borrow `*f` as mutable more than once at a time
//[nll]~| ERROR cannot borrow `*f` as mutable more than once at a time
//[g2p]~^^^^^ ERROR cannot borrow `*f` as mutable more than once at a time
}
fn twice_ten_oi(f: &mut Fn(i32) -> i32) {
f(f(10));
}
fn twice_ten_oo(f: Box<FnOnce(i32) -> i32>) {
f(f(10));
//[lxl]~^ ERROR cannot move a value of type
//[lxl]~^^ ERROR cannot move a value of type
//[lxl]~^^^ ERROR use of moved value: `*f`
//[nll]~^^^^ ERROR cannot move a value of type
//[nll]~^^^^^ ERROR cannot move a value of type
//[nll]~^^^^^^ ERROR cannot move a value of type
//[nll]~^^^^^^^ ERROR cannot move a value of type
//[nll]~^^^^^^^^ ERROR use of moved value: `*f`
//[g2p]~^^^^^^^^^ ERROR cannot move a value of type
//[g2p]~^^^^^^^^^^ ERROR cannot move a value of type
//[g2p]~^^^^^^^^^^^ ERROR cannot move a value of type
//[g2p]~^^^^^^^^^^^^ ERROR cannot move a value of type
//[g2p]~^^^^^^^^^^^^^ ERROR use of moved value: `*f`
}
twice_ten_sm(&mut |x| x + 1);
twice_ten_si(&mut |x| x + 1);
twice_ten_so(Box::new(|x| x + 1));
twice_ten_om(&mut |x| x + 1);
twice_ten_oi(&mut |x| x + 1);
twice_ten_oo(Box::new(|x| x + 1));
}
trait TwoMethods {
fn m(&mut self, x: i32) -> i32 { x + 1 }
fn i(&self, x: i32) -> i32 { x + 1 }
}
struct T;
impl TwoMethods for T { }
struct S;
impl S {
fn m(&mut self, x: i32) -> i32 { x + 1 }
fn i(&self, x: i32) -> i32 { x + 1 }
}
impl TwoMethods for [i32; 3] { }
fn double_access<X: Copy>(m: &mut [X], s: &[X]) {
m[0] = s[1];
}
fn coerce_unsized() {
let mut a = [1, 2, 3];
// This is not okay.
double_access(&mut a, &a);
//[lxl]~^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
//[nll]~^^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
//[g2p]~^^^ ERROR cannot borrow `a` as immutable because it is also borrowed as mutable [E0502]
// But this is okay.
a.m(a.i(10));
}
struct I(i32);
impl Index<i32> for I {
type Output = i32;
fn index(&self, _: i32) -> &i32 {
&self.0
}
}
impl IndexMut<i32> for I {
fn index_mut(&mut self, _: i32) -> &mut i32 {
&mut self.0
}
}
fn coerce_index_op() {
let mut i = I(10);
i[i[3]] = 4;
//[lxl]~^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
//[nll]~^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
i[3] = i[4];
i[i[3]] = i[4];
//[lxl]~^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
//[nll]~^^ ERROR cannot borrow `i` as immutable because it is also borrowed as mutable [E0502]
}
struct A(i32);
macro_rules! trivial_binop {
($Trait:ident, $m:ident) => {
impl $Trait<i32> for A { fn $m(&mut self, rhs: i32) { self.0 = rhs; } }
}
}
trivial_binop!(AddAssign, add_assign);
trivial_binop!(SubAssign, sub_assign);
trivial_binop!(MulAssign, mul_assign);
trivial_binop!(DivAssign, div_assign);
trivial_binop!(RemAssign, rem_assign);
trivial_binop!(BitAndAssign, bitand_assign);
trivial_binop!(BitOrAssign, bitor_assign);
trivial_binop!(BitXorAssign, bitxor_assign);
trivial_binop!(ShlAssign, shl_assign);
trivial_binop!(ShrAssign, shr_assign);
fn overloaded_binops()
|
fn main() {
// As a reminder, this is the basic case we want to ensure we handle.
let mut v = vec![1, 2, 3];
v.push(v.len());
// (as a rule, pnkfelix does not like to write tests with dead code.)
deref_coercion(&mut 5);
overloaded_call_traits();
let mut s = S;
s.m(s.i(10));
let mut t = T;
t.m(t.i(10));
coerce_unsized();
coerce_index_op();
overloaded_binops();
}
|
{
let mut a = A(10);
a += a.0;
//[lxl]~^ ERROR cannot use `a.0` because it was mutably borrowed
//[nll]~^^ ERROR cannot use `a.0` because it was mutably borrowed
a -= a.0;
//[lxl]~^ ERROR cannot use `a.0` because it was mutably borrowed
//[nll]~^^ ERROR cannot use `a.0` because it was mutably borrowed
a *= a.0;
//[lxl]~^ ERROR cannot use `a.0` because it was mutably borrowed
//[nll]~^^ ERROR cannot use `a.0` because it was mutably borrowed
a /= a.0;
//[lxl]~^ ERROR cannot use `a.0` because it was mutably borrowed
//[nll]~^^ ERROR cannot use `a.0` because it was mutably borrowed
a &= a.0;
//[lxl]~^ ERROR cannot use `a.0` because it was mutably borrowed
//[nll]~^^ ERROR cannot use `a.0` because it was mutably borrowed
a |= a.0;
//[lxl]~^ ERROR cannot use `a.0` because it was mutably borrowed
//[nll]~^^ ERROR cannot use `a.0` because it was mutably borrowed
a ^= a.0;
//[lxl]~^ ERROR cannot use `a.0` because it was mutably borrowed
//[nll]~^^ ERROR cannot use `a.0` because it was mutably borrowed
a <<= a.0;
//[lxl]~^ ERROR cannot use `a.0` because it was mutably borrowed
//[nll]~^^ ERROR cannot use `a.0` because it was mutably borrowed
a >>= a.0;
//[lxl]~^ ERROR cannot use `a.0` because it was mutably borrowed
//[nll]~^^ ERROR cannot use `a.0` because it was mutably borrowed
}
|
vm_service.rs
|
use crate::vmm_handler::node::node_server::Node;
use crate::vmm_handler::node::{
Response as NodeResponse, Status as NodeStatus, VmConfig, VmId, VmList, VmResponse,
};
use crate::vmm_handler::VmmHandler;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use tonic::{Code, Request, Response, Status};
#[derive(Debug)]
pub struct VmService {
// TODO: Not sure about this at all
handlers: Arc<RwLock<HashMap<String, VmmHandler>>>,
}
impl VmService {
pub fn new() -> Self {
VmService {
handlers: Arc::new(RwLock::new(HashMap::new())),
}
}
}
#[tonic::async_trait]
impl Node for VmService {
async fn start_vm(&self, request: Request<VmConfig>) -> Result<Response<VmResponse>, Status> {
let config = request.into_inner();
tracing::info!(
"Starting VM {}, {}, {}",
&config.vm_id,
&config.memory,
&config.vcpus
);
let mut handlers = self.handlers.write().await;
let handler = handlers
.entry(config.vm_id.to_owned())
.or_insert(VmmHandler::new());
handler.configure_vm(&config).await;
tracing::info!("Configured VM...");
handler.start_vm().await;
tracing::info!("Started VM...");
let response = VmResponse {
status: NodeStatus::Success as i32,
config: Some(config),
};
// TODO: there is no reason to return the config now
Ok(Response::new(response))
}
async fn stop_vm(&self, request: Request<VmId>) -> Result<Response<NodeResponse>, Status> {
let vm_id = request.into_inner().vm_id;
tracing::info!("Stopping VM {}", vm_id);
tracing::info!("handlers available");
let mut handlers = self.handlers.write().await;
if let Some(handler) = handlers.get(&vm_id) {
tracing::info!("Fetched handler for vm {}", vm_id);
handler.stop_vm().await;
handlers.remove(&vm_id);
let response = NodeResponse {
status: NodeStatus::Success as i32,
};
Ok(Response::new(response))
} else {
return Err(Status::new(Code::FailedPrecondition, "vm not found"));
}
}
async fn
|
(&self, request: Request<()>) -> Result<Response<VmList>, Status> {
tracing::debug!("Got a request: {:?}", request);
let response = VmList {
vm_id: vec![String::from("123")],
};
Ok(Response::new(response))
}
async fn health_check(&self, request: Request<()>) -> Result<Response<NodeResponse>, Status> {
tracing::debug!("Got a request: {:?}", request);
let response = NodeResponse {
status: NodeStatus::Success as i32,
};
Ok(Response::new(response))
}
}
|
list_vms
|
derivation_record.py
|
from dataclasses import dataclass
from blspy import G1Element
from profit.types.blockchain_format.sized_bytes import bytes32
from profit.util.ints import uint32
from profit.wallet.util.wallet_types import WalletType
@dataclass(frozen=True)
class
|
:
"""
These are records representing a puzzle hash, which is generated from a
public key, derivation index, and wallet type. Stored in the puzzle_store.
"""
index: uint32
puzzle_hash: bytes32
pubkey: G1Element
wallet_type: WalletType
wallet_id: uint32
|
DerivationRecord
|
str.go
|
package mgutil
// StrSet holds a set of strings
type StrSet []string
// NewStrSet returns a new StrSet initialised with the strings in l
func
|
(l ...string) StrSet {
return StrSet{}.Add(l...)
}
// Add add the list of strings l to the set and returns the new set
func (s StrSet) Add(l ...string) StrSet {
res := make(StrSet, 0, len(s)+len(l))
for _, lst := range [][]string{[]string(s), l} {
for _, p := range lst {
if !res.Has(p) {
res = append(res, p)
}
}
}
return res
}
// Has returns true if p is in the set
func (s StrSet) Has(p string) bool {
for _, q := range s {
if p == q {
return true
}
}
return false
}
|
NewStrSet
|
profiler.go
|
package main
import (
"time"
"github.com/moxspec/moxspec/pci"
"github.com/moxspec/moxspec/smbios"
)
func
|
(defInterval time.Duration) []profile {
pcid := pci.NewDecoder()
err := pcid.Decode()
if err != nil {
log.Fatal(err)
}
spec := smbios.NewDecoder()
pcidevs := pci.NewDecoder()
type Decoder interface {
Decode() error
}
decoders := []Decoder{
spec,
pcidevs,
}
for _, d := range decoders {
err := d.Decode()
if err != nil {
log.Warn(err.Error())
}
}
// TODO: make interval configurable
var profs []profile
for _, p := range scanProcessor() {
profs = append(profs, profile{
interval: defInterval,
reporter: p,
})
}
for _, p := range scanMemory() {
profs = append(profs, profile{
interval: defInterval,
reporter: p,
})
}
for _, p := range scanStorage(pcidevs) {
profs = append(profs, profile{
interval: defInterval,
reporter: p,
})
}
for _, p := range scanNetwork(pcidevs) {
profs = append(profs, profile{
interval: defInterval,
reporter: p,
})
}
return profs
}
|
scanDevices
|
templates.rs
|
use simple_html_template::{TemplateCache, html_map, hash_map};
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use web_sys::HtmlElement;
use utils::settings::SETTINGS;
use std::fmt;
thread_local! {
pub static TEMPLATES: Templates = Templates::new();
}
macro_rules! template_path {
($e:tt) => {
concat!("../../../../../../../.template_output/", $e)
}
}
const SIDEBAR:&'static str = "sidebar";
const HEADER:&'static str = "header";
const FOOTER:&'static str = "footer";
const MAIN:&'static str = "main";
const SIDEBAR_LAYOUT:&'static str = "sidebar-layout";
const SIDEBAR_LAYOUT_ITEM:&'static str = "sidebar-layout-item";
const SIDEBAR_IMAGES:&'static str = "sidebar-images";
pub fn sidebar() -> HtmlElement {
TEMPLATES.with(|t| t.cache.render_elem_plain(SIDEBAR))
}
pub fn header(title:&str, subtitle:&str) -> HtmlElement {
//This is unsafe - because currently subtitle uses raw html, need to preserve it
TEMPLATES.with(|t| t.cache.render_elem(HEADER, &hash_map!(
"title" => title,
"subtitle" => subtitle
)).unwrap_throw())
}
pub fn footer() -> HtmlElement {
TEMPLATES.with(|t| t.cache.render_elem_plain(FOOTER))
}
pub fn main() -> HtmlElement {
TEMPLATES.with(|t| t.cache.render_elem_plain(MAIN))
}
pub fn sidebar_layout() -> HtmlElement {
TEMPLATES.with(|t| t.cache.render_elem_plain(SIDEBAR_LAYOUT))
}
pub fn sidebar_layout_item(label:&str, src: &str) -> HtmlElement {
TEMPLATES.with(|t| t.cache.render_elem(SIDEBAR_LAYOUT_ITEM, &html_map!{
"label" => label,
"src" => src,
}).unwrap_throw())
}
pub fn sidebar_images() -> HtmlElement {
TEMPLATES.with(|t| t.cache.render_elem_plain(SIDEBAR_IMAGES))
}
pub struct Templates {
pub cache: TemplateCache<'static>
}
impl fmt::Debug for Templates {
|
f.debug_list()
.entries(self.cache.templates.keys())
.finish()
}
}
impl Templates {
pub fn new() -> Self {
let cache = TemplateCache::new(&vec![
(SIDEBAR, include_str!(
template_path!("module/poster/edit/sidebar/sidebar.html")
)),
(HEADER, include_str!(
template_path!("module/poster/edit/header.html")
)),
(FOOTER, include_str!(
template_path!("module/poster/edit/footer.html")
)),
(MAIN, include_str!(
template_path!("module/poster/edit/main.html")
)),
(SIDEBAR_LAYOUT, include_str!(
template_path!("module/poster/edit/sidebar/layout.html")
)),
(SIDEBAR_LAYOUT_ITEM, include_str!(
template_path!("module/poster/edit/sidebar/layout-item.html")
)),
(SIDEBAR_IMAGES, include_str!(
template_path!("module/poster/edit/sidebar/images.html")
)),
]);
Self { cache }
}
}
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
app.component.ts
|
import { Component } from '@angular/core';
import { Router, ActivatedRoute, ROUTER_DIRECTIVES } from '@angular/router';
import { Config, NavbarComponent, ToolbarComponent } from './commonc/index';
import { UserService, WeixinService } from './service/index';
/**
* This class represents the main application component. Within the @Routes annotation is the configuration of the
* applications routes, configuring the paths for the lazy loaded components (HomeComponent, AboutComponent).
*/
@Component({
moduleId: module.id,
selector: 'sd-app',
templateUrl: 'app.component.html',
directives: [ROUTER_DIRECTIVES, NavbarComponent, ToolbarComponent],
styleUrls: ['app.component.css']
})
export class AppComponent {
user: any = {
"qrcode": ""
};
errorMessage: any;
constructor(public router: Router, public aroute: ActivatedRoute, public userService: UserService, public weixinService: WeixinService) {
// console.log('Environment config', Config);
//检查用户授权
this.checkUserAuth();
}
//检查用户授权
checkUserAuth() {
this.userService.getUserinfo()
.subscribe(
res => {
if (!res.error) {
this.user = res.user;
} else {
var url = location.href.split('#')[0];
if (url.indexOf('?')<0) {
url = 'http://'+location.host+'/?action='+location.pathname.substr(1, 9999);
}
this.userService.auth(url);
return;
}
//路由跳转
this.routeNavigate();
},
error => {
this.errorMessage = <any>error;
}
);
}
//路由跳转
routeNavigate() {
this.aroute.queryParams.subscribe(
params => {
var action: string = "";
if ("action" in params) {
action = params['action'];
switch (action) {
case 'user':
this.router.navigate(['/user']);
break;
case 'user/ranking':
this.router.navigate(['/user/ranking']);
break;
case 'user/profit':
this.router.navigate(['/user/profit']);
break;
case 'userqrcode':
this.router.navigate(['/userqrcode']);
break;
default:
break;
}
}
//注册wexinJS
var url: string = "";
if (action.length) {
url = 'http://'+location.host+'/'+action;
} else {
url = location.href.split('#')[0];
}
if (url.indexOf('?')<0) {
this.weixinService.config(encodeURIComponent(url));
//分享设置
this.share();
}
}
);
}
//分享
share() {
var title = '细节生活';
var desc = '';
var link = this.user.qrcode;
var img = 'http://'+location.host+'/assets/images/qrcode-xjsh.jpg';
|
this.weixinService.wxshareQQ(title, desc, link, img);
this.weixinService.wxshareQZone(title, desc, link, img);
}
}
|
this.weixinService.wxshareTimeLine(title, link, img);
this.weixinService.wxshareAppMessage(title, desc, link, img);
|
test-file-results-table-terse.js
|
/**
* Copyright 2018 The WPT Dashboard Project. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
import '../node_modules/@polymer/polymer/lib/elements/dom-repeat.js';
import { html } from '../node_modules/@polymer/polymer/polymer-element.js';
import { AbstractTestFileResultsTable } from './abstract-test-file-results-table.js';
class
|
extends AbstractTestFileResultsTable {
static get template() {
return html`
<style>
td {
position: relative;
}
td.sub-test-name {
font-family: monospace;
background-color: white;
}
td.sub-test-name code {
box-sizing: border-box;
height: 100%;
left: 0;
overflow: hidden;
position: absolute;
text-overflow: ellipsis;
top: 0;
white-space: nowrap;
width: 100%;
}
td.sub-test-name code:hover {
z-index: 1;
text-overflow: initial;
background-color: inherit;
width: -moz-max-content;
width: max-content;
}
</style>
${super.template}
`;
}
static get is() {
return 'test-file-results-table-terse';
}
static get properties() {
return {
matchers: {
type: Array,
value: [
{
re: /^assert_equals:.* expected ("(\\"|[^"])*"|[^ ]*) but got ("(\\"|[^"])*"|[^ ]*)$/,
getMessage: match => `!EQ(${match[1]}, ${match[3]})`,
},
{
re: /^assert_approx_equals:.* expected ("(\\"|[^"])*"| [+][/][-] |[^:]*) but got ("(\\"|[^"])*"| [+][/][-] |[^:]*):.*$/,
getMessage: match => `!~EQ(${match[1]}, ${match[3]})`,
},
{
re: /^assert ("(\\"|[^"])*"|[^ ]*) == ("(\\"|[^"])*"|[^ ]*)$/,
getMessage: match => `!EQ(${match[1]}, ${match[3]})`,
},
{
re: /^assert_array_equals:.*$/,
getMessage: () => '!ARRAY_EQ(a, b)',
},
{
re: /^Uncaught [^ ]*Error:.*$/,
getMessage: () => 'UNCAUGHT_ERROR',
},
{
re: /^([^ ]*) is not ([a-zA-Z0-9 ]*)$/,
getMessage: match => `NOT_${match[2].toUpperCase().replace(/\s/g, '_')}(${match[1]})`,
},
{
re: /^promise_test: Unhandled rejection with value: (.*)$/,
getMessage: match => `PROMISE_REJECT(${match[1]})`,
},
{
re: /^assert_true: .*$/,
getMessage: () => '!TRUE',
},
{
re: /^assert_own_property: [^"]*"([^"]*)".*$/,
getMessage: match => `!OWN_PROPERTY(${match[1]})`,
},
{
re: /^assert_inherits: [^"]*"([^"]*)".*$/,
getMessage: match => `!INHERITS(${match[1]})`,
},
],
},
};
}
subtestMessage(result) {
let msg = super.subtestMessage(result);
if (msg) {
return msg;
}
// Terse table only: Display "ERROR" without message on harness error.
if (result.status === 'ERROR') {
return 'ERROR';
}
return this.parseFailureMessage(result.message);
}
parseFailureMessage(msg) {
let matchedMsg = '';
for (const matcher of this.matchers) {
const match = msg.match(matcher.re);
if (match !== null) {
matchedMsg = matcher.getMessage(match);
break;
}
}
return matchedMsg ? matchedMsg : 'FAIL';
}
}
window.customElements.define(
TestFileResultsTableTerse.is, TestFileResultsTableTerse);
export { TestFileResultsTableTerse };
|
TestFileResultsTableTerse
|
crtp.rs
|
/* automatically generated by rust-bindgen */
#![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)]
#[repr(C)]
#[derive(Debug, Default, Copy, Clone)]
pub struct Base {
pub _address: u8,
}
#[repr(C)]
#[derive(Debug, Copy)]
pub struct Derived {
pub _address: u8,
}
#[test]
fn bindgen_test_layout_Derived() {
assert_eq!(::std::mem::size_of::<Derived>() , 1usize , concat ! (
"Size of: " , stringify ! ( Derived ) ));
assert_eq! (::std::mem::align_of::<Derived>() , 1usize , concat ! (
"Alignment of " , stringify ! ( Derived ) ));
}
impl Clone for Derived {
fn clone(&self) -> Self { *self }
}
impl Default for Derived {
fn default() -> Self { unsafe { ::std::mem::zeroed() } }
}
#[repr(C)]
#[derive(Debug, Default)]
pub struct BaseWithDestructor {
pub _address: u8,
}
#[repr(C)]
#[derive(Debug)]
pub struct DerivedFromBaseWithDestructor {
pub _address: u8,
}
#[test]
fn bindgen_test_layout_DerivedFromBaseWithDestructor() {
|
"Size of: " , stringify ! ( DerivedFromBaseWithDestructor ) ));
assert_eq! (::std::mem::align_of::<DerivedFromBaseWithDestructor>() ,
1usize , concat ! (
"Alignment of " , stringify ! ( DerivedFromBaseWithDestructor
) ));
}
impl Default for DerivedFromBaseWithDestructor {
fn default() -> Self { unsafe { ::std::mem::zeroed() } }
}
#[test]
fn __bindgen_test_layout_Base_instantiation() {
assert_eq!(::std::mem::size_of::<Base>() , 1usize , concat ! (
"Size of template specialization: " , stringify ! ( Base ) ));
assert_eq!(::std::mem::align_of::<Base>() , 1usize , concat ! (
"Alignment of template specialization: " , stringify ! ( Base )
));
}
#[test]
fn __bindgen_test_layout_BaseWithDestructor_instantiation() {
assert_eq!(::std::mem::size_of::<BaseWithDestructor>() , 1usize , concat !
(
"Size of template specialization: " , stringify ! (
BaseWithDestructor ) ));
assert_eq!(::std::mem::align_of::<BaseWithDestructor>() , 1usize , concat
! (
"Alignment of template specialization: " , stringify ! (
BaseWithDestructor ) ));
}
|
assert_eq!(::std::mem::size_of::<DerivedFromBaseWithDestructor>() , 1usize
, concat ! (
|
listing.go
|
package generator
import (
"bytes"
"fmt"
"html/template"
FP "path/filepath"
"strings"
"github.com/morningconsult/serrors"
)
// ListingData holds the data for the listing page.
type ListingData struct {
Title string
Date string
Short string
Link string
TimeToRead string
Tags []*Tag
}
// ListingGenerator Object
type ListingGenerator struct {
Config *ListingConfig
}
// ListingConfig holds the configuration for the listing page.
type ListingConfig struct {
Posts []*Post
PageTitle string
IsIndex bool
BaseConfig
}
func (pLC *ListingConfig) String() string {
return fmt.Sprintf("LstgCfg: %s; \n\t PgTtl<%s> IsIdx?<%t> Posts: %+v",
pLC.BaseConfig.String(), pLC.PageTitle, pLC.IsIndex, pLC.Posts)
}
// Generate starts the listing generation.
func (g *ListingGenerator) Generate() error {
shortTmplPath := FP.Join("template", "short.html")
archiveLinkTmplPath := FP.Join("template", "archiveLink.html")
posts := g.Config.Posts
// For the ALL POSTS listing AND for the ARCHIVES
// listing, this template is the MasterPageTemplate.
t := g.Config.Template
destDirPath := g.Config.Dest
targs := *new(IndexHtmlMasterPageTemplateVariableArguments)
targs.PageTitle = g.Config.PageTitle
targs.HtmlTitle = g.Config.PageTitle
shortTmplRaw, err := getTemplate(shortTmplPath)
if err != nil {
return err
}
var postBlox []string
for _, post := range posts {
meta := post.PropSet
link := fmt.Sprintf("/%s/", post.DirBase)
ld := ListingData{
Title: meta["title"],
Date: meta["date"],
Short: meta["short"],
Link: link,
Tags: createTags(meta["tags"]),
TimeToRead: calculateTimeToRead(post.CntAsHTML),
}
execdPostTmplOutput := bytes.Buffer{}
if err := shortTmplRaw.Execute(&execdPostTmplOutput, ld); err != nil {
return serrors.Errorf("error executing template %s: %w", shortTmplPath, err)
}
postBlox = append(postBlox, execdPostTmplOutput.String())
}
htmlBloxFragment := template.HTML(strings.Join(postBlox, "<br />"))
if g.Config.IsIndex {
archiveLinkTmplRaw, err := getTemplate(archiveLinkTmplPath)
if err != nil {
return err
}
execdArchiveLinkTmplOutput := bytes.Buffer{}
if err := archiveLinkTmplRaw.Execute(&execdArchiveLinkTmplOutput, nil); err != nil {
return serrors.Errorf("error executing template %s: %w", archiveLinkTmplPath, err)
}
htmlBloxFragment = template.HTML(fmt.Sprintf(
"%s%s", htmlBloxFragment, template.HTML(execdArchiveLinkTmplOutput.String())))
}
targs.HtmlContentFrag = htmlBloxFragment
// WriteIndexHTML(blogProps SU.PropSet, destDirPath, pageTitle,
// aMetaDesc string, htmlContentFrag template.HTML, t *template.Template)
if err := WriteIndexHTML(targs, g.Config.BlogProps, destDirPath, t); err != nil {
return err
}
return nil
}
func
|
(input string) string {
// an average human reads about 200 wpm
var secondsPerWord = 60.0 / 200.0
// multiply with the amount of words
words := secondsPerWord * float64(len(strings.Split(input, " ")))
// add 12 seconds for each image
images := 12.0 * strings.Count(input, "<img")
result := (words + float64(images)) / 60.0
if result < 1.0 {
result = 1.0
}
return fmt.Sprintf("%.0fm", result)
}
|
calculateTimeToRead
|
inline_local_variable.rs
|
use either::Either;
use hir::PathResolution;
use ide_db::{base_db::FileId, defs::Definition, search::FileReference};
use rustc_hash::FxHashMap;
use syntax::{
ast::{self, AstNode, AstToken, NameOwner},
TextRange,
};
use crate::{
assist_context::{AssistContext, Assists},
AssistId, AssistKind,
};
// Assist: inline_local_variable
//
// Inlines local variable.
//
// ```
// fn main() {
// let x$0 = 1 + 2;
// x * 4;
// }
// ```
// ->
// ```
// fn main() {
// (1 + 2) * 4;
// }
// ```
pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let InlineData { let_stmt, delete_let, replace_usages, target } =
inline_let(ctx).or_else(|| inline_usage(ctx))?;
let initializer_expr = let_stmt.initializer()?;
let delete_range = if delete_let {
if let Some(whitespace) = let_stmt
.syntax()
.next_sibling_or_token()
.and_then(|it| ast::Whitespace::cast(it.as_token()?.clone()))
{
Some(TextRange::new(
let_stmt.syntax().text_range().start(),
whitespace.syntax().text_range().end(),
))
} else {
Some(let_stmt.syntax().text_range())
}
} else {
None
};
let wrap_in_parens = replace_usages
.iter()
.map(|(&file_id, refs)| {
refs.iter()
.map(|&FileReference { range, .. }| {
let usage_node = ctx
.covering_node_for_range(range)
.ancestors()
.find_map(ast::PathExpr::cast)?;
let usage_parent_option =
usage_node.syntax().parent().and_then(ast::Expr::cast);
let usage_parent = match usage_parent_option {
Some(u) => u,
None => return Some(false),
};
let initializer = matches!(
initializer_expr,
ast::Expr::CallExpr(_)
| ast::Expr::IndexExpr(_)
| ast::Expr::MethodCallExpr(_)
| ast::Expr::FieldExpr(_)
| ast::Expr::TryExpr(_)
| ast::Expr::RefExpr(_)
| ast::Expr::Literal(_)
| ast::Expr::TupleExpr(_)
| ast::Expr::ArrayExpr(_)
| ast::Expr::ParenExpr(_)
| ast::Expr::PathExpr(_)
| ast::Expr::BlockExpr(_)
| ast::Expr::EffectExpr(_),
);
let parent = matches!(
usage_parent,
ast::Expr::CallExpr(_)
| ast::Expr::TupleExpr(_)
| ast::Expr::ArrayExpr(_)
| ast::Expr::ParenExpr(_)
| ast::Expr::ForExpr(_)
| ast::Expr::WhileExpr(_)
| ast::Expr::BreakExpr(_)
| ast::Expr::ReturnExpr(_)
| ast::Expr::MatchExpr(_)
);
Some(!(initializer || parent))
})
.collect::<Option<_>>()
.map(|b| (file_id, b))
})
.collect::<Option<FxHashMap<_, Vec<_>>>>()?;
let init_str = initializer_expr.syntax().text().to_string();
let init_in_paren = format!("({})", &init_str);
let target = match target {
ast::NameOrNameRef::Name(it) => it.syntax().text_range(),
ast::NameOrNameRef::NameRef(it) => it.syntax().text_range(),
};
acc.add(
AssistId("inline_local_variable", AssistKind::RefactorInline),
"Inline variable",
target,
move |builder| {
if let Some(range) = delete_range {
builder.delete(range);
}
for (file_id, references) in replace_usages {
for (&should_wrap, reference) in wrap_in_parens[&file_id].iter().zip(references) {
let replacement =
if should_wrap { init_in_paren.clone() } else { init_str.clone() };
match reference.name.as_name_ref() {
Some(name_ref)
if ast::RecordExprField::for_field_name(name_ref).is_some() =>
{
cov_mark::hit!(inline_field_shorthand);
builder.insert(reference.range.end(), format!(": {}", replacement));
}
_ => builder.replace(reference.range, replacement),
}
}
}
},
)
}
struct InlineData {
let_stmt: ast::LetStmt,
delete_let: bool,
target: ast::NameOrNameRef,
replace_usages: FxHashMap<FileId, Vec<FileReference>>,
}
fn inline_let(ctx: &AssistContext) -> Option<InlineData> {
let let_stmt = ctx.find_node_at_offset::<ast::LetStmt>()?;
let bind_pat = match let_stmt.pat()? {
ast::Pat::IdentPat(pat) => pat,
_ => return None,
};
if bind_pat.mut_token().is_some() {
cov_mark::hit!(test_not_inline_mut_variable);
return None;
}
if !bind_pat.syntax().text_range().contains_inclusive(ctx.offset()) {
cov_mark::hit!(not_applicable_outside_of_bind_pat);
return None;
}
let def = ctx.sema.to_def(&bind_pat)?;
let def = Definition::Local(def);
let usages = def.usages(&ctx.sema).all();
if usages.is_empty() {
cov_mark::hit!(test_not_applicable_if_variable_unused);
return None;
};
Some(InlineData {
let_stmt,
delete_let: true,
target: ast::NameOrNameRef::Name(bind_pat.name()?),
replace_usages: usages.references,
})
}
fn inline_usage(ctx: &AssistContext) -> Option<InlineData> {
let path_expr = ctx.find_node_at_offset::<ast::PathExpr>()?;
let path = path_expr.path()?;
let name = match path.as_single_segment()?.kind()? {
ast::PathSegmentKind::Name(name) => name,
_ => return None,
};
let local = match ctx.sema.resolve_path(&path)? {
PathResolution::Local(local) => local,
_ => return None,
};
if local.is_mut(ctx.sema.db) {
cov_mark::hit!(test_not_inline_mut_variable_use);
return None;
}
let bind_pat = match local.source(ctx.db()).value {
Either::Left(ident) => ident,
_ => return None,
};
let let_stmt = ast::LetStmt::cast(bind_pat.syntax().parent()?)?;
let def = Definition::Local(local);
let mut usages = def.usages(&ctx.sema).all();
let delete_let = usages.references.values().map(|v| v.len()).sum::<usize>() == 1;
for references in usages.references.values_mut() {
references.retain(|reference| reference.name.as_name_ref() == Some(&name));
}
Some(InlineData {
let_stmt,
delete_let,
target: ast::NameOrNameRef::NameRef(name),
replace_usages: usages.references,
})
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn test_inline_let_bind_literal_expr() {
check_assist(
inline_local_variable,
r"
fn bar(a: usize) {}
fn foo() {
let a$0 = 1;
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
r"
fn bar(a: usize) {}
fn foo() {
1 + 1;
if 1 > 10 {
}
while 1 > 10 {
}
let b = 1 * 10;
bar(1);
}",
);
}
#[test]
fn test_inline_let_bind_bin_expr() {
check_assist(
inline_local_variable,
r"
fn bar(a: usize) {}
fn foo() {
let a$0 = 1 + 1;
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
r"
fn bar(a: usize) {}
fn foo() {
(1 + 1) + 1;
if (1 + 1) > 10 {
}
while (1 + 1) > 10 {
}
let b = (1 + 1) * 10;
bar(1 + 1);
}",
);
}
#[test]
fn test_inline_let_bind_function_call_expr() {
check_assist(
inline_local_variable,
r"
fn bar(a: usize) {}
fn foo() {
let a$0 = bar(1);
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
r"
fn bar(a: usize) {}
fn foo() {
bar(1) + 1;
if bar(1) > 10 {
}
while bar(1) > 10 {
}
let b = bar(1) * 10;
bar(bar(1));
}",
);
}
#[test]
fn test_inline_let_bind_cast_expr() {
check_assist(
inline_local_variable,
r"
fn bar(a: usize): usize { a }
fn foo() {
let a$0 = bar(1) as u64;
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
r"
fn bar(a: usize): usize { a }
fn foo() {
(bar(1) as u64) + 1;
if (bar(1) as u64) > 10 {
}
while (bar(1) as u64) > 10 {
}
let b = (bar(1) as u64) * 10;
bar(bar(1) as u64);
}",
);
}
#[test]
fn test_inline_let_bind_block_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = { 10 + 1 };
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
r"
fn foo() {
{ 10 + 1 } + 1;
if { 10 + 1 } > 10 {
}
while { 10 + 1 } > 10 {
}
let b = { 10 + 1 } * 10;
bar({ 10 + 1 });
}",
);
}
#[test]
fn test_inline_let_bind_paren_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = ( 10 + 1 );
a + 1;
if a > 10 {
}
while a > 10 {
}
let b = a * 10;
bar(a);
}",
r"
fn foo() {
( 10 + 1 ) + 1;
if ( 10 + 1 ) > 10 {
}
while ( 10 + 1 ) > 10 {
}
let b = ( 10 + 1 ) * 10;
bar(( 10 + 1 ));
}",
);
}
#[test]
fn test_not_inline_mut_variable() {
cov_mark::check!(test_not_inline_mut_variable);
check_assist_not_applicable(
inline_local_variable,
r"
fn foo() {
let mut a$0 = 1 + 1;
a + 1;
}",
);
}
#[test]
fn test_not_inline_mut_variable_use() {
cov_mark::check!(test_not_inline_mut_variable_use);
check_assist_not_applicable(
inline_local_variable,
r"
fn foo() {
let mut a = 1 + 1;
a$0 + 1;
}",
);
}
#[test]
fn test_call_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = bar(10 + 1);
let b = a * 10;
let c = a as usize;
}",
r"
fn foo() {
let b = bar(10 + 1) * 10;
let c = bar(10 + 1) as usize;
}",
);
}
#[test]
fn test_index_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let x = vec![1, 2, 3];
let a$0 = x[0];
let b = a * 10;
let c = a as usize;
}",
r"
fn foo() {
let x = vec![1, 2, 3];
let b = x[0] * 10;
let c = x[0] as usize;
}",
);
}
#[test]
fn test_method_call_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let bar = vec![1];
let a$0 = bar.len();
let b = a * 10;
let c = a as usize;
}",
r"
fn foo() {
let bar = vec![1];
let b = bar.len() * 10;
let c = bar.len() as usize;
}",
);
}
#[test]
fn test_field_expr() {
check_assist(
inline_local_variable,
r"
struct Bar {
foo: usize
}
fn foo() {
let bar = Bar { foo: 1 };
let a$0 = bar.foo;
let b = a * 10;
let c = a as usize;
}",
r"
struct Bar {
foo: usize
}
fn foo() {
let bar = Bar { foo: 1 };
let b = bar.foo * 10;
let c = bar.foo as usize;
}",
);
}
#[test]
fn test_try_expr() {
check_assist(
inline_local_variable,
r"
fn foo() -> Option<usize> {
let bar = Some(1);
let a$0 = bar?;
let b = a * 10;
let c = a as usize;
None
}",
r"
fn foo() -> Option<usize> {
let bar = Some(1);
let b = bar? * 10;
let c = bar? as usize;
None
}",
);
}
#[test]
fn test_ref_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let bar = 10;
let a$0 = &bar;
let b = a * 10;
}",
r"
fn foo() {
let bar = 10;
let b = &bar * 10;
}",
);
}
#[test]
fn test_tuple_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = (10, 20);
let b = a[0];
}",
r"
fn foo() {
let b = (10, 20)[0];
}",
);
}
#[test]
fn test_array_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = [1, 2, 3];
let b = a.len();
}",
r"
fn foo() {
let b = [1, 2, 3].len();
}",
);
}
#[test]
fn test_paren()
|
#[test]
fn test_path_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let d = 10;
let a$0 = d;
let b = a * 10;
let c = a as usize;
}",
r"
fn foo() {
let d = 10;
let b = d * 10;
let c = d as usize;
}",
);
}
#[test]
fn test_block_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = { 10 };
let b = a * 10;
let c = a as usize;
}",
r"
fn foo() {
let b = { 10 } * 10;
let c = { 10 } as usize;
}",
);
}
#[test]
fn test_used_in_different_expr1() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = 10 + 20;
let b = a * 10;
let c = (a, 20);
let d = [a, 10];
let e = (a);
}",
r"
fn foo() {
let b = (10 + 20) * 10;
let c = (10 + 20, 20);
let d = [10 + 20, 10];
let e = (10 + 20);
}",
);
}
#[test]
fn test_used_in_for_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = vec![10, 20];
for i in a {}
}",
r"
fn foo() {
for i in vec![10, 20] {}
}",
);
}
#[test]
fn test_used_in_while_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = 1 > 0;
while a {}
}",
r"
fn foo() {
while 1 > 0 {}
}",
);
}
#[test]
fn test_used_in_break_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = 1 + 1;
loop {
break a;
}
}",
r"
fn foo() {
loop {
break 1 + 1;
}
}",
);
}
#[test]
fn test_used_in_return_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = 1 > 0;
return a;
}",
r"
fn foo() {
return 1 > 0;
}",
);
}
#[test]
fn test_used_in_match_expr() {
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = 1 > 0;
match a {}
}",
r"
fn foo() {
match 1 > 0 {}
}",
);
}
#[test]
fn inline_field_shorthand() {
cov_mark::check!(inline_field_shorthand);
check_assist(
inline_local_variable,
r"
struct S { foo: i32}
fn main() {
let $0foo = 92;
S { foo }
}
",
r"
struct S { foo: i32}
fn main() {
S { foo: 92 }
}
",
);
}
#[test]
fn test_not_applicable_if_variable_unused() {
cov_mark::check!(test_not_applicable_if_variable_unused);
check_assist_not_applicable(
inline_local_variable,
r"
fn foo() {
let $0a = 0;
}
",
)
}
#[test]
fn not_applicable_outside_of_bind_pat() {
cov_mark::check!(not_applicable_outside_of_bind_pat);
check_assist_not_applicable(
inline_local_variable,
r"
fn main() {
let x = $01 + 2;
x * 4;
}
",
)
}
#[test]
fn works_on_local_usage() {
check_assist(
inline_local_variable,
r#"
fn f() {
let xyz = 0;
xyz$0;
}
"#,
r#"
fn f() {
0;
}
"#,
);
}
#[test]
fn does_not_remove_let_when_multiple_usages() {
check_assist(
inline_local_variable,
r#"
fn f() {
let xyz = 0;
xyz$0;
xyz;
}
"#,
r#"
fn f() {
let xyz = 0;
0;
xyz;
}
"#,
);
}
#[test]
fn not_applicable_with_non_ident_pattern() {
check_assist_not_applicable(
inline_local_variable,
r#"
fn main() {
let (x, y) = (0, 1);
x$0;
}
"#,
);
}
#[test]
fn not_applicable_on_local_usage_in_macro() {
check_assist_not_applicable(
inline_local_variable,
r#"
macro_rules! m {
($i:ident) => { $i }
}
fn f() {
let xyz = 0;
m!(xyz$0); // replacing it would break the macro
}
"#,
);
check_assist_not_applicable(
inline_local_variable,
r#"
macro_rules! m {
($i:ident) => { $i }
}
fn f() {
let xyz$0 = 0;
m!(xyz); // replacing it would break the macro
}
"#,
);
}
}
|
{
check_assist(
inline_local_variable,
r"
fn foo() {
let a$0 = (10 + 20);
let b = a * 10;
let c = a as usize;
}",
r"
fn foo() {
let b = (10 + 20) * 10;
let c = (10 + 20) as usize;
}",
);
}
|
main.rs
|
use std::fs;
#[derive(Copy,Clone,PartialEq)]
enum Action {North(i64),South(i64),East(i64),West(i64),Left(i64),Right(i64),Forward(i64)}
impl Action {
fn parse(s: &str) -> Option<Action> {
if s.len() < 2 {return None;}
match &s[1..].parse::<i64>() {
Err(_) => None,
Ok(num) => {
match &s[0..1] {
"N" => Some(Action::North(*num)),
"S" => Some(Action::South(*num)),
"E" => Some(Action::East(*num)),
"W" => Some(Action::West(*num)),
"L" => Some(Action::Left(*num)),
"R" => Some(Action::Right(*num)),
"F" => Some(Action::Forward(*num)),
_ => None
}
}
}
}
}
#[derive(Copy,Clone)]
struct Ship {
x: i64,
y: i64,
d: i64,
wx: i64,
wy: i64
}
impl Ship {
fn default() -> Ship {
Ship{x: 0, y: 0, d: 0, wx: 10, wy: 1}
|
match action {
Action::North(n) => Ship {y: self.y + n, ..*self},
Action::South(n) => Ship {y: self.y - n, ..*self},
Action::East(n) => Ship {x: self.x + n, ..*self},
Action::West(n) => Ship {x: self.x - n, ..*self},
Action::Left(n) => Ship {d: (self.d + n).rem_euclid(360), ..*self},
Action::Right(n) => Ship {d: (self.d - n).rem_euclid(360), ..*self},
Action::Forward(n) => match self.d {
0 => Ship {x: self.x + n, ..*self},
90 => Ship {y: self.y - n, ..*self},
180 => Ship {x: self.x - n, ..*self},
270 => Ship {y: self.y + n, ..*self},
_ => panic!("Illegal direction")
},
}
}
fn act_right_method(&self, action: Action) -> Ship {
match action {
Action::North(n) => Ship {wy: self.wy + n, ..*self},
Action::South(n) => Ship {wy: self.wy - n, ..*self},
Action::East(n) => Ship {wx: self.wx + n, ..*self},
Action::West(n) => Ship {wx: self.wx - n, ..*self},
Action::Left(n) => match n {
0 => *self,
90 => Ship {wx: -self.wy, wy: self.wx, ..*self},
180 => Ship {wx: -self.wx, wy: -self.wy, ..*self},
270 => Ship {wx: self.wy, wy: -self.wx, ..*self},
_ => panic!("Illegal direction")
},
Action::Right(n) => match n {
0 => *self,
90 => Ship {wx: self.wy, wy: -self.wx, ..*self},
180 => Ship {wx: -self.wx, wy: -self.wy, ..*self},
270 => Ship {wx: -self.wy, wy: self.wx, ..*self},
_ => panic!("Illegal direction")
},
Action::Forward(n) => Ship {x: self.x + self.wx * n, y: self.y + self.wy * n, ..*self},
}
}
}
fn main() {
let file = fs::read_to_string("input.txt").expect("File does not exist");
let mut actions: Vec<Action> = Vec::new();
for line in file.lines() {
actions.push(Action::parse(line).unwrap());
}
let mut ship = Ship::default();
for &action in &actions {
ship = ship.act_wrong_method(action);
}
println!("The first action interpretation puts the ship at ({},{}), bearing {}.", ship.x, ship.y, ship.d);
println!("The ship's Manhattan distance from the origin is {}.", ship.x.abs() + ship.y.abs());
ship = Ship::default();
for &action in &actions {
ship = ship.act_right_method(action);
}
println!("The second action interpretation puts the ship at ({},{}), bearing {}.", ship.x, ship.y, ship.d);
println!("The ship's Manhattan distance from the origin is {}.", ship.x.abs() + ship.y.abs());
}
|
}
fn act_wrong_method(&self, action: Action) -> Ship {
|
fileActions.ts
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import 'vs/css!./media/fileactions';
import * as nls from 'vs/nls';
import * as types from 'vs/base/common/types';
import { isWindows, isLinux } from 'vs/base/common/platform';
import * as extpath from 'vs/base/common/extpath';
import { extname, basename } from 'vs/base/common/path';
import * as resources from 'vs/base/common/resources';
import { URI } from 'vs/base/common/uri';
import { toErrorMessage } from 'vs/base/common/errorMessage';
import * as strings from 'vs/base/common/strings';
import { Action } from 'vs/base/common/actions';
import { dispose, IDisposable } from 'vs/base/common/lifecycle';
import { VIEWLET_ID, IExplorerService } from 'vs/workbench/contrib/files/common/files';
import { ITextFileService } from 'vs/workbench/services/textfile/common/textfiles';
import { IFileService, AutoSaveConfiguration } from 'vs/platform/files/common/files';
import { toResource, SideBySideEditor } from 'vs/workbench/common/editor';
import { ExplorerViewlet } from 'vs/workbench/contrib/files/browser/explorerViewlet';
import { IUntitledEditorService } from 'vs/workbench/services/untitled/common/untitledEditorService';
import { IQuickOpenService } from 'vs/platform/quickOpen/common/quickOpen';
import { IViewletService } from 'vs/workbench/services/viewlet/browser/viewlet';
import { IInstantiationService, ServicesAccessor } from 'vs/platform/instantiation/common/instantiation';
import { ITextModel } from 'vs/editor/common/model';
import { IWindowService } from 'vs/platform/windows/common/windows';
import { REVEAL_IN_EXPLORER_COMMAND_ID, SAVE_ALL_COMMAND_ID, SAVE_ALL_LABEL, SAVE_ALL_IN_GROUP_COMMAND_ID } from 'vs/workbench/contrib/files/browser/fileCommands';
import { ITextModelService, ITextModelContentProvider } from 'vs/editor/common/services/resolverService';
import { IConfigurationService, ConfigurationTarget } from 'vs/platform/configuration/common/configuration';
import { IClipboardService } from 'vs/platform/clipboard/common/clipboardService';
import { IModeService } from 'vs/editor/common/services/modeService';
import { IModelService } from 'vs/editor/common/services/modelService';
import { ICommandService, CommandsRegistry } from 'vs/platform/commands/common/commands';
import { IListService, ListWidget } from 'vs/platform/list/browser/listService';
import { RawContextKey } from 'vs/platform/contextkey/common/contextkey';
import { Schemas } from 'vs/base/common/network';
import { IDialogService, IConfirmationResult, getConfirmMessage } from 'vs/platform/dialogs/common/dialogs';
import { INotificationService, Severity } from 'vs/platform/notification/common/notification';
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
import { Constants } from 'vs/editor/common/core/uint';
import { CLOSE_EDITORS_AND_GROUP_COMMAND_ID } from 'vs/workbench/browser/parts/editor/editorCommands';
import { coalesce } from 'vs/base/common/arrays';
import { AsyncDataTree } from 'vs/base/browser/ui/tree/asyncDataTree';
import { ExplorerItem, NewExplorerItem } from 'vs/workbench/contrib/files/common/explorerModel';
import { onUnexpectedError } from 'vs/base/common/errors';
export const NEW_FILE_COMMAND_ID = 'explorer.newFile';
export const NEW_FILE_LABEL = nls.localize('newFile', "New File");
export const NEW_FOLDER_COMMAND_ID = 'explorer.newFolder';
export const NEW_FOLDER_LABEL = nls.localize('newFolder', "New Folder");
export const TRIGGER_RENAME_LABEL = nls.localize('rename', "Rename");
export const MOVE_FILE_TO_TRASH_LABEL = nls.localize('delete', "Delete");
export const COPY_FILE_LABEL = nls.localize('copyFile', "Copy");
export const PASTE_FILE_LABEL = nls.localize('pasteFile', "Paste");
export const FileCopiedContext = new RawContextKey<boolean>('fileCopied', false);
const CONFIRM_DELETE_SETTING_KEY = 'explorer.confirmDelete';
function onError(notificationService: INotificationService, error: any): void {
if (error.message === 'string') {
error = error.message;
}
notificationService.error(toErrorMessage(error, false));
}
function refreshIfSeparator(value: string, explorerService: IExplorerService): void {
if (value && ((value.indexOf('/') >= 0) || (value.indexOf('\\') >= 0))) {
// New input contains separator, multiple resources will get created workaround for #68204
explorerService.refresh();
}
}
/* New File */
export class NewFileAction extends Action {
static readonly ID = 'workbench.files.action.createFileFromExplorer';
static readonly LABEL = nls.localize('createNewFile', "New File");
constructor(
@IExplorerService explorerService: IExplorerService,
@ICommandService private commandService: ICommandService
) {
super('explorer.newFile', NEW_FILE_LABEL);
this.class = 'explorer-action new-file';
this._register(explorerService.onDidChangeEditable(e => {
const elementIsBeingEdited = explorerService.isEditable(e);
this.enabled = !elementIsBeingEdited;
}));
}
run(): Promise<any> {
return this.commandService.executeCommand(NEW_FILE_COMMAND_ID);
}
}
/* New Folder */
export class NewFolderAction extends Action {
static readonly ID = 'workbench.files.action.createFolderFromExplorer';
static readonly LABEL = nls.localize('createNewFolder', "New Folder");
constructor(
@IExplorerService explorerService: IExplorerService,
@ICommandService private commandService: ICommandService
) {
super('explorer.newFolder', NEW_FOLDER_LABEL);
this.class = 'explorer-action new-folder';
this._register(explorerService.onDidChangeEditable(e => {
const elementIsBeingEdited = explorerService.isEditable(e);
this.enabled = !elementIsBeingEdited;
}));
}
run(): Promise<any> {
return this.commandService.executeCommand(NEW_FOLDER_COMMAND_ID);
}
}
/* Create new file from anywhere: Open untitled */
export class GlobalNewUntitledFileAction extends Action {
public static readonly ID = 'workbench.action.files.newUntitledFile';
public static readonly LABEL = nls.localize('newUntitledFile', "New Untitled File");
constructor(
id: string,
label: string,
@IEditorService private readonly editorService: IEditorService
) {
super(id, label);
}
public run(): Promise<any> {
return this.editorService.openEditor({ options: { pinned: true } }); // untitled are always pinned
}
}
function deleteFiles(textFileService: ITextFileService, dialogService: IDialogService, configurationService: IConfigurationService, fileService: IFileService, elements: ExplorerItem[], useTrash: boolean, skipConfirm = false): Promise<void> {
let primaryButton: string;
if (useTrash) {
primaryButton = isWindows ? nls.localize('deleteButtonLabelRecycleBin', "&&Move to Recycle Bin") : nls.localize({ key: 'deleteButtonLabelTrash', comment: ['&& denotes a mnemonic'] }, "&&Move to Trash");
} else {
primaryButton = nls.localize({ key: 'deleteButtonLabel', comment: ['&& denotes a mnemonic'] }, "&&Delete");
}
const distinctElements = resources.distinctParents(elements, e => e.resource);
// Handle dirty
let confirmDirtyPromise: Promise<boolean> = Promise.resolve(true);
const dirty = textFileService.getDirty().filter(d => distinctElements.some(e => resources.isEqualOrParent(d, e.resource, !isLinux /* ignorecase */)));
if (dirty.length) {
let message: string;
if (distinctElements.length > 1) {
message = nls.localize('dirtyMessageFilesDelete', "You are deleting files with unsaved changes. Do you want to continue?");
} else if (distinctElements[0].isDirectory) {
if (dirty.length === 1) {
message = nls.localize('dirtyMessageFolderOneDelete', "You are deleting a folder with unsaved changes in 1 file. Do you want to continue?");
} else {
message = nls.localize('dirtyMessageFolderDelete', "You are deleting a folder with unsaved changes in {0} files. Do you want to continue?", dirty.length);
}
} else {
message = nls.localize('dirtyMessageFileDelete', "You are deleting a file with unsaved changes. Do you want to continue?");
}
confirmDirtyPromise = dialogService.confirm({
message,
type: 'warning',
detail: nls.localize('dirtyWarning', "Your changes will be lost if you don't save them."),
primaryButton
}).then(res => {
if (!res.confirmed) {
return false;
}
skipConfirm = true; // since we already asked for confirmation
return textFileService.revertAll(dirty).then(() => true);
});
}
// Check if file is dirty in editor and save it to avoid data loss
return confirmDirtyPromise.then(confirmed => {
if (!confirmed) {
return undefined;
}
let confirmDeletePromise: Promise<IConfirmationResult>;
// Check if we need to ask for confirmation at all
if (skipConfirm || (useTrash && configurationService.getValue<boolean>(CONFIRM_DELETE_SETTING_KEY) === false)) {
confirmDeletePromise = Promise.resolve({ confirmed: true });
}
// Confirm for moving to trash
else if (useTrash) {
const message = getMoveToTrashMessage(distinctElements);
confirmDeletePromise = dialogService.confirm({
message,
detail: isWindows ? nls.localize('undoBin', "You can restore from the Recycle Bin.") : nls.localize('undoTrash', "You can restore from the Trash."),
primaryButton,
checkbox: {
label: nls.localize('doNotAskAgain', "Do not ask me again")
},
type: 'question'
});
}
// Confirm for deleting permanently
else {
const message = getDeleteMessage(distinctElements);
confirmDeletePromise = dialogService.confirm({
message,
detail: nls.localize('irreversible', "This action is irreversible!"),
primaryButton,
type: 'warning'
});
}
return confirmDeletePromise.then(confirmation => {
// Check for confirmation checkbox
let updateConfirmSettingsPromise: Promise<void> = Promise.resolve(undefined);
if (confirmation.confirmed && confirmation.checkboxChecked === true) {
updateConfirmSettingsPromise = configurationService.updateValue(CONFIRM_DELETE_SETTING_KEY, false, ConfigurationTarget.USER);
}
return updateConfirmSettingsPromise.then(() => {
// Check for confirmation
if (!confirmation.confirmed) {
return Promise.resolve(undefined);
}
// Call function
const servicePromise = Promise.all(distinctElements.map(e => fileService.del(e.resource, { useTrash: useTrash, recursive: true })))
.then(undefined, (error: any) => {
// Handle error to delete file(s) from a modal confirmation dialog
let errorMessage: string;
let detailMessage: string | undefined;
let primaryButton: string;
if (useTrash) {
errorMessage = isWindows ? nls.localize('binFailed', "Failed to delete using the Recycle Bin. Do you want to permanently delete instead?") : nls.localize('trashFailed', "Failed to delete using the Trash. Do you want to permanently delete instead?");
detailMessage = nls.localize('irreversible', "This action is irreversible!");
primaryButton = nls.localize({ key: 'deletePermanentlyButtonLabel', comment: ['&& denotes a mnemonic'] }, "&&Delete Permanently");
} else {
errorMessage = toErrorMessage(error, false);
primaryButton = nls.localize({ key: 'retryButtonLabel', comment: ['&& denotes a mnemonic'] }, "&&Retry");
}
return dialogService.confirm({
message: errorMessage,
detail: detailMessage,
type: 'warning',
primaryButton
}).then(res => {
if (res.confirmed) {
if (useTrash) {
useTrash = false; // Delete Permanently
}
skipConfirm = true;
return deleteFiles(textFileService, dialogService, configurationService, fileService, elements, useTrash, skipConfirm);
}
return Promise.resolve();
});
});
return servicePromise;
});
});
});
}
function getMoveToTrashMessage(distinctElements: ExplorerItem[]): string {
if (containsBothDirectoryAndFile(distinctElements)) {
return getConfirmMessage(nls.localize('confirmMoveTrashMessageFilesAndDirectories', "Are you sure you want to delete the following {0} files/directories and their contents?", distinctElements.length), distinctElements.map(e => e.resource));
}
if (distinctElements.length > 1) {
if (distinctElements[0].isDirectory) {
return getConfirmMessage(nls.localize('confirmMoveTrashMessageMultipleDirectories', "Are you sure you want to delete the following {0} directories and their contents?", distinctElements.length), distinctElements.map(e => e.resource));
}
return getConfirmMessage(nls.localize('confirmMoveTrashMessageMultiple', "Are you sure you want to delete the following {0} files?", distinctElements.length), distinctElements.map(e => e.resource));
}
if (distinctElements[0].isDirectory) {
return nls.localize('confirmMoveTrashMessageFolder', "Are you sure you want to delete '{0}' and its contents?", distinctElements[0].name);
}
return nls.localize('confirmMoveTrashMessageFile', "Are you sure you want to delete '{0}'?", distinctElements[0].name);
}
function getDeleteMessage(distinctElements: ExplorerItem[]): string {
if (containsBothDirectoryAndFile(distinctElements)) {
return getConfirmMessage(nls.localize('confirmDeleteMessageFilesAndDirectories', "Are you sure you want to permanently delete the following {0} files/directories and their contents?", distinctElements.length), distinctElements.map(e => e.resource));
}
if (distinctElements.length > 1) {
if (distinctElements[0].isDirectory) {
return getConfirmMessage(nls.localize('confirmDeleteMessageMultipleDirectories', "Are you sure you want to permanently delete the following {0} directories and their contents?", distinctElements.length), distinctElements.map(e => e.resource));
}
return getConfirmMessage(nls.localize('confirmDeleteMessageMultiple', "Are you sure you want to permanently delete the following {0} files?", distinctElements.length), distinctElements.map(e => e.resource));
}
if (distinctElements[0].isDirectory) {
return nls.localize('confirmDeleteMessageFolder', "Are you sure you want to permanently delete '{0}' and its contents?", distinctElements[0].name);
}
return nls.localize('confirmDeleteMessageFile', "Are you sure you want to permanently delete '{0}'?", distinctElements[0].name);
}
function containsBothDirectoryAndFile(distinctElements: ExplorerItem[]): boolean {
const directories = distinctElements.filter(element => element.isDirectory);
const files = distinctElements.filter(element => !element.isDirectory);
return directories.length > 0 && files.length > 0;
}
export function findValidPasteFileTarget(targetFolder: ExplorerItem, fileToPaste: { resource: URI, isDirectory?: boolean, allowOverwirte: boolean }): URI {
let name = resources.basenameOrAuthority(fileToPaste.resource);
let candidate = resources.joinPath(targetFolder.resource, name);
while (true && !fileToPaste.allowOverwirte) {
if (!targetFolder.root.find(candidate)) {
break;
}
name = incrementFileName(name, !!fileToPaste.isDirectory);
candidate = resources.joinPath(targetFolder.resource, name);
}
return candidate;
}
export function incrementFileName(name: string, isFolder: boolean): string {
let namePrefix = name;
let extSuffix = '';
if (!isFolder) {
extSuffix = extname(name);
namePrefix = basename(name, extSuffix);
}
// name copy 5(.txt) => name copy 6(.txt)
// name copy(.txt) => name copy 2(.txt)
const suffixRegex = /^(.+ copy)( \d+)?$/;
if (suffixRegex.test(namePrefix)) {
return namePrefix.replace(suffixRegex, (match, g1?, g2?) => {
let number = (g2 ? parseInt(g2) : 1);
return number === 0
? `${g1}`
: (number < Constants.MAX_SAFE_SMALL_INTEGER
? `${g1} ${number + 1}`
: `${g1}${g2} copy`);
}) + extSuffix;
}
// name(.txt) => name copy(.txt)
return `${namePrefix} copy${extSuffix}`;
}
// Global Compare with
export class GlobalCompareResourcesAction extends Action {
public static readonly ID = 'workbench.files.action.compareFileWith';
public static readonly LABEL = nls.localize('globalCompareFile', "Compare Active File With...");
constructor(
id: string,
label: string,
@IQuickOpenService private readonly quickOpenService: IQuickOpenService,
@IEditorService private readonly editorService: IEditorService,
@INotificationService private readonly notificationService: INotificationService,
) {
super(id, label);
}
public run(): Promise<any> {
const activeInput = this.editorService.activeEditor;
const activeResource = activeInput ? activeInput.getResource() : undefined;
if (activeResource) {
// Compare with next editor that opens
const toDispose = this.editorService.overrideOpenEditor(editor => {
// Only once!
toDispose.dispose();
// Open editor as diff
const resource = editor.getResource();
if (resource) {
return {
override: this.editorService.openEditor({
leftResource: activeResource,
rightResource: resource
}).then(() => null)
};
}
return undefined;
});
// Bring up quick open
this.quickOpenService.show('', { autoFocus: { autoFocusSecondEntry: true } }).then(() => {
toDispose.dispose(); // make sure to unbind if quick open is closing
});
} else {
this.notificationService.info(nls.localize('openFileToCompare', "Open a file first to compare it with another file."));
}
return Promise.resolve(true);
}
}
export class ToggleAutoSaveAction extends Action {
public static readonly ID = 'workbench.action.toggleAutoSave';
public static readonly LABEL = nls.localize('toggleAutoSave', "Toggle Auto Save");
constructor(
id: string,
label: string,
@IConfigurationService private readonly configurationService: IConfigurationService
) {
super(id, label);
}
public run(): Promise<any> {
const setting = this.configurationService.inspect('files.autoSave');
let userAutoSaveConfig = setting.user;
if (types.isUndefinedOrNull(userAutoSaveConfig)) {
userAutoSaveConfig = setting.default; // use default if setting not defined
}
let newAutoSaveValue: string;
if ([AutoSaveConfiguration.AFTER_DELAY, AutoSaveConfiguration.ON_FOCUS_CHANGE, AutoSaveConfiguration.ON_WINDOW_CHANGE].some(s => s === userAutoSaveConfig)) {
newAutoSaveValue = AutoSaveConfiguration.OFF;
} else {
newAutoSaveValue = AutoSaveConfiguration.AFTER_DELAY;
}
return this.configurationService.updateValue('files.autoSave', newAutoSaveValue, ConfigurationTarget.USER);
}
}
export abstract class BaseSaveAllAction extends Action {
private lastIsDirty: boolean;
constructor(
id: string,
label: string,
@ITextFileService private readonly textFileService: ITextFileService,
@IUntitledEditorService private readonly untitledEditorService: IUntitledEditorService,
@ICommandService protected commandService: ICommandService,
@INotificationService private notificationService: INotificationService,
) {
super(id, label);
this.lastIsDirty = this.textFileService.isDirty();
this.enabled = this.lastIsDirty;
this.registerListeners();
}
protected abstract includeUntitled(): boolean;
protected abstract doRun(context: any): Promise<any>;
private registerListeners(): void {
// listen to files being changed locally
this._register(this.textFileService.models.onModelsDirty(e => this.updateEnablement(true)));
this._register(this.textFileService.models.onModelsSaved(e => this.updateEnablement(false)));
this._register(this.textFileService.models.onModelsReverted(e => this.updateEnablement(false)));
this._register(this.textFileService.models.onModelsSaveError(e => this.updateEnablement(true)));
if (this.includeUntitled()) {
this._register(this.untitledEditorService.onDidChangeDirty(resource => this.updateEnablement(this.untitledEditorService.isDirty(resource))));
}
}
private updateEnablement(isDirty: boolean): void {
if (this.lastIsDirty !== isDirty) {
this.enabled = this.textFileService.isDirty();
this.lastIsDirty = this.enabled;
}
}
public run(context?: any): Promise<boolean> {
return this.doRun(context).then(() => true, error => {
onError(this.notificationService, error);
return false;
});
}
}
export class SaveAllAction extends BaseSaveAllAction {
public static readonly ID = 'workbench.action.files.saveAll';
public static readonly LABEL = SAVE_ALL_LABEL;
public get class(): string {
return 'explorer-action save-all';
}
protected doRun(context: any): Promise<any> {
return this.commandService.executeCommand(SAVE_ALL_COMMAND_ID);
}
protected includeUntitled(): boolean {
return true;
}
}
export class SaveAllInGroupAction extends BaseSaveAllAction {
public static readonly ID = 'workbench.files.action.saveAllInGroup';
public static readonly LABEL = nls.localize('saveAllInGroup', "Save All in Group");
public get class(): string {
return 'explorer-action save-all';
}
protected doRun(context: any): Promise<any> {
return this.commandService.executeCommand(SAVE_ALL_IN_GROUP_COMMAND_ID, {}, context);
}
protected includeUntitled(): boolean {
return true;
}
}
export class CloseGroupAction extends Action {
public static readonly ID = 'workbench.files.action.closeGroup';
public static readonly LABEL = nls.localize('closeGroup', "Close Group");
constructor(id: string, label: string, @ICommandService private readonly commandService: ICommandService) {
super(id, label, 'action-close-all-files');
}
public run(context?: any): Promise<any> {
return this.commandService.executeCommand(CLOSE_EDITORS_AND_GROUP_COMMAND_ID, {}, context);
}
}
export class FocusFilesExplorer extends Action {
public static readonly ID = 'workbench.files.action.focusFilesExplorer';
public static readonly LABEL = nls.localize('focusFilesExplorer', "Focus on Files Explorer");
constructor(
id: string,
label: string,
@IViewletService private readonly viewletService: IViewletService
) {
super(id, label);
}
public run(): Promise<any> {
return this.viewletService.openViewlet(VIEWLET_ID, true);
}
}
export class ShowActiveFileInExplorer extends Action {
public static readonly ID = 'workbench.files.action.showActiveFileInExplorer';
public static readonly LABEL = nls.localize('showInExplorer', "Reveal Active File in Side Bar");
constructor(
id: string,
label: string,
@IEditorService private readonly editorService: IEditorService,
@INotificationService private readonly notificationService: INotificationService,
@ICommandService private readonly commandService: ICommandService
) {
super(id, label);
}
public run(): Promise<any> {
const resource = toResource(this.editorService.activeEditor, { supportSideBySide: SideBySideEditor.MASTER });
if (resource) {
this.commandService.executeCommand(REVEAL_IN_EXPLORER_COMMAND_ID, resource);
} else {
this.notificationService.info(nls.localize('openFileToShow', "Open a file first to show it in the explorer"));
}
return Promise.resolve(true);
}
}
export class CollapseExplorerView extends Action {
public static readonly ID = 'workbench.files.action.collapseExplorerFolders';
public static readonly LABEL = nls.localize('collapseExplorerFolders', "Collapse Folders in Explorer");
constructor(id: string,
label: string,
@IViewletService private readonly viewletService: IViewletService,
@IExplorerService readonly explorerService: IExplorerService
) {
super(id, label, 'explorer-action collapse-explorer');
this._register(explorerService.onDidChangeEditable(e => {
const elementIsBeingEdited = explorerService.isEditable(e);
this.enabled = !elementIsBeingEdited;
}));
}
run(): Promise<any> {
return this.viewletService.openViewlet(VIEWLET_ID).then((viewlet: ExplorerViewlet) => {
const explorerView = viewlet.getExplorerView();
if (explorerView) {
explorerView.collapseAll();
}
});
}
}
export class RefreshExplorerView extends Action {
public static readonly ID = 'workbench.files.action.refreshFilesExplorer';
public static readonly LABEL = nls.localize('refreshExplorer', "Refresh Explorer");
constructor(
id: string, label: string,
@IViewletService private readonly viewletService: IViewletService,
@IExplorerService private readonly explorerService: IExplorerService
) {
super(id, label, 'explorer-action refresh-explorer');
this._register(explorerService.onDidChangeEditable(e => {
const elementIsBeingEdited = explorerService.isEditable(e);
this.enabled = !elementIsBeingEdited;
}));
}
public run(): Promise<any> {
return this.viewletService.openViewlet(VIEWLET_ID).then(() =>
this.explorerService.refresh()
);
}
}
export class ShowOpenedFileInNewWindow extends Action {
public static readonly ID = 'workbench.action.files.showOpenedFileInNewWindow';
public static readonly LABEL = nls.localize('openFileInNewWindow', "Open Active File in New Window");
constructor(
id: string,
label: string,
@IEditorService private readonly editorService: IEditorService,
@IWindowService private readonly windowService: IWindowService,
@INotificationService private readonly notificationService: INotificationService,
@IFileService private readonly fileService: IFileService
) {
super(id, label);
}
public run(): Promise<any> {
const fileResource = toResource(this.editorService.activeEditor, { supportSideBySide: SideBySideEditor.MASTER });
if (fileResource) {
if (this.fileService.canHandleResource(fileResource)) {
this.windowService.openWindow([{ fileUri: fileResource }], { forceNewWindow: true });
} else {
this.notificationService.info(nls.localize('openFileToShowInNewWindow.unsupportedschema', "The active editor must contain an openable resource."));
}
} else {
this.notificationService.info(nls.localize('openFileToShowInNewWindow.nofile', "Open a file first to open in new window"));
}
return Promise.resolve(true);
}
}
export function validateFileName(item: ExplorerItem, name: string): string | null {
// Produce a well formed file name
name = getWellFormedFileName(name);
// Name not provided
if (!name || name.length === 0 || /^\s+$/.test(name)) {
return nls.localize('emptyFileNameError', "A file or folder name must be provided.");
}
// Relative paths only
if (name[0] === '/' || name[0] === '\\') {
return nls.localize('fileNameStartsWithSlashError', "A file or folder name cannot start with a slash.");
}
const names = coalesce(name.split(/[\\/]/));
const parent = item.parent;
if (name !== item.name) {
// Do not allow to overwrite existing file
|
}
// Invalid File name
if (names.some((folderName) => !extpath.isValidBasename(folderName))) {
return nls.localize('invalidFileNameError', "The name **{0}** is not valid as a file or folder name. Please choose a different name.", trimLongName(name));
}
return null;
}
function trimLongName(name: string): string {
if (name && name.length > 255) {
return `${name.substr(0, 255)}...`;
}
return name;
}
export function getWellFormedFileName(filename: string): string {
if (!filename) {
return filename;
}
// Trim tabs
filename = strings.trim(filename, '\t');
// Remove trailing dots, slashes, and spaces
filename = strings.rtrim(filename, '.');
filename = strings.rtrim(filename, '/');
filename = strings.rtrim(filename, '\\');
return filename;
}
export class CompareWithClipboardAction extends Action {
public static readonly ID = 'workbench.files.action.compareWithClipboard';
public static readonly LABEL = nls.localize('compareWithClipboard', "Compare Active File with Clipboard");
private static readonly SCHEME = 'clipboardCompare';
private registrationDisposal: IDisposable | undefined;
constructor(
id: string,
label: string,
@IEditorService private readonly editorService: IEditorService,
@IInstantiationService private readonly instantiationService: IInstantiationService,
@ITextModelService private readonly textModelService: ITextModelService,
@IFileService private readonly fileService: IFileService
) {
super(id, label);
this.enabled = true;
}
public run(): Promise<any> {
const resource = toResource(this.editorService.activeEditor, { supportSideBySide: SideBySideEditor.MASTER });
if (resource && (this.fileService.canHandleResource(resource) || resource.scheme === Schemas.untitled)) {
if (!this.registrationDisposal) {
const provider = this.instantiationService.createInstance(ClipboardContentProvider);
this.registrationDisposal = this.textModelService.registerTextModelContentProvider(CompareWithClipboardAction.SCHEME, provider);
}
const name = resources.basename(resource);
const editorLabel = nls.localize('clipboardComparisonLabel', "Clipboard ↔ {0}", name);
return this.editorService.openEditor({ leftResource: resource.with({ scheme: CompareWithClipboardAction.SCHEME }), rightResource: resource, label: editorLabel }).finally(() => {
dispose(this.registrationDisposal);
this.registrationDisposal = undefined;
});
}
return Promise.resolve(true);
}
public dispose(): void {
super.dispose();
dispose(this.registrationDisposal);
this.registrationDisposal = undefined;
}
}
class ClipboardContentProvider implements ITextModelContentProvider {
constructor(
@IClipboardService private readonly clipboardService: IClipboardService,
@IModeService private readonly modeService: IModeService,
@IModelService private readonly modelService: IModelService
) { }
provideTextContent(resource: URI): Promise<ITextModel> {
const model = this.modelService.createModel(this.clipboardService.readText(), this.modeService.createByFilepathOrFirstLine(resource), resource);
return Promise.resolve(model);
}
}
interface IExplorerContext {
stat?: ExplorerItem;
selection: ExplorerItem[];
}
function getContext(listWidget: ListWidget): IExplorerContext {
// These commands can only be triggered when explorer viewlet is visible so get it using the active viewlet
const tree = <AsyncDataTree<null, ExplorerItem>>listWidget;
const focus = tree.getFocus();
const stat = focus.length ? focus[0] : undefined;
const selection = tree.getSelection();
// Only respect the selection if user clicked inside it (focus belongs to it)
return { stat, selection: selection && typeof stat !== 'undefined' && selection.indexOf(stat) >= 0 ? selection : [] };
}
function onErrorWithRetry(notificationService: INotificationService, error: any, retry: () => Promise<any>): void {
notificationService.prompt(Severity.Error, toErrorMessage(error, false),
[{
label: nls.localize('retry', "Retry"),
run: () => retry()
}]
);
}
async function openExplorerAndCreate(accessor: ServicesAccessor, isFolder: boolean): Promise<void> {
const listService = accessor.get(IListService);
const explorerService = accessor.get(IExplorerService);
const fileService = accessor.get(IFileService);
const textFileService = accessor.get(ITextFileService);
const editorService = accessor.get(IEditorService);
const viewletService = accessor.get(IViewletService);
await viewletService.openViewlet(VIEWLET_ID, true);
const list = listService.lastFocusedList;
if (list) {
const { stat } = getContext(list);
let folder: ExplorerItem;
if (stat) {
folder = stat.isDirectory ? stat : stat.parent!;
} else {
folder = explorerService.roots[0];
}
if (folder.isReadonly) {
throw new Error('Parent folder is readonly.');
}
const newStat = new NewExplorerItem(folder, isFolder);
await folder.fetchChildren(fileService, explorerService);
folder.addChild(newStat);
const onSuccess = async (value: string) => {
const createPromise = isFolder ? fileService.createFolder(resources.joinPath(folder.resource, value)) : textFileService.create(resources.joinPath(folder.resource, value));
return createPromise.then(created => {
refreshIfSeparator(value, explorerService);
return isFolder ? explorerService.select(created.resource, true)
: editorService.openEditor({ resource: created.resource, options: { pinned: true } }).then(() => undefined);
}, (error) => {
onErrorWithRetry(accessor.get(INotificationService), error, () => onSuccess(value));
});
};
explorerService.setEditable(newStat, {
validationMessage: value => validateFileName(newStat, value),
onFinish: (value, success) => {
folder.removeChild(newStat);
explorerService.setEditable(newStat, null);
if (success) {
onSuccess(value);
} else {
explorerService.select(folder.resource).then(undefined, onUnexpectedError);
}
}
});
}
}
CommandsRegistry.registerCommand({
id: NEW_FILE_COMMAND_ID,
handler: (accessor) => {
openExplorerAndCreate(accessor, false).then(undefined, onUnexpectedError);
}
});
CommandsRegistry.registerCommand({
id: NEW_FOLDER_COMMAND_ID,
handler: (accessor) => {
openExplorerAndCreate(accessor, true).then(undefined, onUnexpectedError);
}
});
export const renameHandler = (accessor: ServicesAccessor) => {
const listService = accessor.get(IListService);
const explorerService = accessor.get(IExplorerService);
const textFileService = accessor.get(ITextFileService);
if (!listService.lastFocusedList) {
return;
}
const { stat } = getContext(listService.lastFocusedList);
if (!stat) {
return;
}
explorerService.setEditable(stat, {
validationMessage: value => validateFileName(stat, value),
onFinish: (value, success) => {
if (success) {
const parentResource = stat.parent!.resource;
const targetResource = resources.joinPath(parentResource, value);
if (stat.resource.toString() !== targetResource.toString()) {
textFileService.move(stat.resource, targetResource).then(() => refreshIfSeparator(value, explorerService), onUnexpectedError);
}
}
explorerService.setEditable(stat, null);
}
});
};
export const moveFileToTrashHandler = (accessor: ServicesAccessor) => {
const listService = accessor.get(IListService);
if (!listService.lastFocusedList) {
return Promise.resolve();
}
const explorerContext = getContext(listService.lastFocusedList);
const stats = explorerContext.selection.length > 1 ? explorerContext.selection : [explorerContext.stat!];
return deleteFiles(accessor.get(ITextFileService), accessor.get(IDialogService), accessor.get(IConfigurationService), accessor.get(IFileService), stats, true);
};
export const deleteFileHandler = (accessor: ServicesAccessor) => {
const listService = accessor.get(IListService);
if (!listService.lastFocusedList) {
return Promise.resolve();
}
const explorerContext = getContext(listService.lastFocusedList);
const stats = explorerContext.selection.length > 1 ? explorerContext.selection : [explorerContext.stat!];
return deleteFiles(accessor.get(ITextFileService), accessor.get(IDialogService), accessor.get(IConfigurationService), accessor.get(IFileService), stats, false);
};
let pasteShouldMove = false;
export const copyFileHandler = (accessor: ServicesAccessor) => {
const listService = accessor.get(IListService);
if (!listService.lastFocusedList) {
return;
}
const explorerContext = getContext(listService.lastFocusedList);
const explorerService = accessor.get(IExplorerService);
if (explorerContext.stat) {
const stats = explorerContext.selection.length > 1 ? explorerContext.selection : [explorerContext.stat];
explorerService.setToCopy(stats, false);
pasteShouldMove = false;
}
};
export const cutFileHandler = (accessor: ServicesAccessor) => {
const listService = accessor.get(IListService);
if (!listService.lastFocusedList) {
return;
}
const explorerContext = getContext(listService.lastFocusedList);
const explorerService = accessor.get(IExplorerService);
if (explorerContext.stat) {
const stats = explorerContext.selection.length > 1 ? explorerContext.selection : [explorerContext.stat];
explorerService.setToCopy(stats, true);
pasteShouldMove = true;
}
};
export const DOWNLOAD_COMMAND_ID = 'explorer.download';
const downloadFileHandler = (accessor: ServicesAccessor) => {
const listService = accessor.get(IListService);
if (!listService.lastFocusedList) {
return;
}
const explorerContext = getContext(listService.lastFocusedList);
const textFileService = accessor.get(ITextFileService);
if (explorerContext.stat) {
const stats = explorerContext.selection.length > 1 ? explorerContext.selection : [explorerContext.stat];
stats.forEach(async s => {
await textFileService.saveAs(s.resource, undefined, { availableFileSystems: [Schemas.file] });
});
}
};
CommandsRegistry.registerCommand({
id: DOWNLOAD_COMMAND_ID,
handler: downloadFileHandler
});
export const pasteFileHandler = (accessor: ServicesAccessor) => {
const listService = accessor.get(IListService);
const clipboardService = accessor.get(IClipboardService);
const explorerService = accessor.get(IExplorerService);
const fileService = accessor.get(IFileService);
const textFileService = accessor.get(ITextFileService);
const notificationService = accessor.get(INotificationService);
const editorService = accessor.get(IEditorService);
if (listService.lastFocusedList) {
const explorerContext = getContext(listService.lastFocusedList);
const toPaste = resources.distinctParents(clipboardService.readResources(), r => r);
const element = explorerContext.stat || explorerService.roots[0];
// Check if target is ancestor of pasted folder
Promise.all(toPaste.map(fileToPaste => {
if (element.resource.toString() !== fileToPaste.toString() && resources.isEqualOrParent(element.resource, fileToPaste, !isLinux /* ignorecase */)) {
throw new Error(nls.localize('fileIsAncestor', "File to paste is an ancestor of the destination folder"));
}
return fileService.resolve(fileToPaste).then(fileToPasteStat => {
// Find target
let target: ExplorerItem;
if (element.resource.toString() === fileToPaste.toString()) {
target = element.parent!;
} else {
target = element.isDirectory ? element : element.parent!;
}
const targetFile = findValidPasteFileTarget(target, { resource: fileToPaste, isDirectory: fileToPasteStat.isDirectory, allowOverwirte: pasteShouldMove });
// Move/Copy File
return pasteShouldMove ? textFileService.move(fileToPaste, targetFile) : fileService.copy(fileToPaste, targetFile);
}, error => {
onError(notificationService, new Error(nls.localize('fileDeleted', "File to paste was deleted or moved meanwhile")));
});
})).then((stat) => {
if (pasteShouldMove) {
// Cut is done. Make sure to clear cut state.
explorerService.setToCopy([], false);
}
if (stat.length === 1 && !stat[0].isDirectory) {
editorService.openEditor({ resource: stat[0].resource, options: { pinned: true, preserveFocus: true } }).then(undefined, onUnexpectedError);
}
});
}
};
|
const child = parent && parent.getChild(name);
if (child && child !== item) {
return nls.localize('fileNameExistsError', "A file or folder **{0}** already exists at this location. Please choose a different name.", name);
}
|
linux_musl.rs
|
pub(crate) fn strerror_imp(eno: i32) -> &'static str {
match eno {
0 => "No error information",
libc::EILSEQ => "Illegal byte sequence",
libc::EDOM => "Domain error",
libc::ERANGE => "Result not representable",
libc::ENOTTY => "Not a tty",
libc::EACCES => "Permission denied",
libc::EPERM => "Operation not permitted",
libc::ENOENT => "No such file or directory",
libc::ESRCH => "No such process",
libc::EEXIST => "File exists",
libc::EOVERFLOW => "Value too large for data type",
libc::ENOSPC => "No space left on device",
libc::ENOMEM => "Out of memory",
libc::EBUSY => "Resource busy",
libc::EINTR => "Interrupted system call",
libc::EAGAIN => "Resource temporarily unavailable",
libc::ESPIPE => "Invalid seek",
libc::EXDEV => "Cross-device link",
libc::EROFS => "Read-only file system",
libc::ENOTEMPTY => "Directory not empty",
libc::ECONNRESET => "Connection reset by peer",
libc::ETIMEDOUT => "Operation timed out",
libc::ECONNREFUSED => "Connection refused",
|
libc::EPIPE => "Broken pipe",
libc::EIO => "I/O error",
libc::ENXIO => "No such device or address",
libc::ENOTBLK => "Block device required",
libc::ENODEV => "No such device",
libc::ENOTDIR => "Not a directory",
libc::EISDIR => "Is a directory",
libc::ETXTBSY => "Text file busy",
libc::ENOEXEC => "Exec format error",
libc::EINVAL => "Invalid argument",
libc::E2BIG => "Argument list too long",
libc::ELOOP => "Symbolic link loop",
libc::ENAMETOOLONG => "Filename too long",
libc::ENFILE => "Too many open files in system",
libc::EMFILE => "No file descriptors available",
libc::EBADF => "Bad file descriptor",
libc::ECHILD => "No child process",
libc::EFAULT => "Bad address",
libc::EFBIG => "File too large",
libc::EMLINK => "Too many links",
libc::ENOLCK => "No locks available",
libc::EDEADLK => "Resource deadlock would occur",
libc::ENOTRECOVERABLE => "State not recoverable",
libc::EOWNERDEAD => "Previous owner died",
libc::ECANCELED => "Operation canceled",
libc::ENOSYS => "Function not implemented",
libc::ENOMSG => "No message of desired type",
libc::EIDRM => "Identifier removed",
libc::ENOSTR => "Device not a stream",
libc::ENODATA => "No data available",
libc::ETIME => "Device timeout",
libc::ENOSR => "Out of streams resources",
libc::ENOLINK => "Link has been severed",
libc::EPROTO => "Protocol error",
libc::EBADMSG => "Bad message",
libc::EBADFD => "File descriptor in bad state",
libc::ENOTSOCK => "Not a socket",
libc::EDESTADDRREQ => "Destination address required",
libc::EMSGSIZE => "Message too large",
libc::EPROTOTYPE => "Protocol wrong type for socket",
libc::ENOPROTOOPT => "Protocol not available",
libc::EPROTONOSUPPORT => "Protocol not supported",
libc::ESOCKTNOSUPPORT => "Socket type not supported",
libc::ENOTSUP => "Not supported",
libc::EPFNOSUPPORT => "Protocol family not supported",
libc::EAFNOSUPPORT => "Address family not supported by protocol",
libc::EADDRNOTAVAIL => "Address not available",
libc::ENETDOWN => "Network is down",
libc::ENETUNREACH => "Network unreachable",
libc::ENETRESET => "Connection reset by network",
libc::ECONNABORTED => "Connection aborted",
libc::ENOBUFS => "No buffer space available",
libc::EISCONN => "Socket is connected",
libc::ENOTCONN => "Socket not connected",
libc::ESHUTDOWN => "Cannot send after socket shutdown",
libc::EALREADY => "Operation already in progress",
libc::EINPROGRESS => "Operation in progress",
libc::ESTALE => "Stale file handle",
libc::EREMOTEIO => "Remote I/O error",
libc::EDQUOT => "Quota exceeded",
libc::ENOMEDIUM => "No medium found",
libc::EMEDIUMTYPE => "Wrong medium type",
libc::EMULTIHOP => "Multihop attempted",
_ => "No error information",
}
}
|
libc::EHOSTDOWN => "Host is down",
libc::EHOSTUNREACH => "Host is unreachable",
libc::EADDRINUSE => "Address in use",
|
chown_linux.go
|
package logrotate
import (
"os"
"syscall"
)
// osChown is a var so we can mock it out during tests.
var osChown = os.Chown
func chown(name string, info os.FileInfo) error {
f, err := os.OpenFile(name, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, info.Mode())
if err != nil {
return err
}
_ = f.Close()
stat := info.Sys().(*syscall.Stat_t)
|
return osChown(name, int(stat.Uid), int(stat.Gid))
}
|
|
T_S_I__5.py
|
import sys
import DefaultTable
import array
from fontTools import ttLib
from fontTools.misc.textTools import safeEval
class table_T_S_I__5(DefaultTable.DefaultTable):
def decompile(self, data, ttFont):
numGlyphs = ttFont['maxp'].numGlyphs
assert len(data) == 2 * numGlyphs
a = array.array("H")
a.fromstring(data)
if sys.byteorder <> "big":
a.byteswap()
self.glyphGrouping = {}
for i in range(numGlyphs):
self.glyphGrouping[ttFont.getGlyphName(i)] = a[i]
def compile(self, ttFont):
glyphNames = ttFont.getGlyphOrder()
a = array.array("H")
for i in range(len(glyphNames)):
a.append(self.glyphGrouping[glyphNames[i]])
if sys.byteorder <> "big":
a.byteswap()
return a.tostring()
def toXML(self, writer, ttFont):
names = self.glyphGrouping.keys()
names.sort()
for glyphName in names:
writer.simpletag("glyphgroup", name=glyphName, value=self.glyphGrouping[glyphName])
writer.newline()
def fromXML(self, (name, attrs, content), ttFont):
if not hasattr(self, "glyphGrouping"):
self.glyphGrouping = {}
if name <> "glyphgroup":
|
self.glyphGrouping[attrs["name"]] = safeEval(attrs["value"])
|
return
|
test_commands.py
|
from django.core.management import call_command
from django.db.utils import OperationalError
from django.test import TestCase
class CommandTest(TestCase):
def test_wait_for_db_ready(self):
"""test waiting for db when db is available"""
with patch('django.db.utils.ConnectionHandler.__getitem__') as gi:
gi.return_value = True
call_command('wait_for_db')
self.assertEqual(gi.call_count, 1)
@patch('time.sleep', return_value=True)
def test_wait_for_db(self, ts):
"""test waiting for db"""
with patch('django.db.utils.ConnectionHandler.__getitem__') as gi:
gi.side_effect = [OperationalError] * 5 + [True]
call_command('wait_for_db')
self.assertEqual(gi.call_count, 6)
|
from unittest.mock import patch
|
|
pg_test.go
|
package pg
import (
"bytes"
"io"
"testing"
"github.com/k1LoW/tcpdp/dumper"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)
var pgValueTests = []struct {
description string
in []byte
direction dumper.Direction
connMetadata dumper.ConnMetadata
expected []dumper.DumpValue
expectedQuery []dumper.DumpValue
}{
{
"Parse username/database from StartupMessage packet",
[]byte{
0x00, 0x00, 0x00, 0x64, 0x00, 0x03, 0x00, 0x00, 0x65, 0x78, 0x74, 0x72, 0x61, 0x5f, 0x66, 0x6c,
0x6f, 0x61, 0x74, 0x5f, 0x64, 0x69, 0x67, 0x69, 0x74, 0x73, 0x00, 0x32, 0x00, 0x75, 0x73, 0x65,
0x72, 0x00, 0x70, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73, 0x00, 0x64, 0x61, 0x74, 0x61, 0x62,
0x61, 0x73, 0x65, 0x00, 0x74, 0x65, 0x73, 0x74, 0x64, 0x62, 0x00, 0x63, 0x6c, 0x69, 0x65, 0x6e,
0x74, 0x5f, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x00, 0x55, 0x54, 0x46, 0x38, 0x00,
0x64, 0x61, 0x74, 0x65, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x00, 0x49, 0x53, 0x4f, 0x2c, 0x20, 0x4d,
0x44, 0x59, 0x00, 0x00,
},
dumper.SrcToDst,
dumper.ConnMetadata{
DumpValues: []dumper.DumpValue{},
Internal: connMetadataInternal{
messageLength: uint32(0),
},
},
[]dumper.DumpValue{
dumper.DumpValue{
Key: "username",
Value: "postgres",
},
dumper.DumpValue{
Key: "database",
Value: "testdb",
},
},
[]dumper.DumpValue{},
},
{
"Parse query from MessageQuery packet",
[]byte{
0x51, 0x00, 0x00, 0x00, 0x19, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x20, 0x2a, 0x20, 0x46, 0x52,
0x4f, 0x4d, 0x20, 0x75, 0x73, 0x65, 0x72, 0x73, 0x3b, 0x00,
},
dumper.SrcToDst,
dumper.ConnMetadata{
DumpValues: []dumper.DumpValue{},
Internal: connMetadataInternal{
messageLength: uint32(0),
},
},
[]dumper.DumpValue{},
[]dumper.DumpValue{
dumper.DumpValue{
Key: "query",
Value: "SELECT * FROM users;",
},
dumper.DumpValue{
Key: "message_type",
Value: "Q",
},
},
},
{
"Parse query from MessageParse packet",
[]byte{
0x50, 0x00, 0x00, 0x00, 0x34, 0x00, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x20, 0x43, 0x4f, 0x4e,
|
0x04,
},
dumper.SrcToDst,
dumper.ConnMetadata{
DumpValues: []dumper.DumpValue{},
Internal: connMetadataInternal{
messageLength: uint32(0),
},
},
[]dumper.DumpValue{},
[]dumper.DumpValue{
dumper.DumpValue{
Key: "stmt_name",
Value: "",
},
dumper.DumpValue{
Key: "parse",
Value: "SELECT CONCAT($1::text, $2::text, $3::text);",
},
dumper.DumpValue{
Key: "message_type",
Value: "P",
},
},
},
{
"Parse query from MessageBind packet",
[]byte{
0x42, 0x00, 0x00, 0x00, 0x3f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x09, 0x30,
0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x39, 0x00, 0x00, 0x00, 0x1e, 0xe3, 0x81, 0x82, 0xe3,
0x81, 0x84, 0xe3, 0x81, 0x86, 0xe3, 0x81, 0x88, 0xe3, 0x81, 0x8a, 0xe3, 0x81, 0x8b, 0xe3, 0x81,
0x8d, 0xe3, 0x81, 0x8f, 0xe3, 0x81, 0x91, 0xe3, 0x81, 0x93, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x45, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x53, 0x00, 0x00, 0x00, 0x04,
},
dumper.SrcToDst,
dumper.ConnMetadata{
DumpValues: []dumper.DumpValue{},
Internal: connMetadataInternal{
messageLength: uint32(0),
},
},
[]dumper.DumpValue{},
[]dumper.DumpValue{
dumper.DumpValue{
Key: "portal_name",
Value: "",
},
dumper.DumpValue{
Key: "stmt_name",
Value: "",
},
dumper.DumpValue{
Key: "bind_values",
Value: []string{"012345679", "あいうえおかきくけこ", ""},
},
dumper.DumpValue{
Key: "message_type",
Value: "B",
},
},
},
{
"When direction = dumper.RemoteToClient do not parse query",
[]byte{
0x51, 0x00, 0x00, 0x00, 0x19, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x20, 0x2a, 0x20, 0x46, 0x52,
0x4f, 0x4d, 0x20, 0x75, 0x73, 0x65, 0x72, 0x73, 0x3b, 0x00,
},
dumper.RemoteToClient,
dumper.ConnMetadata{
DumpValues: []dumper.DumpValue{},
Internal: connMetadataInternal{
messageLength: uint32(0),
},
},
[]dumper.DumpValue{},
[]dumper.DumpValue{},
},
}
func TestPgReadHandshakeStartupMessage(t *testing.T) {
for _, tt := range pgValueTests {
t.Run(tt.description, func(t *testing.T) {
out := new(bytes.Buffer)
dumper := &Dumper{
logger: newTestLogger(out),
}
in := tt.in
direction := tt.direction
connMetadata := &tt.connMetadata
actual, err := dumper.readHandshake(in, direction, connMetadata)
if err != nil {
t.Errorf("%v", err)
}
expected := tt.expected
if len(actual) != len(expected) {
t.Errorf("actual %v\nwant %v", actual, expected)
}
if len(actual) == 2 {
if actual[0] != expected[0] {
t.Errorf("actual %v\nwant %v", actual, expected)
}
if actual[1] != expected[1] {
t.Errorf("actual %v\nwant %v", actual, expected)
}
}
})
}
}
func TestPgRead(t *testing.T) {
for _, tt := range pgValueTests {
t.Run(tt.description, func(t *testing.T) {
out := new(bytes.Buffer)
dumper := &Dumper{
logger: newTestLogger(out),
}
in := tt.in
direction := tt.direction
connMetadata := &tt.connMetadata
actual, err := dumper.Read(in, direction, connMetadata)
if err != nil {
t.Errorf("%v", err)
}
expected := tt.expectedQuery
if len(actual) != len(expected) {
t.Errorf("actual %v\nwant %v", actual, expected)
}
if len(actual) == 2 {
if actual[0] != expected[0] {
t.Errorf("actual %#v\nwant %#v", actual[0], expected[0])
}
if actual[1] != expected[1] {
t.Errorf("actual %#v\nwant %#v", actual[1], expected[1])
}
}
})
}
}
var readBytesTests = []struct {
in []byte
len int
expected []byte
}{
{
[]byte{0x12, 0x34, 0x56, 0x78},
2,
[]byte{0x12, 0x34},
},
{
[]byte{0x12, 0x34, 0x56, 0x78},
0,
[]byte{},
},
}
func TestReadBytes(t *testing.T) {
for _, tt := range readBytesTests {
buff := bytes.NewBuffer(tt.in)
actual := readBytes(buff, tt.len)
if !bytes.Equal(actual, tt.expected) {
t.Errorf("actual %#v\nwant %#v", actual, tt.expected)
}
}
}
// newTestLogger return zap.Logger for test
func newTestLogger(out io.Writer) *zap.Logger {
encoderConfig := zapcore.EncoderConfig{
TimeKey: "ts",
LevelKey: "level",
NameKey: "logger",
CallerKey: "caller",
MessageKey: "msg",
StacktraceKey: "stacktrace",
EncodeLevel: zapcore.LowercaseLevelEncoder,
EncodeTime: zapcore.ISO8601TimeEncoder,
EncodeDuration: zapcore.StringDurationEncoder,
EncodeCaller: zapcore.ShortCallerEncoder,
}
logger := zap.New(zapcore.NewCore(
zapcore.NewJSONEncoder(encoderConfig),
zapcore.AddSync(out),
zapcore.DebugLevel,
))
return logger
}
|
0x43, 0x41, 0x54, 0x28, 0x24, 0x31, 0x3a, 0x3a, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x20, 0x24, 0x32,
0x3a, 0x3a, 0x74, 0x65, 0x78, 0x74, 0x2c, 0x20, 0x24, 0x33, 0x3a, 0x3a, 0x74, 0x65, 0x78, 0x74,
0x29, 0x3b, 0x00, 0x00, 0x00, 0x44, 0x00, 0x00, 0x00, 0x06, 0x53, 0x00, 0x53, 0x00, 0x00, 0x00,
|
mod.rs
|
pub mod light;
pub mod phong_lighting;
pub mod point_light;
|
pub mod rectangle_light;
|
|
datastore_test.go
|
package datastore
import (
"testing"
"github.com/stretchr/testify/suite"
)
type DataStoreTestSuite struct {
suite.Suite
}
func (s *DataStoreTestSuite) TestInit() {
Init(testDB)
defer Exit()
s.Nil(dbm.Db.Ping())
}
func (s *DataStoreTestSuite) TestExit() {
Init(testDB)
s.Nil(dbm.Db.Ping())
Exit()
s.NotNil(dbm.Db.Ping())
}
func
|
(t *testing.T) {
suite.Run(t, new(DataStoreTestSuite))
}
|
TestDataStore
|
ab_testing_design_test.py
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for gps_building_blocks.analysis.exp_design.ab_testing_design."""
from absl.testing import absltest
import numpy as np
from gps_building_blocks.analysis.exp_design import ab_testing_design
BASELINE_CONVERSION_RATE_PERCENTAGE = 5
EXPECTED_UPLIFT_PERCENTAGE = 10
LABELS = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
PREDICTIONS = np.array([
0.7, 0.63, 0.4, 0.77, 0.45, 0.8, 0.41, 0.82, 0.7, 0.6, 0.5, 0.45, 0.74,
0.11, 0.21, 0.05, 0.67, 0.79, 0.60, 0.10
])
class ABTestingExperimentalDesignTest(absltest.TestCase):
def test_calc_chisquared_sample_size_returns_correct_values(self):
result_sample_size = ab_testing_design.calc_chisquared_sample_size(
baseline_conversion_rate_percentage=BASELINE_CONVERSION_RATE_PERCENTAGE,
expected_uplift_percentage=EXPECTED_UPLIFT_PERCENTAGE)
self.assertEqual(result_sample_size, 14913.0)
def test_calc_chisquared_sample_size_change_power_and_confidence(self):
result_sample_size = ab_testing_design.calc_chisquared_sample_size(
baseline_conversion_rate_percentage=BASELINE_CONVERSION_RATE_PERCENTAGE,
expected_uplift_percentage=EXPECTED_UPLIFT_PERCENTAGE,
power_percentage=90,
confidence_level_percentage=99)
self.assertEqual(result_sample_size, 28271.0)
def test_calc_chisquared_sample_sizes_for_bins_returns_correct_values(self):
results = ab_testing_design.calc_chisquared_sample_sizes_for_bins(
labels=LABELS, probability_predictions=PREDICTIONS, number_bins=3)
self.assertEqual(results.shape, (24, 7))
self.assertListEqual(
list(results.columns), [
'bin_number', 'bin_size', 'conv_rate_percentage',
'uplift_percentage', 'power_percentage',
'confidence_level_percentage', 'sample_size'
])
self.assertListEqual(
list(results['sample_size']), [
248.0, 314.0, 343.0, 421.0, 62.0, 79.0, 86.0, 106.0, 928.0, 1178.0,
1285.0, 1577.0, 232.0, 295.0, 322.0, 395.0, 1031.0, 1309.0, 1428.0,
1752.0, 258.0, 328.0, 357.0, 438.0
])
def test_resulted_bin_metrics_does_not_contain_nas(self):
results = ab_testing_design.calc_chisquared_sample_sizes_for_bins(
labels=LABELS, probability_predictions=PREDICTIONS, number_bins=3)
self.assertFalse(results.isna().values.any())
def
|
(
self):
results = ab_testing_design.calc_chisquared_sample_sizes_for_cumulative_bins(
labels=LABELS, probability_predictions=PREDICTIONS, number_bins=5)
self.assertEqual(results.shape, (40, 8))
self.assertListEqual(
list(results.columns), [
'cumulative_bin_number', 'bin_size', 'bin_size_percentage',
'conv_rate_percentage', 'uplift_percentage', 'power_percentage',
'confidence_level_percentage', 'sample_size'
])
self.assertListEqual(
list(results['sample_size']), [
207.0, 262.0, 286.0, 351.0, 52.0, 66.0, 72.0, 88.0, 371.0, 471.0,
514.0, 631.0, 93.0, 118.0, 129.0, 158.0, 442.0, 561.0, 612.0, 751.0,
111.0, 141.0, 153.0, 188.0, 371.0, 471.0, 514.0, 631.0, 93.0, 118.0,
129.0, 158.0, 619.0, 785.0, 857.0, 1051.0, 155.0, 197.0, 215.0,
263.0
])
if __name__ == '__main__':
absltest.main()
|
test_calc_chisquared_sample_sizes_for_cumulative_bins_returns_right_vals
|
nvutils.py
|
# -*- coding: utf-8 -*-
# Copyright 1999-2021 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import sys
import uuid
from collections import namedtuple
from ctypes import c_char, c_char_p, c_int, c_uint, c_ulonglong, byref,\
create_string_buffer, Structure, POINTER, CDLL
logger = logging.getLogger(__name__)
# Some constants taken from cuda.h
CUDA_SUCCESS = 0
CU_DEVICE_ATTRIBUTE_MULTIPROCESSOR_COUNT = 16
CU_DEVICE_ATTRIBUTE_MAX_THREADS_PER_MULTIPROCESSOR = 39
CU_DEVICE_ATTRIBUTE_CLOCK_RATE = 13
CU_DEVICE_ATTRIBUTE_PCI_BUS_ID = 33
CU_DEVICE_ATTRIBUTE_PCI_DEVICE_ID = 34
CU_DEVICE_ATTRIBUTE_MEMORY_CLOCK_RATE = 36
CU_NO_CUDA_CAPABLE_DEVICE_DETECTED = 100
# nvml constants
NVML_SUCCESS = 0
NVML_TEMPERATURE_GPU = 0
NVML_DRIVER_NOT_LOADED = 9
class _CUuuid_t(Structure):
_fields_ = [
('bytes', c_char * 16)
]
class _nvmlUtilization_t(Structure):
_fields_ = [
('gpu', c_uint),
('memory', c_uint),
]
class _struct_nvmlDevice_t(Structure):
pass # opaque handle
_nvmlDevice_t = POINTER(_struct_nvmlDevice_t)
class _nvmlBAR1Memory_t(Structure):
_fields_ = [
('total', c_ulonglong),
('free', c_ulonglong),
('used', c_ulonglong),
]
_is_windows: bool = sys.platform.startswith('win')
def _load_nv_library(*libnames):
for lib in libnames:
try:
return CDLL(lib)
except OSError:
continue
_cuda_lib = _nvml_lib = None
_cu_device_info = namedtuple('_cu_device_info', 'index uuid name multiprocessors cuda_cores threads')
_nvml_driver_info = namedtuple('_nvml_driver_info', 'driver_version cuda_version')
_nvml_device_status = namedtuple(
'_nvml_device_status', 'gpu_util mem_util temperature fb_total_mem fb_used_mem fb_free_mem')
_init_pid = None
_gpu_count = None
_driver_info = None
_device_infos = dict()
_no_device_warned = False
class NVError(Exception):
def __init__(self, msg, *args, errno=None):
self._errno = errno
super().__init__(msg or 'Unknown error', *args)
def __str__(self):
return f'({self._errno}) {super().__str__()}'
@property
def errno(self):
return self._errno
@property
def message(self):
return super().__str__()
class NVDeviceAPIError(NVError):
pass
class NVMLAPIError(NVError):
pass
def _cu_check_error(result):
if result != CUDA_SUCCESS:
_error_str = c_char_p()
_cuda_lib.cuGetErrorString(result, byref(_error_str))
raise NVDeviceAPIError(_error_str.value.decode(), errno=result)
_nvmlErrorString = None
def _nvml_check_error(result):
global _nvmlErrorString
if _nvmlErrorString is None:
_nvmlErrorString = _nvml_lib.nvmlErrorString
_nvmlErrorString.restype = c_char_p
if result != NVML_SUCCESS:
_error_str = _nvmlErrorString(result)
raise NVMLAPIError(_error_str.decode(), errno=result)
_cu_process_var_to_cores = {
(1, 0): 8,
(1, 1): 8,
(1, 2): 8,
(1, 3): 8,
(2, 0): 32,
(2, 1): 48,
}
def _cu_get_processor_cores(major, minor):
return _cu_process_var_to_cores.get((major, minor), 192)
def _init_cp():
global _cuda_lib, _no_device_warned
if _init_pid == os.getpid():
return
_cuda_lib = _load_nv_library('libcuda.so', 'libcuda.dylib', 'cuda.dll', 'nvcuda.dll')
if _cuda_lib is None:
return
try:
_cu_check_error(_cuda_lib.cuInit(0))
except NVDeviceAPIError as ex:
if ex.errno == CU_NO_CUDA_CAPABLE_DEVICE_DETECTED:
_cuda_lib = None
if not _no_device_warned:
logger.warning('No CUDA device detected')
_no_device_warned = True
else:
logger.exception('Failed to initialize libcuda.')
return
def _init_nvml():
global _nvml_lib, _no_device_warned
if _init_pid == os.getpid():
return
nvml_paths = ['libnvidia-ml.so', 'libnvidia-ml.so.1', 'libnvidia-ml.dylib', 'nvml.dll']
if _is_windows:
nvml_paths.append(os.path.join(os.getenv("ProgramFiles", "C:/Program Files"),
"NVIDIA Corporation/NVSMI/nvml.dll"))
_nvml_lib = _load_nv_library(*nvml_paths)
if _nvml_lib is None:
return
try:
_nvml_check_error(_nvml_lib.nvmlInit_v2())
except NVMLAPIError as ex:
if ex.errno == NVML_DRIVER_NOT_LOADED:
_nvml_lib = None
if not _no_device_warned:
logger.warning('Failed to load libnvidia-ml: %s, no CUDA device will be enabled', ex.message)
_no_device_warned = True
else:
|
return
def _init():
global _init_pid
_init_cp()
_init_nvml()
if _nvml_lib is not None and _cuda_lib is not None:
_init_pid = os.getpid()
def get_device_count():
global _gpu_count
if _gpu_count is not None:
return _gpu_count
_init_nvml()
if _nvml_lib is None:
return None
if 'CUDA_VISIBLE_DEVICES' in os.environ:
devices = os.environ['CUDA_VISIBLE_DEVICES'].strip()
if not devices:
_gpu_count = 0
else:
_gpu_count = len(devices.split(','))
else:
n_gpus = c_uint()
_cu_check_error(_nvml_lib.nvmlDeviceGetCount(byref(n_gpus)))
_gpu_count = n_gpus.value
return _gpu_count
def get_driver_info():
global _driver_info
_init_nvml()
if _nvml_lib is None:
return None
if _driver_info is not None:
return _driver_info
version_buf = create_string_buffer(100)
cuda_version = c_uint()
_nvml_check_error(_nvml_lib.nvmlSystemGetDriverVersion(version_buf, len(version_buf)))
_nvml_check_error(_nvml_lib.nvmlSystemGetCudaDriverVersion(byref(cuda_version)))
_driver_info = _nvml_driver_info(
driver_version=version_buf.value.decode(),
cuda_version='.'.join(str(v) for v in divmod(cuda_version.value, 1000))
)
return _driver_info
def get_device_info(dev_index):
try:
return _device_infos[dev_index]
except KeyError:
pass
_init()
if _init_pid is None:
return None
device = c_int()
name_buf = create_string_buffer(100)
uuid_t = _CUuuid_t()
cc_major = c_int()
cc_minor = c_int()
cores = c_int()
threads_per_core = c_int()
_cu_check_error(_cuda_lib.cuDeviceGet(byref(device), c_int(dev_index)))
_cu_check_error(_cuda_lib.cuDeviceGetName(name_buf, len(name_buf), device))
_cu_check_error(_cuda_lib.cuDeviceGetUuid(byref(uuid_t), device))
_cu_check_error(_cuda_lib.cuDeviceComputeCapability(
byref(cc_major), byref(cc_minor), device))
_cu_check_error(_cuda_lib.cuDeviceGetAttribute(
byref(cores), CU_DEVICE_ATTRIBUTE_MULTIPROCESSOR_COUNT, device))
_cu_check_error(_cuda_lib.cuDeviceGetAttribute(
byref(threads_per_core), CU_DEVICE_ATTRIBUTE_MAX_THREADS_PER_MULTIPROCESSOR, device))
if 'CUDA_VISIBLE_DEVICES' in os.environ:
real_dev_index = [int(s) for s in os.environ['CUDA_VISIBLE_DEVICES'].split(',')][dev_index]
else:
real_dev_index = dev_index
info = _device_infos[dev_index] = _cu_device_info(
index=real_dev_index,
uuid=uuid.UUID(bytes=uuid_t.bytes),
name=name_buf.value.decode(),
multiprocessors=cores.value,
cuda_cores=cores.value * _cu_get_processor_cores(cc_major.value, cc_minor.value),
threads=cores.value * threads_per_core.value,
)
return info
def get_device_status(dev_index):
_init()
if _init_pid is None:
return None
device = _nvmlDevice_t()
utils = _nvmlUtilization_t()
temperature = c_uint()
memory_info = _nvmlBAR1Memory_t()
dev_uuid = get_device_info(dev_index).uuid
uuid_str = ('GPU-' + str(dev_uuid)).encode()
_nvml_check_error(_nvml_lib.nvmlDeviceGetHandleByUUID(uuid_str, byref(device)))
_nvml_check_error(_nvml_lib.nvmlDeviceGetUtilizationRates(device, byref(utils)))
_nvml_check_error(_nvml_lib.nvmlDeviceGetTemperature(
device, NVML_TEMPERATURE_GPU, byref(temperature)))
_nvml_check_error(_nvml_lib.nvmlDeviceGetBAR1MemoryInfo(device, byref(memory_info)))
return _nvml_device_status(
gpu_util=utils.gpu,
mem_util=utils.memory,
temperature=temperature.value,
fb_total_mem=memory_info.total,
fb_free_mem=memory_info.free,
fb_used_mem=memory_info.used,
)
|
logger.exception('Failed to initialize libnvidia-ml.')
|
MultiServer.py
|
import argparse
import asyncio
import functools
import json
import logging
import re
import shlex
import urllib.request
import zlib
import ModuleUpdate
ModuleUpdate.update()
import websockets
import aioconsole
import Items
import Regions
from MultiClient import ReceivedItem, get_item_name_from_id, get_location_name_from_address
class Client:
def __init__(self, socket):
self.socket = socket
self.auth = False
self.name = None
self.team = None
self.slot = None
self.send_index = 0
class Context:
def __init__(self, host, port, password):
self.data_filename = None
self.save_filename = None
self.disable_save = False
self.player_names = {}
self.rom_names = {}
self.remote_items = set()
self.locations = {}
self.host = host
self.port = port
self.password = password
self.server = None
self.countdown_timer = 0
self.clients = []
self.received_items = {}
async def send_msgs(websocket, msgs):
if not websocket or not websocket.open or websocket.closed:
|
try:
await websocket.send(json.dumps(msgs))
except websockets.ConnectionClosed:
pass
def broadcast_all(ctx : Context, msgs):
for client in ctx.clients:
if client.auth:
asyncio.create_task(send_msgs(client.socket, msgs))
def broadcast_team(ctx : Context, team, msgs):
for client in ctx.clients:
if client.auth and client.team == team:
asyncio.create_task(send_msgs(client.socket, msgs))
def notify_all(ctx : Context, text):
logging.info("Notice (all): %s" % text)
broadcast_all(ctx, [['Print', text]])
def notify_team(ctx : Context, team : int, text : str):
logging.info("Notice (Team #%d): %s" % (team+1, text))
broadcast_team(ctx, team, [['Print', text]])
def notify_client(client : Client, text : str):
if not client.auth:
return
logging.info("Notice (Player %s in team %d): %s" % (client.name, client.team+1, text))
asyncio.create_task(send_msgs(client.socket, [['Print', text]]))
async def server(websocket, path, ctx : Context):
client = Client(websocket)
ctx.clients.append(client)
try:
await on_client_connected(ctx, client)
async for data in websocket:
for msg in json.loads(data):
if len(msg) == 1:
cmd = msg
args = None
else:
cmd = msg[0]
args = msg[1]
await process_client_cmd(ctx, client, cmd, args)
except Exception as e:
if not isinstance(e, websockets.WebSocketException):
logging.exception(e)
finally:
await on_client_disconnected(ctx, client)
ctx.clients.remove(client)
async def on_client_connected(ctx : Context, client : Client):
await send_msgs(client.socket, [['RoomInfo', {
'password': ctx.password is not None,
'players': [(client.team, client.slot, client.name) for client in ctx.clients if client.auth]
}]])
async def on_client_disconnected(ctx : Context, client : Client):
if client.auth:
await on_client_left(ctx, client)
async def on_client_joined(ctx : Context, client : Client):
notify_all(ctx, "%s (Team #%d) has joined the game" % (client.name, client.team + 1))
async def on_client_left(ctx : Context, client : Client):
notify_all(ctx, "%s (Team #%d) has left the game" % (client.name, client.team + 1))
async def countdown(ctx : Context, timer):
notify_all(ctx, f'[Server]: Starting countdown of {timer}s')
if ctx.countdown_timer:
ctx.countdown_timer = timer
return
ctx.countdown_timer = timer
while ctx.countdown_timer > 0:
notify_all(ctx, f'[Server]: {ctx.countdown_timer}')
ctx.countdown_timer -= 1
await asyncio.sleep(1)
notify_all(ctx, f'[Server]: GO')
def get_connected_players_string(ctx : Context):
auth_clients = [c for c in ctx.clients if c.auth]
if not auth_clients:
return 'No player connected'
auth_clients.sort(key=lambda c: (c.team, c.slot))
current_team = 0
text = 'Team #1: '
for c in auth_clients:
if c.team != current_team:
text += f':: Team #{c.team + 1}: '
current_team = c.team
text += f'{c.name} '
return 'Connected players: ' + text[:-1]
def get_received_items(ctx : Context, team, player):
return ctx.received_items.setdefault((team, player), [])
def tuplize_received_items(items):
return [(item.item, item.location, item.player) for item in items]
def send_new_items(ctx : Context):
for client in ctx.clients:
if not client.auth:
continue
items = get_received_items(ctx, client.team, client.slot)
if len(items) > client.send_index:
asyncio.create_task(send_msgs(client.socket, [['ReceivedItems', (client.send_index, tuplize_received_items(items)[client.send_index:])]]))
client.send_index = len(items)
def forfeit_player(ctx : Context, team, slot):
all_locations = [values[0] for values in Regions.location_table.values() if type(values[0]) is int]
notify_all(ctx, "%s (Team #%d) has forfeited" % (ctx.player_names[(team, slot)], team + 1))
register_location_checks(ctx, team, slot, all_locations)
def register_location_checks(ctx : Context, team, slot, locations):
found_items = False
for location in locations:
if (location, slot) in ctx.locations:
target_item, target_player = ctx.locations[(location, slot)]
if target_player != slot or slot in ctx.remote_items:
found = False
recvd_items = get_received_items(ctx, team, target_player)
for recvd_item in recvd_items:
if recvd_item.location == location and recvd_item.player == slot:
found = True
break
if not found:
new_item = ReceivedItem(target_item, location, slot)
recvd_items.append(new_item)
if slot != target_player:
broadcast_team(ctx, team, [['ItemSent', (slot, location, target_player, target_item)]])
logging.info('(Team #%d) %s sent %s to %s (%s)' % (team+1, ctx.player_names[(team, slot)], get_item_name_from_id(target_item), ctx.player_names[(team, target_player)], get_location_name_from_address(location)))
found_items = True
send_new_items(ctx)
if found_items and not ctx.disable_save:
try:
with open(ctx.save_filename, "wb") as f:
jsonstr = json.dumps((list(ctx.rom_names.items()),
[(k, [i.__dict__ for i in v]) for k, v in ctx.received_items.items()]))
f.write(zlib.compress(jsonstr.encode("utf-8")))
except Exception as e:
logging.exception(e)
async def process_client_cmd(ctx : Context, client : Client, cmd, args):
if type(cmd) is not str:
await send_msgs(client.socket, [['InvalidCmd']])
return
if cmd == 'Connect':
if not args or type(args) is not dict or \
'password' not in args or type(args['password']) not in [str, type(None)] or \
'rom' not in args or type(args['rom']) is not list:
await send_msgs(client.socket, [['InvalidArguments', 'Connect']])
return
errors = set()
if ctx.password is not None and args['password'] != ctx.password:
errors.add('InvalidPassword')
if tuple(args['rom']) not in ctx.rom_names:
errors.add('InvalidRom')
else:
team, slot = ctx.rom_names[tuple(args['rom'])]
if any([c.slot == slot and c.team == team for c in ctx.clients if c.auth]):
errors.add('SlotAlreadyTaken')
else:
client.name = ctx.player_names[(team, slot)]
client.team = team
client.slot = slot
if errors:
await send_msgs(client.socket, [['ConnectionRefused', list(errors)]])
else:
client.auth = True
reply = [['Connected', [(client.team, client.slot), [(p, n) for (t, p), n in ctx.player_names.items() if t == client.team]]]]
items = get_received_items(ctx, client.team, client.slot)
if items:
reply.append(['ReceivedItems', (0, tuplize_received_items(items))])
client.send_index = len(items)
await send_msgs(client.socket, reply)
await on_client_joined(ctx, client)
if not client.auth:
return
if cmd == 'Sync':
items = get_received_items(ctx, client.team, client.slot)
if items:
client.send_index = len(items)
await send_msgs(client.socket, [['ReceivedItems', (0, tuplize_received_items(items))]])
if cmd == 'LocationChecks':
if type(args) is not list:
await send_msgs(client.socket, [['InvalidArguments', 'LocationChecks']])
return
register_location_checks(ctx, client.team, client.slot, args)
if cmd == 'LocationScouts':
if type(args) is not list:
await send_msgs(client.socket, [['InvalidArguments', 'LocationScouts']])
return
locs = []
for location in args:
if type(location) is not int or 0 >= location > len(Regions.location_table):
await send_msgs(client.socket, [['InvalidArguments', 'LocationScouts']])
return
loc_name = list(Regions.location_table.keys())[location - 1]
target_item, target_player = ctx.locations[(Regions.location_table[loc_name][0], client.slot)]
replacements = {'SmallKey': 0xA2, 'BigKey': 0x9D, 'Compass': 0x8D, 'Map': 0x7D}
item_type = [i[2] for i in Items.item_table.values() if type(i[3]) is int and i[3] == target_item]
if item_type:
target_item = replacements.get(item_type[0], target_item)
locs.append([loc_name, location, target_item, target_player])
logging.info(f"{client.name} in team {client.team+1} scouted {', '.join([l[0] for l in locs])}")
await send_msgs(client.socket, [['LocationInfo', [l[1:] for l in locs]]])
if cmd == 'Say':
if type(args) is not str or not args.isprintable():
await send_msgs(client.socket, [['InvalidArguments', 'Say']])
return
notify_all(ctx, client.name + ': ' + args)
if args.startswith('!players'):
notify_all(ctx, get_connected_players_string(ctx))
if args.startswith('!forfeit'):
forfeit_player(ctx, client.team, client.slot)
if args.startswith('!countdown'):
try:
timer = int(args.split()[1])
except (IndexError, ValueError):
timer = 10
asyncio.create_task(countdown(ctx, timer))
def set_password(ctx : Context, password):
ctx.password = password
logging.warning('Password set to ' + password if password is not None else 'Password disabled')
async def console(ctx : Context):
while True:
input = await aioconsole.ainput()
try:
command = shlex.split(input)
if not command:
continue
if command[0] == '/exit':
ctx.server.ws_server.close()
break
if command[0] == '/players':
logging.info(get_connected_players_string(ctx))
if command[0] == '/password':
set_password(ctx, command[1] if len(command) > 1 else None)
if command[0] == '/kick' and len(command) > 1:
team = int(command[2]) - 1 if len(command) > 2 and command[2].isdigit() else None
for client in ctx.clients:
if client.auth and client.name.lower() == command[1].lower() and (team is None or team == client.team):
if client.socket and not client.socket.closed:
await client.socket.close()
if command[0] == '/forfeitslot' and len(command) > 1 and command[1].isdigit():
if len(command) > 2 and command[2].isdigit():
team = int(command[1]) - 1
slot = int(command[2])
else:
team = 0
slot = int(command[1])
forfeit_player(ctx, team, slot)
if command[0] == '/forfeitplayer' and len(command) > 1:
seeked_player = command[1].lower()
for (team, slot), name in ctx.player_names.items():
if name.lower() == seeked_player:
forfeit_player(ctx, team, slot)
if command[0] == '/senditem' and len(command) > 2:
[(player, item)] = re.findall(r'\S* (\S*) (.*)', input)
if item in Items.item_table:
for client in ctx.clients:
if client.auth and client.name.lower() == player.lower():
new_item = ReceivedItem(Items.item_table[item][3], "cheat console", client.slot)
get_received_items(ctx, client.team, client.slot).append(new_item)
notify_all(ctx, 'Cheat console: sending "' + item + '" to ' + client.name)
send_new_items(ctx)
else:
logging.warning("Unknown item: " + item)
if command[0] == '/hint':
for (team,slot), name in ctx.player_names.items():
if len(command) == 1:
print("Use /hint {Playername} {itemname}\nFor example /hint Berserker Lamp")
elif name.lower() == command[1].lower():
item = " ".join(command[2:])
if item in Items.item_table:
seeked_item_id = Items.item_table[item][3]
for check, result in ctx.locations.items():
item_id, receiving_player = result
if receiving_player == slot and item_id == seeked_item_id:
location_id, finding_player = check
name_finder = ctx.player_names[team, finding_player]
hint = f"[Hint]: {name}'s {item} can be found at " \
f"{get_location_name_from_address(location_id)} in {name_finder}'s World"
notify_team(ctx, team, hint)
else:
logging.warning("Unknown item: " + item)
if command[0][0] != '/':
notify_all(ctx, '[Server]: ' + input)
except:
import traceback
traceback.print_exc()
async def main():
parser = argparse.ArgumentParser()
parser.add_argument('--host', default=None)
parser.add_argument('--port', default=38281, type=int)
parser.add_argument('--password', default=None)
parser.add_argument('--multidata', default=None)
parser.add_argument('--savefile', default=None)
parser.add_argument('--disable_save', default=False, action='store_true')
parser.add_argument('--loglevel', default='info', choices=['debug', 'info', 'warning', 'error', 'critical'])
args = parser.parse_args()
logging.basicConfig(format='[%(asctime)s] %(message)s', level=getattr(logging, args.loglevel.upper(), logging.INFO))
ctx = Context(args.host, args.port, args.password)
ctx.data_filename = args.multidata
try:
if not ctx.data_filename:
import tkinter
import tkinter.filedialog
root = tkinter.Tk()
root.withdraw()
ctx.data_filename = tkinter.filedialog.askopenfilename(filetypes=(("Multiworld data","*multidata"),))
with open(ctx.data_filename, 'rb') as f:
jsonobj = json.loads(zlib.decompress(f.read()).decode("utf-8"))
for team, names in enumerate(jsonobj['names']):
for player, name in enumerate(names, 1):
ctx.player_names[(team, player)] = name
ctx.rom_names = {tuple(rom): (team, slot) for slot, team, rom in jsonobj['roms']}
ctx.remote_items = set(jsonobj['remote_items'])
ctx.locations = {tuple(k): tuple(v) for k, v in jsonobj['locations']}
except Exception as e:
logging.error('Failed to read multiworld data (%s)' % e)
return
ip = urllib.request.urlopen('https://v4.ident.me').read().decode('utf8') if not ctx.host else ctx.host
logging.info('Hosting game at %s:%d (%s)' % (ip, ctx.port, 'No password' if not ctx.password else 'Password: %s' % ctx.password))
ctx.disable_save = args.disable_save
if not ctx.disable_save:
if not ctx.save_filename:
ctx.save_filename = (ctx.data_filename[:-9] if ctx.data_filename[-9:] == 'multidata' else (ctx.data_filename + '_')) + 'multisave'
try:
with open(ctx.save_filename, 'rb') as f:
jsonobj = json.loads(zlib.decompress(f.read()).decode("utf-8"))
rom_names = jsonobj[0]
received_items = {tuple(k): [ReceivedItem(**i) for i in v] for k, v in jsonobj[1]}
if not all([ctx.rom_names[tuple(rom)] == (team, slot) for rom, (team, slot) in rom_names]):
raise Exception('Save file mismatch, will start a new game')
ctx.received_items = received_items
logging.info('Loaded save file with %d received items for %d players' % (sum([len(p) for p in received_items.values()]), len(received_items)))
except FileNotFoundError:
logging.error('No save data found, starting a new game')
except Exception as e:
logging.info(e)
ctx.server = websockets.serve(functools.partial(server,ctx=ctx), ctx.host, ctx.port, ping_timeout=None, ping_interval=None)
await ctx.server
await console(ctx)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.run_until_complete(asyncio.gather(*asyncio.Task.all_tasks()))
loop.close()
|
return
|
prefixlog.go
|
package eventlog
import (
"strings"
"github.com/ipfs/fs-repo-migrations/ipfs-2-to-3/Godeps/_workspace/src/github.com/ipfs/go-ipfs/util"
)
// StandardLogger provides API compatibility with standard printf loggers
// eg. go-logging
type StandardLogger interface {
Critical(args ...interface{})
Criticalf(format string, args ...interface{})
Debug(args ...interface{})
Debugf(format string, args ...interface{})
Error(args ...interface{})
Errorf(format string, args ...interface{})
Fatal(args ...interface{})
Fatalf(format string, args ...interface{})
Info(args ...interface{})
Infof(format string, args ...interface{})
Notice(args ...interface{})
Noticef(format string, args ...interface{})
Panic(args ...interface{})
Panicf(format string, args ...interface{})
Warning(args ...interface{})
Warningf(format string, args ...interface{})
}
// StandardLogger provides API compatibility with standard printf loggers
// eg. go-logging
type PrefixLogger interface {
StandardLogger
Format() string
Args() []interface{}
Prefix(fmt string, args ...interface{}) PrefixLogger
}
// Logger retrieves an event logger by name
func Logger(system string) PrefixLogger {
// TODO if we would like to adjust log levels at run-time. Store this event
// logger in a map (just like the util.Logger impl)
logger := util.Logger(system)
return Prefix(logger, "")
}
func Prefix(l StandardLogger, format string, args ...interface{}) PrefixLogger {
return &prefixLogger{logger: l, format: format, args: args}
}
type prefixLogger struct {
logger StandardLogger
format string
args []interface{}
}
func (pl *prefixLogger) Format() string {
return pl.format
}
func (pl *prefixLogger) Args() []interface{} {
return pl.args
}
func (pl *prefixLogger) Prefix(fmt string, args ...interface{}) PrefixLogger {
return Prefix(pl, fmt, args...)
}
func (pl *prefixLogger) prepend(fmt string, args []interface{}) (string, []interface{}) {
together := make([]interface{}, 0, len(pl.args)+len(args))
together = append(together, pl.args...)
together = append(together, args...)
if len(pl.format) > 0 {
fmt = pl.format + " " + fmt
}
return fmt, together
}
func valfmtn(count int) string {
s := strings.Repeat("%v ", count)
s = s[:len(s)-1] // remove last space
return s
}
type logFunc func(args ...interface{})
type logFuncf func(fmt string, args ...interface{})
func (pl *prefixLogger) logFunc(f logFuncf, args ...interface{}) {
// need to actually use the format version, with extra fmt strings appended
fmt := valfmtn(len(args))
pl.logFuncf(f, fmt, args...)
}
func (pl *prefixLogger) logFuncf(f logFuncf, format string, args ...interface{}) {
format, args = pl.prepend(format, args)
f(format, args...)
}
func (pl *prefixLogger) Critical(args ...interface{}) {
pl.logFunc(pl.logger.Criticalf, args...)
}
func (pl *prefixLogger) Debug(args ...interface{}) {
pl.logFunc(pl.logger.Debugf, args...)
}
func (pl *prefixLogger) Error(args ...interface{}) {
pl.logFunc(pl.logger.Errorf, args...)
}
func (pl *prefixLogger) Fatal(args ...interface{}) {
pl.logFunc(pl.logger.Fatalf, args...)
}
func (pl *prefixLogger) Info(args ...interface{}) {
pl.logFunc(pl.logger.Infof, args...)
}
func (pl *prefixLogger) Notice(args ...interface{}) {
pl.logFunc(pl.logger.Noticef, args...)
}
func (pl *prefixLogger) Panic(args ...interface{}) {
pl.logFunc(pl.logger.Panicf, args...)
}
|
pl.logFunc(pl.logger.Warningf, args...)
}
func (pl *prefixLogger) Criticalf(format string, args ...interface{}) {
pl.logFuncf(pl.logger.Criticalf, format, args...)
}
func (pl *prefixLogger) Debugf(format string, args ...interface{}) {
pl.logFuncf(pl.logger.Debugf, format, args...)
}
func (pl *prefixLogger) Errorf(format string, args ...interface{}) {
pl.logFuncf(pl.logger.Errorf, format, args...)
}
func (pl *prefixLogger) Fatalf(format string, args ...interface{}) {
pl.logFuncf(pl.logger.Fatalf, format, args...)
}
func (pl *prefixLogger) Infof(format string, args ...interface{}) {
pl.logFuncf(pl.logger.Infof, format, args...)
}
func (pl *prefixLogger) Noticef(format string, args ...interface{}) {
pl.logFuncf(pl.logger.Noticef, format, args...)
}
func (pl *prefixLogger) Panicf(format string, args ...interface{}) {
pl.logFuncf(pl.logger.Panicf, format, args...)
}
func (pl *prefixLogger) Warningf(format string, args ...interface{}) {
pl.logFuncf(pl.logger.Warningf, format, args...)
}
|
func (pl *prefixLogger) Warning(args ...interface{}) {
|
accessory_limit_vo.py
|
# coding: utf-8
import pprint
import re
import six
class AccessoryLimitVo:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'limit_count': 'str',
'limit_size': 'str',
'limit_file_type': 'str'
}
attribute_map = {
'limit_count': 'limit_count',
'limit_size': 'limit_size',
'limit_file_type': 'limit_file_type'
}
def __init__(self, limit_count=None, limit_size=None, limit_file_type=None):
"""AccessoryLimitVo - a model defined in huaweicloud sdk"""
self._limit_count = None
self._limit_size = None
self._limit_file_type = None
self.discriminator = None
if limit_count is not None:
self.limit_count = limit_count
if limit_size is not None:
self.limit_size = limit_size
if limit_file_type is not None:
self.limit_file_type = limit_file_type
@property
def limit_count(self):
"""Gets the limit_count of this AccessoryLimitVo.
限制文件数量
:return: The limit_count of this AccessoryLimitVo.
:rtype: str
"""
return self._limit_count
@limit_count.setter
def limit_count(self, limit_count):
"""Sets the limit_count of this AccessoryLimitVo.
限制文件数量
:param limit_count: The limit_count of this AccessoryLimitVo.
:type: str
"""
self._limit_count = limit_count
@property
def limit_size(self):
"""Gets the limit_size o
|
self, limit_size):
"""Sets the limit_size of this AccessoryLimitVo.
限制文件大小,单位是M
:param limit_size: The limit_size of this AccessoryLimitVo.
:type: str
"""
self._limit_size = limit_size
@property
def limit_file_type(self):
"""Gets the limit_file_type of this AccessoryLimitVo.
限制文件类型
:return: The limit_file_type of this AccessoryLimitVo.
:rtype: str
"""
return self._limit_file_type
@limit_file_type.setter
def limit_file_type(self, limit_file_type):
"""Sets the limit_file_type of this AccessoryLimitVo.
限制文件类型
:param limit_file_type: The limit_file_type of this AccessoryLimitVo.
:type: str
"""
self._limit_file_type = limit_file_type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AccessoryLimitVo):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
f this AccessoryLimitVo.
限制文件大小,单位是M
:return: The limit_size of this AccessoryLimitVo.
:rtype: str
"""
return self._limit_size
@limit_size.setter
def limit_size(
|
bell.d.ts
|
import { IconDefinition, IconPrefix, IconName } from "@fortawesome/fontawesome-common-types";
export const definition: IconDefinition;
|
export const ukBell: IconDefinition;
export const prefix: IconPrefix;
export const iconName: IconName;
export const width: number;
export const height: number;
export const ligatures: string[];
export const unicode: string;
export const svgPathData: string;
| |
stats_bucket.go
|
package base
import (
"encoding/json"
"fmt"
"sync/atomic"
sgbucket "github.com/couchbase/sg-bucket"
)
// A wrapper around a Bucket that tracks bucket usage statistics as basic read/write counts. Doesn't break
// down by operation type, to better identify counts for bulk operations
type StatsBucket struct {
bucket Bucket
docsRead uint64
docsWritten uint64
bytesRead uint64
bytesWritten uint64
unknownSizeRead uint64
unknownSizeWritten uint64
}
type StatsBucketStats struct {
DocsRead uint64
DocsWritten uint64
BytesRead uint64
BytesWritten uint64
UnknownSizeRead uint64
UnknownSizeWritten uint64
}
func (sbs *StatsBucketStats) String() string {
return fmt.Sprintf("\nDocs Read: %12d \nDocs Written: %12d \nBytes Read: %12d \nBytes Written: %12d \nUnknown Size Reads: %12d \nUnknown Size Writes:%12d",
sbs.DocsRead, sbs.DocsWritten, sbs.BytesRead, sbs.BytesWritten, sbs.UnknownSizeRead, sbs.UnknownSizeWritten)
}
func (sbs *StatsBucketStats) PerIteration(iterationCount uint64) string {
return fmt.Sprintf("\nDocs Read: %12d \nDocs Written: %12d \nBytes Read: %12d \nBytes Written: %12d \nUnknown Size Reads: %12d \nUnknown Size Writes:%12d",
sbs.DocsRead/iterationCount,
sbs.DocsWritten/iterationCount,
sbs.BytesRead/iterationCount,
sbs.BytesWritten/iterationCount,
sbs.UnknownSizeRead/iterationCount,
sbs.UnknownSizeWritten/iterationCount)
}
func NewStatsBucket(bucket Bucket) *StatsBucket {
return &StatsBucket{
bucket: bucket,
}
}
func (b *StatsBucket) docRead(count, bytesRead int) {
atomic.AddUint64(&b.docsRead, uint64(count))
if bytesRead == -1 {
atomic.AddUint64(&b.unknownSizeRead, uint64(1))
} else {
atomic.AddUint64(&b.bytesRead, uint64(bytesRead))
}
}
func (b *StatsBucket) docWrite(count, bytesWritten int) {
atomic.AddUint64(&b.docsWritten, uint64(count))
if bytesWritten == -1 {
atomic.AddUint64(&b.unknownSizeWritten, uint64(1))
} else {
atomic.AddUint64(&b.bytesWritten, uint64(bytesWritten))
}
}
func (b *StatsBucket) GetStats() StatsBucketStats {
return StatsBucketStats{
DocsRead: atomic.LoadUint64(&b.docsRead),
DocsWritten: atomic.LoadUint64(&b.docsWritten),
BytesRead: atomic.LoadUint64(&b.bytesRead),
BytesWritten: atomic.LoadUint64(&b.bytesWritten),
UnknownSizeRead: atomic.LoadUint64(&b.unknownSizeRead),
UnknownSizeWritten: atomic.LoadUint64(&b.unknownSizeWritten),
}
}
func (b *StatsBucket) GetName() string {
return b.bucket.GetName()
}
func (b *StatsBucket) Get(k string, rv interface{}) (uint64, error) {
cas, err := b.bucket.Get(k, rv)
if vBytes, ok := rv.([]byte); ok {
defer b.docRead(1, len(vBytes))
} else if marshalledJSON, marshalErr := json.Marshal(rv); marshalErr == nil {
defer b.docRead(1, len(marshalledJSON))
} else {
defer b.docRead(1, -1)
}
return cas, err
}
func (b *StatsBucket) GetRaw(k string) (v []byte, cas uint64, err error) {
v, cas, err = b.bucket.GetRaw(k)
b.docRead(1, len(v))
return v, cas, err
}
func (b *StatsBucket) GetAndTouchRaw(k string, exp uint32) (v []byte, cas uint64, err error) {
v, cas, err = b.bucket.GetAndTouchRaw(k, exp)
b.docRead(1, len(v))
return v, cas, err
}
func (b *StatsBucket) Touch(k string, exp uint32) (cas uint64, err error) {
return b.bucket.Touch(k, exp)
}
func (b *StatsBucket) GetBulkRaw(keys []string) (map[string][]byte, error) {
results, err := b.bucket.GetBulkRaw(keys)
for _, value := range results {
b.docRead(1, len(value))
}
return results, err
}
func (b *StatsBucket) Add(k string, exp uint32, v interface{}) (added bool, err error) {
if vBytes, ok := v.([]byte); ok {
defer b.docWrite(1, len(vBytes))
} else {
defer b.docWrite(1, -1)
}
return b.bucket.Add(k, exp, v)
}
func (b *StatsBucket) AddRaw(k string, exp uint32, v []byte) (added bool, err error) {
defer b.docWrite(1, len(v))
return b.bucket.AddRaw(k, exp, v)
}
func (b *StatsBucket) Append(k string, data []byte) error {
defer b.docWrite(1, len(data))
return b.bucket.Append(k, data)
}
func (b *StatsBucket) Set(k string, exp uint32, v interface{}) error {
if vBytes, ok := v.([]byte); ok {
defer b.docWrite(1, len(vBytes))
} else {
defer b.docWrite(1, -1)
}
return b.bucket.Set(k, exp, v)
}
func (b *StatsBucket) SetRaw(k string, exp uint32, v []byte) error {
defer b.docWrite(1, len(v))
return b.bucket.SetRaw(k, exp, v)
}
func (b *StatsBucket) Delete(k string) error {
return b.bucket.Delete(k)
}
func (b *StatsBucket) Remove(k string, cas uint64) (casOut uint64, err error) {
return b.bucket.Remove(k, cas)
}
func (b *StatsBucket) Write(k string, flags int, exp uint32, v interface{}, opt sgbucket.WriteOptions) error {
if vBytes, ok := v.([]byte); ok {
defer b.docWrite(1, len(vBytes))
} else {
defer b.docWrite(1, -1)
}
return b.bucket.Write(k, flags, exp, v, opt)
}
func (b *StatsBucket) WriteCas(k string, flags int, exp uint32, cas uint64, v interface{}, opt sgbucket.WriteOptions) (uint64, error) {
if vBytes, ok := v.([]byte); ok {
defer b.docWrite(1, len(vBytes))
} else {
defer b.docWrite(1, -1)
}
return b.bucket.WriteCas(k, flags, exp, cas, v, opt)
}
func (b *StatsBucket) Update(k string, exp uint32, callback sgbucket.UpdateFunc) (casOut uint64, err error) {
defer b.docWrite(1, -1)
return b.bucket.Update(k, exp, callback)
}
func (b *StatsBucket) WriteUpdate(k string, exp uint32, callback sgbucket.WriteUpdateFunc) (casOut uint64, err error) {
defer b.docWrite(1, -1)
return b.bucket.WriteUpdate(k, exp, callback)
}
func (b *StatsBucket) Incr(k string, amt, def uint64, exp uint32) (uint64, error) {
return b.bucket.Incr(k, amt, def, exp)
}
func (b *StatsBucket) WriteCasWithXattr(k string, xattr string, exp uint32, cas uint64, v interface{}, xv interface{}) (casOut uint64, err error) {
if vBytes, ok := v.([]byte); ok {
defer b.docWrite(1, len(vBytes))
} else {
defer b.docWrite(1, -1)
|
func (b *StatsBucket) WriteUpdateWithXattr(k string, xattr string, exp uint32, previous *sgbucket.BucketDocument, callback sgbucket.WriteUpdateWithXattrFunc) (casOut uint64, err error) {
defer b.docWrite(1, -1)
return b.bucket.WriteUpdateWithXattr(k, xattr, exp, previous, callback)
}
func (b *StatsBucket) GetWithXattr(k string, xattr string, rv interface{}, xv interface{}) (cas uint64, err error) {
cas, err = b.bucket.GetWithXattr(k, xattr, rv, xv)
if vBytes, ok := rv.([]byte); ok {
defer b.docRead(1, len(vBytes))
} else if marshalledJSON, marshalErr := json.Marshal(rv); marshalErr == nil {
defer b.docRead(1, len(marshalledJSON))
} else {
defer b.docRead(1, -1)
}
return cas, err
}
func (b *StatsBucket) DeleteWithXattr(k string, xattr string) error {
return b.bucket.DeleteWithXattr(k, xattr)
}
func (b *StatsBucket) GetXattr(k string, xattr string, xv interface{}) (cas uint64, err error) {
return b.bucket.GetXattr(k, xattr, xv)
}
func (b *StatsBucket) GetDDoc(docname string, value interface{}) error {
return b.bucket.GetDDoc(docname, value)
}
func (b *StatsBucket) PutDDoc(docname string, value interface{}) error {
return b.bucket.PutDDoc(docname, value)
}
func (b *StatsBucket) DeleteDDoc(docname string) error {
return b.bucket.DeleteDDoc(docname)
}
func (b *StatsBucket) View(ddoc, name string, params map[string]interface{}) (sgbucket.ViewResult, error) {
return b.bucket.View(ddoc, name, params)
}
func (b *StatsBucket) ViewCustom(ddoc, name string, params map[string]interface{}, vres interface{}) error {
return b.bucket.ViewCustom(ddoc, name, params, vres)
}
func (b *StatsBucket) ViewQuery(ddoc, name string, params map[string]interface{}) (sgbucket.QueryResultIterator, error) {
return b.bucket.ViewQuery(ddoc, name, params)
}
func (b *StatsBucket) SetBulk(entries []*sgbucket.BulkSetEntry) (err error) {
defer b.docWrite(len(entries), 0)
return b.bucket.SetBulk(entries)
}
func (b *StatsBucket) Refresh() error {
return b.bucket.Refresh()
}
func (b *StatsBucket) StartTapFeed(args sgbucket.FeedArguments) (sgbucket.MutationFeed, error) {
return b.bucket.StartTapFeed(args)
}
func (b *StatsBucket) StartDCPFeed(args sgbucket.FeedArguments, callback sgbucket.FeedEventCallbackFunc) error {
return b.bucket.StartDCPFeed(args, callback)
}
func (b *StatsBucket) Close() {
b.bucket.Close()
}
func (b *StatsBucket) Dump() {
b.bucket.Dump()
}
func (b *StatsBucket) VBHash(docID string) uint32 {
return b.bucket.VBHash(docID)
}
func (b *StatsBucket) GetMaxVbno() (uint16, error) {
return b.bucket.GetMaxVbno()
}
func (b *StatsBucket) CouchbaseServerVersion() (major uint64, minor uint64, micro string) {
return b.bucket.CouchbaseServerVersion()
}
func (b *StatsBucket) UUID() (string, error) {
return b.bucket.UUID()
}
func (b *StatsBucket) GetStatsVbSeqno(maxVbno uint16, useAbsHighSeqNo bool) (uuids map[uint16]uint64, highSeqnos map[uint16]uint64, seqErr error) {
return b.GetStatsVbSeqno(maxVbno, useAbsHighSeqNo)
}
func (b *StatsBucket) IsSupported(feature sgbucket.BucketFeature) bool {
return b.bucket.IsSupported(feature)
}
|
}
return b.bucket.WriteCasWithXattr(k, xattr, exp, cas, v, xv)
}
|
imports_test.go
|
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package astutil
import (
"bytes"
"go/ast"
"go/format"
"go/parser"
"go/token"
"reflect"
"strconv"
"testing"
)
var fset = token.NewFileSet()
func parse(t *testing.T, name, in string) *ast.File {
file, err := parser.ParseFile(fset, name, in, parser.ParseComments)
if err != nil {
t.Fatalf("%s parse: %v", name, err)
}
return file
}
func print(t *testing.T, name string, f *ast.File) string {
var buf bytes.Buffer
if err := format.Node(&buf, fset, f); err != nil {
t.Fatalf("%s gofmt: %v", name, err)
}
return string(buf.Bytes())
}
type test struct {
name string
renamedPkg string
pkg string
in string
out string
broken bool // known broken
}
var addTests = []test{
{
name: "leave os alone",
pkg: "os",
in: `package main
import (
"os"
)
`,
out: `package main
import (
"os"
)
`,
},
{
name: "import.1",
pkg: "os",
in: `package main
`,
out: `package main
import "os"
`,
},
{
name: "import.2",
pkg: "os",
in: `package main
// Comment
import "C"
`,
out: `package main
// Comment
import "C"
import "os"
`,
},
{
name: "import.3",
pkg: "os",
in: `package main
// Comment
import "C"
import (
"io"
"utf8"
)
`,
out: `package main
// Comment
import "C"
import (
"io"
"os"
"utf8"
)
`,
},
{
name: "import.17",
pkg: "x/y/z",
in: `package main
// Comment
import "C"
import (
"a"
"b"
"x/w"
"d/f"
)
`,
out: `package main
// Comment
import "C"
import (
"a"
"b"
"x/w"
"x/y/z"
"d/f"
)
`,
},
{
name: "import into singular group",
pkg: "bytes",
in: `package main
import "os"
`,
out: `package main
import (
"bytes"
"os"
)
`,
},
{
name: "import into singular group with comment",
pkg: "bytes",
in: `package main
import /* why */ /* comment here? */ "os"
`,
out: `package main
import /* why */ /* comment here? */ (
"bytes"
"os"
)
`,
},
{
name: "import into group with leading comment",
pkg: "strings",
in: `package main
import (
// comment before bytes
"bytes"
"os"
)
`,
out: `package main
import (
// comment before bytes
"bytes"
"os"
"strings"
)
`,
},
{
name: "",
renamedPkg: "fmtpkg",
pkg: "fmt",
in: `package main
import "os"
`,
out: `package main
import (
fmtpkg "fmt"
"os"
)
`,
},
{
name: "struct comment",
pkg: "time",
in: `package main
// This is a comment before a struct.
type T struct {
t time.Time
}
`,
out: `package main
import "time"
// This is a comment before a struct.
type T struct {
t time.Time
}
`,
},
{
name: "issue 8729 import C",
pkg: "time",
in: `package main
import "C"
// comment
type T time.Time
`,
out: `package main
import "C"
import "time"
// comment
type T time.Time
`,
},
{
name: "issue 8729 empty import",
pkg: "time",
in: `package main
import ()
// comment
type T time.Time
`,
out: `package main
import "time"
// comment
type T time.Time
`,
},
{
name: "issue 8729 comment on package line",
pkg: "time",
in: `package main // comment
type T time.Time
`,
out: `package main // comment
import "time"
type T time.Time
`,
},
{
name: "issue 8729 comment after package",
pkg: "time",
in: `package main
// comment
type T time.Time
`,
out: `package main
import "time"
// comment
type T time.Time
`,
},
{
name: "issue 8729 comment before and on package line",
pkg: "time",
in: `// comment before
package main // comment on
type T time.Time
`,
out: `// comment before
package main // comment on
import "time"
type T time.Time
`,
},
// Issue 9961: Match prefixes using path segments rather than bytes
{
name: "issue 9961",
pkg: "regexp",
in: `package main
import (
"flag"
"testing"
"rsc.io/p"
)
`,
out: `package main
import (
"flag"
"regexp"
"testing"
"rsc.io/p"
)
`,
},
// Issue 10337: Preserve comment position
{
name: "issue 10337",
pkg: "fmt",
in: `package main
import (
"bytes" // a
"log" // c
)
`,
out: `package main
import (
"bytes" // a
"fmt"
"log" // c
)
`,
},
{
name: "issue 10337 new import at the start",
pkg: "bytes",
in: `package main
import (
"fmt" // b
"log" // c
)
`,
out: `package main
import (
"bytes"
"fmt" // b
"log" // c
)
`,
},
{
name: "issue 10337 new import at the end",
pkg: "log",
in: `package main
import (
"bytes" // a
"fmt" // b
)
`,
out: `package main
import (
"bytes" // a
"fmt" // b
"log"
)
`,
},
// Issue 14075: Merge import declarations
{
name: "issue 14075",
pkg: "bufio",
in: `package main
import "bytes"
import "fmt"
`,
out: `package main
import (
"bufio"
"bytes"
"fmt"
)
`,
},
{
name: "issue 14075 update position",
pkg: "bufio",
in: `package main
import "bytes"
import (
"fmt"
)
`,
out: `package main
import (
"bufio"
"bytes"
"fmt"
)
`,
},
{
name: `issue 14075 ignore import "C"`,
pkg: "bufio",
in: `package main
// Comment
import "C"
import "bytes"
import "fmt"
`,
out: `package main
// Comment
import "C"
import (
"bufio"
"bytes"
"fmt"
)
`,
},
{
name: `issue 14075 ignore adjacent import "C"`,
pkg: "bufio",
in: `package main
// Comment
import "C"
import "fmt"
`,
out: `package main
// Comment
import "C"
import (
"bufio"
"fmt"
)
`,
},
{
name: `issue 14075 ignore adjacent import "C" (without factored import)`,
pkg: "bufio",
in: `package main
// Comment
import "C"
import "fmt"
`,
out: `package main
// Comment
import "C"
import (
"bufio"
"fmt"
)
`,
},
{
name: `issue 14075 ignore single import "C"`,
pkg: "bufio",
in: `package main
// Comment
import "C"
`,
out: `package main
// Comment
import "C"
import "bufio"
`,
},
}
func TestAddImport(t *testing.T) {
for _, test := range addTests {
file := parse(t, test.name, test.in)
var before bytes.Buffer
ast.Fprint(&before, fset, file, nil)
AddNamedImport(fset, file, test.renamedPkg, test.pkg)
if got := print(t, test.name, file); got != test.out {
if test.broken {
t.Logf("%s is known broken:\ngot: %s\nwant: %s", test.name, got, test.out)
} else {
t.Errorf("%s:\ngot: %s\nwant: %s", test.name, got, test.out)
}
var after bytes.Buffer
ast.Fprint(&after, fset, file, nil)
t.Logf("AST before:\n%s\nAST after:\n%s\n", before.String(), after.String())
}
}
}
func TestDoubleAddImport(t *testing.T) {
file := parse(t, "doubleimport", "package main\n")
AddImport(fset, file, "os")
AddImport(fset, file, "bytes")
want := `package main
import (
"bytes"
"os"
)
`
if got := print(t, "doubleimport", file); got != want {
t.Errorf("got: %s\nwant: %s", got, want)
}
}
func
|
(t *testing.T) {
file := parse(t, "doublenamedimport", "package main\n")
AddNamedImport(fset, file, "o", "os")
AddNamedImport(fset, file, "i", "io")
want := `package main
import (
i "io"
o "os"
)
`
if got := print(t, "doublenamedimport", file); got != want {
t.Errorf("got: %s\nwant: %s", got, want)
}
}
// Part of issue 8729.
func TestDoubleAddImportWithDeclComment(t *testing.T) {
file := parse(t, "doubleimport", `package main
import (
)
// comment
type I int
`)
// The AddImport order here matters.
AddImport(fset, file, "golang.org/x/tools/go/ast/astutil")
AddImport(fset, file, "os")
want := `package main
import (
"golang.org/x/tools/go/ast/astutil"
"os"
)
// comment
type I int
`
if got := print(t, "doubleimport_with_decl_comment", file); got != want {
t.Errorf("got: %s\nwant: %s", got, want)
}
}
var deleteTests = []test{
{
name: "import.4",
pkg: "os",
in: `package main
import (
"os"
)
`,
out: `package main
`,
},
{
name: "import.5",
pkg: "os",
in: `package main
// Comment
import "C"
import "os"
`,
out: `package main
// Comment
import "C"
`,
},
{
name: "import.6",
pkg: "os",
in: `package main
// Comment
import "C"
import (
"io"
"os"
"utf8"
)
`,
out: `package main
// Comment
import "C"
import (
"io"
"utf8"
)
`,
},
{
name: "import.7",
pkg: "io",
in: `package main
import (
"io" // a
"os" // b
"utf8" // c
)
`,
out: `package main
import (
// a
"os" // b
"utf8" // c
)
`,
},
{
name: "import.8",
pkg: "os",
in: `package main
import (
"io" // a
"os" // b
"utf8" // c
)
`,
out: `package main
import (
"io" // a
// b
"utf8" // c
)
`,
},
{
name: "import.9",
pkg: "utf8",
in: `package main
import (
"io" // a
"os" // b
"utf8" // c
)
`,
out: `package main
import (
"io" // a
"os" // b
// c
)
`,
},
{
name: "import.10",
pkg: "io",
in: `package main
import (
"io"
"os"
"utf8"
)
`,
out: `package main
import (
"os"
"utf8"
)
`,
},
{
name: "import.11",
pkg: "os",
in: `package main
import (
"io"
"os"
"utf8"
)
`,
out: `package main
import (
"io"
"utf8"
)
`,
},
{
name: "import.12",
pkg: "utf8",
in: `package main
import (
"io"
"os"
"utf8"
)
`,
out: `package main
import (
"io"
"os"
)
`,
},
{
name: "handle.raw.quote.imports",
pkg: "os",
in: "package main\n\nimport `os`",
out: `package main
`,
},
{
name: "import.13",
pkg: "io",
in: `package main
import (
"fmt"
"io"
"os"
"utf8"
"go/format"
)
`,
out: `package main
import (
"fmt"
"os"
"utf8"
"go/format"
)
`,
},
{
name: "import.14",
pkg: "io",
in: `package main
import (
"fmt" // a
"io" // b
"os" // c
"utf8" // d
"go/format" // e
)
`,
out: `package main
import (
"fmt" // a
// b
"os" // c
"utf8" // d
"go/format" // e
)
`,
},
{
name: "import.15",
pkg: "double",
in: `package main
import (
"double"
"double"
)
`,
out: `package main
`,
},
{
name: "import.16",
pkg: "bubble",
in: `package main
import (
"toil"
"bubble"
"bubble"
"trouble"
)
`,
out: `package main
import (
"toil"
"trouble"
)
`,
},
{
name: "import.17",
pkg: "quad",
in: `package main
import (
"quad"
"quad"
)
import (
"quad"
"quad"
)
`,
out: `package main
`,
},
{
name: "import.18",
renamedPkg: "x",
pkg: "fmt",
in: `package main
import (
"fmt"
x "fmt"
)
`,
out: `package main
import "fmt"
`,
},
{
name: "import.18",
renamedPkg: "x",
pkg: "fmt",
in: `package main
import x "fmt"
import y "fmt"
`,
out: `package main
import y "fmt"
`,
},
}
func TestDeleteImport(t *testing.T) {
for _, test := range deleteTests {
file := parse(t, test.name, test.in)
DeleteNamedImport(fset, file, test.renamedPkg, test.pkg)
if got := print(t, test.name, file); got != test.out {
t.Errorf("%s:\ngot: %s\nwant: %s", test.name, got, test.out)
}
}
}
type rewriteTest struct {
name string
srcPkg string
dstPkg string
in string
out string
}
var rewriteTests = []rewriteTest{
{
name: "import.13",
srcPkg: "utf8",
dstPkg: "encoding/utf8",
in: `package main
import (
"io"
"os"
"utf8" // thanks ken
)
`,
out: `package main
import (
"encoding/utf8" // thanks ken
"io"
"os"
)
`,
},
{
name: "import.14",
srcPkg: "asn1",
dstPkg: "encoding/asn1",
in: `package main
import (
"asn1"
"crypto"
"crypto/rsa"
_ "crypto/sha1"
"crypto/x509"
"crypto/x509/pkix"
"time"
)
var x = 1
`,
out: `package main
import (
"crypto"
"crypto/rsa"
_ "crypto/sha1"
"crypto/x509"
"crypto/x509/pkix"
"encoding/asn1"
"time"
)
var x = 1
`,
},
{
name: "import.15",
srcPkg: "url",
dstPkg: "net/url",
in: `package main
import (
"bufio"
"net"
"path"
"url"
)
var x = 1 // comment on x, not on url
`,
out: `package main
import (
"bufio"
"net"
"net/url"
"path"
)
var x = 1 // comment on x, not on url
`,
},
{
name: "import.16",
srcPkg: "http",
dstPkg: "net/http",
in: `package main
import (
"flag"
"http"
"log"
"text/template"
)
var addr = flag.String("addr", ":1718", "http service address") // Q=17, R=18
`,
out: `package main
import (
"flag"
"log"
"net/http"
"text/template"
)
var addr = flag.String("addr", ":1718", "http service address") // Q=17, R=18
`,
},
}
func TestRewriteImport(t *testing.T) {
for _, test := range rewriteTests {
file := parse(t, test.name, test.in)
RewriteImport(fset, file, test.srcPkg, test.dstPkg)
if got := print(t, test.name, file); got != test.out {
t.Errorf("%s:\ngot: %s\nwant: %s", test.name, got, test.out)
}
}
}
var importsTests = []struct {
name string
in string
want [][]string
}{
{
name: "no packages",
in: `package foo
`,
want: nil,
},
{
name: "one group",
in: `package foo
import (
"fmt"
"testing"
)
`,
want: [][]string{{"fmt", "testing"}},
},
{
name: "four groups",
in: `package foo
import "C"
import (
"fmt"
"testing"
"appengine"
"myproject/mylib1"
"myproject/mylib2"
)
`,
want: [][]string{
{"C"},
{"fmt", "testing"},
{"appengine"},
{"myproject/mylib1", "myproject/mylib2"},
},
},
{
name: "multiple factored groups",
in: `package foo
import (
"fmt"
"testing"
"appengine"
)
import (
"reflect"
"bytes"
)
`,
want: [][]string{
{"fmt", "testing"},
{"appengine"},
{"reflect"},
{"bytes"},
},
},
}
func unquote(s string) string {
res, err := strconv.Unquote(s)
if err != nil {
return "could_not_unquote"
}
return res
}
func TestImports(t *testing.T) {
fset := token.NewFileSet()
for _, test := range importsTests {
f, err := parser.ParseFile(fset, "test.go", test.in, 0)
if err != nil {
t.Errorf("%s: %v", test.name, err)
continue
}
var got [][]string
for _, group := range Imports(fset, f) {
var b []string
for _, spec := range group {
b = append(b, unquote(spec.Path.Value))
}
got = append(got, b)
}
if !reflect.DeepEqual(got, test.want) {
t.Errorf("Imports(%s)=%v, want %v", test.name, got, test.want)
}
}
}
|
TestDoubleAddNamedImport
|
GetText.py
|
# The MIT License (MIT)
#
# Copyright (c) 2016 Adam Schubert
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import gettext
import os
import logging
logger = logging.getLogger(__name__)
class
|
(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self, locale_code):
"""
Initialize GetText
:param locale_code selected locale
"""
try:
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'locale', '{}.mo'.format(locale_code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logger.debug('{} Loaded'.format(filename))
except IOError:
logger.debug('Failed to find locale {}'.format(locale_code))
trans = gettext.NullTranslations()
trans.install()
|
GetText
|
acpi_table.rs
|
#![allow(dead_code)]
use crate::get_acpi_table;
pub use acpi::{
interrupt::{InterruptModel, InterruptSourceOverride, IoApic, Polarity, TriggerMode},
Acpi,
};
use alloc::vec::Vec;
use lazy_static::*;
use spin::Mutex;
pub struct AcpiTable {
inner: Acpi,
}
lazy_static! {
static ref ACPI_TABLE: Mutex<Option<AcpiTable>> = Mutex::default();
}
impl AcpiTable {
fn initialize_check() {
#[cfg(target_arch = "x86_64")]
{
|
}
}
}
pub fn invalidate() {
*ACPI_TABLE.lock() = None;
}
pub fn get_ioapic() -> Vec<IoApic> {
Self::initialize_check();
let table = ACPI_TABLE.lock();
match &*table {
None => Vec::default(),
Some(table) => match table.inner.interrupt_model.as_ref().unwrap() {
InterruptModel::Apic(apic) => {
apic.io_apics.iter().map(|x| IoApic { ..*x }).collect()
}
_ => Vec::default(),
},
}
}
pub fn get_interrupt_source_overrides() -> Vec<InterruptSourceOverride> {
Self::initialize_check();
let table = ACPI_TABLE.lock();
match &*table {
None => Vec::default(),
Some(table) => match table.inner.interrupt_model.as_ref().unwrap() {
InterruptModel::Apic(apic) => apic
.interrupt_source_overrides
.iter()
.map(|x| InterruptSourceOverride {
polarity: Self::clone_polarity(&x.polarity),
trigger_mode: Self::clone_trigger_mode(&x.trigger_mode),
..*x
})
.collect(),
_ => Vec::default(),
},
}
}
fn clone_polarity(x: &Polarity) -> Polarity {
match x {
Polarity::SameAsBus => Polarity::SameAsBus,
Polarity::ActiveHigh => Polarity::ActiveHigh,
Polarity::ActiveLow => Polarity::ActiveLow,
}
}
fn clone_trigger_mode(x: &TriggerMode) -> TriggerMode {
match x {
TriggerMode::SameAsBus => TriggerMode::SameAsBus,
TriggerMode::Edge => TriggerMode::Edge,
TriggerMode::Level => TriggerMode::Level,
}
}
}
|
let mut table = ACPI_TABLE.lock();
if let None = *table {
*table = get_acpi_table().map(|x| AcpiTable { inner: x });
|
routing_spec.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {$, browser, by, element, ExpectedConditions} from 'protractor';
import {verifyNoBrowserErrors} from '../../../e2e_util/e2e_util';
function
|
(selector: string) {
// Waits for the element with id 'abc' to be present on the dom.
browser.wait(ExpectedConditions.presenceOf($(selector)), 20000);
}
describe('routing inbox-app', () => {
afterEach(verifyNoBrowserErrors);
describe('index view', () => {
const URL = '/';
it('should list out the current collection of items', () => {
browser.get(URL);
waitForElement('.inbox-item-record');
expect(element.all(by.css('.inbox-item-record')).count()).toEqual(200);
});
});
});
|
waitForElement
|
ExpandShortLinks.user.js
|
// ==UserScript==
// @name Expand Short Links
// @description Appends more characters to short link texts in posts and comments so they can be easily seen and clicked on
// @homepage https://github.com/samliew/SO-mod-userscripts
// @author @samliew
// @version 1.4
//
// @include https://*stackoverflow.com/*
// @include https://*serverfault.com/*
// @include https://*superuser.com/*
// @include https://*askubuntu.com/*
// @include https://*mathoverflow.net/*
// @include https://*.stackexchange.com/*
// ==/UserScript==
(function() {
'use strict';
function expandShortLinks() {
$('.js-post-body, .comment-copy').find('a').not('.post-tag').not('.shortlink').filter((i,el) => el.innerText.length > 0 && el.innerText.length <= 2 && el.children.length == 0).addClass('shortlink');
}
function appendStyles() {
const styles = `
<style>
a.shortlink {
font-weight: bold;
color: var(--red-500) !important;
}
a.shortlink:after {
content: '_link';
color :var(--green-400);
font-style: italic;
font-weight: normal;
}
</style>
`;
$('body').append(styles);
}
// On page load
|
})();
|
appendStyles();
expandShortLinks();
$(document).ajaxComplete(expandShortLinks);
|
hook.py
|
import allure
from selene.core.exceptions import TimeoutException
from selene.support.shared import browser
def attach_snapshots_on_failure(error: TimeoutException) -> Exception:
"""
An example of selene hook_wait_failure that attaches snapshots to failed test step.
It is actually might not needed,
because using pytest_runtest_makereport hook
you can achieve similar
by attaching screenshots to the test body itself,
that is more handy during analysis of test report
but if you need it, you can use it by adding to your browser setup fixture::
import web_test
browser.config.hook_wait_failure = \
web_test.assist.selene.shared.hook.attach_snapshots_on_failure
otherwise, you can skip it;)
"""
last_screenshot = browser.config.last_screenshot
if last_screenshot:
|
last_page_source = browser.config.last_page_source
if last_page_source:
allure.attach.file(source=last_page_source,
name='page source on failure',
attachment_type=allure.attachment_type.HTML)
return error
|
allure.attach.file(source=last_screenshot,
name='screenshot on failure',
attachment_type=allure.attachment_type.PNG)
|
clear.go
|
package cmd
import (
"errors"
"fmt"
"os"
"sptlrx/cookie"
"github.com/muesli/coral"
)
var clearCmd = &coral.Command{
Use: "clear",
Short: "Clear saved cookie",
RunE: func(cmd *coral.Command, args []string) error {
err := cookie.Clear()
if err == nil
|
else if errors.Is(err, os.ErrNotExist) {
fmt.Println("You haven't saved any cookies 🍪")
return nil
}
return err
},
}
|
{
fmt.Println("Cookie have been cleared.")
}
|
whitelist.rs
|
use crate::utils::wait_until;
use crate::{Net, Spec};
use ckb_app_config::CKBAppConfig;
use log::info;
use std::{collections::HashSet, thread::sleep, time::Duration};
pub struct WhitelistOnSessionLimit;
impl Spec for WhitelistOnSessionLimit {
crate::name!("whitelist_on_session_limit");
crate::setup!(num_nodes: 5, connect_all: false);
fn modify_ckb_config(&self) -> Box<dyn Fn(&mut CKBAppConfig) -> ()> {
// disable outbound peer service
Box::new(|config| {
config.network.connect_outbound_interval_secs = 0;
config.network.discovery_local_address = true;
config.network.max_peers = 2;
config.network.max_outbound_peers = 1;
})
}
fn run(&self, net: &mut Net) {
info!("Running whitelist on session limit");
// with no whitelist
let node4 = net.nodes.pop().unwrap();
let node3 = net.nodes.pop().unwrap();
let node2 = net.nodes.pop().unwrap();
let node1 = net.nodes.pop().unwrap();
let mut node0 = net.nodes.pop().unwrap();
let mut id_set = HashSet::new();
id_set.insert(node1.node_id());
id_set.insert(node4.node_id());
node0.connect(&node1);
// outbound session will be refused
node0.connect_uncheck(&node2);
node0.generate_blocks(1);
node3.connect(&node0);
// inbound session will be rotated by network partition
node4.connect_uncheck(&node0);
sleep(Duration::from_secs(5));
let rpc_client0 = node0.rpc_client();
|
let peers = rpc_client0.get_peers();
peers.len() == 2
&& peers
.into_iter()
.all(|node| id_set.contains(&node.node_id.as_str()))
});
if !is_connect_peer_num_eq_2 {
panic!("refuse to connect fail");
}
// restart node0, set node1 to node0's whitelist
let node1_listen = format!(
"/ip4/127.0.0.1/tcp/{}/p2p/{}",
node1.p2p_port(),
node1.node_id()
);
node0.stop();
node0.edit_config_file(
Box::new(|_| ()),
Box::new(move |config| {
config.network.whitelist_peers = vec![node1_listen.parse().unwrap()]
}),
);
node0.start();
// with whitelist
let mut id_set = HashSet::new();
id_set.insert(node1.node_id());
id_set.insert(node2.node_id());
id_set.insert(node3.node_id());
node0.connect(&node2);
node3.connect(&node0);
// whitelist will be connected on outbound reach limit
node0.connect(&node1);
let rpc_client0 = node0.rpc_client();
let is_connect_peer_num_eq_3 = wait_until(10, || {
let peers = rpc_client0.get_peers();
peers.len() == 3
&& peers
.into_iter()
.all(|node| id_set.contains(&node.node_id.as_str()))
});
if !is_connect_peer_num_eq_3 {
panic!("whitelist connect fail");
}
}
}
|
let is_connect_peer_num_eq_2 = wait_until(10, || {
|
__init__.py
|
import os
import torch
from torch.utils.cpp_extension import load
cwd = os.path.dirname(os.path.realpath(__file__))
cpu_path = os.path.join(cwd, 'cpu')
gpu_path = os.path.join(cwd, 'gpu')
cpu = load('sync_bn_cpu', [
os.path.join(cpu_path, 'operator.cpp'),
|
if torch.cuda.is_available():
gpu = load('sync_bn_gpu', [
os.path.join(gpu_path, 'operator.cpp'),
os.path.join(gpu_path, 'sync_bn_cuda.cu'),
], build_directory=gpu_path, verbose=False)
|
os.path.join(cpu_path, 'sync_bn.cpp'),
], build_directory=cpu_path, verbose=False)
|
forms.py
|
from django import forms
from .models import *
class ProfileForm(forms.ModelForm):
class Meta:
model=Profile
exclude=['username']
class
|
(forms.ModelForm):
class Meta:
model=BlogPost
exclude=['username','neighborhood','profpic']
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
exclude=['username','post']
class BusinessForm(forms.ModelForm):
class Meta:
model = Business
exclude=['owner','neighborhood']
class notificationsForm(forms.ModelForm):
class Meta:
model = Update
exclude=['author','neighborhood','post_date']
|
BlogPostForm
|
bundle_test.go
|
package validation
import (
"io/ioutil"
"path/filepath"
"strings"
"testing"
"github.com/operator-framework/operator-registry/pkg/registry"
"github.com/stretchr/testify/require"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
k8syaml "k8s.io/apimachinery/pkg/util/yaml"
)
func
|
(t *testing.T) {
var table = []struct {
description string
directory string
hasError bool
errString string
}{
{
description: "registryv1 bundle/valid bundle",
directory: "./testdata/valid_bundle",
hasError: false,
},
{
description: "registryv1 bundle/invalid bundle",
directory: "./testdata/invalid_bundle",
hasError: true,
errString: "owned CRD etcdclusters.etcd.database.coreos.com/v1beta2 not found in bundle",
},
{
description: "registryv1 bundle/invalid bundle 2",
directory: "./testdata/invalid_bundle_2",
hasError: true,
errString: `CRD etcdclusters.etcd.database.coreos.com/v1beta2 is present in bundle "test" but not defined in CSV`,
},
}
for _, tt := range table {
unstObjs := []*unstructured.Unstructured{}
// Read all files in manifests directory
items, err := ioutil.ReadDir(tt.directory)
require.NoError(t, err, "Unable to read directory: %s", tt.description)
for _, item := range items {
fileWithPath := filepath.Join(tt.directory, item.Name())
data, err := ioutil.ReadFile(fileWithPath)
require.NoError(t, err, "Unable to read file: %s", fileWithPath)
dec := k8syaml.NewYAMLOrJSONDecoder(strings.NewReader(string(data)), 30)
k8sFile := &unstructured.Unstructured{}
err = dec.Decode(k8sFile)
require.NoError(t, err, "Unable to decode file: %s", fileWithPath)
unstObjs = append(unstObjs, k8sFile)
}
// Validate the bundle object
bundle := registry.NewBundle("test", "", nil, unstObjs...)
results := BundleValidator.Validate(bundle)
if len(results) > 0 {
require.Equal(t, tt.hasError, results[0].HasError(), "%s: %s", tt.description, results[0])
if results[0].HasError() {
require.Contains(t, results[0].Errors[0].Error(), tt.errString)
}
}
}
}
|
TestValidateBundle
|
router.js
|
router.get('/:section', require('./index'));
};
|
module.exports = function (router) {
|
|
test.js
|
"use-strict";
let Instagram = require('./instagram');
Instagram = new Instagram();
|
Instagram.csrfToken = csrf;
}).then(() => {
return Instagram.auth('login', 'password').then(sessionId => {
Instagram.sessionId = sessionId
console.log('sessionId', sessionId);
// return Instagram.getUserDataByUsername('username-for-get').then((t) => {
// return Instagram.getUserFollowers(t.graphql.user.id).then((t) => {
// console.log(t); // - instagram followers for user "username-for-get"
// })
// })
})
}).catch(console.error);
|
Instagram.getCsrfToken().then((csrf) => {
|
recipe-576780.py
|
#!/usr/bin/env python
'''This module exposes function timelimited and two
classes TimeLimited and TimeLimitExpired.
Function timelimited can be used to invoke any
callable object with a time limit.
Class TimeLimited wraps any callable object into a
time limited callable with an equivalent signature.
Beware, any critical resources like locks, memory or
files, etc. acquired or opened by the callable may
not be released respectively closed. Therefore,
time limiting such callables may cause deadlock or
leaks or both.
No signals or timers are affected and any errors are
propagated as usual. Decorators and with statements
are avoided for backward compatibility.
Tested with Python 2.2.3, 2.3.7, 2.4.5, 2.5.2, 2.6.2
or 3.0.1 on CentOS 4.7, MacOS X 10.4.11 Tiger (Intel)
and 10.3.9 Panther (PPC), Solaris 10 and Windows XP.
Note, for Python 3.0 and beyond, replace ', e:' with
' as e:' in the 3 except lines marked #XXX below or
run the Python 2to3 translator on this file, see
<http://docs.python.org/dev/3.1/library/2to3.html>
The core of the function timelimited is copied from
<http://code.activestate.com/recipes/473878/>.
'''
__all__ = ('timelimited', 'TimeLimited', 'TimeLimitExpired')
__version__ = '4 2009-06-08'
from threading import Thread
# The #PYCHOK marks are intended for postprocessing
# by <http://code.activestate.com/recipes/546532/>
try: # UGLY! private method __stop
_Thread_stop = Thread._Thread__stop #PYCHOK false
except AttributeError: # _stop in Python 3.0
_Thread_stop = Thread._stop #PYCHOK expected
class TimeLimitExpired(Exception):
'''Exception raised when time limit expires.
'''
pass
def timelimited(timeout, function, *args, **kwds):
'''Invoke the given function with the positional and
keyword arguments under a time constraint.
The function result is returned if the function
finishes within the given time limit, otherwise
a TimeLimitExpired error is raised.
The timeout value is in seconds and has the same
resolution as the standard time.time function. A
timeout value of None invokes the given function
without imposing any time limit.
A TypeError is raised if function is not callable,
a ValueError is raised for negative timeout values
and any errors occurring inside the function are
passed along as-is.
'''
class _Timelimited(Thread):
_error_ = TimeLimitExpired # assume timeout
_result_ = None
def run(self):
try:
self._result_ = function(*args, **kwds)
self._error_ = None
except Exception, e: #XXX as for Python 3.0
self._error_ = e
def _stop(self):
# UGLY! force the thread to stop by (ab)using
# the private __stop or _stop method, but that
# seems to work better than these recipes
# <http://code.activestate.com/recipes/496960/>
# <http://sebulba.wikispaces.com/recipe+thread2>
if self.isAlive():
_Thread_stop(self)
if not hasattr(function, '__call__'):
raise TypeError('function not callable: %s' % repr(function))
if timeout is None: # shortcut
return function(*args, **kwds)
if timeout < 0:
raise ValueError('timeout invalid: %s' % repr(timeout))
t = _Timelimited()
t.start()
t.join(timeout)
if t._error_ is None:
return t._result_
if t._error_ is TimeLimitExpired:
t._stop()
raise TimeLimitExpired('timeout %r for %s' % (timeout, repr(function)))
else:
raise t._error_
class TimeLimited(object):
'''Create a time limited version of any callable.
For example, to limit function f to t seconds,
first create a time limited version of f.
from timelimited import *
f_t = TimeLimited(f, t)
Then, instead of invoking f(...), use f_t like
try:
r = f_t(...)
except TimeLimitExpired:
r = ... # timed out
'''
def __init__(self, function, timeout=None):
'''See function timelimited for a description
of the arguments.
'''
self._function = function
self._timeout = timeout
def __call__(self, *args, **kwds):
|
def __str__(self):
return '<%s of %r, timeout=%s>' % (repr(self)[1:-1], self._function, self._timeout)
def _timeout_get(self):
return self._timeout
def _timeout_set(self, timeout):
self._timeout = timeout
timeout = property(_timeout_get, _timeout_set, None,
'Property to get and set the timeout value')
if __name__ == '__main__':
import sys, time, threading #PYCHOK expected
_format = '%s test %%d/8 %%s in Python %s: %%s' % (
sys.argv[0], sys.version.split()[0])
_tests = 0
def passed(arg='OK'):
global _tests
_tests += 1
print(_format % (_tests, 'passed', arg))
def failed(fmt, *args):
global _tests
_tests += 1
if args:
t = fmt % args
else:
t = fmt
print(_format % (_tests, 'failed', t))
def check(timeout, sleep, result, arg='OK'):
if timeout > sleep:
x = None # time.sleep(0) result
elif isinstance(result, TimeLimitExpired):
x = result
else:
x = TimeLimitExpired
if result is x:
passed(arg)
else:
failed('expected %r, but got %r', x, result)
# check timelimited function
for t, s in ((2.0, 1),
(1.0, 20)): # note, 20!
try:
r = timelimited(t, time.sleep, s)
except Exception, e: #XXX as for Python 3.0
r = e
check(t, s, r, timelimited)
# check TimeLimited class and property
f = TimeLimited(time.sleep)
for t, s in ((2.0, 1),
(1.0, 20)): # note, 20!
f.timeout = t
try:
r = f(s)
except Exception, e: #XXX as for Python 3.0
r = e
check(t, s, r, f)
# check TypeError
try:
t = timelimited(0, None)
failed('no %r', TypeError)
except TypeError:
passed(TypeError)
except:
failed('expected %r', TypeError)
# check ValueError
try:
t = timelimited(-10, time.time)
failed('no %r', ValueError)
except ValueError:
passed(ValueError)
except:
failed('expected %r', ValueError)
# check error passing from thread
try:
r = timelimited(1, lambda x: 1/x, 0)
failed('no %r', ZeroDivisionError)
except ZeroDivisionError:
passed(ZeroDivisionError)
except:
failed('expected %r', ZeroDivisionError)
# check that all created threads stopped
for t in threading.enumerate():
if t.isAlive() and repr(t).startswith('<_Timelimited('):
failed('thread %r still alive', t)
break
else:
passed('all _Timelimited threads stopped')
|
'''See function timelimited for a description
of the behavior.
'''
return timelimited(self._timeout, self._function, *args, **kwds)
|
main.js
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var argument_1 = require("./argument");
exports.Argument = argument_1.Argument;
var jsonArgument_1 = require("./jsonArgument");
exports.JSONArgument = jsonArgument_1.JSONArgument;
const comment_1 = require("./comment");
exports.Comment = comment_1.Comment;
const parser_1 = require("./parser");
var flag_1 = require("./flag");
exports.Flag = flag_1.Flag;
const instruction_1 = require("./instruction");
exports.Instruction = instruction_1.Instruction;
var line_1 = require("./line");
exports.Line = line_1.Line;
const parserDirective_1 = require("./parserDirective");
exports.ParserDirective = parserDirective_1.ParserDirective;
var property_1 = require("./property");
|
var variable_1 = require("./variable");
exports.Variable = variable_1.Variable;
var add_1 = require("./instructions/add");
exports.Add = add_1.Add;
const arg_1 = require("./instructions/arg");
exports.Arg = arg_1.Arg;
const cmd_1 = require("./instructions/cmd");
exports.Cmd = cmd_1.Cmd;
const copy_1 = require("./instructions/copy");
exports.Copy = copy_1.Copy;
const entrypoint_1 = require("./instructions/entrypoint");
exports.Entrypoint = entrypoint_1.Entrypoint;
const env_1 = require("./instructions/env");
exports.Env = env_1.Env;
const from_1 = require("./instructions/from");
exports.From = from_1.From;
const healthcheck_1 = require("./instructions/healthcheck");
exports.Healthcheck = healthcheck_1.Healthcheck;
var jsonInstruction_1 = require("./jsonInstruction");
exports.JSONInstruction = jsonInstruction_1.JSONInstruction;
var label_1 = require("./instructions/label");
exports.Label = label_1.Label;
var modifiableInstruction_1 = require("./modifiableInstruction");
exports.ModifiableInstruction = modifiableInstruction_1.ModifiableInstruction;
var onbuild_1 = require("./instructions/onbuild");
exports.Onbuild = onbuild_1.Onbuild;
var propertyInstruction_1 = require("./propertyInstruction");
exports.PropertyInstruction = propertyInstruction_1.PropertyInstruction;
var shell_1 = require("./instructions/shell");
exports.Shell = shell_1.Shell;
var stopsignal_1 = require("./instructions/stopsignal");
exports.Stopsignal = stopsignal_1.Stopsignal;
var user_1 = require("./instructions/user");
exports.User = user_1.User;
var volume_1 = require("./instructions/volume");
exports.Volume = volume_1.Volume;
var workdir_1 = require("./instructions/workdir");
exports.Workdir = workdir_1.Workdir;
var Keyword;
(function (Keyword) {
Keyword["ADD"] = "ADD";
Keyword["ARG"] = "ARG";
Keyword["CMD"] = "CMD";
Keyword["COPY"] = "COPY";
Keyword["ENTRYPOINT"] = "ENTRYPOINT";
Keyword["ENV"] = "ENV";
Keyword["EXPOSE"] = "EXPOSE";
Keyword["FROM"] = "FROM";
Keyword["HEALTHCHECK"] = "HEALTHCHECK";
Keyword["LABEL"] = "LABEL";
Keyword["MAINTAINER"] = "MAINTAINER";
Keyword["ONBUILD"] = "ONBUILD";
Keyword["RUN"] = "RUN";
Keyword["SHELL"] = "SHELL";
Keyword["STOPSIGNAL"] = "STOPSIGNAL";
Keyword["USER"] = "USER";
Keyword["VOLUME"] = "VOLUME";
Keyword["WORKDIR"] = "WORKDIR";
})(Keyword = exports.Keyword || (exports.Keyword = {}));
var Directive;
(function (Directive) {
Directive["escape"] = "escape";
})(Directive = exports.Directive || (exports.Directive = {}));
exports.DefaultVariables = [
"FTP_PROXY", "ftp_proxy",
"HTTP_PROXY", "http_proxy",
"HTTPS_PROXY", "https_proxy",
"NO_PROXY", "no_proxy"
];
var DockerfileParser;
(function (DockerfileParser) {
function parse(content) {
let parser = new parser_1.Parser();
return parser.parse(content);
}
DockerfileParser.parse = parse;
})(DockerfileParser = exports.DockerfileParser || (exports.DockerfileParser = {}));
|
exports.Property = property_1.Property;
|
index.js
|
/** @license Material-UI v1.0.0-beta.41
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.withWidth = exports.Zoom = undefined;
exports.Collapse = exports.Fade = exports.Grow = exports.Slide = exports.Tooltip = exports.Toolbar = exports.TextField = exports.Typography = exports.Tab = exports.Tabs = exports.TableSortLabel = exports.TableRow = exports.TablePagination = exports.TableHead = exports.TableFooter = exports.TableCell = exports.TableBody = exports.Table = exports.Switch = exports.SwipeableDrawer = exports.SvgIcon = exports.colors = exports.jssPreset = exports.createMuiTheme = exports.withTheme = exports.withStyles = exports.MuiThemeProvider = exports.StepLabel = exports.StepContent = exports.StepIcon = exports.StepButton = exports.Step = exports.Stepper = exports.SnackbarContent = exports.Snackbar = exports.Select = exports.RadioGroup = exports.Radio = exports.LinearProgress = exports.CircularProgress = exports.Portal = exports.Popover = exports.Paper = exports.ModalManager = exports.Backdrop = exports.Modal = exports.MobileStepper = exports.MenuList = exports.MenuItem = exports.Menu = exports.ListSubheader = exports.ListItemText = exports.ListItemSecondaryAction = exports.ListItemIcon = exports.ListItemAvatar = exports.ListItem = exports.List = exports.GridListTileBar = exports.GridListTile = exports.GridList = exports.Grid = exports.InputAdornment = exports.InputLabel = exports.Input = exports.IconButton = exports.Icon = exports.Hidden = exports.FormControlLabel = exports.FormHelperText = exports.FormLabel = exports.FormGroup = exports.FormControl = exports.ExpansionPanelSummary = exports.ExpansionPanelDetails = exports.ExpansionPanelActions = exports.ExpansionPanel = exports.Drawer = exports.Divider = exports.withMobileDialog = exports.DialogTitle = exports.DialogContentText = exports.DialogContent = exports.DialogActions = exports.Dialog = exports.CssBaseline = exports.ClickAwayListener = exports.Chip = exports.Checkbox = exports.CardMedia = exports.CardHeader = exports.CardContent = exports.CardActions = exports.Card = exports.ButtonBase = exports.Button = exports.BottomNavigationAction = exports.BottomNavigation = exports.Badge = exports.Avatar = exports.AppBar = undefined;
var _AppBar = require('./AppBar');
Object.defineProperty(exports, 'AppBar', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_AppBar).default;
}
});
var _Avatar = require('./Avatar');
Object.defineProperty(exports, 'Avatar', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Avatar).default;
}
});
var _Badge = require('./Badge');
Object.defineProperty(exports, 'Badge', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Badge).default;
}
});
var _BottomNavigation = require('./BottomNavigation');
Object.defineProperty(exports, 'BottomNavigation', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_BottomNavigation).default;
}
});
Object.defineProperty(exports, 'BottomNavigationAction', {
enumerable: true,
get: function get() {
return _BottomNavigation.BottomNavigationAction;
}
});
var _Button = require('./Button');
Object.defineProperty(exports, 'Button', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Button).default;
}
});
var _ButtonBase = require('./ButtonBase');
Object.defineProperty(exports, 'ButtonBase', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_ButtonBase).default;
}
});
var _Card = require('./Card');
Object.defineProperty(exports, 'Card', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Card).default;
}
});
Object.defineProperty(exports, 'CardActions', {
enumerable: true,
get: function get() {
return _Card.CardActions;
}
});
Object.defineProperty(exports, 'CardContent', {
enumerable: true,
get: function get() {
return _Card.CardContent;
}
});
Object.defineProperty(exports, 'CardHeader', {
enumerable: true,
get: function get() {
return _Card.CardHeader;
}
});
Object.defineProperty(exports, 'CardMedia', {
enumerable: true,
get: function get() {
return _Card.CardMedia;
}
});
var _Checkbox = require('./Checkbox');
Object.defineProperty(exports, 'Checkbox', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Checkbox).default;
}
});
var _Chip = require('./Chip');
Object.defineProperty(exports, 'Chip', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Chip).default;
}
});
var _ClickAwayListener = require('./utils/ClickAwayListener');
Object.defineProperty(exports, 'ClickAwayListener', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_ClickAwayListener).default;
}
});
var _CssBaseline = require('./CssBaseline');
Object.defineProperty(exports, 'CssBaseline', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_CssBaseline).default;
}
});
var _Dialog = require('./Dialog');
Object.defineProperty(exports, 'Dialog', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Dialog).default;
}
});
Object.defineProperty(exports, 'DialogActions', {
enumerable: true,
get: function get() {
return _Dialog.DialogActions;
}
});
Object.defineProperty(exports, 'DialogContent', {
enumerable: true,
get: function get() {
return _Dialog.DialogContent;
}
});
Object.defineProperty(exports, 'DialogContentText', {
enumerable: true,
get: function get() {
return _Dialog.DialogContentText;
}
});
Object.defineProperty(exports, 'DialogTitle', {
enumerable: true,
get: function get() {
return _Dialog.DialogTitle;
}
});
Object.defineProperty(exports, 'withMobileDialog', {
enumerable: true,
get: function get() {
return _Dialog.withMobileDialog;
}
});
var _Divider = require('./Divider');
Object.defineProperty(exports, 'Divider', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Divider).default;
}
});
var _Drawer = require('./Drawer');
Object.defineProperty(exports, 'Drawer', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Drawer).default;
}
});
var _ExpansionPanel = require('./ExpansionPanel');
Object.defineProperty(exports, 'ExpansionPanel', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_ExpansionPanel).default;
}
});
Object.defineProperty(exports, 'ExpansionPanelActions', {
enumerable: true,
get: function get() {
return _ExpansionPanel.ExpansionPanelActions;
}
});
Object.defineProperty(exports, 'ExpansionPanelDetails', {
enumerable: true,
get: function get() {
return _ExpansionPanel.ExpansionPanelDetails;
}
});
Object.defineProperty(exports, 'ExpansionPanelSummary', {
enumerable: true,
get: function get() {
return _ExpansionPanel.ExpansionPanelSummary;
}
});
var _Form = require('./Form');
Object.defineProperty(exports, 'FormControl', {
enumerable: true,
get: function get() {
return _Form.FormControl;
}
});
Object.defineProperty(exports, 'FormGroup', {
enumerable: true,
get: function get() {
return _Form.FormGroup;
}
});
Object.defineProperty(exports, 'FormLabel', {
enumerable: true,
get: function get() {
return _Form.FormLabel;
}
});
Object.defineProperty(exports, 'FormHelperText', {
enumerable: true,
get: function get() {
return _Form.FormHelperText;
}
});
Object.defineProperty(exports, 'FormControlLabel', {
enumerable: true,
get: function get() {
return _Form.FormControlLabel;
}
});
var _Hidden = require('./Hidden');
Object.defineProperty(exports, 'Hidden', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Hidden).default;
}
});
var _Icon = require('./Icon');
Object.defineProperty(exports, 'Icon', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Icon).default;
}
});
var _IconButton = require('./IconButton');
Object.defineProperty(exports, 'IconButton', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_IconButton).default;
}
});
var _Input = require('./Input');
Object.defineProperty(exports, 'Input', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Input).default;
}
});
Object.defineProperty(exports, 'InputLabel', {
enumerable: true,
get: function get() {
return _Input.InputLabel;
}
});
Object.defineProperty(exports, 'InputAdornment', {
enumerable: true,
get: function get() {
return _Input.InputAdornment;
}
});
var _Grid = require('./Grid');
Object.defineProperty(exports, 'Grid', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Grid).default;
}
});
var _GridList = require('./GridList');
Object.defineProperty(exports, 'GridList', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_GridList).default;
}
});
Object.defineProperty(exports, 'GridListTile', {
enumerable: true,
get: function get() {
return _GridList.GridListTile;
}
});
Object.defineProperty(exports, 'GridListTileBar', {
enumerable: true,
get: function get() {
return _GridList.GridListTileBar;
}
});
var _List = require('./List');
Object.defineProperty(exports, 'List', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_List).default;
}
});
Object.defineProperty(exports, 'ListItem', {
enumerable: true,
get: function get() {
return _List.ListItem;
}
});
Object.defineProperty(exports, 'ListItemAvatar', {
enumerable: true,
get: function get() {
return _List.ListItemAvatar;
}
});
Object.defineProperty(exports, 'ListItemIcon', {
enumerable: true,
get: function get() {
return _List.ListItemIcon;
}
});
Object.defineProperty(exports, 'ListItemSecondaryAction', {
enumerable: true,
get: function get() {
return _List.ListItemSecondaryAction;
}
});
Object.defineProperty(exports, 'ListItemText', {
enumerable: true,
get: function get() {
return _List.ListItemText;
}
});
Object.defineProperty(exports, 'ListSubheader', {
enumerable: true,
get: function get() {
return _List.ListSubheader;
}
});
var _Menu = require('./Menu');
Object.defineProperty(exports, 'Menu', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Menu).default;
}
});
Object.defineProperty(exports, 'MenuItem', {
enumerable: true,
get: function get() {
return _Menu.MenuItem;
}
});
Object.defineProperty(exports, 'MenuList', {
enumerable: true,
get: function get() {
return _Menu.MenuList;
}
});
var _MobileStepper = require('./MobileStepper');
Object.defineProperty(exports, 'MobileStepper', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_MobileStepper).default;
}
});
var _Modal = require('./Modal');
Object.defineProperty(exports, 'Modal', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Modal).default;
}
});
Object.defineProperty(exports, 'Backdrop', {
enumerable: true,
get: function get() {
return _Modal.Backdrop;
}
});
Object.defineProperty(exports, 'ModalManager', {
enumerable: true,
get: function get() {
return _Modal.ModalManager;
}
});
var _Paper = require('./Paper');
Object.defineProperty(exports, 'Paper', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Paper).default;
}
});
var _Popover = require('./Popover');
Object.defineProperty(exports, 'Popover', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Popover).default;
}
});
var _Portal = require('./Portal');
Object.defineProperty(exports, 'Portal', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Portal).default;
}
});
var _Progress = require('./Progress');
Object.defineProperty(exports, 'CircularProgress', {
enumerable: true,
get: function get() {
return _Progress.CircularProgress;
}
});
Object.defineProperty(exports, 'LinearProgress', {
enumerable: true,
get: function get() {
return _Progress.LinearProgress;
}
});
var _Radio = require('./Radio');
Object.defineProperty(exports, 'Radio', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Radio).default;
}
});
Object.defineProperty(exports, 'RadioGroup', {
enumerable: true,
get: function get() {
return _Radio.RadioGroup;
}
});
var _Select = require('./Select');
Object.defineProperty(exports, 'Select', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Select).default;
}
});
var _Snackbar = require('./Snackbar');
Object.defineProperty(exports, 'Snackbar', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Snackbar).default;
}
});
Object.defineProperty(exports, 'SnackbarContent', {
enumerable: true,
get: function get() {
return _Snackbar.SnackbarContent;
}
});
var _Stepper = require('./Stepper');
Object.defineProperty(exports, 'Stepper', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Stepper).default;
}
});
Object.defineProperty(exports, 'Step', {
enumerable: true,
get: function get() {
return _Stepper.Step;
}
});
Object.defineProperty(exports, 'StepButton', {
enumerable: true,
get: function get() {
return _Stepper.StepButton;
}
});
Object.defineProperty(exports, 'StepIcon', {
enumerable: true,
get: function get() {
return _Stepper.StepIcon;
}
});
Object.defineProperty(exports, 'StepContent', {
enumerable: true,
get: function get() {
return _Stepper.StepContent;
}
});
Object.defineProperty(exports, 'StepLabel', {
enumerable: true,
get: function get() {
return _Stepper.StepLabel;
}
});
var _styles = require('./styles');
Object.defineProperty(exports, 'MuiThemeProvider', {
enumerable: true,
get: function get() {
return _styles.MuiThemeProvider;
}
});
Object.defineProperty(exports, 'withStyles', {
enumerable: true,
get: function get() {
return _styles.withStyles;
}
});
Object.defineProperty(exports, 'withTheme', {
enumerable: true,
get: function get() {
return _styles.withTheme;
}
});
Object.defineProperty(exports, 'createMuiTheme', {
enumerable: true,
get: function get() {
return _styles.createMuiTheme;
}
});
Object.defineProperty(exports, 'jssPreset', {
enumerable: true,
get: function get() {
return _styles.jssPreset;
}
});
var _SvgIcon = require('./SvgIcon');
Object.defineProperty(exports, 'SvgIcon', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_SvgIcon).default;
}
});
var _SwipeableDrawer = require('./SwipeableDrawer');
Object.defineProperty(exports, 'SwipeableDrawer', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_SwipeableDrawer).default;
}
});
var _Switch = require('./Switch');
Object.defineProperty(exports, 'Switch', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Switch).default;
}
});
var _Table = require('./Table');
Object.defineProperty(exports, 'Table', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Table).default;
}
});
Object.defineProperty(exports, 'TableBody', {
enumerable: true,
get: function get() {
return _Table.TableBody;
}
});
Object.defineProperty(exports, 'TableCell', {
enumerable: true,
get: function get() {
return _Table.TableCell;
}
});
Object.defineProperty(exports, 'TableFooter', {
enumerable: true,
get: function get() {
return _Table.TableFooter;
}
});
Object.defineProperty(exports, 'TableHead', {
enumerable: true,
get: function get() {
return _Table.TableHead;
}
});
Object.defineProperty(exports, 'TablePagination', {
enumerable: true,
get: function get() {
return _Table.TablePagination;
}
});
Object.defineProperty(exports, 'TableRow', {
enumerable: true,
get: function get() {
return _Table.TableRow;
}
});
Object.defineProperty(exports, 'TableSortLabel', {
enumerable: true,
get: function get() {
return _Table.TableSortLabel;
}
});
var _Tabs = require('./Tabs');
Object.defineProperty(exports, 'Tabs', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Tabs).default;
}
});
Object.defineProperty(exports, 'Tab', {
enumerable: true,
get: function get() {
return _Tabs.Tab;
}
});
var _Typography = require('./Typography');
Object.defineProperty(exports, 'Typography', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Typography).default;
}
});
var _TextField = require('./TextField');
Object.defineProperty(exports, 'TextField', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_TextField).default;
}
});
var _Toolbar = require('./Toolbar');
Object.defineProperty(exports, 'Toolbar', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Toolbar).default;
}
});
var _Tooltip = require('./Tooltip');
Object.defineProperty(exports, 'Tooltip', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_Tooltip).default;
}
});
var _transitions = require('./transitions');
Object.defineProperty(exports, 'Slide', {
enumerable: true,
get: function get() {
return _transitions.Slide;
}
});
Object.defineProperty(exports, 'Grow', {
enumerable: true,
get: function get() {
return _transitions.Grow;
}
});
Object.defineProperty(exports, 'Fade', {
enumerable: true,
get: function get() {
return _transitions.Fade;
}
});
Object.defineProperty(exports, 'Collapse', {
enumerable: true,
get: function get() {
return _transitions.Collapse;
}
});
Object.defineProperty(exports, 'Zoom', {
enumerable: true,
get: function get() {
return _transitions.Zoom;
}
});
var _withWidth = require('./utils/withWidth');
Object.defineProperty(exports, 'withWidth', {
enumerable: true,
get: function get() {
return _interopRequireDefault(_withWidth).default;
}
});
var _colors = require('./colors');
var colors = _interopRequireWildcard(_colors);
function
|
(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// eslint-disable-next-line import/first
exports.colors = colors;
|
_interopRequireWildcard
|
wasi.rs
|
use crate::utils::{parse_envvar, parse_mapdir};
use anyhow::Result;
use std::collections::BTreeSet;
use std::path::PathBuf;
use wasmer::{Instance, Module, RuntimeError, Val};
use wasmer_wasi::{get_wasi_versions, is_wasix_module, WasiError, WasiState, WasiVersion};
use structopt::StructOpt;
#[derive(Debug, StructOpt, Clone, Default)]
/// WASI Options
pub struct Wasi {
/// WASI pre-opened directory
#[structopt(
long = "dir",
name = "DIR",
multiple = true,
group = "wasi",
number_of_values = 1
)]
pre_opened_directories: Vec<PathBuf>,
/// Map a host directory to a different location for the Wasm module
#[structopt(
long = "mapdir",
name = "GUEST_DIR:HOST_DIR",
multiple = true,
parse(try_from_str = parse_mapdir),
number_of_values = 1,
)]
mapped_dirs: Vec<(String, PathBuf)>,
/// Pass custom environment variables
#[structopt(
long = "env",
name = "KEY=VALUE",
multiple = true,
parse(try_from_str = parse_envvar),
)]
env_vars: Vec<(String, String)>,
/// Enable experimental IO devices
#[cfg(feature = "experimental-io-devices")]
#[cfg_attr(
feature = "experimental-io-devices",
structopt(long = "enable-experimental-io-devices")
)]
enable_experimental_io_devices: bool,
/// Allow WASI modules to import multiple versions of WASI without a warning.
#[structopt(long = "allow-multiple-wasi-versions")]
pub allow_multiple_wasi_versions: bool,
/// Require WASI modules to only import 1 version of WASI.
#[structopt(long = "deny-multiple-wasi-versions")]
pub deny_multiple_wasi_versions: bool,
}
#[allow(dead_code)]
impl Wasi {
/// Gets the WASI version (if any) for the provided module
pub fn get_versions(module: &Module) -> Option<BTreeSet<WasiVersion>> {
// Get the wasi version in strict mode, so no other imports are
// allowed.
get_wasi_versions(module, true)
}
/// Checks if a given module has any WASI imports at all.
pub fn has_wasi_imports(module: &Module) -> bool {
// Get the wasi version in non-strict mode, so no other imports
// are allowed
get_wasi_versions(module, false).is_some()
}
/// Helper function for instantiating a module with Wasi imports for the `Run` command.
pub fn instantiate(
&self,
module: &Module,
program_name: String,
args: Vec<String>,
) -> Result<Instance> {
let args = args.iter().cloned().map(|arg| arg.into_bytes());
let mut wasi_state_builder = WasiState::new(program_name);
wasi_state_builder
.args(args)
.envs(self.env_vars.clone())
.preopen_dirs(self.pre_opened_directories.clone())?
.map_dirs(self.mapped_dirs.clone())?;
#[cfg(feature = "experimental-io-devices")]
{
if self.enable_experimental_io_devices {
wasi_state_builder
.setup_fs(Box::new(wasmer_wasi_experimental_io_devices::initialize));
}
}
let mut wasi_env = wasi_state_builder.finalize()?;
wasi_env.state.fs.is_wasix.store(
is_wasix_module(module),
std::sync::atomic::Ordering::Release,
);
let import_object = wasi_env.import_object_for_all_wasi_versions(module)?;
let instance = Instance::new(module, &import_object)?;
Ok(instance)
}
/// Helper function for handling the result of a Wasi _start function.
pub fn handle_result(&self, result: Result<Box<[Val]>, RuntimeError>) -> Result<()> {
match result {
Ok(_) => Ok(()),
Err(err) => {
let err: anyhow::Error = match err.downcast::<WasiError>() {
Ok(WasiError::Exit(exit_code)) => {
// We should exit with the provided exit code
std::process::exit(exit_code as _);
}
Ok(err) => err.into(),
Err(err) => err.into(),
};
Err(err)
}
}
}
pub fn for_binfmt_interpreter() -> Result<Self>
|
}
|
{
use std::env;
let dir = env::var_os("WASMER_BINFMT_MISC_PREOPEN")
.map(Into::into)
.unwrap_or_else(|| PathBuf::from("."));
Ok(Self {
deny_multiple_wasi_versions: true,
env_vars: env::vars().collect(),
pre_opened_directories: vec![dir],
..Self::default()
})
}
|
__init__.py
|
"""The Emporia Vue integration."""
import asyncio
from datetime import datetime, timedelta
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
import logging
from pyemvue import PyEmVue
from pyemvue.device import VueDeviceChannel
from pyemvue.enums import Scale
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from .const import DOMAIN, VUE_DATA, ENABLE_1M, ENABLE_1D, ENABLE_1MON
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(ENABLE_1M, default=True): cv.boolean,
vol.Optional(ENABLE_1D, default=True): cv.boolean,
vol.Optional(ENABLE_1MON, default=True): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["sensor", "switch"]
device_gids = []
device_information = {}
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Emporia Vue component."""
hass.data.setdefault(DOMAIN, {})
conf = config.get(DOMAIN)
if not conf:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_EMAIL: conf[CONF_EMAIL],
CONF_PASSWORD: conf[CONF_PASSWORD],
ENABLE_1M: conf[ENABLE_1M],
ENABLE_1D: conf[ENABLE_1D],
ENABLE_1MON: conf[ENABLE_1MON],
},
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Emporia Vue from a config entry."""
global device_gids
global device_information
device_gids = []
device_information = {}
entry_data = entry.data
email = entry_data[CONF_EMAIL]
password = entry_data[CONF_PASSWORD]
# _LOGGER.info(entry_data)
vue = PyEmVue()
loop = asyncio.get_event_loop()
try:
result = await loop.run_in_executor(None, vue.login, email, password)
if not result:
raise Exception("Could not authenticate with Emporia API")
except Exception:
_LOGGER.error("Could not authenticate with Emporia API")
return False
scales_1hr = []
try:
devices = await loop.run_in_executor(None, vue.get_devices)
total_channels = 0
for d in devices:
total_channels += len(d.channels)
_LOGGER.info(
"Found {0} Emporia devices with {1} total channels".format(
len(devices), total_channels
)
)
for device in devices:
if not device.device_gid in device_gids:
device_gids.append(device.device_gid)
await loop.run_in_executor(None, vue.populate_device_properties, device)
device_information[device.device_gid] = device
else:
device_information[device.device_gid].channels += device.channels
async def async_update_data_1min():
"""Fetch data from API endpoint at a 1 minute interval
This is the place to pre-process the data to lookup tables
so entities can quickly look up their data.
"""
return await update_sensors(vue, [Scale.MINUTE.value])
async def async_update_data_1hr():
"""Fetch data from API endpoint at a 1 hour interval
This is the place to pre-process the data to lookup tables
so entities can quickly look up their data.
"""
return await update_sensors(vue, scales_1hr)
if ENABLE_1D not in entry_data or entry_data[ENABLE_1D]:
scales_1hr.append(Scale.DAY.value)
if ENABLE_1MON not in entry_data or entry_data[ENABLE_1MON]:
scales_1hr.append(Scale.MONTH.value)
coordinator_1min = None
if ENABLE_1M not in entry_data or entry_data[ENABLE_1M]:
coordinator_1min = DataUpdateCoordinator(
hass,
_LOGGER,
# Name of the data. For logging purposes.
name="sensor",
update_method=async_update_data_1min,
# Polling interval. Will only be polled if there are subscribers.
update_interval=timedelta(minutes=1),
)
await coordinator_1min.async_config_entry_first_refresh()
_LOGGER.info(f"1min Update data: {coordinator_1min.data}")
if scales_1hr:
coordinator_1hr = DataUpdateCoordinator(
hass,
_LOGGER,
# Name of the data. For logging purposes.
name="sensor",
update_method=async_update_data_1hr,
# Polling interval. Will only be polled if there are subscribers.
update_interval=timedelta(hours=1),
)
await coordinator_1hr.async_config_entry_first_refresh()
_LOGGER.info(f"1hr Update data: {coordinator_1hr.data}")
except Exception as err:
_LOGGER.warn(f"Exception while setting up Emporia Vue. Will retry. {err}")
raise ConfigEntryNotReady(
f"Exception while setting up Emporia Vue. Will retry. {err}"
)
hass.data[DOMAIN][entry.entry_id] = {
VUE_DATA: vue,
"coordinator_1min": coordinator_1min,
"coordinator_1hr": coordinator_1hr
}
try:
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
except Exception as err:
_LOGGER.warn(f"Error setting up platforms: {err}")
raise ConfigEntryNotReady(f"Error setting up platforms: {err}")
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def update_sensors(vue, scales):
try:
# Note: asyncio.TimeoutError and aiohttp.ClientError are already
# handled by the data update coordinator.
data = {}
loop = asyncio.get_event_loop()
for scale in scales:
now = datetime.utcnow()
channels = await loop.run_in_executor(
None, vue.get_devices_usage, device_gids, now, scale
)
if not channels:
_LOGGER.warn(
f"No channels found during update for scale {scale}. Retrying..."
)
channels = await loop.run_in_executor(
None, vue.get_devices_usage, device_gids, now, scale
)
if channels:
for channel in channels:
id = "{0}-{1}-{2}".format(
channel.device_gid, channel.channel_num, scale
)
usage = round(channel.usage, 3)
if scale == Scale.MINUTE.value:
usage = round(
60 * 1000 * channel.usage
) # convert from kwh to w rate
elif scale == Scale.SECOND.value:
usage = round(3600 * 1000 * channel.usage) # convert to rate
elif scale == Scale.MINUTES_15.value:
|
info = None
if channel.device_gid in device_information:
info = device_information[channel.device_gid]
if channel.channel_num in ["MainsFromGrid", "MainsToGrid"]:
found = False
channel_123 = None
for channel2 in info.channels:
if channel2.channel_num == channel.channel_num:
found = True
break
elif channel2.channel_num == "1,2,3":
channel_123 = channel2
if not found:
_LOGGER.info(
f"Adding channel for channel {channel.device_gid}-{channel.channel_num}"
)
info.channels.append(
VueDeviceChannel(
gid=channel.device_gid,
name=None,
channelNum=channel.channel_num,
channelMultiplier=channel_123.channel_multiplier,
channelTypeGid=channel_123.channel_type_gid,
)
)
data[id] = {
"device_gid": channel.device_gid,
"channel_num": channel.channel_num,
"usage": usage,
"scale": scale,
"info": info,
}
else:
raise UpdateFailed(f"No channels found during update for scale {scale}")
return data
except Exception as err:
_LOGGER.error(f"Error communicating with Emporia API: {err}")
raise UpdateFailed(f"Error communicating with Emporia API: {err}")
|
usage = round(
4 * 1000 * channel.usage
) # this might never be used but for safety, convert to rate
|
app.d2aa8f2d2c6ee55b8886.js
|
'use strict';
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; };
/******/(function (modules) {
// webpackBootstrap
/******/ // The module cache
/******/var installedModules = {};
/******/
/******/ // The require function
/******/function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/if (installedModules[moduleId]) {
/******/return installedModules[moduleId].exports;
/******/
}
/******/ // Create a new module (and put it into the cache)
/******/var module = installedModules[moduleId] = {
/******/i: moduleId,
/******/l: false,
/******/exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/module.l = true;
/******/
/******/ // Return the exports of the module
/******/return module.exports;
/******/
}
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/__webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/__webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/__webpack_require__.d = function (exports, name, getter) {
/******/if (!__webpack_require__.o(exports, name)) {
/******/Object.defineProperty(exports, name, {
/******/configurable: false,
/******/enumerable: true,
/******/get: getter
/******/ });
/******/
}
/******/
};
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/__webpack_require__.n = function (module) {
/******/var getter = module && module.__esModule ?
/******/function getDefault() {
return module['default'];
} :
/******/function getModuleExports() {
return module;
};
/******/__webpack_require__.d(getter, 'a', getter);
/******/return getter;
/******/
};
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/__webpack_require__.o = function (object, property) {
return Object.prototype.hasOwnProperty.call(object, property);
};
/******/
/******/ // __webpack_public_path__
/******/__webpack_require__.p = "";
/******/
/******/ // Load entry module and return exports
/******/return __webpack_require__(__webpack_require__.s = 0);
/******/
})(
/************************************************************************/
/******/{
/***/"./node_modules/axios/index.js":
/***/function node_modulesAxiosIndexJs(module, exports, __webpack_require__) {
module.exports = __webpack_require__("./node_modules/axios/lib/axios.js");
/***/
},
/***/"./node_modules/axios/lib/adapters/xhr.js":
/***/function node_modulesAxiosLibAdaptersXhrJs(module, exports, __webpack_require__) {
"use strict";
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
var settle = __webpack_require__("./node_modules/axios/lib/core/settle.js");
var buildURL = __webpack_require__("./node_modules/axios/lib/helpers/buildURL.js");
var parseHeaders = __webpack_require__("./node_modules/axios/lib/helpers/parseHeaders.js");
var isURLSameOrigin = __webpack_require__("./node_modules/axios/lib/helpers/isURLSameOrigin.js");
var createError = __webpack_require__("./node_modules/axios/lib/core/createError.js");
var btoa = typeof window !== 'undefined' && window.btoa && window.btoa.bind(window) || __webpack_require__("./node_modules/axios/lib/helpers/btoa.js");
module.exports = function xhrAdapter(config) {
return new Promise(function dispatchXhrRequest(resolve, reject) {
var requestData = config.data;
var requestHeaders = config.headers;
if (utils.isFormData(requestData)) {
delete requestHeaders['Content-Type']; // Let the browser set it
}
var request = new XMLHttpRequest();
var loadEvent = 'onreadystatechange';
var xDomain = false;
// For IE 8/9 CORS support
// Only supports POST and GET calls and doesn't returns the response headers.
// DON'T do this for testing b/c XMLHttpRequest is mocked, not XDomainRequest.
if ("development" !== 'test' && typeof window !== 'undefined' && window.XDomainRequest && !('withCredentials' in request) && !isURLSameOrigin(config.url)) {
request = new window.XDomainRequest();
loadEvent = 'onload';
xDomain = true;
request.onprogress = function handleProgress() {};
request.ontimeout = function handleTimeout() {};
}
// HTTP basic authentication
if (config.auth) {
var username = config.auth.username || '';
var password = config.auth.password || '';
requestHeaders.Authorization = 'Basic ' + btoa(username + ':' + password);
}
request.open(config.method.toUpperCase(), buildURL(config.url, config.params, config.paramsSerializer), true);
// Set the request timeout in MS
request.timeout = config.timeout;
// Listen for ready state
request[loadEvent] = function handleLoad() {
if (!request || request.readyState !== 4 && !xDomain) {
return;
}
// The request errored out and we didn't get a response, this will be
// handled by onerror instead
// With one exception: request that using file: protocol, most browsers
// will return status as 0 even though it's a successful request
if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) {
return;
}
// Prepare the response
var responseHeaders = 'getAllResponseHeaders' in request ? parseHeaders(request.getAllResponseHeaders()) : null;
var responseData = !config.responseType || config.responseType === 'text' ? request.responseText : request.response;
var response = {
data: responseData,
// IE sends 1223 instead of 204 (https://github.com/mzabriskie/axios/issues/201)
status: request.status === 1223 ? 204 : request.status,
statusText: request.status === 1223 ? 'No Content' : request.statusText,
headers: responseHeaders,
config: config,
request: request
};
settle(resolve, reject, response);
// Clean up request
request = null;
};
// Handle low level network errors
request.onerror = function handleError() {
// Real errors are hidden from us by the browser
// onerror should only fire if it's a network error
reject(createError('Network Error', config));
// Clean up request
request = null;
};
// Handle timeout
request.ontimeout = function handleTimeout() {
reject(createError('timeout of ' + config.timeout + 'ms exceeded', config, 'ECONNABORTED'));
// Clean up request
request = null;
};
// Add xsrf header
// This is only done if running in a standard browser environment.
// Specifically not if we're in a web worker, or react-native.
if (utils.isStandardBrowserEnv()) {
var cookies = __webpack_require__("./node_modules/axios/lib/helpers/cookies.js");
// Add xsrf header
var xsrfValue = (config.withCredentials || isURLSameOrigin(config.url)) && config.xsrfCookieName ? cookies.read(config.xsrfCookieName) : undefined;
if (xsrfValue) {
requestHeaders[config.xsrfHeaderName] = xsrfValue;
}
}
// Add headers to the request
if ('setRequestHeader' in request) {
utils.forEach(requestHeaders, function setRequestHeader(val, key) {
if (typeof requestData === 'undefined' && key.toLowerCase() === 'content-type') {
// Remove Content-Type if data is undefined
delete requestHeaders[key];
} else {
// Otherwise add header to the request
request.setRequestHeader(key, val);
}
});
}
// Add withCredentials to request if needed
if (config.withCredentials) {
request.withCredentials = true;
}
// Add responseType to request if needed
if (config.responseType) {
try {
request.responseType = config.responseType;
} catch (e) {
if (request.responseType !== 'json') {
throw e;
}
}
}
// Handle progress if needed
if (typeof config.onDownloadProgress === 'function') {
request.addEventListener('progress', config.onDownloadProgress);
}
// Not all browsers support upload events
if (typeof config.onUploadProgress === 'function' && request.upload) {
request.upload.addEventListener('progress', config.onUploadProgress);
}
if (config.cancelToken) {
// Handle cancellation
config.cancelToken.promise.then(function onCanceled(cancel) {
if (!request) {
return;
}
request.abort();
reject(cancel);
// Clean up request
request = null;
});
}
if (requestData === undefined) {
requestData = null;
}
// Send the request
request.send(requestData);
});
};
/***/
},
/***/"./node_modules/axios/lib/axios.js":
/***/function node_modulesAxiosLibAxiosJs(module, exports, __webpack_require__) {
"use strict";
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
var bind = __webpack_require__("./node_modules/axios/lib/helpers/bind.js");
var Axios = __webpack_require__("./node_modules/axios/lib/core/Axios.js");
var defaults = __webpack_require__("./node_modules/axios/lib/defaults.js");
/**
* Create an instance of Axios
*
* @param {Object} defaultConfig The default config for the instance
* @return {Axios} A new instance of Axios
*/
function createInstance(defaultConfig) {
var context = new Axios(defaultConfig);
var instance = bind(Axios.prototype.request, context);
// Copy axios.prototype to instance
utils.extend(instance, Axios.prototype, context);
// Copy context to instance
utils.extend(instance, context);
return instance;
}
// Create the default instance to be exported
var axios = createInstance(defaults);
// Expose Axios class to allow class inheritance
axios.Axios = Axios;
// Factory for creating new instances
axios.create = function create(instanceConfig) {
return createInstance(utils.merge(defaults, instanceConfig));
};
// Expose Cancel & CancelToken
axios.Cancel = __webpack_require__("./node_modules/axios/lib/cancel/Cancel.js");
axios.CancelToken = __webpack_require__("./node_modules/axios/lib/cancel/CancelToken.js");
axios.isCancel = __webpack_require__("./node_modules/axios/lib/cancel/isCancel.js");
// Expose all/spread
axios.all = function all(promises) {
return Promise.all(promises);
};
axios.spread = __webpack_require__("./node_modules/axios/lib/helpers/spread.js");
module.exports = axios;
// Allow use of default import syntax in TypeScript
module.exports.default = axios;
/***/
},
/***/"./node_modules/axios/lib/cancel/Cancel.js":
/***/function node_modulesAxiosLibCancelCancelJs(module, exports, __webpack_require__) {
"use strict";
/**
* A `Cancel` is an object that is thrown when an operation is canceled.
*
* @class
* @param {string=} message The message.
*/
function Cancel(message) {
this.message = message;
}
Cancel.prototype.toString = function toString() {
return 'Cancel' + (this.message ? ': ' + this.message : '');
};
Cancel.prototype.__CANCEL__ = true;
module.exports = Cancel;
/***/
},
/***/"./node_modules/axios/lib/cancel/CancelToken.js":
/***/function node_modulesAxiosLibCancelCancelTokenJs(module, exports, __webpack_require__) {
"use strict";
var Cancel = __webpack_require__("./node_modules/axios/lib/cancel/Cancel.js");
/**
* A `CancelToken` is an object that can be used to request cancellation of an operation.
*
* @class
* @param {Function} executor The executor function.
*/
function CancelToken(executor) {
if (typeof executor !== 'function') {
throw new TypeError('executor must be a function.');
}
var resolvePromise;
this.promise = new Promise(function promiseExecutor(resolve) {
resolvePromise = resolve;
});
var token = this;
executor(function cancel(message) {
if (token.reason) {
// Cancellation has already been requested
return;
}
token.reason = new Cancel(message);
resolvePromise(token.reason);
});
}
/**
* Throws a `Cancel` if cancellation has been requested.
*/
CancelToken.prototype.throwIfRequested = function throwIfRequested() {
if (this.reason) {
throw this.reason;
}
};
/**
* Returns an object that contains a new `CancelToken` and a function that, when called,
* cancels the `CancelToken`.
*/
CancelToken.source = function source() {
var cancel;
var token = new CancelToken(function executor(c) {
cancel = c;
});
return {
token: token,
cancel: cancel
};
};
module.exports = CancelToken;
/***/
},
/***/"./node_modules/axios/lib/cancel/isCancel.js":
/***/function node_modulesAxiosLibCancelIsCancelJs(module, exports, __webpack_require__) {
"use strict";
module.exports = function isCancel(value) {
return !!(value && value.__CANCEL__);
};
/***/
},
/***/"./node_modules/axios/lib/core/Axios.js":
/***/function node_modulesAxiosLibCoreAxiosJs(module, exports, __webpack_require__) {
"use strict";
var defaults = __webpack_require__("./node_modules/axios/lib/defaults.js");
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
var InterceptorManager = __webpack_require__("./node_modules/axios/lib/core/InterceptorManager.js");
var dispatchRequest = __webpack_require__("./node_modules/axios/lib/core/dispatchRequest.js");
var isAbsoluteURL = __webpack_require__("./node_modules/axios/lib/helpers/isAbsoluteURL.js");
var combineURLs = __webpack_require__("./node_modules/axios/lib/helpers/combineURLs.js");
/**
* Create a new instance of Axios
*
* @param {Object} instanceConfig The default config for the instance
*/
function Axios(instanceConfig) {
this.defaults = instanceConfig;
this.interceptors = {
request: new InterceptorManager(),
response: new InterceptorManager()
};
}
/**
* Dispatch a request
*
* @param {Object} config The config specific for this request (merged with this.defaults)
*/
Axios.prototype.request = function request(config) {
/*eslint no-param-reassign:0*/
// Allow for axios('example/url'[, config]) a la fetch API
if (typeof config === 'string') {
config = utils.merge({
url: arguments[0]
}, arguments[1]);
}
config = utils.merge(defaults, this.defaults, { method: 'get' }, config);
// Support baseURL config
if (config.baseURL && !isAbsoluteURL(config.url)) {
config.url = combineURLs(config.baseURL, config.url);
}
// Hook up interceptors middleware
var chain = [dispatchRequest, undefined];
var promise = Promise.resolve(config);
this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) {
chain.unshift(interceptor.fulfilled, interceptor.rejected);
});
this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) {
chain.push(interceptor.fulfilled, interceptor.rejected);
});
while (chain.length) {
promise = promise.then(chain.shift(), chain.shift());
}
return promise;
};
// Provide aliases for supported request methods
utils.forEach(['delete', 'get', 'head'], function forEachMethodNoData(method) {
/*eslint func-names:0*/
Axios.prototype[method] = function (url, config) {
return this.request(utils.merge(config || {}, {
method: method,
url: url
}));
};
});
utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) {
/*eslint func-names:0*/
Axios.prototype[method] = function (url, data, config) {
return this.request(utils.merge(config || {}, {
method: method,
url: url,
data: data
}));
};
});
module.exports = Axios;
/***/
},
/***/"./node_modules/axios/lib/core/InterceptorManager.js":
/***/function node_modulesAxiosLibCoreInterceptorManagerJs(module, exports, __webpack_require__) {
"use strict";
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
function InterceptorManager() {
this.handlers = [];
}
/**
* Add a new interceptor to the stack
*
* @param {Function} fulfilled The function to handle `then` for a `Promise`
* @param {Function} rejected The function to handle `reject` for a `Promise`
*
* @return {Number} An ID used to remove interceptor later
*/
InterceptorManager.prototype.use = function use(fulfilled, rejected) {
this.handlers.push({
fulfilled: fulfilled,
rejected: rejected
});
return this.handlers.length - 1;
};
/**
* Remove an interceptor from the stack
*
* @param {Number} id The ID that was returned by `use`
*/
InterceptorManager.prototype.eject = function eject(id) {
if (this.handlers[id]) {
this.handlers[id] = null;
}
};
/**
* Iterate over all the registered interceptors
*
* This method is particularly useful for skipping over any
* interceptors that may have become `null` calling `eject`.
*
* @param {Function} fn The function to call for each interceptor
*/
InterceptorManager.prototype.forEach = function forEach(fn) {
utils.forEach(this.handlers, function forEachHandler(h) {
if (h !== null) {
fn(h);
}
});
};
module.exports = InterceptorManager;
/***/
},
/***/"./node_modules/axios/lib/core/createError.js":
/***/function node_modulesAxiosLibCoreCreateErrorJs(module, exports, __webpack_require__) {
"use strict";
var enhanceError = __webpack_require__("./node_modules/axios/lib/core/enhanceError.js");
/**
* Create an Error with the specified message, config, error code, and response.
*
* @param {string} message The error message.
* @param {Object} config The config.
* @param {string} [code] The error code (for example, 'ECONNABORTED').
@ @param {Object} [response] The response.
* @returns {Error} The created error.
*/
module.exports = function createError(message, config, code, response) {
var error = new Error(message);
return enhanceError(error, config, code, response);
};
/***/
},
/***/"./node_modules/axios/lib/core/dispatchRequest.js":
/***/function node_modulesAxiosLibCoreDispatchRequestJs(module, exports, __webpack_require__) {
"use strict";
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
var transformData = __webpack_require__("./node_modules/axios/lib/core/transformData.js");
var isCancel = __webpack_require__("./node_modules/axios/lib/cancel/isCancel.js");
var defaults = __webpack_require__("./node_modules/axios/lib/defaults.js");
/**
* Throws a `Cancel` if cancellation has been requested.
|
config.cancelToken.throwIfRequested();
}
}
/**
* Dispatch a request to the server using the configured adapter.
*
* @param {object} config The config that is to be used for the request
* @returns {Promise} The Promise to be fulfilled
*/
module.exports = function dispatchRequest(config) {
throwIfCancellationRequested(config);
// Ensure headers exist
config.headers = config.headers || {};
// Transform request data
config.data = transformData(config.data, config.headers, config.transformRequest);
// Flatten headers
config.headers = utils.merge(config.headers.common || {}, config.headers[config.method] || {}, config.headers || {});
utils.forEach(['delete', 'get', 'head', 'post', 'put', 'patch', 'common'], function cleanHeaderConfig(method) {
delete config.headers[method];
});
var adapter = config.adapter || defaults.adapter;
return adapter(config).then(function onAdapterResolution(response) {
throwIfCancellationRequested(config);
// Transform response data
response.data = transformData(response.data, response.headers, config.transformResponse);
return response;
}, function onAdapterRejection(reason) {
if (!isCancel(reason)) {
throwIfCancellationRequested(config);
// Transform response data
if (reason && reason.response) {
reason.response.data = transformData(reason.response.data, reason.response.headers, config.transformResponse);
}
}
return Promise.reject(reason);
});
};
/***/
},
/***/"./node_modules/axios/lib/core/enhanceError.js":
/***/function node_modulesAxiosLibCoreEnhanceErrorJs(module, exports, __webpack_require__) {
"use strict";
/**
* Update an Error with the specified config, error code, and response.
*
* @param {Error} error The error to update.
* @param {Object} config The config.
* @param {string} [code] The error code (for example, 'ECONNABORTED').
@ @param {Object} [response] The response.
* @returns {Error} The error.
*/
module.exports = function enhanceError(error, config, code, response) {
error.config = config;
if (code) {
error.code = code;
}
error.response = response;
return error;
};
/***/
},
/***/"./node_modules/axios/lib/core/settle.js":
/***/function node_modulesAxiosLibCoreSettleJs(module, exports, __webpack_require__) {
"use strict";
var createError = __webpack_require__("./node_modules/axios/lib/core/createError.js");
/**
* Resolve or reject a Promise based on response status.
*
* @param {Function} resolve A function that resolves the promise.
* @param {Function} reject A function that rejects the promise.
* @param {object} response The response.
*/
module.exports = function settle(resolve, reject, response) {
var validateStatus = response.config.validateStatus;
// Note: status is not exposed by XDomainRequest
if (!response.status || !validateStatus || validateStatus(response.status)) {
resolve(response);
} else {
reject(createError('Request failed with status code ' + response.status, response.config, null, response));
}
};
/***/
},
/***/"./node_modules/axios/lib/core/transformData.js":
/***/function node_modulesAxiosLibCoreTransformDataJs(module, exports, __webpack_require__) {
"use strict";
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
/**
* Transform the data for a request or a response
*
* @param {Object|String} data The data to be transformed
* @param {Array} headers The headers for the request or response
* @param {Array|Function} fns A single function or Array of functions
* @returns {*} The resulting transformed data
*/
module.exports = function transformData(data, headers, fns) {
/*eslint no-param-reassign:0*/
utils.forEach(fns, function transform(fn) {
data = fn(data, headers);
});
return data;
};
/***/
},
/***/"./node_modules/axios/lib/defaults.js":
/***/function node_modulesAxiosLibDefaultsJs(module, exports, __webpack_require__) {
"use strict";
/* WEBPACK VAR INJECTION */
(function (process) {
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
var normalizeHeaderName = __webpack_require__("./node_modules/axios/lib/helpers/normalizeHeaderName.js");
var PROTECTION_PREFIX = /^\)\]\}',?\n/;
var DEFAULT_CONTENT_TYPE = {
'Content-Type': 'application/x-www-form-urlencoded'
};
function setContentTypeIfUnset(headers, value) {
if (!utils.isUndefined(headers) && utils.isUndefined(headers['Content-Type'])) {
headers['Content-Type'] = value;
}
}
function getDefaultAdapter() {
var adapter;
if (typeof XMLHttpRequest !== 'undefined') {
// For browsers use XHR adapter
adapter = __webpack_require__("./node_modules/axios/lib/adapters/xhr.js");
} else if (typeof process !== 'undefined') {
// For node use HTTP adapter
adapter = __webpack_require__("./node_modules/axios/lib/adapters/xhr.js");
}
return adapter;
}
var defaults = {
adapter: getDefaultAdapter(),
transformRequest: [function transformRequest(data, headers) {
normalizeHeaderName(headers, 'Content-Type');
if (utils.isFormData(data) || utils.isArrayBuffer(data) || utils.isStream(data) || utils.isFile(data) || utils.isBlob(data)) {
return data;
}
if (utils.isArrayBufferView(data)) {
return data.buffer;
}
if (utils.isURLSearchParams(data)) {
setContentTypeIfUnset(headers, 'application/x-www-form-urlencoded;charset=utf-8');
return data.toString();
}
if (utils.isObject(data)) {
setContentTypeIfUnset(headers, 'application/json;charset=utf-8');
return JSON.stringify(data);
}
return data;
}],
transformResponse: [function transformResponse(data) {
/*eslint no-param-reassign:0*/
if (typeof data === 'string') {
data = data.replace(PROTECTION_PREFIX, '');
try {
data = JSON.parse(data);
} catch (e) {/* Ignore */}
}
return data;
}],
timeout: 0,
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
validateStatus: function validateStatus(status) {
return status >= 200 && status < 300;
}
};
defaults.headers = {
common: {
'Accept': 'application/json, text/plain, */*'
}
};
utils.forEach(['delete', 'get', 'head'], function forEachMehtodNoData(method) {
defaults.headers[method] = {};
});
utils.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) {
defaults.headers[method] = utils.merge(DEFAULT_CONTENT_TYPE);
});
module.exports = defaults;
/* WEBPACK VAR INJECTION */
}).call(exports, __webpack_require__("./node_modules/process/browser.js"));
/***/
},
/***/"./node_modules/axios/lib/helpers/bind.js":
/***/function node_modulesAxiosLibHelpersBindJs(module, exports, __webpack_require__) {
"use strict";
module.exports = function bind(fn, thisArg) {
return function wrap() {
var args = new Array(arguments.length);
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
return fn.apply(thisArg, args);
};
};
/***/
},
/***/"./node_modules/axios/lib/helpers/btoa.js":
/***/function node_modulesAxiosLibHelpersBtoaJs(module, exports, __webpack_require__) {
"use strict";
// btoa polyfill for IE<10 courtesy https://github.com/davidchambers/Base64.js
var chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
function E() {
this.message = 'String contains an invalid character';
}
E.prototype = new Error();
E.prototype.code = 5;
E.prototype.name = 'InvalidCharacterError';
function btoa(input) {
var str = String(input);
var output = '';
for (
// initialize result and counter
var block, charCode, idx = 0, map = chars;
// if the next str index does not exist:
// change the mapping table to "="
// check if d has no fractional digits
str.charAt(idx | 0) || (map = '=', idx % 1);
// "8 - idx % 1 * 8" generates the sequence 2, 4, 6, 8
output += map.charAt(63 & block >> 8 - idx % 1 * 8)) {
charCode = str.charCodeAt(idx += 3 / 4);
if (charCode > 0xFF) {
throw new E();
}
block = block << 8 | charCode;
}
return output;
}
module.exports = btoa;
/***/
},
/***/"./node_modules/axios/lib/helpers/buildURL.js":
/***/function node_modulesAxiosLibHelpersBuildURLJs(module, exports, __webpack_require__) {
"use strict";
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
function encode(val) {
return encodeURIComponent(val).replace(/%40/gi, '@').replace(/%3A/gi, ':').replace(/%24/g, '$').replace(/%2C/gi, ',').replace(/%20/g, '+').replace(/%5B/gi, '[').replace(/%5D/gi, ']');
}
/**
* Build a URL by appending params to the end
*
* @param {string} url The base of the url (e.g., http://www.google.com)
* @param {object} [params] The params to be appended
* @returns {string} The formatted url
*/
module.exports = function buildURL(url, params, paramsSerializer) {
/*eslint no-param-reassign:0*/
if (!params) {
return url;
}
var serializedParams;
if (paramsSerializer) {
serializedParams = paramsSerializer(params);
} else if (utils.isURLSearchParams(params)) {
serializedParams = params.toString();
} else {
var parts = [];
utils.forEach(params, function serialize(val, key) {
if (val === null || typeof val === 'undefined') {
return;
}
if (utils.isArray(val)) {
key = key + '[]';
}
if (!utils.isArray(val)) {
val = [val];
}
utils.forEach(val, function parseValue(v) {
if (utils.isDate(v)) {
v = v.toISOString();
} else if (utils.isObject(v)) {
v = JSON.stringify(v);
}
parts.push(encode(key) + '=' + encode(v));
});
});
serializedParams = parts.join('&');
}
if (serializedParams) {
url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams;
}
return url;
};
/***/
},
/***/"./node_modules/axios/lib/helpers/combineURLs.js":
/***/function node_modulesAxiosLibHelpersCombineURLsJs(module, exports, __webpack_require__) {
"use strict";
/**
* Creates a new URL by combining the specified URLs
*
* @param {string} baseURL The base URL
* @param {string} relativeURL The relative URL
* @returns {string} The combined URL
*/
module.exports = function combineURLs(baseURL, relativeURL) {
return baseURL.replace(/\/+$/, '') + '/' + relativeURL.replace(/^\/+/, '');
};
/***/
},
/***/"./node_modules/axios/lib/helpers/cookies.js":
/***/function node_modulesAxiosLibHelpersCookiesJs(module, exports, __webpack_require__) {
"use strict";
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
module.exports = utils.isStandardBrowserEnv() ?
// Standard browser envs support document.cookie
function standardBrowserEnv() {
return {
write: function write(name, value, expires, path, domain, secure) {
var cookie = [];
cookie.push(name + '=' + encodeURIComponent(value));
if (utils.isNumber(expires)) {
cookie.push('expires=' + new Date(expires).toGMTString());
}
if (utils.isString(path)) {
cookie.push('path=' + path);
}
if (utils.isString(domain)) {
cookie.push('domain=' + domain);
}
if (secure === true) {
cookie.push('secure');
}
document.cookie = cookie.join('; ');
},
read: function read(name) {
var match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)'));
return match ? decodeURIComponent(match[3]) : null;
},
remove: function remove(name) {
this.write(name, '', Date.now() - 86400000);
}
};
}() :
// Non standard browser env (web workers, react-native) lack needed support.
function nonStandardBrowserEnv() {
return {
write: function write() {},
read: function read() {
return null;
},
remove: function remove() {}
};
}();
/***/
},
/***/"./node_modules/axios/lib/helpers/isAbsoluteURL.js":
/***/function node_modulesAxiosLibHelpersIsAbsoluteURLJs(module, exports, __webpack_require__) {
"use strict";
/**
* Determines whether the specified URL is absolute
*
* @param {string} url The URL to test
* @returns {boolean} True if the specified URL is absolute, otherwise false
*/
module.exports = function isAbsoluteURL(url) {
// A URL is considered absolute if it begins with "<scheme>://" or "//" (protocol-relative URL).
// RFC 3986 defines scheme name as a sequence of characters beginning with a letter and followed
// by any combination of letters, digits, plus, period, or hyphen.
return (/^([a-z][a-z\d\+\-\.]*:)?\/\//i.test(url)
);
};
/***/
},
/***/"./node_modules/axios/lib/helpers/isURLSameOrigin.js":
/***/function node_modulesAxiosLibHelpersIsURLSameOriginJs(module, exports, __webpack_require__) {
"use strict";
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
module.exports = utils.isStandardBrowserEnv() ?
// Standard browser envs have full support of the APIs needed to test
// whether the request URL is of the same origin as current location.
function standardBrowserEnv() {
var msie = /(msie|trident)/i.test(navigator.userAgent);
var urlParsingNode = document.createElement('a');
var originURL;
/**
* Parse a URL to discover it's components
*
* @param {String} url The URL to be parsed
* @returns {Object}
*/
function resolveURL(url) {
var href = url;
if (msie) {
// IE needs attribute set twice to normalize properties
urlParsingNode.setAttribute('href', href);
href = urlParsingNode.href;
}
urlParsingNode.setAttribute('href', href);
// urlParsingNode provides the UrlUtils interface - http://url.spec.whatwg.org/#urlutils
return {
href: urlParsingNode.href,
protocol: urlParsingNode.protocol ? urlParsingNode.protocol.replace(/:$/, '') : '',
host: urlParsingNode.host,
search: urlParsingNode.search ? urlParsingNode.search.replace(/^\?/, '') : '',
hash: urlParsingNode.hash ? urlParsingNode.hash.replace(/^#/, '') : '',
hostname: urlParsingNode.hostname,
port: urlParsingNode.port,
pathname: urlParsingNode.pathname.charAt(0) === '/' ? urlParsingNode.pathname : '/' + urlParsingNode.pathname
};
}
originURL = resolveURL(window.location.href);
/**
* Determine if a URL shares the same origin as the current location
*
* @param {String} requestURL The URL to test
* @returns {boolean} True if URL shares the same origin, otherwise false
*/
return function isURLSameOrigin(requestURL) {
var parsed = utils.isString(requestURL) ? resolveURL(requestURL) : requestURL;
return parsed.protocol === originURL.protocol && parsed.host === originURL.host;
};
}() :
// Non standard browser envs (web workers, react-native) lack needed support.
function nonStandardBrowserEnv() {
return function isURLSameOrigin() {
return true;
};
}();
/***/
},
/***/"./node_modules/axios/lib/helpers/normalizeHeaderName.js":
/***/function node_modulesAxiosLibHelpersNormalizeHeaderNameJs(module, exports, __webpack_require__) {
"use strict";
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
module.exports = function normalizeHeaderName(headers, normalizedName) {
utils.forEach(headers, function processHeader(value, name) {
if (name !== normalizedName && name.toUpperCase() === normalizedName.toUpperCase()) {
headers[normalizedName] = value;
delete headers[name];
}
});
};
/***/
},
/***/"./node_modules/axios/lib/helpers/parseHeaders.js":
/***/function node_modulesAxiosLibHelpersParseHeadersJs(module, exports, __webpack_require__) {
"use strict";
var utils = __webpack_require__("./node_modules/axios/lib/utils.js");
/**
* Parse headers into an object
*
* ```
* Date: Wed, 27 Aug 2014 08:58:49 GMT
* Content-Type: application/json
* Connection: keep-alive
* Transfer-Encoding: chunked
* ```
*
* @param {String} headers Headers needing to be parsed
* @returns {Object} Headers parsed into an object
*/
module.exports = function parseHeaders(headers) {
var parsed = {};
var key;
var val;
var i;
if (!headers) {
return parsed;
}
utils.forEach(headers.split('\n'), function parser(line) {
i = line.indexOf(':');
key = utils.trim(line.substr(0, i)).toLowerCase();
val = utils.trim(line.substr(i + 1));
if (key) {
parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val;
}
});
return parsed;
};
/***/
},
/***/"./node_modules/axios/lib/helpers/spread.js":
/***/function node_modulesAxiosLibHelpersSpreadJs(module, exports, __webpack_require__) {
"use strict";
/**
* Syntactic sugar for invoking a function and expanding an array for arguments.
*
* Common use case would be to use `Function.prototype.apply`.
*
* ```js
* function f(x, y, z) {}
* var args = [1, 2, 3];
* f.apply(null, args);
* ```
*
* With `spread` this example can be re-written.
*
* ```js
* spread(function(x, y, z) {})([1, 2, 3]);
* ```
*
* @param {Function} callback
* @returns {Function}
*/
module.exports = function spread(callback) {
return function wrap(arr) {
return callback.apply(null, arr);
};
};
/***/
},
/***/"./node_modules/axios/lib/utils.js":
/***/function node_modulesAxiosLibUtilsJs(module, exports, __webpack_require__) {
"use strict";
var bind = __webpack_require__("./node_modules/axios/lib/helpers/bind.js");
/*global toString:true*/
// utils is a library of generic helper functions non-specific to axios
var toString = Object.prototype.toString;
/**
* Determine if a value is an Array
*
* @param {Object} val The value to test
* @returns {boolean} True if value is an Array, otherwise false
*/
function isArray(val) {
return toString.call(val) === '[object Array]';
}
/**
* Determine if a value is an ArrayBuffer
*
* @param {Object} val The value to test
* @returns {boolean} True if value is an ArrayBuffer, otherwise false
*/
function isArrayBuffer(val) {
return toString.call(val) === '[object ArrayBuffer]';
}
/**
* Determine if a value is a FormData
*
* @param {Object} val The value to test
* @returns {boolean} True if value is an FormData, otherwise false
*/
function isFormData(val) {
return typeof FormData !== 'undefined' && val instanceof FormData;
}
/**
* Determine if a value is a view on an ArrayBuffer
*
* @param {Object} val The value to test
* @returns {boolean} True if value is a view on an ArrayBuffer, otherwise false
*/
function isArrayBufferView(val) {
var result;
if (typeof ArrayBuffer !== 'undefined' && ArrayBuffer.isView) {
result = ArrayBuffer.isView(val);
} else {
result = val && val.buffer && val.buffer instanceof ArrayBuffer;
}
return result;
}
/**
* Determine if a value is a String
*
* @param {Object} val The value to test
* @returns {boolean} True if value is a String, otherwise false
*/
function isString(val) {
return typeof val === 'string';
}
/**
* Determine if a value is a Number
*
* @param {Object} val The value to test
* @returns {boolean} True if value is a Number, otherwise false
*/
function isNumber(val) {
return typeof val === 'number';
}
/**
* Determine if a value is undefined
*
* @param {Object} val The value to test
* @returns {boolean} True if the value is undefined, otherwise false
*/
function isUndefined(val) {
return typeof val === 'undefined';
}
/**
* Determine if a value is an Object
*
* @param {Object} val The value to test
* @returns {boolean} True if value is an Object, otherwise false
*/
function isObject(val) {
return val !== null && (typeof val === 'undefined' ? 'undefined' : _typeof(val)) === 'object';
}
/**
* Determine if a value is a Date
*
* @param {Object} val The value to test
* @returns {boolean} True if value is a Date, otherwise false
*/
function isDate(val) {
return toString.call(val) === '[object Date]';
}
/**
* Determine if a value is a File
*
* @param {Object} val The value to test
* @returns {boolean} True if value is a File, otherwise false
*/
function isFile(val) {
return toString.call(val) === '[object File]';
}
/**
* Determine if a value is a Blob
*
* @param {Object} val The value to test
* @returns {boolean} True if value is a Blob, otherwise false
*/
function isBlob(val) {
return toString.call(val) === '[object Blob]';
}
/**
* Determine if a value is a Function
*
* @param {Object} val The value to test
* @returns {boolean} True if value is a Function, otherwise false
*/
function isFunction(val) {
return toString.call(val) === '[object Function]';
}
/**
* Determine if a value is a Stream
*
* @param {Object} val The value to test
* @returns {boolean} True if value is a Stream, otherwise false
*/
function isStream(val) {
return isObject(val) && isFunction(val.pipe);
}
/**
* Determine if a value is a URLSearchParams object
*
* @param {Object} val The value to test
* @returns {boolean} True if value is a URLSearchParams object, otherwise false
*/
function isURLSearchParams(val) {
return typeof URLSearchParams !== 'undefined' && val instanceof URLSearchParams;
}
/**
* Trim excess whitespace off the beginning and end of a string
*
* @param {String} str The String to trim
* @returns {String} The String freed of excess whitespace
*/
function trim(str) {
return str.replace(/^\s*/, '').replace(/\s*$/, '');
}
/**
* Determine if we're running in a standard browser environment
*
* This allows axios to run in a web worker, and react-native.
* Both environments support XMLHttpRequest, but not fully standard globals.
*
* web workers:
* typeof window -> undefined
* typeof document -> undefined
*
* react-native:
* typeof document.createElement -> undefined
*/
function isStandardBrowserEnv() {
return typeof window !== 'undefined' && typeof document !== 'undefined' && typeof document.createElement === 'function';
}
/**
* Iterate over an Array or an Object invoking a function for each item.
*
* If `obj` is an Array callback will be called passing
* the value, index, and complete array for each item.
*
* If 'obj' is an Object callback will be called passing
* the value, key, and complete object for each property.
*
* @param {Object|Array} obj The object to iterate
* @param {Function} fn The callback to invoke for each item
*/
function forEach(obj, fn) {
// Don't bother if no value provided
if (obj === null || typeof obj === 'undefined') {
return;
}
// Force an array if not already something iterable
if ((typeof obj === 'undefined' ? 'undefined' : _typeof(obj)) !== 'object' && !isArray(obj)) {
/*eslint no-param-reassign:0*/
obj = [obj];
}
if (isArray(obj)) {
// Iterate over array values
for (var i = 0, l = obj.length; i < l; i++) {
fn.call(null, obj[i], i, obj);
}
} else {
// Iterate over object keys
for (var key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
fn.call(null, obj[key], key, obj);
}
}
}
}
/**
* Accepts varargs expecting each argument to be an object, then
* immutably merges the properties of each object and returns result.
*
* When multiple objects contain the same key the later object in
* the arguments list will take precedence.
*
* Example:
*
* ```js
* var result = merge({foo: 123}, {foo: 456});
* console.log(result.foo); // outputs 456
* ```
*
* @param {Object} obj1 Object to merge
* @returns {Object} Result of all merge properties
*/
function merge() /* obj1, obj2, obj3, ... */{
var result = {};
function assignValue(val, key) {
if (_typeof(result[key]) === 'object' && (typeof val === 'undefined' ? 'undefined' : _typeof(val)) === 'object') {
result[key] = merge(result[key], val);
} else {
result[key] = val;
}
}
for (var i = 0, l = arguments.length; i < l; i++) {
forEach(arguments[i], assignValue);
}
return result;
}
/**
* Extends object a by mutably adding to it the properties of object b.
*
* @param {Object} a The object to be extended
* @param {Object} b The object to copy properties from
* @param {Object} thisArg The object to bind function to
* @return {Object} The resulting value of object a
*/
function extend(a, b, thisArg) {
forEach(b, function assignValue(val, key) {
if (thisArg && typeof val === 'function') {
a[key] = bind(val, thisArg);
} else {
a[key] = val;
}
});
return a;
}
module.exports = {
isArray: isArray,
isArrayBuffer: isArrayBuffer,
isFormData: isFormData,
isArrayBufferView: isArrayBufferView,
isString: isString,
isNumber: isNumber,
isObject: isObject,
isUndefined: isUndefined,
isDate: isDate,
isFile: isFile,
isBlob: isBlob,
isFunction: isFunction,
isStream: isStream,
isURLSearchParams: isURLSearchParams,
isStandardBrowserEnv: isStandardBrowserEnv,
forEach: forEach,
merge: merge,
extend: extend,
trim: trim
};
/***/
},
/***/"./node_modules/babel-loader/lib/index.js?{\"cacheDirectory\":true,\"presets\":[[\"env\",{\"modules\":false,\"targets\":{\"browsers\":[\"> 2%\"],\"uglify\":true}}]]}!./node_modules/vue-loader/lib/selector.js?type=script&index=0!./resources/assets/js/App.vue":
/***/function node_modulesBabelLoaderLibIndexJsCacheDirectoryTruePresetsEnvModulesFalseTargetsBrowsers2UglifyTrueNode_modulesVueLoaderLibSelectorJsTypeScriptIndex0ResourcesAssetsJsAppVue(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
/* harmony import */var __WEBPACK_IMPORTED_MODULE_0__helpers_flash__ = __webpack_require__("./resources/assets/js/helpers/flash.js");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_1__store_auth__ = __webpack_require__("./resources/assets/js/store/auth.js");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_2__helpers_api__ = __webpack_require__("./resources/assets/js/helpers/api.js");
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */__webpack_exports__["default"] = {
created: function created() {
__WEBPACK_IMPORTED_MODULE_1__store_auth__["a" /* default */].initialize();
},
data: function data() {
return {
flash: __WEBPACK_IMPORTED_MODULE_0__helpers_flash__["a" /* default */].state,
auth: __WEBPACK_IMPORTED_MODULE_1__store_auth__["a" /* default */].state
};
},
computed: {
check: function check() {
if (this.auth.api_token && this.auth.user_id) {
return true;
}
return false;
}
},
methods: {
logout: function logout() {
var _this = this;
__WEBPACK_IMPORTED_MODULE_2__helpers_api__["a" /* post */]('/api/logout').then(function (res) {
if (res.data.logout) {
__WEBPACK_IMPORTED_MODULE_1__store_auth__["a" /* default */].remove();
__WEBPACK_IMPORTED_MODULE_0__helpers_flash__["a" /* default */].setSuccess('You have successfully logged out!');
_this.$router.push('/login');
}
}).catch();
}
}
};
/***/
},
/***/"./node_modules/babel-loader/lib/index.js?{\"cacheDirectory\":true,\"presets\":[[\"env\",{\"modules\":false,\"targets\":{\"browsers\":[\"> 2%\"],\"uglify\":true}}]]}!./node_modules/vue-loader/lib/selector.js?type=script&index=0!./resources/assets/js/views/Auth/Login.vue":
/***/function node_modulesBabelLoaderLibIndexJsCacheDirectoryTruePresetsEnvModulesFalseTargetsBrowsers2UglifyTrueNode_modulesVueLoaderLibSelectorJsTypeScriptIndex0ResourcesAssetsJsViewsAuthLoginVue(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
/* harmony import */var __WEBPACK_IMPORTED_MODULE_0__helpers_flash__ = __webpack_require__("./resources/assets/js/helpers/flash.js");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_1__helpers_api__ = __webpack_require__("./resources/assets/js/helpers/api.js");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_2__store_auth__ = __webpack_require__("./resources/assets/js/store/auth.js");
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */__webpack_exports__["default"] = {
data: function data() {
return {
form: {
email: '',
password: ''
},
error: {},
isProcessing: false
};
},
methods: {
login: function login() {
var _this = this;
this.isProcessing = true;
this.error = {};
__WEBPACK_IMPORTED_MODULE_1__helpers_api__["a" /* post */]('api/login', this.form).then(function (res) {
if (res.data.authenticated) {
__WEBPACK_IMPORTED_MODULE_2__store_auth__["a" /* default */].set(res.data.api_token, res.data.user_id);
__WEBPACK_IMPORTED_MODULE_0__helpers_flash__["a" /* default */].setSuccess('You have logged!');
_this.$router.push('/');
}
_this.isProcessing = false;
}).catch(function (err) {
if (err.response.status === 422) {
_this.error = err.response.data;
}
_this.isProcessing = false;
});
}
}
};
/***/
},
/***/"./node_modules/babel-loader/lib/index.js?{\"cacheDirectory\":true,\"presets\":[[\"env\",{\"modules\":false,\"targets\":{\"browsers\":[\"> 2%\"],\"uglify\":true}}]]}!./node_modules/vue-loader/lib/selector.js?type=script&index=0!./resources/assets/js/views/Auth/Register.vue":
/***/function node_modulesBabelLoaderLibIndexJsCacheDirectoryTruePresetsEnvModulesFalseTargetsBrowsers2UglifyTrueNode_modulesVueLoaderLibSelectorJsTypeScriptIndex0ResourcesAssetsJsViewsAuthRegisterVue(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
/* harmony import */var __WEBPACK_IMPORTED_MODULE_0__helpers_flash__ = __webpack_require__("./resources/assets/js/helpers/flash.js");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_1__helpers_api__ = __webpack_require__("./resources/assets/js/helpers/api.js");
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */__webpack_exports__["default"] = {
data: function data() {
return {
form: {
name: '',
email: '',
password: '',
password_confirmation: ''
},
error: {},
isProcessing: false
};
},
methods: {
register: function register() {
var _this = this;
this.isProcessing = true;
this.error = {};
__WEBPACK_IMPORTED_MODULE_1__helpers_api__["a" /* post */]('api/register', this.form).then(function (res) {
if (res.data.registered) {
__WEBPACK_IMPORTED_MODULE_0__helpers_flash__["a" /* default */].setSuccess('You have successfully create an Accout!');
_this.$router.push('/login');
}
_this.isProcessing = false;
}).catch(function (err) {
if (err.response.status === 422) {
_this.error = err.response.data;
}
_this.isProcessing = false;
});
}
}
};
/***/
},
/***/"./node_modules/process/browser.js":
/***/function node_modulesProcessBrowserJs(module, exports) {
// shim for using process in browser
var process = module.exports = {};
// cached from whatever global is present so that test runners that stub it
// don't break things. But we need to wrap it in a try catch in case it is
// wrapped in strict mode code which doesn't define any globals. It's inside a
// function because try/catches deoptimize in certain engines.
var cachedSetTimeout;
var cachedClearTimeout;
function defaultSetTimout() {
throw new Error('setTimeout has not been defined');
}
function defaultClearTimeout() {
throw new Error('clearTimeout has not been defined');
}
(function () {
try {
if (typeof setTimeout === 'function') {
cachedSetTimeout = setTimeout;
} else {
cachedSetTimeout = defaultSetTimout;
}
} catch (e) {
cachedSetTimeout = defaultSetTimout;
}
try {
if (typeof clearTimeout === 'function') {
cachedClearTimeout = clearTimeout;
} else {
cachedClearTimeout = defaultClearTimeout;
}
} catch (e) {
cachedClearTimeout = defaultClearTimeout;
}
})();
function runTimeout(fun) {
if (cachedSetTimeout === setTimeout) {
//normal enviroments in sane situations
return setTimeout(fun, 0);
}
// if setTimeout wasn't available but was latter defined
if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) {
cachedSetTimeout = setTimeout;
return setTimeout(fun, 0);
}
try {
// when when somebody has screwed with setTimeout but no I.E. maddness
return cachedSetTimeout(fun, 0);
} catch (e) {
try {
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
return cachedSetTimeout.call(null, fun, 0);
} catch (e) {
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error
return cachedSetTimeout.call(this, fun, 0);
}
}
}
function runClearTimeout(marker) {
if (cachedClearTimeout === clearTimeout) {
//normal enviroments in sane situations
return clearTimeout(marker);
}
// if clearTimeout wasn't available but was latter defined
if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) {
cachedClearTimeout = clearTimeout;
return clearTimeout(marker);
}
try {
// when when somebody has screwed with setTimeout but no I.E. maddness
return cachedClearTimeout(marker);
} catch (e) {
try {
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
return cachedClearTimeout.call(null, marker);
} catch (e) {
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.
// Some versions of I.E. have different rules for clearTimeout vs setTimeout
return cachedClearTimeout.call(this, marker);
}
}
}
var queue = [];
var draining = false;
var currentQueue;
var queueIndex = -1;
function cleanUpNextTick() {
if (!draining || !currentQueue) {
return;
}
draining = false;
if (currentQueue.length) {
queue = currentQueue.concat(queue);
} else {
queueIndex = -1;
}
if (queue.length) {
drainQueue();
}
}
function drainQueue() {
if (draining) {
return;
}
var timeout = runTimeout(cleanUpNextTick);
draining = true;
var len = queue.length;
while (len) {
currentQueue = queue;
queue = [];
while (++queueIndex < len) {
if (currentQueue) {
currentQueue[queueIndex].run();
}
}
queueIndex = -1;
len = queue.length;
}
currentQueue = null;
draining = false;
runClearTimeout(timeout);
}
process.nextTick = function (fun) {
var args = new Array(arguments.length - 1);
if (arguments.length > 1) {
for (var i = 1; i < arguments.length; i++) {
args[i - 1] = arguments[i];
}
}
queue.push(new Item(fun, args));
if (queue.length === 1 && !draining) {
runTimeout(drainQueue);
}
};
// v8 likes predictible objects
function Item(fun, array) {
this.fun = fun;
this.array = array;
}
Item.prototype.run = function () {
this.fun.apply(null, this.array);
};
process.title = 'browser';
process.browser = true;
process.env = {};
process.argv = [];
process.version = ''; // empty string to avoid regexp issues
process.versions = {};
function noop() {}
process.on = noop;
process.addListener = noop;
process.once = noop;
process.off = noop;
process.removeListener = noop;
process.removeAllListeners = noop;
process.emit = noop;
process.prependListener = noop;
process.prependOnceListener = noop;
process.listeners = function (name) {
return [];
};
process.binding = function (name) {
throw new Error('process.binding is not supported');
};
process.cwd = function () {
return '/';
};
process.chdir = function (dir) {
throw new Error('process.chdir is not supported');
};
process.umask = function () {
return 0;
};
/***/
},
/***/"./node_modules/vue-loader/lib/component-normalizer.js":
/***/function node_modulesVueLoaderLibComponentNormalizerJs(module, exports) {
// this module is a runtime utility for cleaner component module output and will
// be included in the final webpack user bundle
module.exports = function normalizeComponent(rawScriptExports, compiledTemplate, scopeId, cssModules) {
var esModule;
var scriptExports = rawScriptExports = rawScriptExports || {};
// ES6 modules interop
var type = _typeof(rawScriptExports.default);
if (type === 'object' || type === 'function') {
esModule = rawScriptExports;
scriptExports = rawScriptExports.default;
}
// Vue.extend constructor export interop
var options = typeof scriptExports === 'function' ? scriptExports.options : scriptExports;
// render functions
if (compiledTemplate) {
options.render = compiledTemplate.render;
options.staticRenderFns = compiledTemplate.staticRenderFns;
}
// scopedId
if (scopeId) {
options._scopeId = scopeId;
}
// inject cssModules
if (cssModules) {
var computed = Object.create(options.computed || null);
Object.keys(cssModules).forEach(function (key) {
var module = cssModules[key];
computed[key] = function () {
return module;
};
});
options.computed = computed;
}
return {
esModule: esModule,
exports: scriptExports,
options: options
};
};
/***/
},
/***/"./node_modules/vue-loader/lib/template-compiler/index.js?{\"id\":\"data-v-1d037175\"}!./node_modules/vue-loader/lib/selector.js?type=template&index=0!./resources/assets/js/views/Auth/Register.vue":
/***/function node_modulesVueLoaderLibTemplateCompilerIndexJsIdDataV1d037175Node_modulesVueLoaderLibSelectorJsTypeTemplateIndex0ResourcesAssetsJsViewsAuthRegisterVue(module, exports, __webpack_require__) {
module.exports = { render: function render() {
var _vm = this;var _h = _vm.$createElement;var _c = _vm._self._c || _h;
return _c('form', {
staticClass: "form",
on: {
"submit": function submit($event) {
$event.preventDefault();
_vm.register($event);
}
}
}, [_c('h1', {
staticClass: "form__title"
}, [_vm._v("Create An Accout")]), _vm._v(" "), _c('div', {
staticClass: "form__group"
}, [_c('div', {
staticClass: "form__group"
}, [_c('label', [_vm._v("Name")]), _vm._v(" "), _c('input', {
directives: [{
name: "model",
rawName: "v-model",
value: _vm.form.name,
expression: "form.name"
}],
staticClass: "form__control",
attrs: {
"type": "text"
},
domProps: {
"value": _vm.form.name
},
on: {
"input": function input($event) {
if ($event.target.composing) {
return;
}
_vm.form.name = $event.target.value;
}
}
}), _vm._v(" "), _vm.error.name ? _c('small', {
staticClass: "error__control"
}, [_vm._v(_vm._s(_vm.error.name[0]))]) : _vm._e()]), _vm._v(" "), _c('div', [_c('label', [_vm._v("Email")]), _vm._v(" "), _c('input', {
directives: [{
name: "model",
rawName: "v-model",
value: _vm.form.email,
expression: "form.email"
}],
staticClass: "form__control",
attrs: {
"type": "text"
},
domProps: {
"value": _vm.form.email
},
on: {
"input": function input($event) {
if ($event.target.composing) {
return;
}
_vm.form.email = $event.target.value;
}
}
}), _vm._v(" "), _vm.error.email ? _c('small', {
staticClass: "error__control"
}, [_vm._v(_vm._s(_vm.error.email[0]))]) : _vm._e()]), _vm._v(" "), _c('div', {
staticClass: "form__group"
}, [_c('label', [_vm._v("Password")]), _vm._v(" "), _c('input', {
directives: [{
name: "model",
rawName: "v-model",
value: _vm.form.password,
expression: "form.password"
}],
staticClass: "form__control",
attrs: {
"type": "password"
},
domProps: {
"value": _vm.form.password
},
on: {
"input": function input($event) {
if ($event.target.composing) {
return;
}
_vm.form.password = $event.target.value;
}
}
}), _vm._v(" "), _vm.error.password ? _c('small', {
staticClass: "error__control"
}, [_vm._v(_vm._s(_vm.error.password[0]))]) : _vm._e()]), _vm._v(" "), _c('div', {
staticClass: "form__group"
}, [_c('label', [_vm._v("Confirm Password")]), _vm._v(" "), _c('input', {
directives: [{
name: "model",
rawName: "v-model",
value: _vm.form.password_confirmation,
expression: "form.password_confirmation"
}],
staticClass: "form__control",
attrs: {
"type": "password"
},
domProps: {
"value": _vm.form.password_confirmation
},
on: {
"input": function input($event) {
if ($event.target.composing) {
return;
}
_vm.form.password_confirmation = $event.target.value;
}
}
})]), _vm._v(" "), _c('div', {
staticClass: "form__group"
}, [_c('button', {
staticClass: "btn btn__primary",
attrs: {
"disabled": _vm.isProcessing
}
}, [_vm._v("\n\t\t\t\tRegister\n\t\t\t")])])])]);
}, staticRenderFns: [] };
module.exports.render._withStripped = true;
if (false) {
module.hot.accept();
if (module.hot.data) {
require("vue-hot-reload-api").rerender("data-v-1d037175", module.exports);
}
}
/***/
},
/***/"./node_modules/vue-loader/lib/template-compiler/index.js?{\"id\":\"data-v-290ad3a6\"}!./node_modules/vue-loader/lib/selector.js?type=template&index=0!./resources/assets/js/App.vue":
/***/function node_modulesVueLoaderLibTemplateCompilerIndexJsIdDataV290ad3a6Node_modulesVueLoaderLibSelectorJsTypeTemplateIndex0ResourcesAssetsJsAppVue(module, exports, __webpack_require__) {
module.exports = { render: function render() {
var _vm = this;var _h = _vm.$createElement;var _c = _vm._self._c || _h;
return _c('div', {
staticClass: "container"
}, [_c('div', {
staticClass: "navbar"
}, [_c('div', {
staticClass: "navbar__brand"
}, [_c('router-link', {
attrs: {
"to": "/"
}
}, [_vm._v("Recipe Box")])], 1), _vm._v(" "), _c('ul', {
staticClass: "navbar__list"
}, [_c('li', {
staticClass: "navbar__item"
}, [!_vm.check ? _c('router-link', {
attrs: {
"to": "/login"
}
}, [_vm._v("LOGIN")]) : _vm._e()], 1), _vm._v(" "), _c('li', {
staticClass: "navbar__item"
}, [!_vm.check ? _c('router-link', {
attrs: {
"to": "/register"
}
}, [_vm._v("REGISTER")]) : _vm._e()], 1), _vm._v(" "), _vm.check ? _c('li', {
staticClass: "navbar__item"
}, [_c('a', {
on: {
"click": function click($event) {
$event.stopPropagation();
_vm.logout($event);
}
}
}, [_vm._v("LOGOUT")])]) : _vm._e()])]), _vm._v(" "), _vm.flash.success ? _c('div', {
staticClass: "flash flash__success"
}, [_vm._v("\n " + _vm._s(_vm.flash.success) + "\n ")]) : _vm._e(), _vm._v(" "), _vm.flash.error ? _c('div', {
staticClass: "flash flash__error"
}, [_vm._v("\n " + _vm._s(_vm.flash.error) + "\n ")]) : _vm._e(), _vm._v(" "), _c('router-view')], 1);
}, staticRenderFns: [] };
module.exports.render._withStripped = true;
if (false) {
module.hot.accept();
if (module.hot.data) {
require("vue-hot-reload-api").rerender("data-v-290ad3a6", module.exports);
}
}
/***/
},
/***/"./node_modules/vue-loader/lib/template-compiler/index.js?{\"id\":\"data-v-fc9dd3f2\"}!./node_modules/vue-loader/lib/selector.js?type=template&index=0!./resources/assets/js/views/Auth/Login.vue":
/***/function node_modulesVueLoaderLibTemplateCompilerIndexJsIdDataVFc9dd3f2Node_modulesVueLoaderLibSelectorJsTypeTemplateIndex0ResourcesAssetsJsViewsAuthLoginVue(module, exports, __webpack_require__) {
module.exports = { render: function render() {
var _vm = this;var _h = _vm.$createElement;var _c = _vm._self._c || _h;
return _c('form', {
staticClass: "form",
on: {
"submit": function submit($event) {
$event.preventDefault();
_vm.login($event);
}
}
}, [_c('h1', {
staticClass: "form__title"
}, [_vm._v("Welcome Back")]), _vm._v(" "), _c('div', {
staticClass: "form__group"
}, [_c('div', [_c('label', [_vm._v("Email")]), _vm._v(" "), _c('input', {
directives: [{
name: "model",
rawName: "v-model",
value: _vm.form.email,
expression: "form.email"
}],
staticClass: "form__control",
attrs: {
"type": "text"
},
domProps: {
"value": _vm.form.email
},
on: {
"input": function input($event) {
if ($event.target.composing) {
return;
}
_vm.form.email = $event.target.value;
}
}
}), _vm._v(" "), _vm.error.email ? _c('small', {
staticClass: "error__control"
}, [_vm._v(_vm._s(_vm.error.email[0]))]) : _vm._e()]), _vm._v(" "), _c('div', {
staticClass: "form__group"
}, [_c('label', [_vm._v("Password")]), _vm._v(" "), _c('input', {
directives: [{
name: "model",
rawName: "v-model",
value: _vm.form.password,
expression: "form.password"
}],
staticClass: "form__control",
attrs: {
"type": "password"
},
domProps: {
"value": _vm.form.password
},
on: {
"input": function input($event) {
if ($event.target.composing) {
return;
}
_vm.form.password = $event.target.value;
}
}
}), _vm._v(" "), _vm.error.password ? _c('small', {
staticClass: "error__control"
}, [_vm._v(_vm._s(_vm.error.password[0]))]) : _vm._e()]), _vm._v(" "), _c('div', {
staticClass: "form__group"
}, [_c('button', {
staticClass: "btn btn__primary",
attrs: {
"disabled": _vm.isProcessing
}
}, [_vm._v("\n Submit\n ")])])])]);
}, staticRenderFns: [] };
module.exports.render._withStripped = true;
if (false) {
module.hot.accept();
if (module.hot.data) {
require("vue-hot-reload-api").rerender("data-v-fc9dd3f2", module.exports);
}
}
/***/
},
/***/"./node_modules/vue-router/dist/vue-router.esm.js":
/***/function node_modulesVueRouterDistVueRouterEsmJs(module, __webpack_exports__, __webpack_require__) {
"use strict";
/**
* vue-router v2.6.0
* (c) 2017 Evan You
* @license MIT
*/
/* */
function assert(condition, message) {
if (!condition) {
throw new Error("[vue-router] " + message);
}
}
function warn(condition, message) {
if ("development" !== 'production' && !condition) {
typeof console !== 'undefined' && console.warn("[vue-router] " + message);
}
}
var View = {
name: 'router-view',
functional: true,
props: {
name: {
type: String,
default: 'default'
}
},
render: function render(_, ref) {
var props = ref.props;
var children = ref.children;
var parent = ref.parent;
var data = ref.data;
data.routerView = true;
// directly use parent context's createElement() function
// so that components rendered by router-view can resolve named slots
var h = parent.$createElement;
var name = props.name;
var route = parent.$route;
var cache = parent._routerViewCache || (parent._routerViewCache = {});
// determine current view depth, also check to see if the tree
// has been toggled inactive but kept-alive.
var depth = 0;
var inactive = false;
while (parent && parent._routerRoot !== parent) {
if (parent.$vnode && parent.$vnode.data.routerView) {
depth++;
}
if (parent._inactive) {
inactive = true;
}
parent = parent.$parent;
}
data.routerViewDepth = depth;
// render previous view if the tree is inactive and kept-alive
if (inactive) {
return h(cache[name], data, children);
}
var matched = route.matched[depth];
// render empty node if no matched route
if (!matched) {
cache[name] = null;
return h();
}
var component = cache[name] = matched.components[name];
// attach instance registration hook
// this will be called in the instance's injected lifecycle hooks
data.registerRouteInstance = function (vm, val) {
// val could be undefined for unregistration
var current = matched.instances[name];
if (val && current !== vm || !val && current === vm) {
matched.instances[name] = val;
}
}
// also regiseter instance in prepatch hook
// in case the same component instance is reused across different routes
;(data.hook || (data.hook = {})).prepatch = function (_, vnode) {
matched.instances[name] = vnode.componentInstance;
};
// resolve props
data.props = resolveProps(route, matched.props && matched.props[name]);
return h(component, data, children);
}
};
function resolveProps(route, config) {
switch (typeof config === 'undefined' ? 'undefined' : _typeof(config)) {
case 'undefined':
return;
case 'object':
return config;
case 'function':
return config(route);
case 'boolean':
return config ? route.params : undefined;
default:
if (true) {
warn(false, "props in \"" + route.path + "\" is a " + (typeof config === 'undefined' ? 'undefined' : _typeof(config)) + ", " + "expecting an object, function or boolean.");
}
}
}
/* */
var encodeReserveRE = /[!'()*]/g;
var encodeReserveReplacer = function encodeReserveReplacer(c) {
return '%' + c.charCodeAt(0).toString(16);
};
var commaRE = /%2C/g;
// fixed encodeURIComponent which is more conformant to RFC3986:
// - escapes [!'()*]
// - preserve commas
var encode = function encode(str) {
return encodeURIComponent(str).replace(encodeReserveRE, encodeReserveReplacer).replace(commaRE, ',');
};
var decode = decodeURIComponent;
function resolveQuery(query, extraQuery, _parseQuery) {
if (extraQuery === void 0) extraQuery = {};
var parse = _parseQuery || parseQuery;
var parsedQuery;
try {
parsedQuery = parse(query || '');
} catch (e) {
"development" !== 'production' && warn(false, e.message);
parsedQuery = {};
}
for (var key in extraQuery) {
var val = extraQuery[key];
parsedQuery[key] = Array.isArray(val) ? val.slice() : val;
}
return parsedQuery;
}
function parseQuery(query) {
var res = {};
query = query.trim().replace(/^(\?|#|&)/, '');
if (!query) {
return res;
}
query.split('&').forEach(function (param) {
var parts = param.replace(/\+/g, ' ').split('=');
var key = decode(parts.shift());
var val = parts.length > 0 ? decode(parts.join('=')) : null;
if (res[key] === undefined) {
res[key] = val;
} else if (Array.isArray(res[key])) {
res[key].push(val);
} else {
res[key] = [res[key], val];
}
});
return res;
}
function stringifyQuery(obj) {
var res = obj ? Object.keys(obj).map(function (key) {
var val = obj[key];
if (val === undefined) {
return '';
}
if (val === null) {
return encode(key);
}
if (Array.isArray(val)) {
var result = [];
val.forEach(function (val2) {
if (val2 === undefined) {
return;
}
if (val2 === null) {
result.push(encode(key));
} else {
result.push(encode(key) + '=' + encode(val2));
}
});
return result.join('&');
}
return encode(key) + '=' + encode(val);
}).filter(function (x) {
return x.length > 0;
}).join('&') : null;
return res ? "?" + res : '';
}
/* */
var trailingSlashRE = /\/?$/;
function createRoute(record, location, redirectedFrom, router) {
var stringifyQuery$$1 = router && router.options.stringifyQuery;
var route = {
name: location.name || record && record.name,
meta: record && record.meta || {},
path: location.path || '/',
hash: location.hash || '',
query: location.query || {},
params: location.params || {},
fullPath: getFullPath(location, stringifyQuery$$1),
matched: record ? formatMatch(record) : []
};
if (redirectedFrom) {
route.redirectedFrom = getFullPath(redirectedFrom, stringifyQuery$$1);
}
return Object.freeze(route);
}
// the starting route that represents the initial state
var START = createRoute(null, {
path: '/'
});
function formatMatch(record) {
var res = [];
while (record) {
res.unshift(record);
record = record.parent;
}
return res;
}
function getFullPath(ref, _stringifyQuery) {
var path = ref.path;
var query = ref.query;if (query === void 0) query = {};
var hash = ref.hash;if (hash === void 0) hash = '';
var stringify = _stringifyQuery || stringifyQuery;
return (path || '/') + stringify(query) + hash;
}
function isSameRoute(a, b) {
if (b === START) {
return a === b;
} else if (!b) {
return false;
} else if (a.path && b.path) {
return a.path.replace(trailingSlashRE, '') === b.path.replace(trailingSlashRE, '') && a.hash === b.hash && isObjectEqual(a.query, b.query);
} else if (a.name && b.name) {
return a.name === b.name && a.hash === b.hash && isObjectEqual(a.query, b.query) && isObjectEqual(a.params, b.params);
} else {
return false;
}
}
function isObjectEqual(a, b) {
if (a === void 0) a = {};
if (b === void 0) b = {};
var aKeys = Object.keys(a);
var bKeys = Object.keys(b);
if (aKeys.length !== bKeys.length) {
return false;
}
return aKeys.every(function (key) {
var aVal = a[key];
var bVal = b[key];
// check nested equality
if ((typeof aVal === 'undefined' ? 'undefined' : _typeof(aVal)) === 'object' && (typeof bVal === 'undefined' ? 'undefined' : _typeof(bVal)) === 'object') {
return isObjectEqual(aVal, bVal);
}
return String(aVal) === String(bVal);
});
}
function isIncludedRoute(current, target) {
return current.path.replace(trailingSlashRE, '/').indexOf(target.path.replace(trailingSlashRE, '/')) === 0 && (!target.hash || current.hash === target.hash) && queryIncludes(current.query, target.query);
}
function queryIncludes(current, target) {
for (var key in target) {
if (!(key in current)) {
return false;
}
}
return true;
}
/* */
// work around weird flow bug
var toTypes = [String, Object];
var eventTypes = [String, Array];
var Link = {
name: 'router-link',
props: {
to: {
type: toTypes,
required: true
},
tag: {
type: String,
default: 'a'
},
exact: Boolean,
append: Boolean,
replace: Boolean,
activeClass: String,
exactActiveClass: String,
event: {
type: eventTypes,
default: 'click'
}
},
render: function render(h) {
var this$1 = this;
var router = this.$router;
var current = this.$route;
var ref = router.resolve(this.to, current, this.append);
var location = ref.location;
var route = ref.route;
var href = ref.href;
var classes = {};
var globalActiveClass = router.options.linkActiveClass;
var globalExactActiveClass = router.options.linkExactActiveClass;
// Support global empty active class
var activeClassFallback = globalActiveClass == null ? 'router-link-active' : globalActiveClass;
var exactActiveClassFallback = globalExactActiveClass == null ? 'router-link-exact-active' : globalExactActiveClass;
var activeClass = this.activeClass == null ? activeClassFallback : this.activeClass;
var exactActiveClass = this.exactActiveClass == null ? exactActiveClassFallback : this.exactActiveClass;
var compareTarget = location.path ? createRoute(null, location, null, router) : route;
classes[exactActiveClass] = isSameRoute(current, compareTarget);
classes[activeClass] = this.exact ? classes[exactActiveClass] : isIncludedRoute(current, compareTarget);
var handler = function handler(e) {
if (guardEvent(e)) {
if (this$1.replace) {
router.replace(location);
} else {
router.push(location);
}
}
};
var on = { click: guardEvent };
if (Array.isArray(this.event)) {
this.event.forEach(function (e) {
on[e] = handler;
});
} else {
on[this.event] = handler;
}
var data = {
class: classes
};
if (this.tag === 'a') {
data.on = on;
data.attrs = { href: href };
} else {
// find the first <a> child and apply listener and href
var a = findAnchor(this.$slots.default);
if (a) {
// in case the <a> is a static node
a.isStatic = false;
var extend = _Vue.util.extend;
var aData = a.data = extend({}, a.data);
aData.on = on;
var aAttrs = a.data.attrs = extend({}, a.data.attrs);
aAttrs.href = href;
} else {
// doesn't have <a> child, apply listener to self
data.on = on;
}
}
return h(this.tag, data, this.$slots.default);
}
};
function guardEvent(e) {
// don't redirect with control keys
if (e.metaKey || e.altKey || e.ctrlKey || e.shiftKey) {
return;
}
// don't redirect when preventDefault called
if (e.defaultPrevented) {
return;
}
// don't redirect on right click
if (e.button !== undefined && e.button !== 0) {
return;
}
// don't redirect if `target="_blank"`
if (e.currentTarget && e.currentTarget.getAttribute) {
var target = e.currentTarget.getAttribute('target');
if (/\b_blank\b/i.test(target)) {
return;
}
}
// this may be a Weex event which doesn't have this method
if (e.preventDefault) {
e.preventDefault();
}
return true;
}
function findAnchor(children) {
if (children) {
var child;
for (var i = 0; i < children.length; i++) {
child = children[i];
if (child.tag === 'a') {
return child;
}
if (child.children && (child = findAnchor(child.children))) {
return child;
}
}
}
}
var _Vue;
function install(Vue) {
if (install.installed) {
return;
}
install.installed = true;
_Vue = Vue;
var isDef = function isDef(v) {
return v !== undefined;
};
var registerInstance = function registerInstance(vm, callVal) {
var i = vm.$options._parentVnode;
if (isDef(i) && isDef(i = i.data) && isDef(i = i.registerRouteInstance)) {
i(vm, callVal);
}
};
Vue.mixin({
beforeCreate: function beforeCreate() {
if (isDef(this.$options.router)) {
this._routerRoot = this;
this._router = this.$options.router;
this._router.init(this);
Vue.util.defineReactive(this, '_route', this._router.history.current);
} else {
this._routerRoot = this.$parent && this.$parent._routerRoot || this;
}
registerInstance(this, this);
},
destroyed: function destroyed() {
registerInstance(this);
}
});
Object.defineProperty(Vue.prototype, '$router', {
get: function get() {
return this._routerRoot._router;
}
});
Object.defineProperty(Vue.prototype, '$route', {
get: function get() {
return this._routerRoot._route;
}
});
Vue.component('router-view', View);
Vue.component('router-link', Link);
var strats = Vue.config.optionMergeStrategies;
// use the same hook merging strategy for route hooks
strats.beforeRouteEnter = strats.beforeRouteLeave = strats.created;
}
/* */
var inBrowser = typeof window !== 'undefined';
/* */
function resolvePath(relative, base, append) {
var firstChar = relative.charAt(0);
if (firstChar === '/') {
return relative;
}
if (firstChar === '?' || firstChar === '#') {
return base + relative;
}
var stack = base.split('/');
// remove trailing segment if:
// - not appending
// - appending to trailing slash (last segment is empty)
if (!append || !stack[stack.length - 1]) {
stack.pop();
}
// resolve relative path
var segments = relative.replace(/^\//, '').split('/');
for (var i = 0; i < segments.length; i++) {
var segment = segments[i];
if (segment === '..') {
stack.pop();
} else if (segment !== '.') {
stack.push(segment);
}
}
// ensure leading slash
if (stack[0] !== '') {
stack.unshift('');
}
return stack.join('/');
}
function parsePath(path) {
var hash = '';
var query = '';
var hashIndex = path.indexOf('#');
if (hashIndex >= 0) {
hash = path.slice(hashIndex);
path = path.slice(0, hashIndex);
}
var queryIndex = path.indexOf('?');
if (queryIndex >= 0) {
query = path.slice(queryIndex + 1);
path = path.slice(0, queryIndex);
}
return {
path: path,
query: query,
hash: hash
};
}
function cleanPath(path) {
return path.replace(/\/\//g, '/');
}
var index$1 = Array.isArray || function (arr) {
return Object.prototype.toString.call(arr) == '[object Array]';
};
/**
* Expose `pathToRegexp`.
*/
var index = pathToRegexp;
var parse_1 = parse;
var compile_1 = compile;
var tokensToFunction_1 = tokensToFunction;
var tokensToRegExp_1 = tokensToRegExp;
/**
* The main path matching regexp utility.
*
* @type {RegExp}
*/
var PATH_REGEXP = new RegExp([
// Match escaped characters that would otherwise appear in future matches.
// This allows the user to escape special characters that won't transform.
'(\\\\.)',
// Match Express-style parameters and un-named parameters with a prefix
// and optional suffixes. Matches appear as:
//
// "/:test(\\d+)?" => ["/", "test", "\d+", undefined, "?", undefined]
// "/route(\\d+)" => [undefined, undefined, undefined, "\d+", undefined, undefined]
// "/*" => ["/", undefined, undefined, undefined, undefined, "*"]
'([\\/.])?(?:(?:\\:(\\w+)(?:\\(((?:\\\\.|[^\\\\()])+)\\))?|\\(((?:\\\\.|[^\\\\()])+)\\))([+*?])?|(\\*))'].join('|'), 'g');
/**
* Parse a string for the raw tokens.
*
* @param {string} str
* @param {Object=} options
* @return {!Array}
*/
function parse(str, options) {
var tokens = [];
var key = 0;
var index = 0;
var path = '';
var defaultDelimiter = options && options.delimiter || '/';
var res;
while ((res = PATH_REGEXP.exec(str)) != null) {
var m = res[0];
var escaped = res[1];
var offset = res.index;
path += str.slice(index, offset);
index = offset + m.length;
// Ignore already escaped sequences.
if (escaped) {
path += escaped[1];
continue;
}
var next = str[index];
var prefix = res[2];
var name = res[3];
var capture = res[4];
var group = res[5];
var modifier = res[6];
var asterisk = res[7];
// Push the current path onto the tokens.
if (path) {
tokens.push(path);
path = '';
}
var partial = prefix != null && next != null && next !== prefix;
var repeat = modifier === '+' || modifier === '*';
var optional = modifier === '?' || modifier === '*';
var delimiter = res[2] || defaultDelimiter;
var pattern = capture || group;
tokens.push({
name: name || key++,
prefix: prefix || '',
delimiter: delimiter,
optional: optional,
repeat: repeat,
partial: partial,
asterisk: !!asterisk,
pattern: pattern ? escapeGroup(pattern) : asterisk ? '.*' : '[^' + escapeString(delimiter) + ']+?'
});
}
// Match any characters still remaining.
if (index < str.length) {
path += str.substr(index);
}
// If the path exists, push it onto the end.
if (path) {
tokens.push(path);
}
return tokens;
}
/**
* Compile a string to a template function for the path.
*
* @param {string} str
* @param {Object=} options
* @return {!function(Object=, Object=)}
*/
function compile(str, options) {
return tokensToFunction(parse(str, options));
}
/**
* Prettier encoding of URI path segments.
*
* @param {string}
* @return {string}
*/
function encodeURIComponentPretty(str) {
return encodeURI(str).replace(/[\/?#]/g, function (c) {
return '%' + c.charCodeAt(0).toString(16).toUpperCase();
});
}
/**
* Encode the asterisk parameter. Similar to `pretty`, but allows slashes.
*
* @param {string}
* @return {string}
*/
function encodeAsterisk(str) {
return encodeURI(str).replace(/[?#]/g, function (c) {
return '%' + c.charCodeAt(0).toString(16).toUpperCase();
});
}
/**
* Expose a method for transforming tokens into the path function.
*/
function tokensToFunction(tokens) {
// Compile all the tokens into regexps.
var matches = new Array(tokens.length);
// Compile all the patterns before compilation.
for (var i = 0; i < tokens.length; i++) {
if (_typeof(tokens[i]) === 'object') {
matches[i] = new RegExp('^(?:' + tokens[i].pattern + ')$');
}
}
return function (obj, opts) {
var path = '';
var data = obj || {};
var options = opts || {};
var encode = options.pretty ? encodeURIComponentPretty : encodeURIComponent;
for (var i = 0; i < tokens.length; i++) {
var token = tokens[i];
if (typeof token === 'string') {
path += token;
continue;
}
var value = data[token.name];
var segment;
if (value == null) {
if (token.optional) {
// Prepend partial segment prefixes.
if (token.partial) {
path += token.prefix;
}
continue;
} else {
throw new TypeError('Expected "' + token.name + '" to be defined');
}
}
if (index$1(value)) {
if (!token.repeat) {
throw new TypeError('Expected "' + token.name + '" to not repeat, but received `' + JSON.stringify(value) + '`');
}
if (value.length === 0) {
if (token.optional) {
continue;
} else {
throw new TypeError('Expected "' + token.name + '" to not be empty');
}
}
for (var j = 0; j < value.length; j++) {
segment = encode(value[j]);
if (!matches[i].test(segment)) {
throw new TypeError('Expected all "' + token.name + '" to match "' + token.pattern + '", but received `' + JSON.stringify(segment) + '`');
}
path += (j === 0 ? token.prefix : token.delimiter) + segment;
}
continue;
}
segment = token.asterisk ? encodeAsterisk(value) : encode(value);
if (!matches[i].test(segment)) {
throw new TypeError('Expected "' + token.name + '" to match "' + token.pattern + '", but received "' + segment + '"');
}
path += token.prefix + segment;
}
return path;
};
}
/**
* Escape a regular expression string.
*
* @param {string} str
* @return {string}
*/
function escapeString(str) {
return str.replace(/([.+*?=^!:${}()[\]|\/\\])/g, '\\$1');
}
/**
* Escape the capturing group by escaping special characters and meaning.
*
* @param {string} group
* @return {string}
*/
function escapeGroup(group) {
return group.replace(/([=!:$\/()])/g, '\\$1');
}
/**
* Attach the keys as a property of the regexp.
*
* @param {!RegExp} re
* @param {Array} keys
* @return {!RegExp}
*/
function attachKeys(re, keys) {
re.keys = keys;
return re;
}
/**
* Get the flags for a regexp from the options.
*
* @param {Object} options
* @return {string}
*/
function flags(options) {
return options.sensitive ? '' : 'i';
}
/**
* Pull out keys from a regexp.
*
* @param {!RegExp} path
* @param {!Array} keys
* @return {!RegExp}
*/
function regexpToRegexp(path, keys) {
// Use a negative lookahead to match only capturing groups.
var groups = path.source.match(/\((?!\?)/g);
if (groups) {
for (var i = 0; i < groups.length; i++) {
keys.push({
name: i,
prefix: null,
delimiter: null,
optional: false,
repeat: false,
partial: false,
asterisk: false,
pattern: null
});
}
}
return attachKeys(path, keys);
}
/**
* Transform an array into a regexp.
*
* @param {!Array} path
* @param {Array} keys
* @param {!Object} options
* @return {!RegExp}
*/
function arrayToRegexp(path, keys, options) {
var parts = [];
for (var i = 0; i < path.length; i++) {
parts.push(pathToRegexp(path[i], keys, options).source);
}
var regexp = new RegExp('(?:' + parts.join('|') + ')', flags(options));
return attachKeys(regexp, keys);
}
/**
* Create a path regexp from string input.
*
* @param {string} path
* @param {!Array} keys
* @param {!Object} options
* @return {!RegExp}
*/
function stringToRegexp(path, keys, options) {
return tokensToRegExp(parse(path, options), keys, options);
}
/**
* Expose a function for taking tokens and returning a RegExp.
*
* @param {!Array} tokens
* @param {(Array|Object)=} keys
* @param {Object=} options
* @return {!RegExp}
*/
function tokensToRegExp(tokens, keys, options) {
if (!index$1(keys)) {
options = /** @type {!Object} */keys || options;
keys = [];
}
options = options || {};
var strict = options.strict;
var end = options.end !== false;
var route = '';
// Iterate over the tokens and create our regexp string.
for (var i = 0; i < tokens.length; i++) {
var token = tokens[i];
if (typeof token === 'string') {
route += escapeString(token);
} else {
var prefix = escapeString(token.prefix);
var capture = '(?:' + token.pattern + ')';
keys.push(token);
if (token.repeat) {
capture += '(?:' + prefix + capture + ')*';
}
if (token.optional) {
if (!token.partial) {
capture = '(?:' + prefix + '(' + capture + '))?';
} else {
capture = prefix + '(' + capture + ')?';
}
} else {
capture = prefix + '(' + capture + ')';
}
route += capture;
}
}
var delimiter = escapeString(options.delimiter || '/');
var endsWithDelimiter = route.slice(-delimiter.length) === delimiter;
// In non-strict mode we allow a slash at the end of match. If the path to
// match already ends with a slash, we remove it for consistency. The slash
// is valid at the end of a path match, not in the middle. This is important
// in non-ending mode, where "/test/" shouldn't match "/test//route".
if (!strict) {
route = (endsWithDelimiter ? route.slice(0, -delimiter.length) : route) + '(?:' + delimiter + '(?=$))?';
}
if (end) {
route += '$';
} else {
// In non-ending mode, we need the capturing groups to match as much as
// possible by using a positive lookahead to the end or next path segment.
route += strict && endsWithDelimiter ? '' : '(?=' + delimiter + '|$)';
}
return attachKeys(new RegExp('^' + route, flags(options)), keys);
}
/**
* Normalize the given path string, returning a regular expression.
*
* An empty array can be passed in for the keys, which will hold the
* placeholder key descriptions. For example, using `/user/:id`, `keys` will
* contain `[{ name: 'id', delimiter: '/', optional: false, repeat: false }]`.
*
* @param {(string|RegExp|Array)} path
* @param {(Array|Object)=} keys
* @param {Object=} options
* @return {!RegExp}
*/
function pathToRegexp(path, keys, options) {
if (!index$1(keys)) {
options = /** @type {!Object} */keys || options;
keys = [];
}
options = options || {};
if (path instanceof RegExp) {
return regexpToRegexp(path, /** @type {!Array} */keys);
}
if (index$1(path)) {
return arrayToRegexp( /** @type {!Array} */path, /** @type {!Array} */keys, options);
}
return stringToRegexp( /** @type {string} */path, /** @type {!Array} */keys, options);
}
index.parse = parse_1;
index.compile = compile_1;
index.tokensToFunction = tokensToFunction_1;
index.tokensToRegExp = tokensToRegExp_1;
/* */
var regexpCompileCache = Object.create(null);
function fillParams(path, params, routeMsg) {
try {
var filler = regexpCompileCache[path] || (regexpCompileCache[path] = index.compile(path));
return filler(params || {}, { pretty: true });
} catch (e) {
if (true) {
warn(false, "missing param for " + routeMsg + ": " + e.message);
}
return '';
}
}
/* */
function createRouteMap(routes, oldPathList, oldPathMap, oldNameMap) {
// the path list is used to control path matching priority
var pathList = oldPathList || [];
var pathMap = oldPathMap || Object.create(null);
var nameMap = oldNameMap || Object.create(null);
routes.forEach(function (route) {
addRouteRecord(pathList, pathMap, nameMap, route);
});
// ensure wildcard routes are always at the end
for (var i = 0, l = pathList.length; i < l; i++) {
if (pathList[i] === '*') {
pathList.push(pathList.splice(i, 1)[0]);
l--;
i--;
}
}
return {
pathList: pathList,
pathMap: pathMap,
nameMap: nameMap
};
}
function addRouteRecord(pathList, pathMap, nameMap, route, parent, matchAs) {
var path = route.path;
var name = route.name;
if (true) {
assert(path != null, "\"path\" is required in a route configuration.");
assert(typeof route.component !== 'string', "route config \"component\" for path: " + String(path || name) + " cannot be a " + "string id. Use an actual component instead.");
}
var normalizedPath = normalizePath(path, parent);
var pathToRegexpOptions = route.pathToRegexpOptions || {};
if (typeof route.caseSensitive === 'boolean') {
pathToRegexpOptions.sensitive = route.caseSensitive;
}
var record = {
path: normalizedPath,
regex: compileRouteRegex(normalizedPath, pathToRegexpOptions),
components: route.components || { default: route.component },
instances: {},
name: name,
parent: parent,
matchAs: matchAs,
redirect: route.redirect,
beforeEnter: route.beforeEnter,
meta: route.meta || {},
props: route.props == null ? {} : route.components ? route.props : { default: route.props }
};
if (route.children) {
// Warn if route is named, does not redirect and has a default child route.
// If users navigate to this route by name, the default child will
// not be rendered (GH Issue #629)
if (true) {
if (route.name && !route.redirect && route.children.some(function (child) {
return (/^\/?$/.test(child.path)
);
})) {
warn(false, "Named Route '" + route.name + "' has a default child route. " + "When navigating to this named route (:to=\"{name: '" + route.name + "'\"), " + "the default child route will not be rendered. Remove the name from " + "this route and use the name of the default child route for named " + "links instead.");
}
}
route.children.forEach(function (child) {
var childMatchAs = matchAs ? cleanPath(matchAs + "/" + child.path) : undefined;
addRouteRecord(pathList, pathMap, nameMap, child, record, childMatchAs);
});
}
if (route.alias !== undefined) {
var aliases = Array.isArray(route.alias) ? route.alias : [route.alias];
aliases.forEach(function (alias) {
var aliasRoute = {
path: alias,
children: route.children
};
addRouteRecord(pathList, pathMap, nameMap, aliasRoute, parent, record.path || '/' // matchAs
);
});
}
if (!pathMap[record.path]) {
pathList.push(record.path);
pathMap[record.path] = record;
}
if (name) {
if (!nameMap[name]) {
nameMap[name] = record;
} else if ("development" !== 'production' && !matchAs) {
warn(false, "Duplicate named routes definition: " + "{ name: \"" + name + "\", path: \"" + record.path + "\" }");
}
}
}
function compileRouteRegex(path, pathToRegexpOptions) {
var regex = index(path, [], pathToRegexpOptions);
if (true) {
var keys = {};
regex.keys.forEach(function (key) {
warn(!keys[key.name], "Duplicate param keys in route with path: \"" + path + "\"");
keys[key.name] = true;
});
}
return regex;
}
function normalizePath(path, parent) {
path = path.replace(/\/$/, '');
if (path[0] === '/') {
return path;
}
if (parent == null) {
return path;
}
return cleanPath(parent.path + "/" + path);
}
/* */
function normalizeLocation(raw, current, append, router) {
var next = typeof raw === 'string' ? { path: raw } : raw;
// named target
if (next.name || next._normalized) {
return next;
}
// relative params
if (!next.path && next.params && current) {
next = assign({}, next);
next._normalized = true;
var params = assign(assign({}, current.params), next.params);
if (current.name) {
next.name = current.name;
next.params = params;
} else if (current.matched.length) {
var rawPath = current.matched[current.matched.length - 1].path;
next.path = fillParams(rawPath, params, "path " + current.path);
} else if (true) {
warn(false, "relative params navigation requires a current route.");
}
return next;
}
var parsedPath = parsePath(next.path || '');
var basePath = current && current.path || '/';
var path = parsedPath.path ? resolvePath(parsedPath.path, basePath, append || next.append) : basePath;
var query = resolveQuery(parsedPath.query, next.query, router && router.options.parseQuery);
var hash = next.hash || parsedPath.hash;
if (hash && hash.charAt(0) !== '#') {
hash = "#" + hash;
}
return {
_normalized: true,
path: path,
query: query,
hash: hash
};
}
function assign(a, b) {
for (var key in b) {
a[key] = b[key];
}
return a;
}
/* */
function createMatcher(routes, router) {
var ref = createRouteMap(routes);
var pathList = ref.pathList;
var pathMap = ref.pathMap;
var nameMap = ref.nameMap;
function addRoutes(routes) {
createRouteMap(routes, pathList, pathMap, nameMap);
}
function match(raw, currentRoute, redirectedFrom) {
var location = normalizeLocation(raw, currentRoute, false, router);
var name = location.name;
if (name) {
var record = nameMap[name];
if (true) {
warn(record, "Route with name '" + name + "' does not exist");
}
if (!record) {
return _createRoute(null, location);
}
var paramNames = record.regex.keys.filter(function (key) {
return !key.optional;
}).map(function (key) {
return key.name;
});
if (_typeof(location.params) !== 'object') {
location.params = {};
}
if (currentRoute && _typeof(currentRoute.params) === 'object') {
for (var key in currentRoute.params) {
if (!(key in location.params) && paramNames.indexOf(key) > -1) {
location.params[key] = currentRoute.params[key];
}
}
}
if (record) {
location.path = fillParams(record.path, location.params, "named route \"" + name + "\"");
return _createRoute(record, location, redirectedFrom);
}
} else if (location.path) {
location.params = {};
for (var i = 0; i < pathList.length; i++) {
var path = pathList[i];
var record$1 = pathMap[path];
if (matchRoute(record$1.regex, location.path, location.params)) {
return _createRoute(record$1, location, redirectedFrom);
}
}
}
// no match
return _createRoute(null, location);
}
function redirect(record, location) {
var originalRedirect = record.redirect;
var redirect = typeof originalRedirect === 'function' ? originalRedirect(createRoute(record, location, null, router)) : originalRedirect;
if (typeof redirect === 'string') {
redirect = { path: redirect };
}
if (!redirect || (typeof redirect === 'undefined' ? 'undefined' : _typeof(redirect)) !== 'object') {
if (true) {
warn(false, "invalid redirect option: " + JSON.stringify(redirect));
}
return _createRoute(null, location);
}
var re = redirect;
var name = re.name;
var path = re.path;
var query = location.query;
var hash = location.hash;
var params = location.params;
query = re.hasOwnProperty('query') ? re.query : query;
hash = re.hasOwnProperty('hash') ? re.hash : hash;
params = re.hasOwnProperty('params') ? re.params : params;
if (name) {
// resolved named direct
var targetRecord = nameMap[name];
if (true) {
assert(targetRecord, "redirect failed: named route \"" + name + "\" not found.");
}
return match({
_normalized: true,
name: name,
query: query,
hash: hash,
params: params
}, undefined, location);
} else if (path) {
// 1. resolve relative redirect
var rawPath = resolveRecordPath(path, record);
// 2. resolve params
var resolvedPath = fillParams(rawPath, params, "redirect route with path \"" + rawPath + "\"");
// 3. rematch with existing query and hash
return match({
_normalized: true,
path: resolvedPath,
query: query,
hash: hash
}, undefined, location);
} else {
if (true) {
warn(false, "invalid redirect option: " + JSON.stringify(redirect));
}
return _createRoute(null, location);
}
}
function alias(record, location, matchAs) {
var aliasedPath = fillParams(matchAs, location.params, "aliased route with path \"" + matchAs + "\"");
var aliasedMatch = match({
_normalized: true,
path: aliasedPath
});
if (aliasedMatch) {
var matched = aliasedMatch.matched;
var aliasedRecord = matched[matched.length - 1];
location.params = aliasedMatch.params;
return _createRoute(aliasedRecord, location);
}
return _createRoute(null, location);
}
function _createRoute(record, location, redirectedFrom) {
if (record && record.redirect) {
return redirect(record, redirectedFrom || location);
}
if (record && record.matchAs) {
return alias(record, location, record.matchAs);
}
return createRoute(record, location, redirectedFrom, router);
}
return {
match: match,
addRoutes: addRoutes
};
}
function matchRoute(regex, path, params) {
var m = path.match(regex);
if (!m) {
return false;
} else if (!params) {
return true;
}
for (var i = 1, len = m.length; i < len; ++i) {
var key = regex.keys[i - 1];
var val = typeof m[i] === 'string' ? decodeURIComponent(m[i]) : m[i];
if (key) {
params[key.name] = val;
}
}
return true;
}
function resolveRecordPath(path, record) {
return resolvePath(path, record.parent ? record.parent.path : '/', true);
}
/* */
var positionStore = Object.create(null);
function setupScroll() {
window.addEventListener('popstate', function (e) {
saveScrollPosition();
if (e.state && e.state.key) {
setStateKey(e.state.key);
}
});
}
function handleScroll(router, to, from, isPop) {
if (!router.app) {
return;
}
var behavior = router.options.scrollBehavior;
if (!behavior) {
return;
}
if (true) {
assert(typeof behavior === 'function', "scrollBehavior must be a function");
}
// wait until re-render finishes before scrolling
router.app.$nextTick(function () {
var position = getScrollPosition();
var shouldScroll = behavior(to, from, isPop ? position : null);
if (!shouldScroll) {
return;
}
var isObject = (typeof shouldScroll === 'undefined' ? 'undefined' : _typeof(shouldScroll)) === 'object';
if (isObject && typeof shouldScroll.selector === 'string') {
var el = document.querySelector(shouldScroll.selector);
if (el) {
var offset = shouldScroll.offset && _typeof(shouldScroll.offset) === 'object' ? shouldScroll.offset : {};
offset = normalizeOffset(offset);
position = getElementPosition(el, offset);
} else if (isValidPosition(shouldScroll)) {
position = normalizePosition(shouldScroll);
}
} else if (isObject && isValidPosition(shouldScroll)) {
position = normalizePosition(shouldScroll);
}
if (position) {
window.scrollTo(position.x, position.y);
}
});
}
function saveScrollPosition() {
var key = getStateKey();
if (key) {
positionStore[key] = {
x: window.pageXOffset,
y: window.pageYOffset
};
}
}
function getScrollPosition() {
var key = getStateKey();
if (key) {
return positionStore[key];
}
}
function getElementPosition(el, offset) {
var docEl = document.documentElement;
var docRect = docEl.getBoundingClientRect();
var elRect = el.getBoundingClientRect();
return {
x: elRect.left - docRect.left - offset.x,
y: elRect.top - docRect.top - offset.y
};
}
function isValidPosition(obj) {
return isNumber(obj.x) || isNumber(obj.y);
}
function normalizePosition(obj) {
return {
x: isNumber(obj.x) ? obj.x : window.pageXOffset,
y: isNumber(obj.y) ? obj.y : window.pageYOffset
};
}
function normalizeOffset(obj) {
return {
x: isNumber(obj.x) ? obj.x : 0,
y: isNumber(obj.y) ? obj.y : 0
};
}
function isNumber(v) {
return typeof v === 'number';
}
/* */
var supportsPushState = inBrowser && function () {
var ua = window.navigator.userAgent;
if ((ua.indexOf('Android 2.') !== -1 || ua.indexOf('Android 4.0') !== -1) && ua.indexOf('Mobile Safari') !== -1 && ua.indexOf('Chrome') === -1 && ua.indexOf('Windows Phone') === -1) {
return false;
}
return window.history && 'pushState' in window.history;
}();
// use User Timing api (if present) for more accurate key precision
var Time = inBrowser && window.performance && window.performance.now ? window.performance : Date;
var _key = genKey();
function genKey() {
return Time.now().toFixed(3);
}
function getStateKey() {
return _key;
}
function setStateKey(key) {
_key = key;
}
function pushState(url, replace) {
saveScrollPosition();
// try...catch the pushState call to get around Safari
// DOM Exception 18 where it limits to 100 pushState calls
var history = window.history;
try {
if (replace) {
history.replaceState({ key: _key }, '', url);
} else {
_key = genKey();
history.pushState({ key: _key }, '', url);
}
} catch (e) {
window.location[replace ? 'replace' : 'assign'](url);
}
}
function replaceState(url) {
pushState(url, true);
}
/* */
function runQueue(queue, fn, cb) {
var step = function step(index) {
if (index >= queue.length) {
cb();
} else {
if (queue[index]) {
fn(queue[index], function () {
step(index + 1);
});
} else {
step(index + 1);
}
}
};
step(0);
}
/* */
var History = function History(router, base) {
this.router = router;
this.base = normalizeBase(base);
// start with a route object that stands for "nowhere"
this.current = START;
this.pending = null;
this.ready = false;
this.readyCbs = [];
this.readyErrorCbs = [];
this.errorCbs = [];
};
History.prototype.listen = function listen(cb) {
this.cb = cb;
};
History.prototype.onReady = function onReady(cb, errorCb) {
if (this.ready) {
cb();
} else {
this.readyCbs.push(cb);
if (errorCb) {
this.readyErrorCbs.push(errorCb);
}
}
};
History.prototype.onError = function onError(errorCb) {
this.errorCbs.push(errorCb);
};
History.prototype.transitionTo = function transitionTo(location, onComplete, onAbort) {
var this$1 = this;
var route = this.router.match(location, this.current);
this.confirmTransition(route, function () {
this$1.updateRoute(route);
onComplete && onComplete(route);
this$1.ensureURL();
// fire ready cbs once
if (!this$1.ready) {
this$1.ready = true;
this$1.readyCbs.forEach(function (cb) {
cb(route);
});
}
}, function (err) {
if (onAbort) {
onAbort(err);
}
if (err && !this$1.ready) {
this$1.ready = true;
this$1.readyErrorCbs.forEach(function (cb) {
cb(err);
});
}
});
};
History.prototype.confirmTransition = function confirmTransition(route, onComplete, onAbort) {
var this$1 = this;
var current = this.current;
var abort = function abort(err) {
if (isError(err)) {
if (this$1.errorCbs.length) {
this$1.errorCbs.forEach(function (cb) {
cb(err);
});
} else {
warn(false, 'uncaught error during route navigation:');
console.error(err);
}
}
onAbort && onAbort(err);
};
if (isSameRoute(route, current) &&
// in the case the route map has been dynamically appended to
route.matched.length === current.matched.length) {
this.ensureURL();
return abort();
}
var ref = resolveQueue(this.current.matched, route.matched);
var updated = ref.updated;
var deactivated = ref.deactivated;
var activated = ref.activated;
var queue = [].concat(
// in-component leave guards
extractLeaveGuards(deactivated),
// global before hooks
this.router.beforeHooks,
// in-component update hooks
extractUpdateHooks(updated),
// in-config enter guards
activated.map(function (m) {
return m.beforeEnter;
}),
// async components
resolveAsyncComponents(activated));
this.pending = route;
var iterator = function iterator(hook, next) {
if (this$1.pending !== route) {
return abort();
}
try {
hook(route, current, function (to) {
if (to === false || isError(to)) {
// next(false) -> abort navigation, ensure current URL
this$1.ensureURL(true);
abort(to);
} else if (typeof to === 'string' || (typeof to === 'undefined' ? 'undefined' : _typeof(to)) === 'object' && (typeof to.path === 'string' || typeof to.name === 'string')) {
// next('/') or next({ path: '/' }) -> redirect
abort();
if ((typeof to === 'undefined' ? 'undefined' : _typeof(to)) === 'object' && to.replace) {
this$1.replace(to);
} else {
this$1.push(to);
}
} else {
// confirm transition and pass on the value
next(to);
}
});
} catch (e) {
abort(e);
}
};
runQueue(queue, iterator, function () {
var postEnterCbs = [];
var isValid = function isValid() {
return this$1.current === route;
};
// wait until async components are resolved before
// extracting in-component enter guards
var enterGuards = extractEnterGuards(activated, postEnterCbs, isValid);
var queue = enterGuards.concat(this$1.router.resolveHooks);
runQueue(queue, iterator, function () {
if (this$1.pending !== route) {
return abort();
}
this$1.pending = null;
onComplete(route);
if (this$1.router.app) {
this$1.router.app.$nextTick(function () {
postEnterCbs.forEach(function (cb) {
cb();
});
});
}
});
});
};
History.prototype.updateRoute = function updateRoute(route) {
var prev = this.current;
this.current = route;
this.cb && this.cb(route);
this.router.afterHooks.forEach(function (hook) {
hook && hook(route, prev);
});
};
function normalizeBase(base) {
if (!base) {
if (inBrowser) {
// respect <base> tag
var baseEl = document.querySelector('base');
base = baseEl && baseEl.getAttribute('href') || '/';
// strip full URL origin
base = base.replace(/^https?:\/\/[^\/]+/, '');
} else {
base = '/';
}
}
// make sure there's the starting slash
if (base.charAt(0) !== '/') {
base = '/' + base;
}
// remove trailing slash
return base.replace(/\/$/, '');
}
function resolveQueue(current, next) {
var i;
var max = Math.max(current.length, next.length);
for (i = 0; i < max; i++) {
if (current[i] !== next[i]) {
break;
}
}
return {
updated: next.slice(0, i),
activated: next.slice(i),
deactivated: current.slice(i)
};
}
function extractGuards(records, name, bind, reverse) {
var guards = flatMapComponents(records, function (def, instance, match, key) {
var guard = extractGuard(def, name);
if (guard) {
return Array.isArray(guard) ? guard.map(function (guard) {
return bind(guard, instance, match, key);
}) : bind(guard, instance, match, key);
}
});
return flatten(reverse ? guards.reverse() : guards);
}
function extractGuard(def, key) {
if (typeof def !== 'function') {
// extend now so that global mixins are applied.
def = _Vue.extend(def);
}
return def.options[key];
}
function extractLeaveGuards(deactivated) {
return extractGuards(deactivated, 'beforeRouteLeave', bindGuard, true);
}
function extractUpdateHooks(updated) {
return extractGuards(updated, 'beforeRouteUpdate', bindGuard);
}
function bindGuard(guard, instance) {
if (instance) {
return function boundRouteGuard() {
return guard.apply(instance, arguments);
};
}
}
function extractEnterGuards(activated, cbs, isValid) {
return extractGuards(activated, 'beforeRouteEnter', function (guard, _, match, key) {
return bindEnterGuard(guard, match, key, cbs, isValid);
});
}
function bindEnterGuard(guard, match, key, cbs, isValid) {
return function routeEnterGuard(to, from, next) {
return guard(to, from, function (cb) {
next(cb);
if (typeof cb === 'function') {
cbs.push(function () {
// #750
// if a router-view is wrapped with an out-in transition,
// the instance may not have been registered at this time.
// we will need to poll for registration until current route
// is no longer valid.
poll(cb, match.instances, key, isValid);
});
}
});
};
}
function poll(cb, // somehow flow cannot infer this is a function
instances, key, isValid) {
if (instances[key]) {
cb(instances[key]);
} else if (isValid()) {
setTimeout(function () {
poll(cb, instances, key, isValid);
}, 16);
}
}
function resolveAsyncComponents(matched) {
return function (to, from, next) {
var hasAsync = false;
var pending = 0;
var error = null;
flatMapComponents(matched, function (def, _, match, key) {
// if it's a function and doesn't have cid attached,
// assume it's an async component resolve function.
// we are not using Vue's default async resolving mechanism because
// we want to halt the navigation until the incoming component has been
// resolved.
if (typeof def === 'function' && def.cid === undefined) {
hasAsync = true;
pending++;
var resolve = once(function (resolvedDef) {
// save resolved on async factory in case it's used elsewhere
def.resolved = typeof resolvedDef === 'function' ? resolvedDef : _Vue.extend(resolvedDef);
match.components[key] = resolvedDef;
pending--;
if (pending <= 0) {
next();
}
});
var reject = once(function (reason) {
var msg = "Failed to resolve async component " + key + ": " + reason;
"development" !== 'production' && warn(false, msg);
if (!error) {
error = isError(reason) ? reason : new Error(msg);
next(error);
}
});
var res;
try {
res = def(resolve, reject);
} catch (e) {
reject(e);
}
if (res) {
if (typeof res.then === 'function') {
res.then(resolve, reject);
} else {
// new syntax in Vue 2.3
var comp = res.component;
if (comp && typeof comp.then === 'function') {
comp.then(resolve, reject);
}
}
}
}
});
if (!hasAsync) {
next();
}
};
}
function flatMapComponents(matched, fn) {
return flatten(matched.map(function (m) {
return Object.keys(m.components).map(function (key) {
return fn(m.components[key], m.instances[key], m, key);
});
}));
}
function flatten(arr) {
return Array.prototype.concat.apply([], arr);
}
// in Webpack 2, require.ensure now also returns a Promise
// so the resolve/reject functions may get called an extra time
// if the user uses an arrow function shorthand that happens to
// return that Promise.
function once(fn) {
var called = false;
return function () {
var args = [],
len = arguments.length;
while (len--) {
args[len] = arguments[len];
}if (called) {
return;
}
called = true;
return fn.apply(this, args);
};
}
function isError(err) {
return Object.prototype.toString.call(err).indexOf('Error') > -1;
}
/* */
var HTML5History = function (History$$1) {
function HTML5History(router, base) {
var this$1 = this;
History$$1.call(this, router, base);
var expectScroll = router.options.scrollBehavior;
if (expectScroll) {
setupScroll();
}
window.addEventListener('popstate', function (e) {
var current = this$1.current;
this$1.transitionTo(getLocation(this$1.base), function (route) {
if (expectScroll) {
handleScroll(router, route, current, true);
}
});
});
}
if (History$$1) HTML5History.__proto__ = History$$1;
HTML5History.prototype = Object.create(History$$1 && History$$1.prototype);
HTML5History.prototype.constructor = HTML5History;
HTML5History.prototype.go = function go(n) {
window.history.go(n);
};
HTML5History.prototype.push = function push(location, onComplete, onAbort) {
var this$1 = this;
var ref = this;
var fromRoute = ref.current;
this.transitionTo(location, function (route) {
pushState(cleanPath(this$1.base + route.fullPath));
handleScroll(this$1.router, route, fromRoute, false);
onComplete && onComplete(route);
}, onAbort);
};
HTML5History.prototype.replace = function replace(location, onComplete, onAbort) {
var this$1 = this;
var ref = this;
var fromRoute = ref.current;
this.transitionTo(location, function (route) {
replaceState(cleanPath(this$1.base + route.fullPath));
handleScroll(this$1.router, route, fromRoute, false);
onComplete && onComplete(route);
}, onAbort);
};
HTML5History.prototype.ensureURL = function ensureURL(push) {
if (getLocation(this.base) !== this.current.fullPath) {
var current = cleanPath(this.base + this.current.fullPath);
push ? pushState(current) : replaceState(current);
}
};
HTML5History.prototype.getCurrentLocation = function getCurrentLocation() {
return getLocation(this.base);
};
return HTML5History;
}(History);
function getLocation(base) {
var path = window.location.pathname;
if (base && path.indexOf(base) === 0) {
path = path.slice(base.length);
}
return (path || '/') + window.location.search + window.location.hash;
}
/* */
var HashHistory = function (History$$1) {
function HashHistory(router, base, fallback) {
History$$1.call(this, router, base);
// check history fallback deeplinking
if (fallback && checkFallback(this.base)) {
return;
}
ensureSlash();
}
if (History$$1) HashHistory.__proto__ = History$$1;
HashHistory.prototype = Object.create(History$$1 && History$$1.prototype);
HashHistory.prototype.constructor = HashHistory;
// this is delayed until the app mounts
// to avoid the hashchange listener being fired too early
HashHistory.prototype.setupListeners = function setupListeners() {
var this$1 = this;
window.addEventListener('hashchange', function () {
if (!ensureSlash()) {
return;
}
this$1.transitionTo(getHash(), function (route) {
replaceHash(route.fullPath);
});
});
};
HashHistory.prototype.push = function push(location, onComplete, onAbort) {
this.transitionTo(location, function (route) {
pushHash(route.fullPath);
onComplete && onComplete(route);
}, onAbort);
};
HashHistory.prototype.replace = function replace(location, onComplete, onAbort) {
this.transitionTo(location, function (route) {
replaceHash(route.fullPath);
onComplete && onComplete(route);
}, onAbort);
};
HashHistory.prototype.go = function go(n) {
window.history.go(n);
};
HashHistory.prototype.ensureURL = function ensureURL(push) {
var current = this.current.fullPath;
if (getHash() !== current) {
push ? pushHash(current) : replaceHash(current);
}
};
HashHistory.prototype.getCurrentLocation = function getCurrentLocation() {
return getHash();
};
return HashHistory;
}(History);
function checkFallback(base) {
var location = getLocation(base);
if (!/^\/#/.test(location)) {
window.location.replace(cleanPath(base + '/#' + location));
return true;
}
}
function ensureSlash() {
var path = getHash();
if (path.charAt(0) === '/') {
return true;
}
replaceHash('/' + path);
return false;
}
function getHash() {
// We can't use window.location.hash here because it's not
// consistent across browsers - Firefox will pre-decode it!
var href = window.location.href;
var index = href.indexOf('#');
return index === -1 ? '' : href.slice(index + 1);
}
function pushHash(path) {
window.location.hash = path;
}
function replaceHash(path) {
var href = window.location.href;
var i = href.indexOf('#');
var base = i >= 0 ? href.slice(0, i) : href;
window.location.replace(base + "#" + path);
}
/* */
var AbstractHistory = function (History$$1) {
function AbstractHistory(router, base) {
History$$1.call(this, router, base);
this.stack = [];
this.index = -1;
}
if (History$$1) AbstractHistory.__proto__ = History$$1;
AbstractHistory.prototype = Object.create(History$$1 && History$$1.prototype);
AbstractHistory.prototype.constructor = AbstractHistory;
AbstractHistory.prototype.push = function push(location, onComplete, onAbort) {
var this$1 = this;
this.transitionTo(location, function (route) {
this$1.stack = this$1.stack.slice(0, this$1.index + 1).concat(route);
this$1.index++;
onComplete && onComplete(route);
}, onAbort);
};
AbstractHistory.prototype.replace = function replace(location, onComplete, onAbort) {
var this$1 = this;
this.transitionTo(location, function (route) {
this$1.stack = this$1.stack.slice(0, this$1.index).concat(route);
onComplete && onComplete(route);
}, onAbort);
};
AbstractHistory.prototype.go = function go(n) {
var this$1 = this;
var targetIndex = this.index + n;
if (targetIndex < 0 || targetIndex >= this.stack.length) {
return;
}
var route = this.stack[targetIndex];
this.confirmTransition(route, function () {
this$1.index = targetIndex;
this$1.updateRoute(route);
});
};
AbstractHistory.prototype.getCurrentLocation = function getCurrentLocation() {
var current = this.stack[this.stack.length - 1];
return current ? current.fullPath : '/';
};
AbstractHistory.prototype.ensureURL = function ensureURL() {
// noop
};
return AbstractHistory;
}(History);
/* */
var VueRouter = function VueRouter(options) {
if (options === void 0) options = {};
this.app = null;
this.apps = [];
this.options = options;
this.beforeHooks = [];
this.resolveHooks = [];
this.afterHooks = [];
this.matcher = createMatcher(options.routes || [], this);
var mode = options.mode || 'hash';
this.fallback = mode === 'history' && !supportsPushState && options.fallback !== false;
if (this.fallback) {
mode = 'hash';
}
if (!inBrowser) {
mode = 'abstract';
}
this.mode = mode;
switch (mode) {
case 'history':
this.history = new HTML5History(this, options.base);
break;
case 'hash':
this.history = new HashHistory(this, options.base, this.fallback);
break;
case 'abstract':
this.history = new AbstractHistory(this, options.base);
break;
default:
if (true) {
assert(false, "invalid mode: " + mode);
}
}
};
var prototypeAccessors = { currentRoute: {} };
VueRouter.prototype.match = function match(raw, current, redirectedFrom) {
return this.matcher.match(raw, current, redirectedFrom);
};
prototypeAccessors.currentRoute.get = function () {
return this.history && this.history.current;
};
VueRouter.prototype.init = function init(app /* Vue component instance */) {
var this$1 = this;
"development" !== 'production' && assert(install.installed, "not installed. Make sure to call `Vue.use(VueRouter)` " + "before creating root instance.");
this.apps.push(app);
// main app already initialized.
if (this.app) {
return;
}
this.app = app;
var history = this.history;
if (history instanceof HTML5History) {
history.transitionTo(history.getCurrentLocation());
} else if (history instanceof HashHistory) {
var setupHashListener = function setupHashListener() {
history.setupListeners();
};
history.transitionTo(history.getCurrentLocation(), setupHashListener, setupHashListener);
}
history.listen(function (route) {
this$1.apps.forEach(function (app) {
app._route = route;
});
});
};
VueRouter.prototype.beforeEach = function beforeEach(fn) {
return registerHook(this.beforeHooks, fn);
};
VueRouter.prototype.beforeResolve = function beforeResolve(fn) {
return registerHook(this.resolveHooks, fn);
};
VueRouter.prototype.afterEach = function afterEach(fn) {
return registerHook(this.afterHooks, fn);
};
VueRouter.prototype.onReady = function onReady(cb, errorCb) {
this.history.onReady(cb, errorCb);
};
VueRouter.prototype.onError = function onError(errorCb) {
this.history.onError(errorCb);
};
VueRouter.prototype.push = function push(location, onComplete, onAbort) {
this.history.push(location, onComplete, onAbort);
};
VueRouter.prototype.replace = function replace(location, onComplete, onAbort) {
this.history.replace(location, onComplete, onAbort);
};
VueRouter.prototype.go = function go(n) {
this.history.go(n);
};
VueRouter.prototype.back = function back() {
this.go(-1);
};
VueRouter.prototype.forward = function forward() {
this.go(1);
};
VueRouter.prototype.getMatchedComponents = function getMatchedComponents(to) {
var route = to ? to.matched ? to : this.resolve(to).route : this.currentRoute;
if (!route) {
return [];
}
return [].concat.apply([], route.matched.map(function (m) {
return Object.keys(m.components).map(function (key) {
return m.components[key];
});
}));
};
VueRouter.prototype.resolve = function resolve(to, current, append) {
var location = normalizeLocation(to, current || this.history.current, append, this);
var route = this.match(location, current);
var fullPath = route.redirectedFrom || route.fullPath;
var base = this.history.base;
var href = createHref(base, fullPath, this.mode);
return {
location: location,
route: route,
href: href,
// for backwards compat
normalizedTo: location,
resolved: route
};
};
VueRouter.prototype.addRoutes = function addRoutes(routes) {
this.matcher.addRoutes(routes);
if (this.history.current !== START) {
this.history.transitionTo(this.history.getCurrentLocation());
}
};
Object.defineProperties(VueRouter.prototype, prototypeAccessors);
function registerHook(list, fn) {
list.push(fn);
return function () {
var i = list.indexOf(fn);
if (i > -1) {
list.splice(i, 1);
}
};
}
function createHref(base, fullPath, mode) {
var path = mode === 'hash' ? '#' + fullPath : fullPath;
return base ? cleanPath(base + '/' + path) : path;
}
VueRouter.install = install;
VueRouter.version = '2.6.0';
if (inBrowser && window.Vue) {
window.Vue.use(VueRouter);
}
/* harmony default export */__webpack_exports__["a"] = VueRouter;
/***/
},
/***/"./node_modules/vue/dist/vue.common.js":
/***/function node_modulesVueDistVueCommonJs(module, exports, __webpack_require__) {
"use strict";
/* WEBPACK VAR INJECTION */
(function (global) {
/*!
* Vue.js v2.3.4
* (c) 2014-2017 Evan You
* Released under the MIT License.
*/
/* */
// these helpers produces better vm code in JS engines due to their
// explicitness and function inlining
function isUndef(v) {
return v === undefined || v === null;
}
function isDef(v) {
return v !== undefined && v !== null;
}
function isTrue(v) {
return v === true;
}
function isFalse(v) {
return v === false;
}
/**
* Check if value is primitive
*/
function isPrimitive(value) {
return typeof value === 'string' || typeof value === 'number';
}
/**
* Quick object check - this is primarily used to tell
* Objects from primitive values when we know the value
* is a JSON-compliant type.
*/
function isObject(obj) {
return obj !== null && (typeof obj === 'undefined' ? 'undefined' : _typeof(obj)) === 'object';
}
var _toString = Object.prototype.toString;
/**
* Strict object type check. Only returns true
* for plain JavaScript objects.
*/
function isPlainObject(obj) {
return _toString.call(obj) === '[object Object]';
}
function isRegExp(v) {
return _toString.call(v) === '[object RegExp]';
}
/**
* Convert a value to a string that is actually rendered.
*/
function toString(val) {
return val == null ? '' : (typeof val === 'undefined' ? 'undefined' : _typeof(val)) === 'object' ? JSON.stringify(val, null, 2) : String(val);
}
/**
* Convert a input value to a number for persistence.
* If the conversion fails, return original string.
*/
function toNumber(val) {
var n = parseFloat(val);
return isNaN(n) ? val : n;
}
/**
* Make a map and return a function for checking if a key
* is in that map.
*/
function makeMap(str, expectsLowerCase) {
var map = Object.create(null);
var list = str.split(',');
for (var i = 0; i < list.length; i++) {
map[list[i]] = true;
}
return expectsLowerCase ? function (val) {
return map[val.toLowerCase()];
} : function (val) {
return map[val];
};
}
/**
* Check if a tag is a built-in tag.
*/
var isBuiltInTag = makeMap('slot,component', true);
/**
* Remove an item from an array
*/
function remove(arr, item) {
if (arr.length) {
var index = arr.indexOf(item);
if (index > -1) {
return arr.splice(index, 1);
}
}
}
/**
* Check whether the object has the property.
*/
var hasOwnProperty = Object.prototype.hasOwnProperty;
function hasOwn(obj, key) {
return hasOwnProperty.call(obj, key);
}
/**
* Create a cached version of a pure function.
*/
function cached(fn) {
var cache = Object.create(null);
return function cachedFn(str) {
var hit = cache[str];
return hit || (cache[str] = fn(str));
};
}
/**
* Camelize a hyphen-delimited string.
*/
var camelizeRE = /-(\w)/g;
var camelize = cached(function (str) {
return str.replace(camelizeRE, function (_, c) {
return c ? c.toUpperCase() : '';
});
});
/**
* Capitalize a string.
*/
var capitalize = cached(function (str) {
return str.charAt(0).toUpperCase() + str.slice(1);
});
/**
* Hyphenate a camelCase string.
*/
var hyphenateRE = /([^-])([A-Z])/g;
var hyphenate = cached(function (str) {
return str.replace(hyphenateRE, '$1-$2').replace(hyphenateRE, '$1-$2').toLowerCase();
});
/**
* Simple bind, faster than native
*/
function bind(fn, ctx) {
function boundFn(a) {
var l = arguments.length;
return l ? l > 1 ? fn.apply(ctx, arguments) : fn.call(ctx, a) : fn.call(ctx);
}
// record original fn length
boundFn._length = fn.length;
return boundFn;
}
/**
* Convert an Array-like object to a real Array.
*/
function toArray(list, start) {
start = start || 0;
var i = list.length - start;
var ret = new Array(i);
while (i--) {
ret[i] = list[i + start];
}
return ret;
}
/**
* Mix properties into target object.
*/
function extend(to, _from) {
for (var key in _from) {
to[key] = _from[key];
}
return to;
}
/**
* Merge an Array of Objects into a single Object.
*/
function toObject(arr) {
var res = {};
for (var i = 0; i < arr.length; i++) {
if (arr[i]) {
extend(res, arr[i]);
}
}
return res;
}
/**
* Perform no operation.
*/
function noop() {}
/**
* Always return false.
*/
var no = function no() {
return false;
};
/**
* Return same value
*/
var identity = function identity(_) {
return _;
};
/**
* Generate a static keys string from compiler modules.
*/
function genStaticKeys(modules) {
return modules.reduce(function (keys, m) {
return keys.concat(m.staticKeys || []);
}, []).join(',');
}
/**
* Check if two values are loosely equal - that is,
* if they are plain objects, do they have the same shape?
*/
function looseEqual(a, b) {
var isObjectA = isObject(a);
var isObjectB = isObject(b);
if (isObjectA && isObjectB) {
try {
return JSON.stringify(a) === JSON.stringify(b);
} catch (e) {
// possible circular reference
return a === b;
}
} else if (!isObjectA && !isObjectB) {
return String(a) === String(b);
} else {
return false;
}
}
function looseIndexOf(arr, val) {
for (var i = 0; i < arr.length; i++) {
if (looseEqual(arr[i], val)) {
return i;
}
}
return -1;
}
/**
* Ensure a function is called only once.
*/
function once(fn) {
var called = false;
return function () {
if (!called) {
called = true;
fn.apply(this, arguments);
}
};
}
var SSR_ATTR = 'data-server-rendered';
var ASSET_TYPES = ['component', 'directive', 'filter'];
var LIFECYCLE_HOOKS = ['beforeCreate', 'created', 'beforeMount', 'mounted', 'beforeUpdate', 'updated', 'beforeDestroy', 'destroyed', 'activated', 'deactivated'];
/* */
var config = {
/**
* Option merge strategies (used in core/util/options)
*/
optionMergeStrategies: Object.create(null),
/**
* Whether to suppress warnings.
*/
silent: false,
/**
* Show production mode tip message on boot?
*/
productionTip: "development" !== 'production',
/**
* Whether to enable devtools
*/
devtools: "development" !== 'production',
/**
* Whether to record perf
*/
performance: false,
/**
* Error handler for watcher errors
*/
errorHandler: null,
/**
* Ignore certain custom elements
*/
ignoredElements: [],
/**
* Custom user key aliases for v-on
*/
keyCodes: Object.create(null),
/**
* Check if a tag is reserved so that it cannot be registered as a
* component. This is platform-dependent and may be overwritten.
*/
isReservedTag: no,
/**
* Check if an attribute is reserved so that it cannot be used as a component
* prop. This is platform-dependent and may be overwritten.
*/
isReservedAttr: no,
/**
* Check if a tag is an unknown element.
* Platform-dependent.
*/
isUnknownElement: no,
/**
* Get the namespace of an element
*/
getTagNamespace: noop,
/**
* Parse the real tag name for the specific platform.
*/
parsePlatformTagName: identity,
/**
* Check if an attribute must be bound using property, e.g. value
* Platform-dependent.
*/
mustUseProp: no,
/**
* Exposed for legacy reasons
*/
_lifecycleHooks: LIFECYCLE_HOOKS
};
/* */
var emptyObject = Object.freeze({});
/**
* Check if a string starts with $ or _
*/
function isReserved(str) {
var c = (str + '').charCodeAt(0);
return c === 0x24 || c === 0x5F;
}
/**
* Define a property.
*/
function def(obj, key, val, enumerable) {
Object.defineProperty(obj, key, {
value: val,
enumerable: !!enumerable,
writable: true,
configurable: true
});
}
/**
* Parse simple path.
*/
var bailRE = /[^\w.$]/;
function parsePath(path) {
if (bailRE.test(path)) {
return;
}
var segments = path.split('.');
return function (obj) {
for (var i = 0; i < segments.length; i++) {
if (!obj) {
return;
}
obj = obj[segments[i]];
}
return obj;
};
}
/* */
var warn = noop;
var tip = noop;
var formatComponentName = null; // work around flow check
if (true) {
var hasConsole = typeof console !== 'undefined';
var classifyRE = /(?:^|[-_])(\w)/g;
var classify = function classify(str) {
return str.replace(classifyRE, function (c) {
return c.toUpperCase();
}).replace(/[-_]/g, '');
};
warn = function warn(msg, vm) {
if (hasConsole && !config.silent) {
console.error("[Vue warn]: " + msg + (vm ? generateComponentTrace(vm) : ''));
}
};
tip = function tip(msg, vm) {
if (hasConsole && !config.silent) {
console.warn("[Vue tip]: " + msg + (vm ? generateComponentTrace(vm) : ''));
}
};
formatComponentName = function formatComponentName(vm, includeFile) {
if (vm.$root === vm) {
return '<Root>';
}
var name = typeof vm === 'string' ? vm : typeof vm === 'function' && vm.options ? vm.options.name : vm._isVue ? vm.$options.name || vm.$options._componentTag : vm.name;
var file = vm._isVue && vm.$options.__file;
if (!name && file) {
var match = file.match(/([^/\\]+)\.vue$/);
name = match && match[1];
}
return (name ? "<" + classify(name) + ">" : "<Anonymous>") + (file && includeFile !== false ? " at " + file : '');
};
var repeat = function repeat(str, n) {
var res = '';
while (n) {
if (n % 2 === 1) {
res += str;
}
if (n > 1) {
str += str;
}
n >>= 1;
}
return res;
};
var generateComponentTrace = function generateComponentTrace(vm) {
if (vm._isVue && vm.$parent) {
var tree = [];
var currentRecursiveSequence = 0;
while (vm) {
if (tree.length > 0) {
var last = tree[tree.length - 1];
if (last.constructor === vm.constructor) {
currentRecursiveSequence++;
vm = vm.$parent;
continue;
} else if (currentRecursiveSequence > 0) {
tree[tree.length - 1] = [last, currentRecursiveSequence];
currentRecursiveSequence = 0;
}
}
tree.push(vm);
vm = vm.$parent;
}
return '\n\nfound in\n\n' + tree.map(function (vm, i) {
return "" + (i === 0 ? '---> ' : repeat(' ', 5 + i * 2)) + (Array.isArray(vm) ? formatComponentName(vm[0]) + "... (" + vm[1] + " recursive calls)" : formatComponentName(vm));
}).join('\n');
} else {
return "\n\n(found in " + formatComponentName(vm) + ")";
}
};
}
/* */
function handleError(err, vm, info) {
if (config.errorHandler) {
config.errorHandler.call(null, err, vm, info);
} else {
if (true) {
warn("Error in " + info + ": \"" + err.toString() + "\"", vm);
}
/* istanbul ignore else */
if (inBrowser && typeof console !== 'undefined') {
console.error(err);
} else {
throw err;
}
}
}
/* */
/* globals MutationObserver */
// can we use __proto__?
var hasProto = '__proto__' in {};
// Browser environment sniffing
var inBrowser = typeof window !== 'undefined';
var UA = inBrowser && window.navigator.userAgent.toLowerCase();
var isIE = UA && /msie|trident/.test(UA);
var isIE9 = UA && UA.indexOf('msie 9.0') > 0;
var isEdge = UA && UA.indexOf('edge/') > 0;
var isAndroid = UA && UA.indexOf('android') > 0;
var isIOS = UA && /iphone|ipad|ipod|ios/.test(UA);
var isChrome = UA && /chrome\/\d+/.test(UA) && !isEdge;
var supportsPassive = false;
if (inBrowser) {
try {
var opts = {};
Object.defineProperty(opts, 'passive', {
get: function get() {
/* istanbul ignore next */
supportsPassive = true;
}
}); // https://github.com/facebook/flow/issues/285
window.addEventListener('test-passive', null, opts);
} catch (e) {}
}
// this needs to be lazy-evaled because vue may be required before
// vue-server-renderer can set VUE_ENV
var _isServer;
var isServerRendering = function isServerRendering() {
if (_isServer === undefined) {
/* istanbul ignore if */
if (!inBrowser && typeof global !== 'undefined') {
// detect presence of vue-server-renderer and avoid
// Webpack shimming the process
_isServer = global['process'].env.VUE_ENV === 'server';
} else {
_isServer = false;
}
}
return _isServer;
};
// detect devtools
var devtools = inBrowser && window.__VUE_DEVTOOLS_GLOBAL_HOOK__;
/* istanbul ignore next */
function isNative(Ctor) {
return typeof Ctor === 'function' && /native code/.test(Ctor.toString());
}
var hasSymbol = typeof Symbol !== 'undefined' && isNative(Symbol) && typeof Reflect !== 'undefined' && isNative(Reflect.ownKeys);
/**
* Defer a task to execute it asynchronously.
*/
var nextTick = function () {
var callbacks = [];
var pending = false;
var timerFunc;
function nextTickHandler() {
pending = false;
var copies = callbacks.slice(0);
callbacks.length = 0;
for (var i = 0; i < copies.length; i++) {
copies[i]();
}
}
// the nextTick behavior leverages the microtask queue, which can be accessed
// via either native Promise.then or MutationObserver.
// MutationObserver has wider support, however it is seriously bugged in
// UIWebView in iOS >= 9.3.3 when triggered in touch event handlers. It
// completely stops working after triggering a few times... so, if native
// Promise is available, we will use it:
/* istanbul ignore if */
if (typeof Promise !== 'undefined' && isNative(Promise)) {
var p = Promise.resolve();
var logError = function logError(err) {
console.error(err);
};
timerFunc = function timerFunc() {
p.then(nextTickHandler).catch(logError);
// in problematic UIWebViews, Promise.then doesn't completely break, but
// it can get stuck in a weird state where callbacks are pushed into the
// microtask queue but the queue isn't being flushed, until the browser
// needs to do some other work, e.g. handle a timer. Therefore we can
// "force" the microtask queue to be flushed by adding an empty timer.
if (isIOS) {
setTimeout(noop);
}
};
} else if (typeof MutationObserver !== 'undefined' && (isNative(MutationObserver) ||
// PhantomJS and iOS 7.x
MutationObserver.toString() === '[object MutationObserverConstructor]')) {
// use MutationObserver where native Promise is not available,
// e.g. PhantomJS IE11, iOS7, Android 4.4
var counter = 1;
var observer = new MutationObserver(nextTickHandler);
var textNode = document.createTextNode(String(counter));
observer.observe(textNode, {
characterData: true
});
timerFunc = function timerFunc() {
counter = (counter + 1) % 2;
textNode.data = String(counter);
};
} else {
// fallback to setTimeout
/* istanbul ignore next */
timerFunc = function timerFunc() {
setTimeout(nextTickHandler, 0);
};
}
return function queueNextTick(cb, ctx) {
var _resolve;
callbacks.push(function () {
if (cb) {
try {
cb.call(ctx);
} catch (e) {
handleError(e, ctx, 'nextTick');
}
} else if (_resolve) {
_resolve(ctx);
}
});
if (!pending) {
pending = true;
timerFunc();
}
if (!cb && typeof Promise !== 'undefined') {
return new Promise(function (resolve, reject) {
_resolve = resolve;
});
}
};
}();
var _Set;
/* istanbul ignore if */
if (typeof Set !== 'undefined' && isNative(Set)) {
// use native Set when available.
_Set = Set;
} else {
// a non-standard Set polyfill that only works with primitive keys.
_Set = function () {
function Set() {
this.set = Object.create(null);
}
Set.prototype.has = function has(key) {
return this.set[key] === true;
};
Set.prototype.add = function add(key) {
this.set[key] = true;
};
Set.prototype.clear = function clear() {
this.set = Object.create(null);
};
return Set;
}();
}
/* */
var uid = 0;
/**
* A dep is an observable that can have multiple
* directives subscribing to it.
*/
var Dep = function Dep() {
this.id = uid++;
this.subs = [];
};
Dep.prototype.addSub = function addSub(sub) {
this.subs.push(sub);
};
Dep.prototype.removeSub = function removeSub(sub) {
remove(this.subs, sub);
};
Dep.prototype.depend = function depend() {
if (Dep.target) {
Dep.target.addDep(this);
}
};
Dep.prototype.notify = function notify() {
// stabilize the subscriber list first
var subs = this.subs.slice();
for (var i = 0, l = subs.length; i < l; i++) {
subs[i].update();
}
};
// the current target watcher being evaluated.
// this is globally unique because there could be only one
// watcher being evaluated at any time.
Dep.target = null;
var targetStack = [];
function pushTarget(_target) {
if (Dep.target) {
targetStack.push(Dep.target);
}
Dep.target = _target;
}
function popTarget() {
Dep.target = targetStack.pop();
}
/*
* not type checking this file because flow doesn't play well with
* dynamically accessing methods on Array prototype
*/
var arrayProto = Array.prototype;
var arrayMethods = Object.create(arrayProto);['push', 'pop', 'shift', 'unshift', 'splice', 'sort', 'reverse'].forEach(function (method) {
// cache original method
var original = arrayProto[method];
def(arrayMethods, method, function mutator() {
var arguments$1 = arguments;
// avoid leaking arguments:
// http://jsperf.com/closure-with-arguments
var i = arguments.length;
var args = new Array(i);
while (i--) {
args[i] = arguments$1[i];
}
var result = original.apply(this, args);
var ob = this.__ob__;
var inserted;
switch (method) {
case 'push':
inserted = args;
break;
case 'unshift':
inserted = args;
break;
case 'splice':
inserted = args.slice(2);
break;
}
if (inserted) {
ob.observeArray(inserted);
}
// notify change
ob.dep.notify();
return result;
});
});
/* */
var arrayKeys = Object.getOwnPropertyNames(arrayMethods);
/**
* By default, when a reactive property is set, the new value is
* also converted to become reactive. However when passing down props,
* we don't want to force conversion because the value may be a nested value
* under a frozen data structure. Converting it would defeat the optimization.
*/
var observerState = {
shouldConvert: true,
isSettingProps: false
};
/**
* Observer class that are attached to each observed
* object. Once attached, the observer converts target
* object's property keys into getter/setters that
* collect dependencies and dispatches updates.
*/
var Observer = function Observer(value) {
this.value = value;
this.dep = new Dep();
this.vmCount = 0;
def(value, '__ob__', this);
if (Array.isArray(value)) {
var augment = hasProto ? protoAugment : copyAugment;
augment(value, arrayMethods, arrayKeys);
this.observeArray(value);
} else {
this.walk(value);
}
};
/**
* Walk through each property and convert them into
* getter/setters. This method should only be called when
* value type is Object.
*/
Observer.prototype.walk = function walk(obj) {
var keys = Object.keys(obj);
for (var i = 0; i < keys.length; i++) {
defineReactive$$1(obj, keys[i], obj[keys[i]]);
}
};
/**
* Observe a list of Array items.
*/
Observer.prototype.observeArray = function observeArray(items) {
for (var i = 0, l = items.length; i < l; i++) {
observe(items[i]);
}
};
// helpers
/**
* Augment an target Object or Array by intercepting
* the prototype chain using __proto__
*/
function protoAugment(target, src) {
/* eslint-disable no-proto */
target.__proto__ = src;
/* eslint-enable no-proto */
}
/**
* Augment an target Object or Array by defining
* hidden properties.
*/
/* istanbul ignore next */
function copyAugment(target, src, keys) {
for (var i = 0, l = keys.length; i < l; i++) {
var key = keys[i];
def(target, key, src[key]);
}
}
/**
* Attempt to create an observer instance for a value,
* returns the new observer if successfully observed,
* or the existing observer if the value already has one.
*/
function observe(value, asRootData) {
if (!isObject(value)) {
return;
}
var ob;
if (hasOwn(value, '__ob__') && value.__ob__ instanceof Observer) {
ob = value.__ob__;
} else if (observerState.shouldConvert && !isServerRendering() && (Array.isArray(value) || isPlainObject(value)) && Object.isExtensible(value) && !value._isVue) {
ob = new Observer(value);
}
if (asRootData && ob) {
ob.vmCount++;
}
return ob;
}
/**
* Define a reactive property on an Object.
*/
function defineReactive$$1(obj, key, val, customSetter) {
var dep = new Dep();
var property = Object.getOwnPropertyDescriptor(obj, key);
if (property && property.configurable === false) {
return;
}
// cater for pre-defined getter/setters
var getter = property && property.get;
var setter = property && property.set;
var childOb = observe(val);
Object.defineProperty(obj, key, {
enumerable: true,
configurable: true,
get: function reactiveGetter() {
var value = getter ? getter.call(obj) : val;
if (Dep.target) {
dep.depend();
if (childOb) {
childOb.dep.depend();
}
if (Array.isArray(value)) {
dependArray(value);
}
}
return value;
},
set: function reactiveSetter(newVal) {
var value = getter ? getter.call(obj) : val;
/* eslint-disable no-self-compare */
if (newVal === value || newVal !== newVal && value !== value) {
return;
}
/* eslint-enable no-self-compare */
if ("development" !== 'production' && customSetter) {
customSetter();
}
if (setter) {
setter.call(obj, newVal);
} else {
val = newVal;
}
childOb = observe(newVal);
dep.notify();
}
});
}
/**
* Set a property on an object. Adds the new property and
* triggers change notification if the property doesn't
* already exist.
*/
function set(target, key, val) {
if (Array.isArray(target) && typeof key === 'number') {
target.length = Math.max(target.length, key);
target.splice(key, 1, val);
return val;
}
if (hasOwn(target, key)) {
target[key] = val;
return val;
}
var ob = target.__ob__;
if (target._isVue || ob && ob.vmCount) {
"development" !== 'production' && warn('Avoid adding reactive properties to a Vue instance or its root $data ' + 'at runtime - declare it upfront in the data option.');
return val;
}
if (!ob) {
target[key] = val;
return val;
}
defineReactive$$1(ob.value, key, val);
ob.dep.notify();
return val;
}
/**
* Delete a property and trigger change if necessary.
*/
function del(target, key) {
if (Array.isArray(target) && typeof key === 'number') {
target.splice(key, 1);
return;
}
var ob = target.__ob__;
if (target._isVue || ob && ob.vmCount) {
"development" !== 'production' && warn('Avoid deleting properties on a Vue instance or its root $data ' + '- just set it to null.');
return;
}
if (!hasOwn(target, key)) {
return;
}
delete target[key];
if (!ob) {
return;
}
ob.dep.notify();
}
/**
* Collect dependencies on array elements when the array is touched, since
* we cannot intercept array element access like property getters.
*/
function dependArray(value) {
for (var e = void 0, i = 0, l = value.length; i < l; i++) {
e = value[i];
e && e.__ob__ && e.__ob__.dep.depend();
if (Array.isArray(e)) {
dependArray(e);
}
}
}
/* */
/**
* Option overwriting strategies are functions that handle
* how to merge a parent option value and a child option
* value into the final value.
*/
var strats = config.optionMergeStrategies;
/**
* Options with restrictions
*/
if (true) {
strats.el = strats.propsData = function (parent, child, vm, key) {
if (!vm) {
warn("option \"" + key + "\" can only be used during instance " + 'creation with the `new` keyword.');
}
return defaultStrat(parent, child);
};
}
/**
* Helper that recursively merges two data objects together.
*/
function mergeData(to, from) {
if (!from) {
return to;
}
var key, toVal, fromVal;
var keys = Object.keys(from);
for (var i = 0; i < keys.length; i++) {
key = keys[i];
toVal = to[key];
fromVal = from[key];
if (!hasOwn(to, key)) {
set(to, key, fromVal);
} else if (isPlainObject(toVal) && isPlainObject(fromVal)) {
mergeData(toVal, fromVal);
}
}
return to;
}
/**
* Data
*/
strats.data = function (parentVal, childVal, vm) {
if (!vm) {
// in a Vue.extend merge, both should be functions
if (!childVal) {
return parentVal;
}
if (typeof childVal !== 'function') {
"development" !== 'production' && warn('The "data" option should be a function ' + 'that returns a per-instance value in component ' + 'definitions.', vm);
return parentVal;
}
if (!parentVal) {
return childVal;
}
// when parentVal & childVal are both present,
// we need to return a function that returns the
// merged result of both functions... no need to
// check if parentVal is a function here because
// it has to be a function to pass previous merges.
return function mergedDataFn() {
return mergeData(childVal.call(this), parentVal.call(this));
};
} else if (parentVal || childVal) {
return function mergedInstanceDataFn() {
// instance merge
var instanceData = typeof childVal === 'function' ? childVal.call(vm) : childVal;
var defaultData = typeof parentVal === 'function' ? parentVal.call(vm) : undefined;
if (instanceData) {
return mergeData(instanceData, defaultData);
} else {
return defaultData;
}
};
}
};
/**
* Hooks and props are merged as arrays.
*/
function mergeHook(parentVal, childVal) {
return childVal ? parentVal ? parentVal.concat(childVal) : Array.isArray(childVal) ? childVal : [childVal] : parentVal;
}
LIFECYCLE_HOOKS.forEach(function (hook) {
strats[hook] = mergeHook;
});
/**
* Assets
*
* When a vm is present (instance creation), we need to do
* a three-way merge between constructor options, instance
* options and parent options.
*/
function mergeAssets(parentVal, childVal) {
var res = Object.create(parentVal || null);
return childVal ? extend(res, childVal) : res;
}
ASSET_TYPES.forEach(function (type) {
strats[type + 's'] = mergeAssets;
});
/**
* Watchers.
*
* Watchers hashes should not overwrite one
* another, so we merge them as arrays.
*/
strats.watch = function (parentVal, childVal) {
/* istanbul ignore if */
if (!childVal) {
return Object.create(parentVal || null);
}
if (!parentVal) {
return childVal;
}
var ret = {};
extend(ret, parentVal);
for (var key in childVal) {
var parent = ret[key];
var child = childVal[key];
if (parent && !Array.isArray(parent)) {
parent = [parent];
}
ret[key] = parent ? parent.concat(child) : [child];
}
return ret;
};
/**
* Other object hashes.
*/
strats.props = strats.methods = strats.computed = function (parentVal, childVal) {
if (!childVal) {
return Object.create(parentVal || null);
}
if (!parentVal) {
return childVal;
}
var ret = Object.create(null);
extend(ret, parentVal);
extend(ret, childVal);
return ret;
};
/**
* Default strategy.
*/
var defaultStrat = function defaultStrat(parentVal, childVal) {
return childVal === undefined ? parentVal : childVal;
};
/**
* Validate component names
*/
function checkComponents(options) {
for (var key in options.components) {
var lower = key.toLowerCase();
if (isBuiltInTag(lower) || config.isReservedTag(lower)) {
warn('Do not use built-in or reserved HTML elements as component ' + 'id: ' + key);
}
}
}
/**
* Ensure all props option syntax are normalized into the
* Object-based format.
*/
function normalizeProps(options) {
var props = options.props;
if (!props) {
return;
}
var res = {};
var i, val, name;
if (Array.isArray(props)) {
i = props.length;
while (i--) {
val = props[i];
if (typeof val === 'string') {
name = camelize(val);
res[name] = { type: null };
} else if (true) {
warn('props must be strings when using array syntax.');
}
}
} else if (isPlainObject(props)) {
for (var key in props) {
val = props[key];
name = camelize(key);
res[name] = isPlainObject(val) ? val : { type: val };
}
}
options.props = res;
}
/**
* Normalize raw function directives into object format.
*/
function normalizeDirectives(options) {
var dirs = options.directives;
if (dirs) {
for (var key in dirs) {
var def = dirs[key];
if (typeof def === 'function') {
dirs[key] = { bind: def, update: def };
}
}
}
}
/**
* Merge two option objects into a new one.
* Core utility used in both instantiation and inheritance.
*/
function mergeOptions(parent, child, vm) {
if (true) {
checkComponents(child);
}
if (typeof child === 'function') {
child = child.options;
}
normalizeProps(child);
normalizeDirectives(child);
var extendsFrom = child.extends;
if (extendsFrom) {
parent = mergeOptions(parent, extendsFrom, vm);
}
if (child.mixins) {
for (var i = 0, l = child.mixins.length; i < l; i++) {
parent = mergeOptions(parent, child.mixins[i], vm);
}
}
var options = {};
var key;
for (key in parent) {
mergeField(key);
}
for (key in child) {
if (!hasOwn(parent, key)) {
mergeField(key);
}
}
function mergeField(key) {
var strat = strats[key] || defaultStrat;
options[key] = strat(parent[key], child[key], vm, key);
}
return options;
}
/**
* Resolve an asset.
* This function is used because child instances need access
* to assets defined in its ancestor chain.
*/
function resolveAsset(options, type, id, warnMissing) {
/* istanbul ignore if */
if (typeof id !== 'string') {
return;
}
var assets = options[type];
// check local registration variations first
if (hasOwn(assets, id)) {
return assets[id];
}
var camelizedId = camelize(id);
if (hasOwn(assets, camelizedId)) {
return assets[camelizedId];
}
var PascalCaseId = capitalize(camelizedId);
if (hasOwn(assets, PascalCaseId)) {
return assets[PascalCaseId];
}
// fallback to prototype chain
var res = assets[id] || assets[camelizedId] || assets[PascalCaseId];
if ("development" !== 'production' && warnMissing && !res) {
warn('Failed to resolve ' + type.slice(0, -1) + ': ' + id, options);
}
return res;
}
/* */
function validateProp(key, propOptions, propsData, vm) {
var prop = propOptions[key];
var absent = !hasOwn(propsData, key);
var value = propsData[key];
// handle boolean props
if (isType(Boolean, prop.type)) {
if (absent && !hasOwn(prop, 'default')) {
value = false;
} else if (!isType(String, prop.type) && (value === '' || value === hyphenate(key))) {
value = true;
}
}
// check default value
if (value === undefined) {
value = getPropDefaultValue(vm, prop, key);
// since the default value is a fresh copy,
// make sure to observe it.
var prevShouldConvert = observerState.shouldConvert;
observerState.shouldConvert = true;
observe(value);
observerState.shouldConvert = prevShouldConvert;
}
if (true) {
assertProp(prop, key, value, vm, absent);
}
return value;
}
/**
* Get the default value of a prop.
*/
function getPropDefaultValue(vm, prop, key) {
// no default, return undefined
if (!hasOwn(prop, 'default')) {
return undefined;
}
var def = prop.default;
// warn against non-factory defaults for Object & Array
if ("development" !== 'production' && isObject(def)) {
warn('Invalid default value for prop "' + key + '": ' + 'Props with type Object/Array must use a factory function ' + 'to return the default value.', vm);
}
// the raw prop value was also undefined from previous render,
// return previous default value to avoid unnecessary watcher trigger
if (vm && vm.$options.propsData && vm.$options.propsData[key] === undefined && vm._props[key] !== undefined) {
return vm._props[key];
}
// call factory function for non-Function types
// a value is Function if its prototype is function even across different execution context
return typeof def === 'function' && getType(prop.type) !== 'Function' ? def.call(vm) : def;
}
/**
* Assert whether a prop is valid.
*/
function assertProp(prop, name, value, vm, absent) {
if (prop.required && absent) {
warn('Missing required prop: "' + name + '"', vm);
return;
}
if (value == null && !prop.required) {
return;
}
var type = prop.type;
var valid = !type || type === true;
var expectedTypes = [];
if (type) {
if (!Array.isArray(type)) {
type = [type];
}
for (var i = 0; i < type.length && !valid; i++) {
var assertedType = assertType(value, type[i]);
expectedTypes.push(assertedType.expectedType || '');
valid = assertedType.valid;
}
}
if (!valid) {
warn('Invalid prop: type check failed for prop "' + name + '".' + ' Expected ' + expectedTypes.map(capitalize).join(', ') + ', got ' + Object.prototype.toString.call(value).slice(8, -1) + '.', vm);
return;
}
var validator = prop.validator;
if (validator) {
if (!validator(value)) {
warn('Invalid prop: custom validator check failed for prop "' + name + '".', vm);
}
}
}
var simpleCheckRE = /^(String|Number|Boolean|Function|Symbol)$/;
function assertType(value, type) {
var valid;
var expectedType = getType(type);
if (simpleCheckRE.test(expectedType)) {
valid = (typeof value === 'undefined' ? 'undefined' : _typeof(value)) === expectedType.toLowerCase();
} else if (expectedType === 'Object') {
valid = isPlainObject(value);
} else if (expectedType === 'Array') {
valid = Array.isArray(value);
} else {
valid = value instanceof type;
}
return {
valid: valid,
expectedType: expectedType
};
}
/**
* Use function string name to check built-in types,
* because a simple equality check will fail when running
* across different vms / iframes.
*/
function getType(fn) {
var match = fn && fn.toString().match(/^\s*function (\w+)/);
return match ? match[1] : '';
}
function isType(type, fn) {
if (!Array.isArray(fn)) {
return getType(fn) === getType(type);
}
for (var i = 0, len = fn.length; i < len; i++) {
if (getType(fn[i]) === getType(type)) {
return true;
}
}
/* istanbul ignore next */
return false;
}
/* */
var mark;
var measure;
if (true) {
var perf = inBrowser && window.performance;
/* istanbul ignore if */
if (perf && perf.mark && perf.measure && perf.clearMarks && perf.clearMeasures) {
mark = function mark(tag) {
return perf.mark(tag);
};
measure = function measure(name, startTag, endTag) {
perf.measure(name, startTag, endTag);
perf.clearMarks(startTag);
perf.clearMarks(endTag);
perf.clearMeasures(name);
};
}
}
/* not type checking this file because flow doesn't play well with Proxy */
var initProxy;
if (true) {
var allowedGlobals = makeMap('Infinity,undefined,NaN,isFinite,isNaN,' + 'parseFloat,parseInt,decodeURI,decodeURIComponent,encodeURI,encodeURIComponent,' + 'Math,Number,Date,Array,Object,Boolean,String,RegExp,Map,Set,JSON,Intl,' + 'require' // for Webpack/Browserify
);
var warnNonPresent = function warnNonPresent(target, key) {
warn("Property or method \"" + key + "\" is not defined on the instance but " + "referenced during render. Make sure to declare reactive data " + "properties in the data option.", target);
};
var hasProxy = typeof Proxy !== 'undefined' && Proxy.toString().match(/native code/);
if (hasProxy) {
var isBuiltInModifier = makeMap('stop,prevent,self,ctrl,shift,alt,meta');
config.keyCodes = new Proxy(config.keyCodes, {
set: function set(target, key, value) {
if (isBuiltInModifier(key)) {
warn("Avoid overwriting built-in modifier in config.keyCodes: ." + key);
return false;
} else {
target[key] = value;
return true;
}
}
});
}
var hasHandler = {
has: function has(target, key) {
var has = key in target;
var isAllowed = allowedGlobals(key) || key.charAt(0) === '_';
if (!has && !isAllowed) {
warnNonPresent(target, key);
}
return has || !isAllowed;
}
};
var getHandler = {
get: function get(target, key) {
if (typeof key === 'string' && !(key in target)) {
warnNonPresent(target, key);
}
return target[key];
}
};
initProxy = function initProxy(vm) {
if (hasProxy) {
// determine which proxy handler to use
var options = vm.$options;
var handlers = options.render && options.render._withStripped ? getHandler : hasHandler;
vm._renderProxy = new Proxy(vm, handlers);
} else {
vm._renderProxy = vm;
}
};
}
/* */
var VNode = function VNode(tag, data, children, text, elm, context, componentOptions) {
this.tag = tag;
this.data = data;
this.children = children;
this.text = text;
this.elm = elm;
this.ns = undefined;
this.context = context;
this.functionalContext = undefined;
this.key = data && data.key;
this.componentOptions = componentOptions;
this.componentInstance = undefined;
this.parent = undefined;
this.raw = false;
this.isStatic = false;
this.isRootInsert = true;
this.isComment = false;
this.isCloned = false;
this.isOnce = false;
};
var prototypeAccessors = { child: {} };
// DEPRECATED: alias for componentInstance for backwards compat.
/* istanbul ignore next */
prototypeAccessors.child.get = function () {
return this.componentInstance;
};
Object.defineProperties(VNode.prototype, prototypeAccessors);
var createEmptyVNode = function createEmptyVNode() {
var node = new VNode();
node.text = '';
node.isComment = true;
return node;
};
function createTextVNode(val) {
return new VNode(undefined, undefined, undefined, String(val));
}
// optimized shallow clone
// used for static nodes and slot nodes because they may be reused across
// multiple renders, cloning them avoids errors when DOM manipulations rely
// on their elm reference.
function cloneVNode(vnode) {
var cloned = new VNode(vnode.tag, vnode.data, vnode.children, vnode.text, vnode.elm, vnode.context, vnode.componentOptions);
cloned.ns = vnode.ns;
cloned.isStatic = vnode.isStatic;
cloned.key = vnode.key;
cloned.isComment = vnode.isComment;
cloned.isCloned = true;
return cloned;
}
function cloneVNodes(vnodes) {
var len = vnodes.length;
var res = new Array(len);
for (var i = 0; i < len; i++) {
res[i] = cloneVNode(vnodes[i]);
}
return res;
}
/* */
var normalizeEvent = cached(function (name) {
var passive = name.charAt(0) === '&';
name = passive ? name.slice(1) : name;
var once$$1 = name.charAt(0) === '~'; // Prefixed last, checked first
name = once$$1 ? name.slice(1) : name;
var capture = name.charAt(0) === '!';
name = capture ? name.slice(1) : name;
return {
name: name,
once: once$$1,
capture: capture,
passive: passive
};
});
function createFnInvoker(fns) {
function invoker() {
var arguments$1 = arguments;
var fns = invoker.fns;
if (Array.isArray(fns)) {
for (var i = 0; i < fns.length; i++) {
fns[i].apply(null, arguments$1);
}
} else {
// return handler return value for single handlers
return fns.apply(null, arguments);
}
}
invoker.fns = fns;
return invoker;
}
function updateListeners(on, oldOn, add, remove$$1, vm) {
var name, cur, old, event;
for (name in on) {
cur = on[name];
old = oldOn[name];
event = normalizeEvent(name);
if (isUndef(cur)) {
"development" !== 'production' && warn("Invalid handler for event \"" + event.name + "\": got " + String(cur), vm);
} else if (isUndef(old)) {
if (isUndef(cur.fns)) {
cur = on[name] = createFnInvoker(cur);
}
add(event.name, cur, event.once, event.capture, event.passive);
} else if (cur !== old) {
old.fns = cur;
on[name] = old;
}
}
for (name in oldOn) {
if (isUndef(on[name])) {
event = normalizeEvent(name);
remove$$1(event.name, oldOn[name], event.capture);
}
}
}
/* */
function mergeVNodeHook(def, hookKey, hook) {
var invoker;
var oldHook = def[hookKey];
function wrappedHook() {
hook.apply(this, arguments);
// important: remove merged hook to ensure it's called only once
// and prevent memory leak
remove(invoker.fns, wrappedHook);
}
if (isUndef(oldHook)) {
// no existing hook
invoker = createFnInvoker([wrappedHook]);
} else {
/* istanbul ignore if */
if (isDef(oldHook.fns) && isTrue(oldHook.merged)) {
// already a merged invoker
invoker = oldHook;
invoker.fns.push(wrappedHook);
} else {
// existing plain hook
invoker = createFnInvoker([oldHook, wrappedHook]);
}
}
invoker.merged = true;
def[hookKey] = invoker;
}
/* */
function extractPropsFromVNodeData(data, Ctor, tag) {
// we are only extracting raw values here.
// validation and default values are handled in the child
// component itself.
var propOptions = Ctor.options.props;
if (isUndef(propOptions)) {
return;
}
var res = {};
var attrs = data.attrs;
var props = data.props;
if (isDef(attrs) || isDef(props)) {
for (var key in propOptions) {
var altKey = hyphenate(key);
if (true) {
var keyInLowerCase = key.toLowerCase();
if (key !== keyInLowerCase && attrs && hasOwn(attrs, keyInLowerCase)) {
tip("Prop \"" + keyInLowerCase + "\" is passed to component " + formatComponentName(tag || Ctor) + ", but the declared prop name is" + " \"" + key + "\". " + "Note that HTML attributes are case-insensitive and camelCased " + "props need to use their kebab-case equivalents when using in-DOM " + "templates. You should probably use \"" + altKey + "\" instead of \"" + key + "\".");
}
}
checkProp(res, props, key, altKey, true) || checkProp(res, attrs, key, altKey, false);
}
}
return res;
}
function checkProp(res, hash, key, altKey, preserve) {
if (isDef(hash)) {
if (hasOwn(hash, key)) {
res[key] = hash[key];
if (!preserve) {
delete hash[key];
}
return true;
} else if (hasOwn(hash, altKey)) {
res[key] = hash[altKey];
if (!preserve) {
delete hash[altKey];
}
return true;
}
}
return false;
}
/* */
// The template compiler attempts to minimize the need for normalization by
// statically analyzing the template at compile time.
//
// For plain HTML markup, normalization can be completely skipped because the
// generated render function is guaranteed to return Array<VNode>. There are
// two cases where extra normalization is needed:
// 1. When the children contains components - because a functional component
// may return an Array instead of a single root. In this case, just a simple
// normalization is needed - if any child is an Array, we flatten the whole
// thing with Array.prototype.concat. It is guaranteed to be only 1-level deep
// because functional components already normalize their own children.
function simpleNormalizeChildren(children) {
for (var i = 0; i < children.length; i++) {
if (Array.isArray(children[i])) {
return Array.prototype.concat.apply([], children);
}
}
return children;
}
// 2. When the children contains constructs that always generated nested Arrays,
// e.g. <template>, <slot>, v-for, or when the children is provided by user
// with hand-written render functions / JSX. In such cases a full normalization
// is needed to cater to all possible types of children values.
function normalizeChildren(children) {
return isPrimitive(children) ? [createTextVNode(children)] : Array.isArray(children) ? normalizeArrayChildren(children) : undefined;
}
function isTextNode(node) {
return isDef(node) && isDef(node.text) && isFalse(node.isComment);
}
function normalizeArrayChildren(children, nestedIndex) {
var res = [];
var i, c, last;
for (i = 0; i < children.length; i++) {
c = children[i];
if (isUndef(c) || typeof c === 'boolean') {
continue;
}
last = res[res.length - 1];
// nested
if (Array.isArray(c)) {
res.push.apply(res, normalizeArrayChildren(c, (nestedIndex || '') + "_" + i));
} else if (isPrimitive(c)) {
if (isTextNode(last)) {
// merge adjacent text nodes
// this is necessary for SSR hydration because text nodes are
// essentially merged when rendered to HTML strings
last.text += String(c);
} else if (c !== '') {
// convert primitive to vnode
res.push(createTextVNode(c));
}
} else {
if (isTextNode(c) && isTextNode(last)) {
// merge adjacent text nodes
res[res.length - 1] = createTextVNode(last.text + c.text);
} else {
// default key for nested array children (likely generated by v-for)
if (isTrue(children._isVList) && isDef(c.tag) && isUndef(c.key) && isDef(nestedIndex)) {
c.key = "__vlist" + nestedIndex + "_" + i + "__";
}
res.push(c);
}
}
}
return res;
}
/* */
function ensureCtor(comp, base) {
return isObject(comp) ? base.extend(comp) : comp;
}
function resolveAsyncComponent(factory, baseCtor, context) {
if (isTrue(factory.error) && isDef(factory.errorComp)) {
return factory.errorComp;
}
if (isDef(factory.resolved)) {
return factory.resolved;
}
if (isTrue(factory.loading) && isDef(factory.loadingComp)) {
return factory.loadingComp;
}
if (isDef(factory.contexts)) {
// already pending
factory.contexts.push(context);
} else {
var contexts = factory.contexts = [context];
var sync = true;
var forceRender = function forceRender() {
for (var i = 0, l = contexts.length; i < l; i++) {
contexts[i].$forceUpdate();
}
};
var resolve = once(function (res) {
// cache resolved
factory.resolved = ensureCtor(res, baseCtor);
// invoke callbacks only if this is not a synchronous resolve
// (async resolves are shimmed as synchronous during SSR)
if (!sync) {
forceRender();
}
});
var reject = once(function (reason) {
"development" !== 'production' && warn("Failed to resolve async component: " + String(factory) + (reason ? "\nReason: " + reason : ''));
if (isDef(factory.errorComp)) {
factory.error = true;
forceRender();
}
});
var res = factory(resolve, reject);
if (isObject(res)) {
if (typeof res.then === 'function') {
// () => Promise
if (isUndef(factory.resolved)) {
res.then(resolve, reject);
}
} else if (isDef(res.component) && typeof res.component.then === 'function') {
res.component.then(resolve, reject);
if (isDef(res.error)) {
factory.errorComp = ensureCtor(res.error, baseCtor);
}
if (isDef(res.loading)) {
factory.loadingComp = ensureCtor(res.loading, baseCtor);
if (res.delay === 0) {
factory.loading = true;
} else {
setTimeout(function () {
if (isUndef(factory.resolved) && isUndef(factory.error)) {
factory.loading = true;
forceRender();
}
}, res.delay || 200);
}
}
if (isDef(res.timeout)) {
setTimeout(function () {
if (isUndef(factory.resolved)) {
reject(true ? "timeout (" + res.timeout + "ms)" : null);
}
}, res.timeout);
}
}
}
sync = false;
// return in case resolved synchronously
return factory.loading ? factory.loadingComp : factory.resolved;
}
}
/* */
function getFirstComponentChild(children) {
if (Array.isArray(children)) {
for (var i = 0; i < children.length; i++) {
var c = children[i];
if (isDef(c) && isDef(c.componentOptions)) {
return c;
}
}
}
}
/* */
/* */
function initEvents(vm) {
vm._events = Object.create(null);
vm._hasHookEvent = false;
// init parent attached events
var listeners = vm.$options._parentListeners;
if (listeners) {
updateComponentListeners(vm, listeners);
}
}
var target;
function add(event, fn, once$$1) {
if (once$$1) {
target.$once(event, fn);
} else {
target.$on(event, fn);
}
}
function remove$1(event, fn) {
target.$off(event, fn);
}
function updateComponentListeners(vm, listeners, oldListeners) {
target = vm;
updateListeners(listeners, oldListeners || {}, add, remove$1, vm);
}
function eventsMixin(Vue) {
var hookRE = /^hook:/;
Vue.prototype.$on = function (event, fn) {
var this$1 = this;
var vm = this;
if (Array.isArray(event)) {
for (var i = 0, l = event.length; i < l; i++) {
this$1.$on(event[i], fn);
}
} else {
(vm._events[event] || (vm._events[event] = [])).push(fn);
// optimize hook:event cost by using a boolean flag marked at registration
// instead of a hash lookup
if (hookRE.test(event)) {
vm._hasHookEvent = true;
}
}
return vm;
};
Vue.prototype.$once = function (event, fn) {
var vm = this;
function on() {
vm.$off(event, on);
fn.apply(vm, arguments);
}
on.fn = fn;
vm.$on(event, on);
return vm;
};
Vue.prototype.$off = function (event, fn) {
var this$1 = this;
var vm = this;
// all
if (!arguments.length) {
vm._events = Object.create(null);
return vm;
}
// array of events
if (Array.isArray(event)) {
for (var i$1 = 0, l = event.length; i$1 < l; i$1++) {
this$1.$off(event[i$1], fn);
}
return vm;
}
// specific event
var cbs = vm._events[event];
if (!cbs) {
return vm;
}
if (arguments.length === 1) {
vm._events[event] = null;
return vm;
}
// specific handler
var cb;
var i = cbs.length;
while (i--) {
cb = cbs[i];
if (cb === fn || cb.fn === fn) {
cbs.splice(i, 1);
break;
}
}
return vm;
};
Vue.prototype.$emit = function (event) {
var vm = this;
if (true) {
var lowerCaseEvent = event.toLowerCase();
if (lowerCaseEvent !== event && vm._events[lowerCaseEvent]) {
tip("Event \"" + lowerCaseEvent + "\" is emitted in component " + formatComponentName(vm) + " but the handler is registered for \"" + event + "\". " + "Note that HTML attributes are case-insensitive and you cannot use " + "v-on to listen to camelCase events when using in-DOM templates. " + "You should probably use \"" + hyphenate(event) + "\" instead of \"" + event + "\".");
}
}
var cbs = vm._events[event];
if (cbs) {
cbs = cbs.length > 1 ? toArray(cbs) : cbs;
var args = toArray(arguments, 1);
for (var i = 0, l = cbs.length; i < l; i++) {
cbs[i].apply(vm, args);
}
}
return vm;
};
}
/* */
/**
* Runtime helper for resolving raw children VNodes into a slot object.
*/
function resolveSlots(children, context) {
var slots = {};
if (!children) {
return slots;
}
var defaultSlot = [];
for (var i = 0, l = children.length; i < l; i++) {
var child = children[i];
// named slots should only be respected if the vnode was rendered in the
// same context.
if ((child.context === context || child.functionalContext === context) && child.data && child.data.slot != null) {
var name = child.data.slot;
var slot = slots[name] || (slots[name] = []);
if (child.tag === 'template') {
slot.push.apply(slot, child.children);
} else {
slot.push(child);
}
} else {
defaultSlot.push(child);
}
}
// ignore whitespace
if (!defaultSlot.every(isWhitespace)) {
slots.default = defaultSlot;
}
return slots;
}
function isWhitespace(node) {
return node.isComment || node.text === ' ';
}
function resolveScopedSlots(fns, // see flow/vnode
res) {
res = res || {};
for (var i = 0; i < fns.length; i++) {
if (Array.isArray(fns[i])) {
resolveScopedSlots(fns[i], res);
} else {
res[fns[i].key] = fns[i].fn;
}
}
return res;
}
/* */
var activeInstance = null;
function initLifecycle(vm) {
var options = vm.$options;
// locate first non-abstract parent
var parent = options.parent;
if (parent && !options.abstract) {
while (parent.$options.abstract && parent.$parent) {
parent = parent.$parent;
}
parent.$children.push(vm);
}
vm.$parent = parent;
vm.$root = parent ? parent.$root : vm;
vm.$children = [];
vm.$refs = {};
vm._watcher = null;
vm._inactive = null;
vm._directInactive = false;
vm._isMounted = false;
vm._isDestroyed = false;
vm._isBeingDestroyed = false;
}
function lifecycleMixin(Vue) {
Vue.prototype._update = function (vnode, hydrating) {
var vm = this;
if (vm._isMounted) {
callHook(vm, 'beforeUpdate');
}
var prevEl = vm.$el;
var prevVnode = vm._vnode;
var prevActiveInstance = activeInstance;
activeInstance = vm;
vm._vnode = vnode;
// Vue.prototype.__patch__ is injected in entry points
// based on the rendering backend used.
if (!prevVnode) {
// initial render
vm.$el = vm.__patch__(vm.$el, vnode, hydrating, false /* removeOnly */
, vm.$options._parentElm, vm.$options._refElm);
} else {
// updates
vm.$el = vm.__patch__(prevVnode, vnode);
}
activeInstance = prevActiveInstance;
// update __vue__ reference
if (prevEl) {
prevEl.__vue__ = null;
}
if (vm.$el) {
vm.$el.__vue__ = vm;
}
// if parent is an HOC, update its $el as well
if (vm.$vnode && vm.$parent && vm.$vnode === vm.$parent._vnode) {
vm.$parent.$el = vm.$el;
}
// updated hook is called by the scheduler to ensure that children are
// updated in a parent's updated hook.
};
Vue.prototype.$forceUpdate = function () {
var vm = this;
if (vm._watcher) {
vm._watcher.update();
}
};
Vue.prototype.$destroy = function () {
var vm = this;
if (vm._isBeingDestroyed) {
return;
}
callHook(vm, 'beforeDestroy');
vm._isBeingDestroyed = true;
// remove self from parent
var parent = vm.$parent;
if (parent && !parent._isBeingDestroyed && !vm.$options.abstract) {
remove(parent.$children, vm);
}
// teardown watchers
if (vm._watcher) {
vm._watcher.teardown();
}
var i = vm._watchers.length;
while (i--) {
vm._watchers[i].teardown();
}
// remove reference from data ob
// frozen object may not have observer.
if (vm._data.__ob__) {
vm._data.__ob__.vmCount--;
}
// call the last hook...
vm._isDestroyed = true;
// invoke destroy hooks on current rendered tree
vm.__patch__(vm._vnode, null);
// fire destroyed hook
callHook(vm, 'destroyed');
// turn off all instance listeners.
vm.$off();
// remove __vue__ reference
if (vm.$el) {
vm.$el.__vue__ = null;
}
// remove reference to DOM nodes (prevents leak)
vm.$options._parentElm = vm.$options._refElm = null;
};
}
function mountComponent(vm, el, hydrating) {
vm.$el = el;
if (!vm.$options.render) {
vm.$options.render = createEmptyVNode;
if (true) {
/* istanbul ignore if */
if (vm.$options.template && vm.$options.template.charAt(0) !== '#' || vm.$options.el || el) {
warn('You are using the runtime-only build of Vue where the template ' + 'compiler is not available. Either pre-compile the templates into ' + 'render functions, or use the compiler-included build.', vm);
} else {
warn('Failed to mount component: template or render function not defined.', vm);
}
}
}
callHook(vm, 'beforeMount');
var updateComponent;
/* istanbul ignore if */
if ("development" !== 'production' && config.performance && mark) {
updateComponent = function updateComponent() {
var name = vm._name;
var id = vm._uid;
var startTag = "vue-perf-start:" + id;
var endTag = "vue-perf-end:" + id;
mark(startTag);
var vnode = vm._render();
mark(endTag);
measure(name + " render", startTag, endTag);
mark(startTag);
vm._update(vnode, hydrating);
mark(endTag);
measure(name + " patch", startTag, endTag);
};
} else {
updateComponent = function updateComponent() {
vm._update(vm._render(), hydrating);
};
}
vm._watcher = new Watcher(vm, updateComponent, noop);
hydrating = false;
// manually mounted instance, call mounted on self
// mounted is called for render-created child components in its inserted hook
if (vm.$vnode == null) {
vm._isMounted = true;
callHook(vm, 'mounted');
}
return vm;
}
function updateChildComponent(vm, propsData, listeners, parentVnode, renderChildren) {
// determine whether component has slot children
// we need to do this before overwriting $options._renderChildren
var hasChildren = !!(renderChildren || // has new static slots
vm.$options._renderChildren || // has old static slots
parentVnode.data.scopedSlots || // has new scoped slots
vm.$scopedSlots !== emptyObject // has old scoped slots
);
vm.$options._parentVnode = parentVnode;
vm.$vnode = parentVnode; // update vm's placeholder node without re-render
if (vm._vnode) {
// update child tree's parent
vm._vnode.parent = parentVnode;
}
vm.$options._renderChildren = renderChildren;
// update props
if (propsData && vm.$options.props) {
observerState.shouldConvert = false;
if (true) {
observerState.isSettingProps = true;
}
var props = vm._props;
var propKeys = vm.$options._propKeys || [];
for (var i = 0; i < propKeys.length; i++) {
var key = propKeys[i];
props[key] = validateProp(key, vm.$options.props, propsData, vm);
}
observerState.shouldConvert = true;
if (true) {
observerState.isSettingProps = false;
}
// keep a copy of raw propsData
vm.$options.propsData = propsData;
}
// update listeners
if (listeners) {
var oldListeners = vm.$options._parentListeners;
vm.$options._parentListeners = listeners;
updateComponentListeners(vm, listeners, oldListeners);
}
// resolve slots + force update if has children
if (hasChildren) {
vm.$slots = resolveSlots(renderChildren, parentVnode.context);
vm.$forceUpdate();
}
}
function isInInactiveTree(vm) {
while (vm && (vm = vm.$parent)) {
if (vm._inactive) {
return true;
}
}
return false;
}
function activateChildComponent(vm, direct) {
if (direct) {
vm._directInactive = false;
if (isInInactiveTree(vm)) {
return;
}
} else if (vm._directInactive) {
return;
}
if (vm._inactive || vm._inactive === null) {
vm._inactive = false;
for (var i = 0; i < vm.$children.length; i++) {
activateChildComponent(vm.$children[i]);
}
callHook(vm, 'activated');
}
}
function deactivateChildComponent(vm, direct) {
if (direct) {
vm._directInactive = true;
if (isInInactiveTree(vm)) {
return;
}
}
if (!vm._inactive) {
vm._inactive = true;
for (var i = 0; i < vm.$children.length; i++) {
deactivateChildComponent(vm.$children[i]);
}
callHook(vm, 'deactivated');
}
}
function callHook(vm, hook) {
var handlers = vm.$options[hook];
if (handlers) {
for (var i = 0, j = handlers.length; i < j; i++) {
try {
handlers[i].call(vm);
} catch (e) {
handleError(e, vm, hook + " hook");
}
}
}
if (vm._hasHookEvent) {
vm.$emit('hook:' + hook);
}
}
/* */
var MAX_UPDATE_COUNT = 100;
var queue = [];
var activatedChildren = [];
var has = {};
var circular = {};
var waiting = false;
var flushing = false;
var index = 0;
/**
* Reset the scheduler's state.
*/
function resetSchedulerState() {
index = queue.length = activatedChildren.length = 0;
has = {};
if (true) {
circular = {};
}
waiting = flushing = false;
}
/**
* Flush both queues and run the watchers.
*/
function flushSchedulerQueue() {
flushing = true;
var watcher, id;
// Sort queue before flush.
// This ensures that:
// 1. Components are updated from parent to child. (because parent is always
// created before the child)
// 2. A component's user watchers are run before its render watcher (because
// user watchers are created before the render watcher)
// 3. If a component is destroyed during a parent component's watcher run,
// its watchers can be skipped.
queue.sort(function (a, b) {
return a.id - b.id;
});
// do not cache length because more watchers might be pushed
// as we run existing watchers
for (index = 0; index < queue.length; index++) {
watcher = queue[index];
id = watcher.id;
has[id] = null;
watcher.run();
// in dev build, check and stop circular updates.
if ("development" !== 'production' && has[id] != null) {
circular[id] = (circular[id] || 0) + 1;
if (circular[id] > MAX_UPDATE_COUNT) {
warn('You may have an infinite update loop ' + (watcher.user ? "in watcher with expression \"" + watcher.expression + "\"" : "in a component render function."), watcher.vm);
break;
}
}
}
// keep copies of post queues before resetting state
var activatedQueue = activatedChildren.slice();
var updatedQueue = queue.slice();
resetSchedulerState();
// call component updated and activated hooks
callActivatedHooks(activatedQueue);
callUpdateHooks(updatedQueue);
// devtool hook
/* istanbul ignore if */
if (devtools && config.devtools) {
devtools.emit('flush');
}
}
function callUpdateHooks(queue) {
var i = queue.length;
while (i--) {
var watcher = queue[i];
var vm = watcher.vm;
if (vm._watcher === watcher && vm._isMounted) {
callHook(vm, 'updated');
}
}
}
/**
* Queue a kept-alive component that was activated during patch.
* The queue will be processed after the entire tree has been patched.
*/
function queueActivatedComponent(vm) {
// setting _inactive to false here so that a render function can
// rely on checking whether it's in an inactive tree (e.g. router-view)
vm._inactive = false;
activatedChildren.push(vm);
}
function callActivatedHooks(queue) {
for (var i = 0; i < queue.length; i++) {
queue[i]._inactive = true;
activateChildComponent(queue[i], true /* true */);
}
}
/**
* Push a watcher into the watcher queue.
* Jobs with duplicate IDs will be skipped unless it's
* pushed when the queue is being flushed.
*/
function queueWatcher(watcher) {
var id = watcher.id;
if (has[id] == null) {
has[id] = true;
if (!flushing) {
queue.push(watcher);
} else {
// if already flushing, splice the watcher based on its id
// if already past its id, it will be run next immediately.
var i = queue.length - 1;
while (i > index && queue[i].id > watcher.id) {
i--;
}
queue.splice(i + 1, 0, watcher);
}
// queue the flush
if (!waiting) {
waiting = true;
nextTick(flushSchedulerQueue);
}
}
}
/* */
var uid$2 = 0;
/**
* A watcher parses an expression, collects dependencies,
* and fires callback when the expression value changes.
* This is used for both the $watch() api and directives.
*/
var Watcher = function Watcher(vm, expOrFn, cb, options) {
this.vm = vm;
vm._watchers.push(this);
// options
if (options) {
this.deep = !!options.deep;
this.user = !!options.user;
this.lazy = !!options.lazy;
this.sync = !!options.sync;
} else {
this.deep = this.user = this.lazy = this.sync = false;
}
this.cb = cb;
this.id = ++uid$2; // uid for batching
this.active = true;
this.dirty = this.lazy; // for lazy watchers
this.deps = [];
this.newDeps = [];
this.depIds = new _Set();
this.newDepIds = new _Set();
this.expression = true ? expOrFn.toString() : '';
// parse expression for getter
if (typeof expOrFn === 'function') {
this.getter = expOrFn;
} else {
this.getter = parsePath(expOrFn);
if (!this.getter) {
this.getter = function () {};
"development" !== 'production' && warn("Failed watching path: \"" + expOrFn + "\" " + 'Watcher only accepts simple dot-delimited paths. ' + 'For full control, use a function instead.', vm);
}
}
this.value = this.lazy ? undefined : this.get();
};
/**
* Evaluate the getter, and re-collect dependencies.
*/
Watcher.prototype.get = function get() {
pushTarget(this);
var value;
var vm = this.vm;
if (this.user) {
try {
value = this.getter.call(vm, vm);
} catch (e) {
handleError(e, vm, "getter for watcher \"" + this.expression + "\"");
}
} else {
value = this.getter.call(vm, vm);
}
// "touch" every property so they are all tracked as
// dependencies for deep watching
if (this.deep) {
traverse(value);
}
popTarget();
this.cleanupDeps();
return value;
};
/**
* Add a dependency to this directive.
*/
Watcher.prototype.addDep = function addDep(dep) {
var id = dep.id;
if (!this.newDepIds.has(id)) {
this.newDepIds.add(id);
this.newDeps.push(dep);
if (!this.depIds.has(id)) {
dep.addSub(this);
}
}
};
/**
* Clean up for dependency collection.
*/
Watcher.prototype.cleanupDeps = function cleanupDeps() {
var this$1 = this;
var i = this.deps.length;
while (i--) {
var dep = this$1.deps[i];
if (!this$1.newDepIds.has(dep.id)) {
dep.removeSub(this$1);
}
}
var tmp = this.depIds;
this.depIds = this.newDepIds;
this.newDepIds = tmp;
this.newDepIds.clear();
tmp = this.deps;
this.deps = this.newDeps;
this.newDeps = tmp;
this.newDeps.length = 0;
};
/**
* Subscriber interface.
* Will be called when a dependency changes.
*/
Watcher.prototype.update = function update() {
/* istanbul ignore else */
if (this.lazy) {
this.dirty = true;
} else if (this.sync) {
this.run();
} else {
queueWatcher(this);
}
};
/**
* Scheduler job interface.
* Will be called by the scheduler.
*/
Watcher.prototype.run = function run() {
if (this.active) {
var value = this.get();
if (value !== this.value ||
// Deep watchers and watchers on Object/Arrays should fire even
// when the value is the same, because the value may
// have mutated.
isObject(value) || this.deep) {
// set new value
var oldValue = this.value;
this.value = value;
if (this.user) {
try {
this.cb.call(this.vm, value, oldValue);
} catch (e) {
handleError(e, this.vm, "callback for watcher \"" + this.expression + "\"");
}
} else {
this.cb.call(this.vm, value, oldValue);
}
}
}
};
/**
* Evaluate the value of the watcher.
* This only gets called for lazy watchers.
*/
Watcher.prototype.evaluate = function evaluate() {
this.value = this.get();
this.dirty = false;
};
/**
* Depend on all deps collected by this watcher.
*/
Watcher.prototype.depend = function depend() {
var this$1 = this;
var i = this.deps.length;
while (i--) {
this$1.deps[i].depend();
}
};
/**
* Remove self from all dependencies' subscriber list.
*/
Watcher.prototype.teardown = function teardown() {
var this$1 = this;
if (this.active) {
// remove self from vm's watcher list
// this is a somewhat expensive operation so we skip it
// if the vm is being destroyed.
if (!this.vm._isBeingDestroyed) {
remove(this.vm._watchers, this);
}
var i = this.deps.length;
while (i--) {
this$1.deps[i].removeSub(this$1);
}
this.active = false;
}
};
/**
* Recursively traverse an object to evoke all converted
* getters, so that every nested property inside the object
* is collected as a "deep" dependency.
*/
var seenObjects = new _Set();
function traverse(val) {
seenObjects.clear();
_traverse(val, seenObjects);
}
function _traverse(val, seen) {
var i, keys;
var isA = Array.isArray(val);
if (!isA && !isObject(val) || !Object.isExtensible(val)) {
return;
}
if (val.__ob__) {
var depId = val.__ob__.dep.id;
if (seen.has(depId)) {
return;
}
seen.add(depId);
}
if (isA) {
i = val.length;
while (i--) {
_traverse(val[i], seen);
}
} else {
keys = Object.keys(val);
i = keys.length;
while (i--) {
_traverse(val[keys[i]], seen);
}
}
}
/* */
var sharedPropertyDefinition = {
enumerable: true,
configurable: true,
get: noop,
set: noop
};
function proxy(target, sourceKey, key) {
sharedPropertyDefinition.get = function proxyGetter() {
return this[sourceKey][key];
};
sharedPropertyDefinition.set = function proxySetter(val) {
this[sourceKey][key] = val;
};
Object.defineProperty(target, key, sharedPropertyDefinition);
}
function initState(vm) {
vm._watchers = [];
var opts = vm.$options;
if (opts.props) {
initProps(vm, opts.props);
}
if (opts.methods) {
initMethods(vm, opts.methods);
}
if (opts.data) {
initData(vm);
} else {
observe(vm._data = {}, true /* asRootData */);
}
if (opts.computed) {
initComputed(vm, opts.computed);
}
if (opts.watch) {
initWatch(vm, opts.watch);
}
}
var isReservedProp = {
key: 1,
ref: 1,
slot: 1
};
function initProps(vm, propsOptions) {
var propsData = vm.$options.propsData || {};
var props = vm._props = {};
// cache prop keys so that future props updates can iterate using Array
// instead of dynamic object key enumeration.
var keys = vm.$options._propKeys = [];
var isRoot = !vm.$parent;
// root instance props should be converted
observerState.shouldConvert = isRoot;
var loop = function loop(key) {
keys.push(key);
var value = validateProp(key, propsOptions, propsData, vm);
/* istanbul ignore else */
if (true) {
if (isReservedProp[key] || config.isReservedAttr(key)) {
warn("\"" + key + "\" is a reserved attribute and cannot be used as component prop.", vm);
}
defineReactive$$1(props, key, value, function () {
if (vm.$parent && !observerState.isSettingProps) {
warn("Avoid mutating a prop directly since the value will be " + "overwritten whenever the parent component re-renders. " + "Instead, use a data or computed property based on the prop's " + "value. Prop being mutated: \"" + key + "\"", vm);
}
});
} else {
defineReactive$$1(props, key, value);
}
// static props are already proxied on the component's prototype
// during Vue.extend(). We only need to proxy props defined at
// instantiation here.
if (!(key in vm)) {
proxy(vm, "_props", key);
}
};
for (var key in propsOptions) {
loop(key);
}observerState.shouldConvert = true;
}
function initData(vm) {
var data = vm.$options.data;
data = vm._data = typeof data === 'function' ? getData(data, vm) : data || {};
if (!isPlainObject(data)) {
data = {};
"development" !== 'production' && warn('data functions should return an object:\n' + 'https://vuejs.org/v2/guide/components.html#data-Must-Be-a-Function', vm);
}
// proxy data on instance
var keys = Object.keys(data);
var props = vm.$options.props;
var i = keys.length;
while (i--) {
if (props && hasOwn(props, keys[i])) {
"development" !== 'production' && warn("The data property \"" + keys[i] + "\" is already declared as a prop. " + "Use prop default value instead.", vm);
} else if (!isReserved(keys[i])) {
proxy(vm, "_data", keys[i]);
}
}
// observe data
observe(data, true /* asRootData */);
}
function getData(data, vm) {
try {
return data.call(vm);
} catch (e) {
handleError(e, vm, "data()");
return {};
}
}
var computedWatcherOptions = { lazy: true };
function initComputed(vm, computed) {
var watchers = vm._computedWatchers = Object.create(null);
for (var key in computed) {
var userDef = computed[key];
var getter = typeof userDef === 'function' ? userDef : userDef.get;
if (true) {
if (getter === undefined) {
warn("No getter function has been defined for computed property \"" + key + "\".", vm);
getter = noop;
}
}
// create internal watcher for the computed property.
watchers[key] = new Watcher(vm, getter, noop, computedWatcherOptions);
// component-defined computed properties are already defined on the
// component prototype. We only need to define computed properties defined
// at instantiation here.
if (!(key in vm)) {
defineComputed(vm, key, userDef);
} else if (true) {
if (key in vm.$data) {
warn("The computed property \"" + key + "\" is already defined in data.", vm);
} else if (vm.$options.props && key in vm.$options.props) {
warn("The computed property \"" + key + "\" is already defined as a prop.", vm);
}
}
}
}
function defineComputed(target, key, userDef) {
if (typeof userDef === 'function') {
sharedPropertyDefinition.get = createComputedGetter(key);
sharedPropertyDefinition.set = noop;
} else {
sharedPropertyDefinition.get = userDef.get ? userDef.cache !== false ? createComputedGetter(key) : userDef.get : noop;
sharedPropertyDefinition.set = userDef.set ? userDef.set : noop;
}
Object.defineProperty(target, key, sharedPropertyDefinition);
}
function createComputedGetter(key) {
return function computedGetter() {
var watcher = this._computedWatchers && this._computedWatchers[key];
if (watcher) {
if (watcher.dirty) {
watcher.evaluate();
}
if (Dep.target) {
watcher.depend();
}
return watcher.value;
}
};
}
function initMethods(vm, methods) {
var props = vm.$options.props;
for (var key in methods) {
vm[key] = methods[key] == null ? noop : bind(methods[key], vm);
if (true) {
if (methods[key] == null) {
warn("method \"" + key + "\" has an undefined value in the component definition. " + "Did you reference the function correctly?", vm);
}
if (props && hasOwn(props, key)) {
warn("method \"" + key + "\" has already been defined as a prop.", vm);
}
}
}
}
function initWatch(vm, watch) {
for (var key in watch) {
var handler = watch[key];
if (Array.isArray(handler)) {
for (var i = 0; i < handler.length; i++) {
createWatcher(vm, key, handler[i]);
}
} else {
createWatcher(vm, key, handler);
}
}
}
function createWatcher(vm, key, handler) {
var options;
if (isPlainObject(handler)) {
options = handler;
handler = handler.handler;
}
if (typeof handler === 'string') {
handler = vm[handler];
}
vm.$watch(key, handler, options);
}
function stateMixin(Vue) {
// flow somehow has problems with directly declared definition object
// when using Object.defineProperty, so we have to procedurally build up
// the object here.
var dataDef = {};
dataDef.get = function () {
return this._data;
};
var propsDef = {};
propsDef.get = function () {
return this._props;
};
if (true) {
dataDef.set = function (newData) {
warn('Avoid replacing instance root $data. ' + 'Use nested data properties instead.', this);
};
propsDef.set = function () {
warn("$props is readonly.", this);
};
}
Object.defineProperty(Vue.prototype, '$data', dataDef);
Object.defineProperty(Vue.prototype, '$props', propsDef);
Vue.prototype.$set = set;
Vue.prototype.$delete = del;
Vue.prototype.$watch = function (expOrFn, cb, options) {
var vm = this;
options = options || {};
options.user = true;
var watcher = new Watcher(vm, expOrFn, cb, options);
if (options.immediate) {
cb.call(vm, watcher.value);
}
return function unwatchFn() {
watcher.teardown();
};
};
}
/* */
function initProvide(vm) {
var provide = vm.$options.provide;
if (provide) {
vm._provided = typeof provide === 'function' ? provide.call(vm) : provide;
}
}
function initInjections(vm) {
var result = resolveInject(vm.$options.inject, vm);
if (result) {
Object.keys(result).forEach(function (key) {
/* istanbul ignore else */
if (true) {
defineReactive$$1(vm, key, result[key], function () {
warn("Avoid mutating an injected value directly since the changes will be " + "overwritten whenever the provided component re-renders. " + "injection being mutated: \"" + key + "\"", vm);
});
} else {
defineReactive$$1(vm, key, result[key]);
}
});
}
}
function resolveInject(inject, vm) {
if (inject) {
// inject is :any because flow is not smart enough to figure out cached
// isArray here
var isArray = Array.isArray(inject);
var result = Object.create(null);
var keys = isArray ? inject : hasSymbol ? Reflect.ownKeys(inject) : Object.keys(inject);
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
var provideKey = isArray ? key : inject[key];
var source = vm;
while (source) {
if (source._provided && provideKey in source._provided) {
result[key] = source._provided[provideKey];
break;
}
source = source.$parent;
}
}
return result;
}
}
/* */
function createFunctionalComponent(Ctor, propsData, data, context, children) {
var props = {};
var propOptions = Ctor.options.props;
if (isDef(propOptions)) {
for (var key in propOptions) {
props[key] = validateProp(key, propOptions, propsData || {});
}
} else {
if (isDef(data.attrs)) {
mergeProps(props, data.attrs);
}
if (isDef(data.props)) {
mergeProps(props, data.props);
}
}
// ensure the createElement function in functional components
// gets a unique context - this is necessary for correct named slot check
var _context = Object.create(context);
var h = function h(a, b, c, d) {
return createElement(_context, a, b, c, d, true);
};
var vnode = Ctor.options.render.call(null, h, {
data: data,
props: props,
children: children,
parent: context,
listeners: data.on || {},
injections: resolveInject(Ctor.options.inject, context),
slots: function slots() {
return resolveSlots(children, context);
}
});
if (vnode instanceof VNode) {
vnode.functionalContext = context;
vnode.functionalOptions = Ctor.options;
if (data.slot) {
(vnode.data || (vnode.data = {})).slot = data.slot;
}
}
return vnode;
}
function mergeProps(to, from) {
for (var key in from) {
to[camelize(key)] = from[key];
}
}
/* */
// hooks to be invoked on component VNodes during patch
var componentVNodeHooks = {
init: function init(vnode, hydrating, parentElm, refElm) {
if (!vnode.componentInstance || vnode.componentInstance._isDestroyed) {
var child = vnode.componentInstance = createComponentInstanceForVnode(vnode, activeInstance, parentElm, refElm);
child.$mount(hydrating ? vnode.elm : undefined, hydrating);
} else if (vnode.data.keepAlive) {
// kept-alive components, treat as a patch
var mountedNode = vnode; // work around flow
componentVNodeHooks.prepatch(mountedNode, mountedNode);
}
},
prepatch: function prepatch(oldVnode, vnode) {
var options = vnode.componentOptions;
var child = vnode.componentInstance = oldVnode.componentInstance;
updateChildComponent(child, options.propsData, // updated props
options.listeners, // updated listeners
vnode, // new parent vnode
options.children // new children
);
},
insert: function insert(vnode) {
var context = vnode.context;
var componentInstance = vnode.componentInstance;
if (!componentInstance._isMounted) {
componentInstance._isMounted = true;
callHook(componentInstance, 'mounted');
}
if (vnode.data.keepAlive) {
if (context._isMounted) {
// vue-router#1212
// During updates, a kept-alive component's child components may
// change, so directly walking the tree here may call activated hooks
// on incorrect children. Instead we push them into a queue which will
// be processed after the whole patch process ended.
queueActivatedComponent(componentInstance);
} else {
activateChildComponent(componentInstance, true /* direct */);
}
}
},
destroy: function destroy(vnode) {
var componentInstance = vnode.componentInstance;
if (!componentInstance._isDestroyed) {
if (!vnode.data.keepAlive) {
componentInstance.$destroy();
} else {
deactivateChildComponent(componentInstance, true /* direct */);
}
}
}
};
var hooksToMerge = Object.keys(componentVNodeHooks);
function createComponent(Ctor, data, context, children, tag) {
if (isUndef(Ctor)) {
return;
}
var baseCtor = context.$options._base;
// plain options object: turn it into a constructor
if (isObject(Ctor)) {
Ctor = baseCtor.extend(Ctor);
}
// if at this stage it's not a constructor or an async component factory,
// reject.
if (typeof Ctor !== 'function') {
if (true) {
warn("Invalid Component definition: " + String(Ctor), context);
}
return;
}
// async component
if (isUndef(Ctor.cid)) {
Ctor = resolveAsyncComponent(Ctor, baseCtor, context);
if (Ctor === undefined) {
// return nothing if this is indeed an async component
// wait for the callback to trigger parent update.
return;
}
}
// resolve constructor options in case global mixins are applied after
// component constructor creation
resolveConstructorOptions(Ctor);
data = data || {};
// transform component v-model data into props & events
if (isDef(data.model)) {
transformModel(Ctor.options, data);
}
// extract props
var propsData = extractPropsFromVNodeData(data, Ctor, tag);
// functional component
if (isTrue(Ctor.options.functional)) {
return createFunctionalComponent(Ctor, propsData, data, context, children);
}
// extract listeners, since these needs to be treated as
// child component listeners instead of DOM listeners
var listeners = data.on;
// replace with listeners with .native modifier
data.on = data.nativeOn;
if (isTrue(Ctor.options.abstract)) {
// abstract components do not keep anything
// other than props & listeners
data = {};
}
// merge component management hooks onto the placeholder node
mergeHooks(data);
// return a placeholder vnode
var name = Ctor.options.name || tag;
var vnode = new VNode("vue-component-" + Ctor.cid + (name ? "-" + name : ''), data, undefined, undefined, undefined, context, { Ctor: Ctor, propsData: propsData, listeners: listeners, tag: tag, children: children });
return vnode;
}
function createComponentInstanceForVnode(vnode, // we know it's MountedComponentVNode but flow doesn't
parent, // activeInstance in lifecycle state
parentElm, refElm) {
var vnodeComponentOptions = vnode.componentOptions;
var options = {
_isComponent: true,
parent: parent,
propsData: vnodeComponentOptions.propsData,
_componentTag: vnodeComponentOptions.tag,
_parentVnode: vnode,
_parentListeners: vnodeComponentOptions.listeners,
_renderChildren: vnodeComponentOptions.children,
_parentElm: parentElm || null,
_refElm: refElm || null
};
// check inline-template render functions
var inlineTemplate = vnode.data.inlineTemplate;
if (isDef(inlineTemplate)) {
options.render = inlineTemplate.render;
options.staticRenderFns = inlineTemplate.staticRenderFns;
}
return new vnodeComponentOptions.Ctor(options);
}
function mergeHooks(data) {
if (!data.hook) {
data.hook = {};
}
for (var i = 0; i < hooksToMerge.length; i++) {
var key = hooksToMerge[i];
var fromParent = data.hook[key];
var ours = componentVNodeHooks[key];
data.hook[key] = fromParent ? mergeHook$1(ours, fromParent) : ours;
}
}
function mergeHook$1(one, two) {
return function (a, b, c, d) {
one(a, b, c, d);
two(a, b, c, d);
};
}
// transform component v-model info (value and callback) into
// prop and event handler respectively.
function transformModel(options, data) {
var prop = options.model && options.model.prop || 'value';
var event = options.model && options.model.event || 'input';(data.props || (data.props = {}))[prop] = data.model.value;
var on = data.on || (data.on = {});
if (isDef(on[event])) {
on[event] = [data.model.callback].concat(on[event]);
} else {
on[event] = data.model.callback;
}
}
/* */
var SIMPLE_NORMALIZE = 1;
var ALWAYS_NORMALIZE = 2;
// wrapper function for providing a more flexible interface
// without getting yelled at by flow
function createElement(context, tag, data, children, normalizationType, alwaysNormalize) {
if (Array.isArray(data) || isPrimitive(data)) {
normalizationType = children;
children = data;
data = undefined;
}
if (isTrue(alwaysNormalize)) {
normalizationType = ALWAYS_NORMALIZE;
}
return _createElement(context, tag, data, children, normalizationType);
}
function _createElement(context, tag, data, children, normalizationType) {
if (isDef(data) && isDef(data.__ob__)) {
"development" !== 'production' && warn("Avoid using observed data object as vnode data: " + JSON.stringify(data) + "\n" + 'Always create fresh vnode data objects in each render!', context);
return createEmptyVNode();
}
if (!tag) {
// in case of component :is set to falsy value
return createEmptyVNode();
}
// support single function children as default scoped slot
if (Array.isArray(children) && typeof children[0] === 'function') {
data = data || {};
data.scopedSlots = { default: children[0] };
children.length = 0;
}
if (normalizationType === ALWAYS_NORMALIZE) {
children = normalizeChildren(children);
} else if (normalizationType === SIMPLE_NORMALIZE) {
children = simpleNormalizeChildren(children);
}
var vnode, ns;
if (typeof tag === 'string') {
var Ctor;
ns = config.getTagNamespace(tag);
if (config.isReservedTag(tag)) {
// platform built-in elements
vnode = new VNode(config.parsePlatformTagName(tag), data, children, undefined, undefined, context);
} else if (isDef(Ctor = resolveAsset(context.$options, 'components', tag))) {
// component
vnode = createComponent(Ctor, data, context, children, tag);
} else {
// unknown or unlisted namespaced elements
// check at runtime because it may get assigned a namespace when its
// parent normalizes children
vnode = new VNode(tag, data, children, undefined, undefined, context);
}
} else {
// direct component options / constructor
vnode = createComponent(tag, data, context, children);
}
if (isDef(vnode)) {
if (ns) {
applyNS(vnode, ns);
}
return vnode;
} else {
return createEmptyVNode();
}
}
function applyNS(vnode, ns) {
vnode.ns = ns;
if (vnode.tag === 'foreignObject') {
// use default namespace inside foreignObject
return;
}
if (isDef(vnode.children)) {
for (var i = 0, l = vnode.children.length; i < l; i++) {
var child = vnode.children[i];
if (isDef(child.tag) && isUndef(child.ns)) {
applyNS(child, ns);
}
}
}
}
/* */
/**
* Runtime helper for rendering v-for lists.
*/
function renderList(val, render) {
var ret, i, l, keys, key;
if (Array.isArray(val) || typeof val === 'string') {
ret = new Array(val.length);
for (i = 0, l = val.length; i < l; i++) {
ret[i] = render(val[i], i);
}
} else if (typeof val === 'number') {
ret = new Array(val);
for (i = 0; i < val; i++) {
ret[i] = render(i + 1, i);
}
} else if (isObject(val)) {
keys = Object.keys(val);
ret = new Array(keys.length);
for (i = 0, l = keys.length; i < l; i++) {
key = keys[i];
ret[i] = render(val[key], key, i);
}
}
if (isDef(ret)) {
ret._isVList = true;
}
return ret;
}
/* */
/**
* Runtime helper for rendering <slot>
*/
function renderSlot(name, fallback, props, bindObject) {
var scopedSlotFn = this.$scopedSlots[name];
if (scopedSlotFn) {
// scoped slot
props = props || {};
if (bindObject) {
extend(props, bindObject);
}
return scopedSlotFn(props) || fallback;
} else {
var slotNodes = this.$slots[name];
// warn duplicate slot usage
if (slotNodes && "development" !== 'production') {
slotNodes._rendered && warn("Duplicate presence of slot \"" + name + "\" found in the same render tree " + "- this will likely cause render errors.", this);
slotNodes._rendered = true;
}
return slotNodes || fallback;
}
}
/* */
/**
* Runtime helper for resolving filters
*/
function resolveFilter(id) {
return resolveAsset(this.$options, 'filters', id, true) || identity;
}
/* */
/**
* Runtime helper for checking keyCodes from config.
*/
function checkKeyCodes(eventKeyCode, key, builtInAlias) {
var keyCodes = config.keyCodes[key] || builtInAlias;
if (Array.isArray(keyCodes)) {
return keyCodes.indexOf(eventKeyCode) === -1;
} else {
return keyCodes !== eventKeyCode;
}
}
/* */
/**
* Runtime helper for merging v-bind="object" into a VNode's data.
*/
function bindObjectProps(data, tag, value, asProp) {
if (value) {
if (!isObject(value)) {
"development" !== 'production' && warn('v-bind without argument expects an Object or Array value', this);
} else {
if (Array.isArray(value)) {
value = toObject(value);
}
var hash;
for (var key in value) {
if (key === 'class' || key === 'style') {
hash = data;
} else {
var type = data.attrs && data.attrs.type;
hash = asProp || config.mustUseProp(tag, type, key) ? data.domProps || (data.domProps = {}) : data.attrs || (data.attrs = {});
}
if (!(key in hash)) {
hash[key] = value[key];
}
}
}
}
return data;
}
/* */
/**
* Runtime helper for rendering static trees.
*/
function renderStatic(index, isInFor) {
var tree = this._staticTrees[index];
// if has already-rendered static tree and not inside v-for,
// we can reuse the same tree by doing a shallow clone.
if (tree && !isInFor) {
return Array.isArray(tree) ? cloneVNodes(tree) : cloneVNode(tree);
}
// otherwise, render a fresh tree.
tree = this._staticTrees[index] = this.$options.staticRenderFns[index].call(this._renderProxy);
markStatic(tree, "__static__" + index, false);
return tree;
}
/**
* Runtime helper for v-once.
* Effectively it means marking the node as static with a unique key.
*/
function markOnce(tree, index, key) {
markStatic(tree, "__once__" + index + (key ? "_" + key : ""), true);
return tree;
}
function markStatic(tree, key, isOnce) {
if (Array.isArray(tree)) {
for (var i = 0; i < tree.length; i++) {
if (tree[i] && typeof tree[i] !== 'string') {
markStaticNode(tree[i], key + "_" + i, isOnce);
}
}
} else {
markStaticNode(tree, key, isOnce);
}
}
function markStaticNode(node, key, isOnce) {
node.isStatic = true;
node.key = key;
node.isOnce = isOnce;
}
/* */
function initRender(vm) {
vm._vnode = null; // the root of the child tree
vm._staticTrees = null;
var parentVnode = vm.$vnode = vm.$options._parentVnode; // the placeholder node in parent tree
var renderContext = parentVnode && parentVnode.context;
vm.$slots = resolveSlots(vm.$options._renderChildren, renderContext);
vm.$scopedSlots = emptyObject;
// bind the createElement fn to this instance
// so that we get proper render context inside it.
// args order: tag, data, children, normalizationType, alwaysNormalize
// internal version is used by render functions compiled from templates
vm._c = function (a, b, c, d) {
return createElement(vm, a, b, c, d, false);
};
// normalization is always applied for the public version, used in
// user-written render functions.
vm.$createElement = function (a, b, c, d) {
return createElement(vm, a, b, c, d, true);
};
}
function renderMixin(Vue) {
Vue.prototype.$nextTick = function (fn) {
return nextTick(fn, this);
};
Vue.prototype._render = function () {
var vm = this;
var ref = vm.$options;
var render = ref.render;
var staticRenderFns = ref.staticRenderFns;
var _parentVnode = ref._parentVnode;
if (vm._isMounted) {
// clone slot nodes on re-renders
for (var key in vm.$slots) {
vm.$slots[key] = cloneVNodes(vm.$slots[key]);
}
}
vm.$scopedSlots = _parentVnode && _parentVnode.data.scopedSlots || emptyObject;
if (staticRenderFns && !vm._staticTrees) {
vm._staticTrees = [];
}
// set parent vnode. this allows render functions to have access
// to the data on the placeholder node.
vm.$vnode = _parentVnode;
// render self
var vnode;
try {
vnode = render.call(vm._renderProxy, vm.$createElement);
} catch (e) {
handleError(e, vm, "render function");
// return error render result,
// or previous vnode to prevent render error causing blank component
/* istanbul ignore else */
if (true) {
vnode = vm.$options.renderError ? vm.$options.renderError.call(vm._renderProxy, vm.$createElement, e) : vm._vnode;
} else {
vnode = vm._vnode;
}
}
// return empty vnode in case the render function errored out
if (!(vnode instanceof VNode)) {
if ("development" !== 'production' && Array.isArray(vnode)) {
warn('Multiple root nodes returned from render function. Render function ' + 'should return a single root node.', vm);
}
vnode = createEmptyVNode();
}
// set parent
vnode.parent = _parentVnode;
return vnode;
};
// internal render helpers.
// these are exposed on the instance prototype to reduce generated render
// code size.
Vue.prototype._o = markOnce;
Vue.prototype._n = toNumber;
Vue.prototype._s = toString;
Vue.prototype._l = renderList;
Vue.prototype._t = renderSlot;
Vue.prototype._q = looseEqual;
Vue.prototype._i = looseIndexOf;
Vue.prototype._m = renderStatic;
Vue.prototype._f = resolveFilter;
Vue.prototype._k = checkKeyCodes;
Vue.prototype._b = bindObjectProps;
Vue.prototype._v = createTextVNode;
Vue.prototype._e = createEmptyVNode;
Vue.prototype._u = resolveScopedSlots;
}
/* */
var uid$1 = 0;
function initMixin(Vue) {
Vue.prototype._init = function (options) {
var vm = this;
// a uid
vm._uid = uid$1++;
var startTag, endTag;
/* istanbul ignore if */
if ("development" !== 'production' && config.performance && mark) {
startTag = "vue-perf-init:" + vm._uid;
endTag = "vue-perf-end:" + vm._uid;
mark(startTag);
}
// a flag to avoid this being observed
vm._isVue = true;
// merge options
if (options && options._isComponent) {
// optimize internal component instantiation
// since dynamic options merging is pretty slow, and none of the
// internal component options needs special treatment.
initInternalComponent(vm, options);
} else {
vm.$options = mergeOptions(resolveConstructorOptions(vm.constructor), options || {}, vm);
}
/* istanbul ignore else */
if (true) {
initProxy(vm);
} else {
vm._renderProxy = vm;
}
// expose real self
vm._self = vm;
initLifecycle(vm);
initEvents(vm);
initRender(vm);
callHook(vm, 'beforeCreate');
initInjections(vm); // resolve injections before data/props
initState(vm);
initProvide(vm); // resolve provide after data/props
callHook(vm, 'created');
/* istanbul ignore if */
if ("development" !== 'production' && config.performance && mark) {
vm._name = formatComponentName(vm, false);
mark(endTag);
measure(vm._name + " init", startTag, endTag);
}
if (vm.$options.el) {
vm.$mount(vm.$options.el);
}
};
}
function initInternalComponent(vm, options) {
var opts = vm.$options = Object.create(vm.constructor.options);
// doing this because it's faster than dynamic enumeration.
opts.parent = options.parent;
opts.propsData = options.propsData;
opts._parentVnode = options._parentVnode;
opts._parentListeners = options._parentListeners;
opts._renderChildren = options._renderChildren;
opts._componentTag = options._componentTag;
opts._parentElm = options._parentElm;
opts._refElm = options._refElm;
if (options.render) {
opts.render = options.render;
opts.staticRenderFns = options.staticRenderFns;
}
}
function resolveConstructorOptions(Ctor) {
var options = Ctor.options;
if (Ctor.super) {
var superOptions = resolveConstructorOptions(Ctor.super);
var cachedSuperOptions = Ctor.superOptions;
if (superOptions !== cachedSuperOptions) {
// super option changed,
// need to resolve new options.
Ctor.superOptions = superOptions;
// check if there are any late-modified/attached options (#4976)
var modifiedOptions = resolveModifiedOptions(Ctor);
// update base extend options
if (modifiedOptions) {
extend(Ctor.extendOptions, modifiedOptions);
}
options = Ctor.options = mergeOptions(superOptions, Ctor.extendOptions);
if (options.name) {
options.components[options.name] = Ctor;
}
}
}
return options;
}
function resolveModifiedOptions(Ctor) {
var modified;
var latest = Ctor.options;
var extended = Ctor.extendOptions;
var sealed = Ctor.sealedOptions;
for (var key in latest) {
if (latest[key] !== sealed[key]) {
if (!modified) {
modified = {};
}
modified[key] = dedupe(latest[key], extended[key], sealed[key]);
}
}
return modified;
}
function dedupe(latest, extended, sealed) {
// compare latest and sealed to ensure lifecycle hooks won't be duplicated
// between merges
if (Array.isArray(latest)) {
var res = [];
sealed = Array.isArray(sealed) ? sealed : [sealed];
extended = Array.isArray(extended) ? extended : [extended];
for (var i = 0; i < latest.length; i++) {
// push original options and not sealed options to exclude duplicated options
if (extended.indexOf(latest[i]) >= 0 || sealed.indexOf(latest[i]) < 0) {
res.push(latest[i]);
}
}
return res;
} else {
return latest;
}
}
function Vue$3(options) {
if ("development" !== 'production' && !(this instanceof Vue$3)) {
warn('Vue is a constructor and should be called with the `new` keyword');
}
this._init(options);
}
initMixin(Vue$3);
stateMixin(Vue$3);
eventsMixin(Vue$3);
lifecycleMixin(Vue$3);
renderMixin(Vue$3);
/* */
function initUse(Vue) {
Vue.use = function (plugin) {
/* istanbul ignore if */
if (plugin.installed) {
return this;
}
// additional parameters
var args = toArray(arguments, 1);
args.unshift(this);
if (typeof plugin.install === 'function') {
plugin.install.apply(plugin, args);
} else if (typeof plugin === 'function') {
plugin.apply(null, args);
}
plugin.installed = true;
return this;
};
}
/* */
function initMixin$1(Vue) {
Vue.mixin = function (mixin) {
this.options = mergeOptions(this.options, mixin);
return this;
};
}
/* */
function initExtend(Vue) {
/**
* Each instance constructor, including Vue, has a unique
* cid. This enables us to create wrapped "child
* constructors" for prototypal inheritance and cache them.
*/
Vue.cid = 0;
var cid = 1;
/**
* Class inheritance
*/
Vue.extend = function (extendOptions) {
extendOptions = extendOptions || {};
var Super = this;
var SuperId = Super.cid;
var cachedCtors = extendOptions._Ctor || (extendOptions._Ctor = {});
if (cachedCtors[SuperId]) {
return cachedCtors[SuperId];
}
var name = extendOptions.name || Super.options.name;
if (true) {
if (!/^[a-zA-Z][\w-]*$/.test(name)) {
warn('Invalid component name: "' + name + '". Component names ' + 'can only contain alphanumeric characters and the hyphen, ' + 'and must start with a letter.');
}
}
var Sub = function VueComponent(options) {
this._init(options);
};
Sub.prototype = Object.create(Super.prototype);
Sub.prototype.constructor = Sub;
Sub.cid = cid++;
Sub.options = mergeOptions(Super.options, extendOptions);
Sub['super'] = Super;
// For props and computed properties, we define the proxy getters on
// the Vue instances at extension time, on the extended prototype. This
// avoids Object.defineProperty calls for each instance created.
if (Sub.options.props) {
initProps$1(Sub);
}
if (Sub.options.computed) {
initComputed$1(Sub);
}
// allow further extension/mixin/plugin usage
Sub.extend = Super.extend;
Sub.mixin = Super.mixin;
Sub.use = Super.use;
// create asset registers, so extended classes
// can have their private assets too.
ASSET_TYPES.forEach(function (type) {
Sub[type] = Super[type];
});
// enable recursive self-lookup
if (name) {
Sub.options.components[name] = Sub;
}
// keep a reference to the super options at extension time.
// later at instantiation we can check if Super's options have
// been updated.
Sub.superOptions = Super.options;
Sub.extendOptions = extendOptions;
Sub.sealedOptions = extend({}, Sub.options);
// cache constructor
cachedCtors[SuperId] = Sub;
return Sub;
};
}
function initProps$1(Comp) {
var props = Comp.options.props;
for (var key in props) {
proxy(Comp.prototype, "_props", key);
}
}
function initComputed$1(Comp) {
var computed = Comp.options.computed;
for (var key in computed) {
defineComputed(Comp.prototype, key, computed[key]);
}
}
/* */
function initAssetRegisters(Vue) {
/**
* Create asset registration methods.
*/
ASSET_TYPES.forEach(function (type) {
Vue[type] = function (id, definition) {
if (!definition) {
return this.options[type + 's'][id];
} else {
/* istanbul ignore if */
if (true) {
if (type === 'component' && config.isReservedTag(id)) {
warn('Do not use built-in or reserved HTML elements as component ' + 'id: ' + id);
}
}
if (type === 'component' && isPlainObject(definition)) {
definition.name = definition.name || id;
definition = this.options._base.extend(definition);
}
if (type === 'directive' && typeof definition === 'function') {
definition = { bind: definition, update: definition };
}
this.options[type + 's'][id] = definition;
return definition;
}
};
});
}
/* */
var patternTypes = [String, RegExp];
function getComponentName(opts) {
return opts && (opts.Ctor.options.name || opts.tag);
}
function matches(pattern, name) {
if (typeof pattern === 'string') {
return pattern.split(',').indexOf(name) > -1;
} else if (isRegExp(pattern)) {
return pattern.test(name);
}
/* istanbul ignore next */
return false;
}
function pruneCache(cache, current, filter) {
for (var key in cache) {
var cachedNode = cache[key];
if (cachedNode) {
var name = getComponentName(cachedNode.componentOptions);
if (name && !filter(name)) {
if (cachedNode !== current) {
pruneCacheEntry(cachedNode);
}
cache[key] = null;
}
}
}
}
function pruneCacheEntry(vnode) {
if (vnode) {
vnode.componentInstance.$destroy();
}
}
var KeepAlive = {
name: 'keep-alive',
abstract: true,
props: {
include: patternTypes,
exclude: patternTypes
},
created: function created() {
this.cache = Object.create(null);
},
destroyed: function destroyed() {
var this$1 = this;
for (var key in this$1.cache) {
pruneCacheEntry(this$1.cache[key]);
}
},
watch: {
include: function include(val) {
pruneCache(this.cache, this._vnode, function (name) {
return matches(val, name);
});
},
exclude: function exclude(val) {
pruneCache(this.cache, this._vnode, function (name) {
return !matches(val, name);
});
}
},
render: function render() {
var vnode = getFirstComponentChild(this.$slots.default);
var componentOptions = vnode && vnode.componentOptions;
if (componentOptions) {
// check pattern
var name = getComponentName(componentOptions);
if (name && (this.include && !matches(this.include, name) || this.exclude && matches(this.exclude, name))) {
return vnode;
}
var key = vnode.key == null
// same constructor may get registered as different local components
// so cid alone is not enough (#3269)
? componentOptions.Ctor.cid + (componentOptions.tag ? "::" + componentOptions.tag : '') : vnode.key;
if (this.cache[key]) {
vnode.componentInstance = this.cache[key].componentInstance;
} else {
this.cache[key] = vnode;
}
vnode.data.keepAlive = true;
}
return vnode;
}
};
var builtInComponents = {
KeepAlive: KeepAlive
};
/* */
function initGlobalAPI(Vue) {
// config
var configDef = {};
configDef.get = function () {
return config;
};
if (true) {
configDef.set = function () {
warn('Do not replace the Vue.config object, set individual fields instead.');
};
}
Object.defineProperty(Vue, 'config', configDef);
// exposed util methods.
// NOTE: these are not considered part of the public API - avoid relying on
// them unless you are aware of the risk.
Vue.util = {
warn: warn,
extend: extend,
mergeOptions: mergeOptions,
defineReactive: defineReactive$$1
};
Vue.set = set;
Vue.delete = del;
Vue.nextTick = nextTick;
Vue.options = Object.create(null);
ASSET_TYPES.forEach(function (type) {
Vue.options[type + 's'] = Object.create(null);
});
// this is used to identify the "base" constructor to extend all plain-object
// components with in Weex's multi-instance scenarios.
Vue.options._base = Vue;
extend(Vue.options.components, builtInComponents);
initUse(Vue);
initMixin$1(Vue);
initExtend(Vue);
initAssetRegisters(Vue);
}
initGlobalAPI(Vue$3);
Object.defineProperty(Vue$3.prototype, '$isServer', {
get: isServerRendering
});
Object.defineProperty(Vue$3.prototype, '$ssrContext', {
get: function get() {
/* istanbul ignore next */
return this.$vnode.ssrContext;
}
});
Vue$3.version = '2.3.4';
/* */
// these are reserved for web because they are directly compiled away
// during template compilation
var isReservedAttr = makeMap('style,class');
// attributes that should be using props for binding
var acceptValue = makeMap('input,textarea,option,select');
var mustUseProp = function mustUseProp(tag, type, attr) {
return attr === 'value' && acceptValue(tag) && type !== 'button' || attr === 'selected' && tag === 'option' || attr === 'checked' && tag === 'input' || attr === 'muted' && tag === 'video';
};
var isEnumeratedAttr = makeMap('contenteditable,draggable,spellcheck');
var isBooleanAttr = makeMap('allowfullscreen,async,autofocus,autoplay,checked,compact,controls,declare,' + 'default,defaultchecked,defaultmuted,defaultselected,defer,disabled,' + 'enabled,formnovalidate,hidden,indeterminate,inert,ismap,itemscope,loop,multiple,' + 'muted,nohref,noresize,noshade,novalidate,nowrap,open,pauseonexit,readonly,' + 'required,reversed,scoped,seamless,selected,sortable,translate,' + 'truespeed,typemustmatch,visible');
var xlinkNS = 'http://www.w3.org/1999/xlink';
var isXlink = function isXlink(name) {
return name.charAt(5) === ':' && name.slice(0, 5) === 'xlink';
};
var getXlinkProp = function getXlinkProp(name) {
return isXlink(name) ? name.slice(6, name.length) : '';
};
var isFalsyAttrValue = function isFalsyAttrValue(val) {
return val == null || val === false;
};
/* */
function genClassForVnode(vnode) {
var data = vnode.data;
var parentNode = vnode;
var childNode = vnode;
while (isDef(childNode.componentInstance)) {
childNode = childNode.componentInstance._vnode;
if (childNode.data) {
data = mergeClassData(childNode.data, data);
}
}
while (isDef(parentNode = parentNode.parent)) {
if (parentNode.data) {
data = mergeClassData(data, parentNode.data);
}
}
return genClassFromData(data);
}
function mergeClassData(child, parent) {
return {
staticClass: concat(child.staticClass, parent.staticClass),
class: isDef(child.class) ? [child.class, parent.class] : parent.class
};
}
function genClassFromData(data) {
var dynamicClass = data.class;
var staticClass = data.staticClass;
if (isDef(staticClass) || isDef(dynamicClass)) {
return concat(staticClass, stringifyClass(dynamicClass));
}
/* istanbul ignore next */
return '';
}
function concat(a, b) {
return a ? b ? a + ' ' + b : a : b || '';
}
function stringifyClass(value) {
if (isUndef(value)) {
return '';
}
if (typeof value === 'string') {
return value;
}
var res = '';
if (Array.isArray(value)) {
var stringified;
for (var i = 0, l = value.length; i < l; i++) {
if (isDef(value[i])) {
if (isDef(stringified = stringifyClass(value[i])) && stringified !== '') {
res += stringified + ' ';
}
}
}
return res.slice(0, -1);
}
if (isObject(value)) {
for (var key in value) {
if (value[key]) {
res += key + ' ';
}
}
return res.slice(0, -1);
}
/* istanbul ignore next */
return res;
}
/* */
var namespaceMap = {
svg: 'http://www.w3.org/2000/svg',
math: 'http://www.w3.org/1998/Math/MathML'
};
var isHTMLTag = makeMap('html,body,base,head,link,meta,style,title,' + 'address,article,aside,footer,header,h1,h2,h3,h4,h5,h6,hgroup,nav,section,' + 'div,dd,dl,dt,figcaption,figure,hr,img,li,main,ol,p,pre,ul,' + 'a,b,abbr,bdi,bdo,br,cite,code,data,dfn,em,i,kbd,mark,q,rp,rt,rtc,ruby,' + 's,samp,small,span,strong,sub,sup,time,u,var,wbr,area,audio,map,track,video,' + 'embed,object,param,source,canvas,script,noscript,del,ins,' + 'caption,col,colgroup,table,thead,tbody,td,th,tr,' + 'button,datalist,fieldset,form,input,label,legend,meter,optgroup,option,' + 'output,progress,select,textarea,' + 'details,dialog,menu,menuitem,summary,' + 'content,element,shadow,template');
// this map is intentionally selective, only covering SVG elements that may
// contain child elements.
var isSVG = makeMap('svg,animate,circle,clippath,cursor,defs,desc,ellipse,filter,font-face,' + 'foreignObject,g,glyph,image,line,marker,mask,missing-glyph,path,pattern,' + 'polygon,polyline,rect,switch,symbol,text,textpath,tspan,use,view', true);
var isPreTag = function isPreTag(tag) {
return tag === 'pre';
};
var isReservedTag = function isReservedTag(tag) {
return isHTMLTag(tag) || isSVG(tag);
};
function getTagNamespace(tag) {
if (isSVG(tag)) {
return 'svg';
}
// basic support for MathML
// note it doesn't support other MathML elements being component roots
if (tag === 'math') {
return 'math';
}
}
var unknownElementCache = Object.create(null);
function isUnknownElement(tag) {
/* istanbul ignore if */
if (!inBrowser) {
return true;
}
if (isReservedTag(tag)) {
return false;
}
tag = tag.toLowerCase();
/* istanbul ignore if */
if (unknownElementCache[tag] != null) {
return unknownElementCache[tag];
}
var el = document.createElement(tag);
if (tag.indexOf('-') > -1) {
// http://stackoverflow.com/a/28210364/1070244
return unknownElementCache[tag] = el.constructor === window.HTMLUnknownElement || el.constructor === window.HTMLElement;
} else {
return unknownElementCache[tag] = /HTMLUnknownElement/.test(el.toString());
}
}
/* */
/**
* Query an element selector if it's not an element already.
*/
function query(el) {
if (typeof el === 'string') {
var selected = document.querySelector(el);
if (!selected) {
"development" !== 'production' && warn('Cannot find element: ' + el);
return document.createElement('div');
}
return selected;
} else {
return el;
}
}
/* */
function createElement$1(tagName, vnode) {
var elm = document.createElement(tagName);
if (tagName !== 'select') {
return elm;
}
// false or null will remove the attribute but undefined will not
if (vnode.data && vnode.data.attrs && vnode.data.attrs.multiple !== undefined) {
elm.setAttribute('multiple', 'multiple');
}
return elm;
}
function createElementNS(namespace, tagName) {
return document.createElementNS(namespaceMap[namespace], tagName);
}
function createTextNode(text) {
return document.createTextNode(text);
}
function createComment(text) {
return document.createComment(text);
}
function insertBefore(parentNode, newNode, referenceNode) {
parentNode.insertBefore(newNode, referenceNode);
}
function removeChild(node, child) {
node.removeChild(child);
}
function appendChild(node, child) {
node.appendChild(child);
}
function parentNode(node) {
return node.parentNode;
}
function nextSibling(node) {
return node.nextSibling;
}
function tagName(node) {
return node.tagName;
}
function setTextContent(node, text) {
node.textContent = text;
}
function setAttribute(node, key, val) {
node.setAttribute(key, val);
}
var nodeOps = Object.freeze({
createElement: createElement$1,
createElementNS: createElementNS,
createTextNode: createTextNode,
createComment: createComment,
insertBefore: insertBefore,
removeChild: removeChild,
appendChild: appendChild,
parentNode: parentNode,
nextSibling: nextSibling,
tagName: tagName,
setTextContent: setTextContent,
setAttribute: setAttribute
});
/* */
var ref = {
create: function create(_, vnode) {
registerRef(vnode);
},
update: function update(oldVnode, vnode) {
if (oldVnode.data.ref !== vnode.data.ref) {
registerRef(oldVnode, true);
registerRef(vnode);
}
},
destroy: function destroy(vnode) {
registerRef(vnode, true);
}
};
function registerRef(vnode, isRemoval) {
var key = vnode.data.ref;
if (!key) {
return;
}
var vm = vnode.context;
var ref = vnode.componentInstance || vnode.elm;
var refs = vm.$refs;
if (isRemoval) {
if (Array.isArray(refs[key])) {
remove(refs[key], ref);
} else if (refs[key] === ref) {
refs[key] = undefined;
}
} else {
if (vnode.data.refInFor) {
if (Array.isArray(refs[key]) && refs[key].indexOf(ref) < 0) {
refs[key].push(ref);
} else {
refs[key] = [ref];
}
} else {
refs[key] = ref;
}
}
}
/**
* Virtual DOM patching algorithm based on Snabbdom by
* Simon Friis Vindum (@paldepind)
* Licensed under the MIT License
* https://github.com/paldepind/snabbdom/blob/master/LICENSE
*
* modified by Evan You (@yyx990803)
*
/*
* Not type-checking this because this file is perf-critical and the cost
* of making flow understand it is not worth it.
*/
var emptyNode = new VNode('', {}, []);
var hooks = ['create', 'activate', 'update', 'remove', 'destroy'];
function sameVnode(a, b) {
return a.key === b.key && a.tag === b.tag && a.isComment === b.isComment && isDef(a.data) === isDef(b.data) && sameInputType(a, b);
}
// Some browsers do not support dynamically changing type for <input>
// so they need to be treated as different nodes
function sameInputType(a, b) {
if (a.tag !== 'input') {
return true;
}
var i;
var typeA = isDef(i = a.data) && isDef(i = i.attrs) && i.type;
var typeB = isDef(i = b.data) && isDef(i = i.attrs) && i.type;
return typeA === typeB;
}
function createKeyToOldIdx(children, beginIdx, endIdx) {
var i, key;
var map = {};
for (i = beginIdx; i <= endIdx; ++i) {
key = children[i].key;
if (isDef(key)) {
map[key] = i;
}
}
return map;
}
function createPatchFunction(backend) {
var i, j;
var cbs = {};
var modules = backend.modules;
var nodeOps = backend.nodeOps;
for (i = 0; i < hooks.length; ++i) {
cbs[hooks[i]] = [];
for (j = 0; j < modules.length; ++j) {
if (isDef(modules[j][hooks[i]])) {
cbs[hooks[i]].push(modules[j][hooks[i]]);
}
}
}
function emptyNodeAt(elm) {
return new VNode(nodeOps.tagName(elm).toLowerCase(), {}, [], undefined, elm);
}
function createRmCb(childElm, listeners) {
function remove$$1() {
if (--remove$$1.listeners === 0) {
removeNode(childElm);
}
}
remove$$1.listeners = listeners;
return remove$$1;
}
function removeNode(el) {
var parent = nodeOps.parentNode(el);
// element may have already been removed due to v-html / v-text
if (isDef(parent)) {
nodeOps.removeChild(parent, el);
}
}
var inPre = 0;
function createElm(vnode, insertedVnodeQueue, parentElm, refElm, nested) {
vnode.isRootInsert = !nested; // for transition enter check
if (createComponent(vnode, insertedVnodeQueue, parentElm, refElm)) {
return;
}
var data = vnode.data;
var children = vnode.children;
var tag = vnode.tag;
if (isDef(tag)) {
if (true) {
if (data && data.pre) {
inPre++;
}
if (!inPre && !vnode.ns && !(config.ignoredElements.length && config.ignoredElements.indexOf(tag) > -1) && config.isUnknownElement(tag)) {
warn('Unknown custom element: <' + tag + '> - did you ' + 'register the component correctly? For recursive components, ' + 'make sure to provide the "name" option.', vnode.context);
}
}
vnode.elm = vnode.ns ? nodeOps.createElementNS(vnode.ns, tag) : nodeOps.createElement(tag, vnode);
setScope(vnode);
/* istanbul ignore if */
{
createChildren(vnode, children, insertedVnodeQueue);
if (isDef(data)) {
invokeCreateHooks(vnode, insertedVnodeQueue);
}
insert(parentElm, vnode.elm, refElm);
}
if ("development" !== 'production' && data && data.pre) {
inPre--;
}
} else if (isTrue(vnode.isComment)) {
vnode.elm = nodeOps.createComment(vnode.text);
insert(parentElm, vnode.elm, refElm);
} else {
vnode.elm = nodeOps.createTextNode(vnode.text);
insert(parentElm, vnode.elm, refElm);
}
}
function createComponent(vnode, insertedVnodeQueue, parentElm, refElm) {
var i = vnode.data;
if (isDef(i)) {
var isReactivated = isDef(vnode.componentInstance) && i.keepAlive;
if (isDef(i = i.hook) && isDef(i = i.init)) {
i(vnode, false /* hydrating */, parentElm, refElm);
}
// after calling the init hook, if the vnode is a child component
// it should've created a child instance and mounted it. the child
// component also has set the placeholder vnode's elm.
// in that case we can just return the element and be done.
if (isDef(vnode.componentInstance)) {
initComponent(vnode, insertedVnodeQueue);
if (isTrue(isReactivated)) {
reactivateComponent(vnode, insertedVnodeQueue, parentElm, refElm);
}
return true;
}
}
}
function initComponent(vnode, insertedVnodeQueue) {
if (isDef(vnode.data.pendingInsert)) {
insertedVnodeQueue.push.apply(insertedVnodeQueue, vnode.data.pendingInsert);
vnode.data.pendingInsert = null;
}
vnode.elm = vnode.componentInstance.$el;
if (isPatchable(vnode)) {
invokeCreateHooks(vnode, insertedVnodeQueue);
setScope(vnode);
} else {
// empty component root.
// skip all element-related modules except for ref (#3455)
registerRef(vnode);
// make sure to invoke the insert hook
insertedVnodeQueue.push(vnode);
}
}
function reactivateComponent(vnode, insertedVnodeQueue, parentElm, refElm) {
var i;
// hack for #4339: a reactivated component with inner transition
// does not trigger because the inner node's created hooks are not called
// again. It's not ideal to involve module-specific logic in here but
// there doesn't seem to be a better way to do it.
var innerNode = vnode;
while (innerNode.componentInstance) {
innerNode = innerNode.componentInstance._vnode;
if (isDef(i = innerNode.data) && isDef(i = i.transition)) {
for (i = 0; i < cbs.activate.length; ++i) {
cbs.activate[i](emptyNode, innerNode);
}
insertedVnodeQueue.push(innerNode);
break;
}
}
// unlike a newly created component,
// a reactivated keep-alive component doesn't insert itself
insert(parentElm, vnode.elm, refElm);
}
function insert(parent, elm, ref) {
if (isDef(parent)) {
if (isDef(ref)) {
if (ref.parentNode === parent) {
nodeOps.insertBefore(parent, elm, ref);
}
} else {
nodeOps.appendChild(parent, elm);
}
}
}
function createChildren(vnode, children, insertedVnodeQueue) {
if (Array.isArray(children)) {
for (var i = 0; i < children.length; ++i) {
createElm(children[i], insertedVnodeQueue, vnode.elm, null, true);
}
} else if (isPrimitive(vnode.text)) {
nodeOps.appendChild(vnode.elm, nodeOps.createTextNode(vnode.text));
}
}
function isPatchable(vnode) {
while (vnode.componentInstance) {
vnode = vnode.componentInstance._vnode;
}
return isDef(vnode.tag);
}
function invokeCreateHooks(vnode, insertedVnodeQueue) {
for (var i$1 = 0; i$1 < cbs.create.length; ++i$1) {
cbs.create[i$1](emptyNode, vnode);
}
i = vnode.data.hook; // Reuse variable
if (isDef(i)) {
if (isDef(i.create)) {
i.create(emptyNode, vnode);
}
if (isDef(i.insert)) {
insertedVnodeQueue.push(vnode);
}
}
}
// set scope id attribute for scoped CSS.
// this is implemented as a special case to avoid the overhead
// of going through the normal attribute patching process.
function setScope(vnode) {
var i;
var ancestor = vnode;
while (ancestor) {
if (isDef(i = ancestor.context) && isDef(i = i.$options._scopeId)) {
nodeOps.setAttribute(vnode.elm, i, '');
}
ancestor = ancestor.parent;
}
// for slot content they should also get the scopeId from the host instance.
if (isDef(i = activeInstance) && i !== vnode.context && isDef(i = i.$options._scopeId)) {
nodeOps.setAttribute(vnode.elm, i, '');
}
}
function addVnodes(parentElm, refElm, vnodes, startIdx, endIdx, insertedVnodeQueue) {
for (; startIdx <= endIdx; ++startIdx) {
createElm(vnodes[startIdx], insertedVnodeQueue, parentElm, refElm);
}
}
function invokeDestroyHook(vnode) {
var i, j;
var data = vnode.data;
if (isDef(data)) {
if (isDef(i = data.hook) && isDef(i = i.destroy)) {
i(vnode);
}
for (i = 0; i < cbs.destroy.length; ++i) {
cbs.destroy[i](vnode);
}
}
if (isDef(i = vnode.children)) {
for (j = 0; j < vnode.children.length; ++j) {
invokeDestroyHook(vnode.children[j]);
}
}
}
function removeVnodes(parentElm, vnodes, startIdx, endIdx) {
for (; startIdx <= endIdx; ++startIdx) {
var ch = vnodes[startIdx];
if (isDef(ch)) {
if (isDef(ch.tag)) {
removeAndInvokeRemoveHook(ch);
invokeDestroyHook(ch);
} else {
// Text node
removeNode(ch.elm);
}
}
}
}
function removeAndInvokeRemoveHook(vnode, rm) {
if (isDef(rm) || isDef(vnode.data)) {
var i;
var listeners = cbs.remove.length + 1;
if (isDef(rm)) {
// we have a recursively passed down rm callback
// increase the listeners count
rm.listeners += listeners;
} else {
// directly removing
rm = createRmCb(vnode.elm, listeners);
}
// recursively invoke hooks on child component root node
if (isDef(i = vnode.componentInstance) && isDef(i = i._vnode) && isDef(i.data)) {
removeAndInvokeRemoveHook(i, rm);
}
for (i = 0; i < cbs.remove.length; ++i) {
cbs.remove[i](vnode, rm);
}
if (isDef(i = vnode.data.hook) && isDef(i = i.remove)) {
i(vnode, rm);
} else {
rm();
}
} else {
removeNode(vnode.elm);
}
}
function updateChildren(parentElm, oldCh, newCh, insertedVnodeQueue, removeOnly) {
var oldStartIdx = 0;
var newStartIdx = 0;
var oldEndIdx = oldCh.length - 1;
var oldStartVnode = oldCh[0];
var oldEndVnode = oldCh[oldEndIdx];
var newEndIdx = newCh.length - 1;
var newStartVnode = newCh[0];
var newEndVnode = newCh[newEndIdx];
var oldKeyToIdx, idxInOld, elmToMove, refElm;
// removeOnly is a special flag used only by <transition-group>
// to ensure removed elements stay in correct relative positions
// during leaving transitions
var canMove = !removeOnly;
while (oldStartIdx <= oldEndIdx && newStartIdx <= newEndIdx) {
if (isUndef(oldStartVnode)) {
oldStartVnode = oldCh[++oldStartIdx]; // Vnode has been moved left
} else if (isUndef(oldEndVnode)) {
oldEndVnode = oldCh[--oldEndIdx];
} else if (sameVnode(oldStartVnode, newStartVnode)) {
patchVnode(oldStartVnode, newStartVnode, insertedVnodeQueue);
oldStartVnode = oldCh[++oldStartIdx];
newStartVnode = newCh[++newStartIdx];
} else if (sameVnode(oldEndVnode, newEndVnode)) {
patchVnode(oldEndVnode, newEndVnode, insertedVnodeQueue);
oldEndVnode = oldCh[--oldEndIdx];
newEndVnode = newCh[--newEndIdx];
} else if (sameVnode(oldStartVnode, newEndVnode)) {
// Vnode moved right
patchVnode(oldStartVnode, newEndVnode, insertedVnodeQueue);
canMove && nodeOps.insertBefore(parentElm, oldStartVnode.elm, nodeOps.nextSibling(oldEndVnode.elm));
oldStartVnode = oldCh[++oldStartIdx];
newEndVnode = newCh[--newEndIdx];
} else if (sameVnode(oldEndVnode, newStartVnode)) {
// Vnode moved left
patchVnode(oldEndVnode, newStartVnode, insertedVnodeQueue);
canMove && nodeOps.insertBefore(parentElm, oldEndVnode.elm, oldStartVnode.elm);
oldEndVnode = oldCh[--oldEndIdx];
newStartVnode = newCh[++newStartIdx];
} else {
if (isUndef(oldKeyToIdx)) {
oldKeyToIdx = createKeyToOldIdx(oldCh, oldStartIdx, oldEndIdx);
}
idxInOld = isDef(newStartVnode.key) ? oldKeyToIdx[newStartVnode.key] : null;
if (isUndef(idxInOld)) {
// New element
createElm(newStartVnode, insertedVnodeQueue, parentElm, oldStartVnode.elm);
newStartVnode = newCh[++newStartIdx];
} else {
elmToMove = oldCh[idxInOld];
/* istanbul ignore if */
if ("development" !== 'production' && !elmToMove) {
warn('It seems there are duplicate keys that is causing an update error. ' + 'Make sure each v-for item has a unique key.');
}
if (sameVnode(elmToMove, newStartVnode)) {
patchVnode(elmToMove, newStartVnode, insertedVnodeQueue);
oldCh[idxInOld] = undefined;
canMove && nodeOps.insertBefore(parentElm, newStartVnode.elm, oldStartVnode.elm);
newStartVnode = newCh[++newStartIdx];
} else {
// same key but different element. treat as new element
createElm(newStartVnode, insertedVnodeQueue, parentElm, oldStartVnode.elm);
newStartVnode = newCh[++newStartIdx];
}
}
}
}
if (oldStartIdx > oldEndIdx) {
refElm = isUndef(newCh[newEndIdx + 1]) ? null : newCh[newEndIdx + 1].elm;
addVnodes(parentElm, refElm, newCh, newStartIdx, newEndIdx, insertedVnodeQueue);
} else if (newStartIdx > newEndIdx) {
removeVnodes(parentElm, oldCh, oldStartIdx, oldEndIdx);
}
}
function patchVnode(oldVnode, vnode, insertedVnodeQueue, removeOnly) {
if (oldVnode === vnode) {
return;
}
// reuse element for static trees.
// note we only do this if the vnode is cloned -
// if the new node is not cloned it means the render functions have been
// reset by the hot-reload-api and we need to do a proper re-render.
if (isTrue(vnode.isStatic) && isTrue(oldVnode.isStatic) && vnode.key === oldVnode.key && (isTrue(vnode.isCloned) || isTrue(vnode.isOnce))) {
vnode.elm = oldVnode.elm;
vnode.componentInstance = oldVnode.componentInstance;
return;
}
var i;
var data = vnode.data;
if (isDef(data) && isDef(i = data.hook) && isDef(i = i.prepatch)) {
i(oldVnode, vnode);
}
var elm = vnode.elm = oldVnode.elm;
var oldCh = oldVnode.children;
var ch = vnode.children;
if (isDef(data) && isPatchable(vnode)) {
for (i = 0; i < cbs.update.length; ++i) {
cbs.update[i](oldVnode, vnode);
}
if (isDef(i = data.hook) && isDef(i = i.update)) {
i(oldVnode, vnode);
}
}
if (isUndef(vnode.text)) {
if (isDef(oldCh) && isDef(ch)) {
if (oldCh !== ch) {
updateChildren(elm, oldCh, ch, insertedVnodeQueue, removeOnly);
}
} else if (isDef(ch)) {
if (isDef(oldVnode.text)) {
nodeOps.setTextContent(elm, '');
}
addVnodes(elm, null, ch, 0, ch.length - 1, insertedVnodeQueue);
} else if (isDef(oldCh)) {
removeVnodes(elm, oldCh, 0, oldCh.length - 1);
} else if (isDef(oldVnode.text)) {
nodeOps.setTextContent(elm, '');
}
} else if (oldVnode.text !== vnode.text) {
nodeOps.setTextContent(elm, vnode.text);
}
if (isDef(data)) {
if (isDef(i = data.hook) && isDef(i = i.postpatch)) {
i(oldVnode, vnode);
}
}
}
function invokeInsertHook(vnode, queue, initial) {
// delay insert hooks for component root nodes, invoke them after the
// element is really inserted
if (isTrue(initial) && isDef(vnode.parent)) {
vnode.parent.data.pendingInsert = queue;
} else {
for (var i = 0; i < queue.length; ++i) {
queue[i].data.hook.insert(queue[i]);
}
}
}
var bailed = false;
// list of modules that can skip create hook during hydration because they
// are already rendered on the client or has no need for initialization
var isRenderedModule = makeMap('attrs,style,class,staticClass,staticStyle,key');
// Note: this is a browser-only function so we can assume elms are DOM nodes.
function hydrate(elm, vnode, insertedVnodeQueue) {
if (true) {
if (!assertNodeMatch(elm, vnode)) {
return false;
}
}
vnode.elm = elm;
var tag = vnode.tag;
var data = vnode.data;
var children = vnode.children;
if (isDef(data)) {
if (isDef(i = data.hook) && isDef(i = i.init)) {
i(vnode, true /* hydrating */);
}
if (isDef(i = vnode.componentInstance)) {
// child component. it should have hydrated its own tree.
initComponent(vnode, insertedVnodeQueue);
return true;
}
}
if (isDef(tag)) {
if (isDef(children)) {
// empty element, allow client to pick up and populate children
if (!elm.hasChildNodes()) {
createChildren(vnode, children, insertedVnodeQueue);
} else {
var childrenMatch = true;
var childNode = elm.firstChild;
for (var i$1 = 0; i$1 < children.length; i$1++) {
if (!childNode || !hydrate(childNode, children[i$1], insertedVnodeQueue)) {
childrenMatch = false;
break;
}
childNode = childNode.nextSibling;
}
// if childNode is not null, it means the actual childNodes list is
// longer than the virtual children list.
if (!childrenMatch || childNode) {
if ("development" !== 'production' && typeof console !== 'undefined' && !bailed) {
bailed = true;
console.warn('Parent: ', elm);
console.warn('Mismatching childNodes vs. VNodes: ', elm.childNodes, children);
}
return false;
}
}
}
if (isDef(data)) {
for (var key in data) {
if (!isRenderedModule(key)) {
invokeCreateHooks(vnode, insertedVnodeQueue);
break;
}
}
}
} else if (elm.data !== vnode.text) {
elm.data = vnode.text;
}
return true;
}
function assertNodeMatch(node, vnode) {
if (isDef(vnode.tag)) {
return vnode.tag.indexOf('vue-component') === 0 || vnode.tag.toLowerCase() === (node.tagName && node.tagName.toLowerCase());
} else {
return node.nodeType === (vnode.isComment ? 8 : 3);
}
}
return function patch(oldVnode, vnode, hydrating, removeOnly, parentElm, refElm) {
if (isUndef(vnode)) {
if (isDef(oldVnode)) {
invokeDestroyHook(oldVnode);
}
return;
}
var isInitialPatch = false;
var insertedVnodeQueue = [];
if (isUndef(oldVnode)) {
// empty mount (likely as component), create new root element
isInitialPatch = true;
createElm(vnode, insertedVnodeQueue, parentElm, refElm);
} else {
var isRealElement = isDef(oldVnode.nodeType);
if (!isRealElement && sameVnode(oldVnode, vnode)) {
// patch existing root node
patchVnode(oldVnode, vnode, insertedVnodeQueue, removeOnly);
} else {
if (isRealElement) {
// mounting to a real element
// check if this is server-rendered content and if we can perform
// a successful hydration.
if (oldVnode.nodeType === 1 && oldVnode.hasAttribute(SSR_ATTR)) {
oldVnode.removeAttribute(SSR_ATTR);
hydrating = true;
}
if (isTrue(hydrating)) {
if (hydrate(oldVnode, vnode, insertedVnodeQueue)) {
invokeInsertHook(vnode, insertedVnodeQueue, true);
return oldVnode;
} else if (true) {
warn('The client-side rendered virtual DOM tree is not matching ' + 'server-rendered content. This is likely caused by incorrect ' + 'HTML markup, for example nesting block-level elements inside ' + '<p>, or missing <tbody>. Bailing hydration and performing ' + 'full client-side render.');
}
}
// either not server-rendered, or hydration failed.
// create an empty node and replace it
oldVnode = emptyNodeAt(oldVnode);
}
// replacing existing element
var oldElm = oldVnode.elm;
var parentElm$1 = nodeOps.parentNode(oldElm);
createElm(vnode, insertedVnodeQueue,
// extremely rare edge case: do not insert if old element is in a
// leaving transition. Only happens when combining transition +
// keep-alive + HOCs. (#4590)
oldElm._leaveCb ? null : parentElm$1, nodeOps.nextSibling(oldElm));
if (isDef(vnode.parent)) {
// component root element replaced.
// update parent placeholder node element, recursively
var ancestor = vnode.parent;
while (ancestor) {
ancestor.elm = vnode.elm;
ancestor = ancestor.parent;
}
if (isPatchable(vnode)) {
for (var i = 0; i < cbs.create.length; ++i) {
cbs.create[i](emptyNode, vnode.parent);
}
}
}
if (isDef(parentElm$1)) {
removeVnodes(parentElm$1, [oldVnode], 0, 0);
} else if (isDef(oldVnode.tag)) {
invokeDestroyHook(oldVnode);
}
}
}
invokeInsertHook(vnode, insertedVnodeQueue, isInitialPatch);
return vnode.elm;
};
}
/* */
var directives = {
create: updateDirectives,
update: updateDirectives,
destroy: function unbindDirectives(vnode) {
updateDirectives(vnode, emptyNode);
}
};
function updateDirectives(oldVnode, vnode) {
if (oldVnode.data.directives || vnode.data.directives) {
_update(oldVnode, vnode);
}
}
function _update(oldVnode, vnode) {
var isCreate = oldVnode === emptyNode;
var isDestroy = vnode === emptyNode;
var oldDirs = normalizeDirectives$1(oldVnode.data.directives, oldVnode.context);
var newDirs = normalizeDirectives$1(vnode.data.directives, vnode.context);
var dirsWithInsert = [];
var dirsWithPostpatch = [];
var key, oldDir, dir;
for (key in newDirs) {
oldDir = oldDirs[key];
dir = newDirs[key];
if (!oldDir) {
// new directive, bind
callHook$1(dir, 'bind', vnode, oldVnode);
if (dir.def && dir.def.inserted) {
dirsWithInsert.push(dir);
}
} else {
// existing directive, update
dir.oldValue = oldDir.value;
callHook$1(dir, 'update', vnode, oldVnode);
if (dir.def && dir.def.componentUpdated) {
dirsWithPostpatch.push(dir);
}
}
}
if (dirsWithInsert.length) {
var callInsert = function callInsert() {
for (var i = 0; i < dirsWithInsert.length; i++) {
callHook$1(dirsWithInsert[i], 'inserted', vnode, oldVnode);
}
};
if (isCreate) {
mergeVNodeHook(vnode.data.hook || (vnode.data.hook = {}), 'insert', callInsert);
} else {
callInsert();
}
}
if (dirsWithPostpatch.length) {
mergeVNodeHook(vnode.data.hook || (vnode.data.hook = {}), 'postpatch', function () {
for (var i = 0; i < dirsWithPostpatch.length; i++) {
callHook$1(dirsWithPostpatch[i], 'componentUpdated', vnode, oldVnode);
}
});
}
if (!isCreate) {
for (key in oldDirs) {
if (!newDirs[key]) {
// no longer present, unbind
callHook$1(oldDirs[key], 'unbind', oldVnode, oldVnode, isDestroy);
}
}
}
}
var emptyModifiers = Object.create(null);
function normalizeDirectives$1(dirs, vm) {
var res = Object.create(null);
if (!dirs) {
return res;
}
var i, dir;
for (i = 0; i < dirs.length; i++) {
dir = dirs[i];
if (!dir.modifiers) {
dir.modifiers = emptyModifiers;
}
res[getRawDirName(dir)] = dir;
dir.def = resolveAsset(vm.$options, 'directives', dir.name, true);
}
return res;
}
function getRawDirName(dir) {
return dir.rawName || dir.name + "." + Object.keys(dir.modifiers || {}).join('.');
}
function callHook$1(dir, hook, vnode, oldVnode, isDestroy) {
var fn = dir.def && dir.def[hook];
if (fn) {
try {
fn(vnode.elm, dir, vnode, oldVnode, isDestroy);
} catch (e) {
handleError(e, vnode.context, "directive " + dir.name + " " + hook + " hook");
}
}
}
var baseModules = [ref, directives];
/* */
function updateAttrs(oldVnode, vnode) {
if (isUndef(oldVnode.data.attrs) && isUndef(vnode.data.attrs)) {
return;
}
var key, cur, old;
var elm = vnode.elm;
var oldAttrs = oldVnode.data.attrs || {};
var attrs = vnode.data.attrs || {};
// clone observed objects, as the user probably wants to mutate it
if (isDef(attrs.__ob__)) {
attrs = vnode.data.attrs = extend({}, attrs);
}
for (key in attrs) {
cur = attrs[key];
old = oldAttrs[key];
if (old !== cur) {
setAttr(elm, key, cur);
}
}
// #4391: in IE9, setting type can reset value for input[type=radio]
/* istanbul ignore if */
if (isIE9 && attrs.value !== oldAttrs.value) {
setAttr(elm, 'value', attrs.value);
}
for (key in oldAttrs) {
if (isUndef(attrs[key])) {
if (isXlink(key)) {
elm.removeAttributeNS(xlinkNS, getXlinkProp(key));
} else if (!isEnumeratedAttr(key)) {
elm.removeAttribute(key);
}
}
}
}
function setAttr(el, key, value) {
if (isBooleanAttr(key)) {
// set attribute for blank value
// e.g. <option disabled>Select one</option>
if (isFalsyAttrValue(value)) {
el.removeAttribute(key);
} else {
el.setAttribute(key, key);
}
} else if (isEnumeratedAttr(key)) {
el.setAttribute(key, isFalsyAttrValue(value) || value === 'false' ? 'false' : 'true');
} else if (isXlink(key)) {
if (isFalsyAttrValue(value)) {
el.removeAttributeNS(xlinkNS, getXlinkProp(key));
} else {
el.setAttributeNS(xlinkNS, key, value);
}
} else {
if (isFalsyAttrValue(value)) {
el.removeAttribute(key);
} else {
el.setAttribute(key, value);
}
}
}
var attrs = {
create: updateAttrs,
update: updateAttrs
};
/* */
function updateClass(oldVnode, vnode) {
var el = vnode.elm;
var data = vnode.data;
var oldData = oldVnode.data;
if (isUndef(data.staticClass) && isUndef(data.class) && (isUndef(oldData) || isUndef(oldData.staticClass) && isUndef(oldData.class))) {
return;
}
var cls = genClassForVnode(vnode);
// handle transition classes
var transitionClass = el._transitionClasses;
if (isDef(transitionClass)) {
cls = concat(cls, stringifyClass(transitionClass));
}
// set the class
if (cls !== el._prevClass) {
el.setAttribute('class', cls);
el._prevClass = cls;
}
}
var klass = {
create: updateClass,
update: updateClass
};
/* */
var validDivisionCharRE = /[\w).+\-_$\]]/;
function parseFilters(exp) {
var inSingle = false;
var inDouble = false;
var inTemplateString = false;
var inRegex = false;
var curly = 0;
var square = 0;
var paren = 0;
var lastFilterIndex = 0;
var c, prev, i, expression, filters;
for (i = 0; i < exp.length; i++) {
prev = c;
c = exp.charCodeAt(i);
if (inSingle) {
if (c === 0x27 && prev !== 0x5C) {
inSingle = false;
}
} else if (inDouble) {
if (c === 0x22 && prev !== 0x5C) {
inDouble = false;
}
} else if (inTemplateString) {
if (c === 0x60 && prev !== 0x5C) {
inTemplateString = false;
}
} else if (inRegex) {
if (c === 0x2f && prev !== 0x5C) {
inRegex = false;
}
} else if (c === 0x7C && // pipe
exp.charCodeAt(i + 1) !== 0x7C && exp.charCodeAt(i - 1) !== 0x7C && !curly && !square && !paren) {
if (expression === undefined) {
// first filter, end of expression
lastFilterIndex = i + 1;
expression = exp.slice(0, i).trim();
} else {
pushFilter();
}
} else {
switch (c) {
case 0x22:
inDouble = true;break; // "
case 0x27:
inSingle = true;break; // '
case 0x60:
inTemplateString = true;break; // `
case 0x28:
paren++;break; // (
case 0x29:
paren--;break; // )
case 0x5B:
square++;break; // [
case 0x5D:
square--;break; // ]
case 0x7B:
curly++;break; // {
case 0x7D:
curly--;break; // }
}
if (c === 0x2f) {
// /
var j = i - 1;
var p = void 0;
// find first non-whitespace prev char
for (; j >= 0; j--) {
p = exp.charAt(j);
if (p !== ' ') {
break;
}
}
if (!p || !validDivisionCharRE.test(p)) {
inRegex = true;
}
}
}
}
if (expression === undefined) {
expression = exp.slice(0, i).trim();
} else if (lastFilterIndex !== 0) {
pushFilter();
}
function pushFilter() {
(filters || (filters = [])).push(exp.slice(lastFilterIndex, i).trim());
lastFilterIndex = i + 1;
}
if (filters) {
for (i = 0; i < filters.length; i++) {
expression = wrapFilter(expression, filters[i]);
}
}
return expression;
}
function wrapFilter(exp, filter) {
var i = filter.indexOf('(');
if (i < 0) {
// _f: resolveFilter
return "_f(\"" + filter + "\")(" + exp + ")";
} else {
var name = filter.slice(0, i);
var args = filter.slice(i + 1);
return "_f(\"" + name + "\")(" + exp + "," + args;
}
}
/* */
function baseWarn(msg) {
console.error("[Vue compiler]: " + msg);
}
function pluckModuleFunction(modules, key) {
return modules ? modules.map(function (m) {
return m[key];
}).filter(function (_) {
return _;
}) : [];
}
function addProp(el, name, value) {
(el.props || (el.props = [])).push({ name: name, value: value });
}
function addAttr(el, name, value) {
(el.attrs || (el.attrs = [])).push({ name: name, value: value });
}
function addDirective(el, name, rawName, value, arg, modifiers) {
(el.directives || (el.directives = [])).push({ name: name, rawName: rawName, value: value, arg: arg, modifiers: modifiers });
}
function addHandler(el, name, value, modifiers, important, warn) {
// warn prevent and passive modifier
/* istanbul ignore if */
if ("development" !== 'production' && warn && modifiers && modifiers.prevent && modifiers.passive) {
warn('passive and prevent can\'t be used together. ' + 'Passive handler can\'t prevent default event.');
}
// check capture modifier
if (modifiers && modifiers.capture) {
delete modifiers.capture;
name = '!' + name; // mark the event as captured
}
if (modifiers && modifiers.once) {
delete modifiers.once;
name = '~' + name; // mark the event as once
}
/* istanbul ignore if */
if (modifiers && modifiers.passive) {
delete modifiers.passive;
name = '&' + name; // mark the event as passive
}
var events;
if (modifiers && modifiers.native) {
delete modifiers.native;
events = el.nativeEvents || (el.nativeEvents = {});
} else {
events = el.events || (el.events = {});
}
var newHandler = { value: value, modifiers: modifiers };
var handlers = events[name];
/* istanbul ignore if */
if (Array.isArray(handlers)) {
important ? handlers.unshift(newHandler) : handlers.push(newHandler);
} else if (handlers) {
events[name] = important ? [newHandler, handlers] : [handlers, newHandler];
} else {
events[name] = newHandler;
}
}
function getBindingAttr(el, name, getStatic) {
var dynamicValue = getAndRemoveAttr(el, ':' + name) || getAndRemoveAttr(el, 'v-bind:' + name);
if (dynamicValue != null) {
return parseFilters(dynamicValue);
} else if (getStatic !== false) {
var staticValue = getAndRemoveAttr(el, name);
if (staticValue != null) {
return JSON.stringify(staticValue);
}
}
}
function getAndRemoveAttr(el, name) {
var val;
if ((val = el.attrsMap[name]) != null) {
var list = el.attrsList;
for (var i = 0, l = list.length; i < l; i++) {
if (list[i].name === name) {
list.splice(i, 1);
break;
}
}
}
return val;
}
/* */
/**
* Cross-platform code generation for component v-model
*/
function genComponentModel(el, value, modifiers) {
var ref = modifiers || {};
var number = ref.number;
var trim = ref.trim;
var baseValueExpression = '$$v';
var valueExpression = baseValueExpression;
if (trim) {
valueExpression = "(typeof " + baseValueExpression + " === 'string'" + "? " + baseValueExpression + ".trim()" + ": " + baseValueExpression + ")";
}
if (number) {
valueExpression = "_n(" + valueExpression + ")";
}
var assignment = genAssignmentCode(value, valueExpression);
el.model = {
value: "(" + value + ")",
expression: "\"" + value + "\"",
callback: "function (" + baseValueExpression + ") {" + assignment + "}"
};
}
/**
* Cross-platform codegen helper for generating v-model value assignment code.
*/
function genAssignmentCode(value, assignment) {
var modelRs = parseModel(value);
if (modelRs.idx === null) {
return value + "=" + assignment;
} else {
return "var $$exp = " + modelRs.exp + ", $$idx = " + modelRs.idx + ";" + "if (!Array.isArray($$exp)){" + value + "=" + assignment + "}" + "else{$$exp.splice($$idx, 1, " + assignment + ")}";
}
}
/**
* parse directive model to do the array update transform. a[idx] = val => $$a.splice($$idx, 1, val)
*
* for loop possible cases:
*
* - test
* - test[idx]
* - test[test1[idx]]
* - test["a"][idx]
* - xxx.test[a[a].test1[idx]]
* - test.xxx.a["asa"][test1[idx]]
*
*/
var len;
var str;
var chr;
var index$1;
var expressionPos;
var expressionEndPos;
function parseModel(val) {
str = val;
len = str.length;
index$1 = expressionPos = expressionEndPos = 0;
if (val.indexOf('[') < 0 || val.lastIndexOf(']') < len - 1) {
return {
exp: val,
idx: null
};
}
while (!eof()) {
chr = next();
/* istanbul ignore if */
if (isStringStart(chr)) {
parseString(chr);
} else if (chr === 0x5B) {
parseBracket(chr);
}
}
return {
exp: val.substring(0, expressionPos),
idx: val.substring(expressionPos + 1, expressionEndPos)
};
}
function next() {
return str.charCodeAt(++index$1);
}
function eof() {
return index$1 >= len;
}
function isStringStart(chr) {
return chr === 0x22 || chr === 0x27;
}
function parseBracket(chr) {
var inBracket = 1;
expressionPos = index$1;
while (!eof()) {
chr = next();
if (isStringStart(chr)) {
parseString(chr);
continue;
}
if (chr === 0x5B) {
inBracket++;
}
if (chr === 0x5D) {
inBracket--;
}
if (inBracket === 0) {
expressionEndPos = index$1;
break;
}
}
}
function parseString(chr) {
var stringQuote = chr;
while (!eof()) {
chr = next();
if (chr === stringQuote) {
break;
}
}
}
/* */
var warn$1;
// in some cases, the event used has to be determined at runtime
// so we used some reserved tokens during compile.
var RANGE_TOKEN = '__r';
var CHECKBOX_RADIO_TOKEN = '__c';
function model(el, dir, _warn) {
warn$1 = _warn;
var value = dir.value;
var modifiers = dir.modifiers;
var tag = el.tag;
var type = el.attrsMap.type;
if (true) {
var dynamicType = el.attrsMap['v-bind:type'] || el.attrsMap[':type'];
if (tag === 'input' && dynamicType) {
warn$1("<input :type=\"" + dynamicType + "\" v-model=\"" + value + "\">:\n" + "v-model does not support dynamic input types. Use v-if branches instead.");
}
// inputs with type="file" are read only and setting the input's
// value will throw an error.
if (tag === 'input' && type === 'file') {
warn$1("<" + el.tag + " v-model=\"" + value + "\" type=\"file\">:\n" + "File inputs are read only. Use a v-on:change listener instead.");
}
}
if (tag === 'select') {
genSelect(el, value, modifiers);
} else if (tag === 'input' && type === 'checkbox') {
genCheckboxModel(el, value, modifiers);
} else if (tag === 'input' && type === 'radio') {
genRadioModel(el, value, modifiers);
} else if (tag === 'input' || tag === 'textarea') {
genDefaultModel(el, value, modifiers);
} else if (!config.isReservedTag(tag)) {
genComponentModel(el, value, modifiers);
// component v-model doesn't need extra runtime
return false;
} else if (true) {
warn$1("<" + el.tag + " v-model=\"" + value + "\">: " + "v-model is not supported on this element type. " + 'If you are working with contenteditable, it\'s recommended to ' + 'wrap a library dedicated for that purpose inside a custom component.');
}
// ensure runtime directive metadata
return true;
}
function genCheckboxModel(el, value, modifiers) {
var number = modifiers && modifiers.number;
var valueBinding = getBindingAttr(el, 'value') || 'null';
var trueValueBinding = getBindingAttr(el, 'true-value') || 'true';
var falseValueBinding = getBindingAttr(el, 'false-value') || 'false';
addProp(el, 'checked', "Array.isArray(" + value + ")" + "?_i(" + value + "," + valueBinding + ")>-1" + (trueValueBinding === 'true' ? ":(" + value + ")" : ":_q(" + value + "," + trueValueBinding + ")"));
addHandler(el, CHECKBOX_RADIO_TOKEN, "var $$a=" + value + "," + '$$el=$event.target,' + "$$c=$$el.checked?(" + trueValueBinding + "):(" + falseValueBinding + ");" + 'if(Array.isArray($$a)){' + "var $$v=" + (number ? '_n(' + valueBinding + ')' : valueBinding) + "," + '$$i=_i($$a,$$v);' + "if($$c){$$i<0&&(" + value + "=$$a.concat($$v))}" + "else{$$i>-1&&(" + value + "=$$a.slice(0,$$i).concat($$a.slice($$i+1)))}" + "}else{" + genAssignmentCode(value, '$$c') + "}", null, true);
}
function genRadioModel(el, value, modifiers) {
var number = modifiers && modifiers.number;
var valueBinding = getBindingAttr(el, 'value') || 'null';
valueBinding = number ? "_n(" + valueBinding + ")" : valueBinding;
addProp(el, 'checked', "_q(" + value + "," + valueBinding + ")");
addHandler(el, CHECKBOX_RADIO_TOKEN, genAssignmentCode(value, valueBinding), null, true);
}
function genSelect(el, value, modifiers) {
var number = modifiers && modifiers.number;
var selectedVal = "Array.prototype.filter" + ".call($event.target.options,function(o){return o.selected})" + ".map(function(o){var val = \"_value\" in o ? o._value : o.value;" + "return " + (number ? '_n(val)' : 'val') + "})";
var assignment = '$event.target.multiple ? $$selectedVal : $$selectedVal[0]';
var code = "var $$selectedVal = " + selectedVal + ";";
code = code + " " + genAssignmentCode(value, assignment);
addHandler(el, 'change', code, null, true);
}
function genDefaultModel(el, value, modifiers) {
var type = el.attrsMap.type;
var ref = modifiers || {};
var lazy = ref.lazy;
var number = ref.number;
var trim = ref.trim;
var needCompositionGuard = !lazy && type !== 'range';
var event = lazy ? 'change' : type === 'range' ? RANGE_TOKEN : 'input';
var valueExpression = '$event.target.value';
if (trim) {
valueExpression = "$event.target.value.trim()";
}
if (number) {
valueExpression = "_n(" + valueExpression + ")";
}
var code = genAssignmentCode(value, valueExpression);
if (needCompositionGuard) {
code = "if($event.target.composing)return;" + code;
}
addProp(el, 'value', "(" + value + ")");
addHandler(el, event, code, null, true);
if (trim || number || type === 'number') {
addHandler(el, 'blur', '$forceUpdate()');
}
}
/* */
// normalize v-model event tokens that can only be determined at runtime.
// it's important to place the event as the first in the array because
// the whole point is ensuring the v-model callback gets called before
// user-attached handlers.
function normalizeEvents(on) {
var event;
/* istanbul ignore if */
if (isDef(on[RANGE_TOKEN])) {
// IE input[type=range] only supports `change` event
event = isIE ? 'change' : 'input';
on[event] = [].concat(on[RANGE_TOKEN], on[event] || []);
delete on[RANGE_TOKEN];
}
if (isDef(on[CHECKBOX_RADIO_TOKEN])) {
// Chrome fires microtasks in between click/change, leads to #4521
event = isChrome ? 'click' : 'change';
on[event] = [].concat(on[CHECKBOX_RADIO_TOKEN], on[event] || []);
delete on[CHECKBOX_RADIO_TOKEN];
}
}
var target$1;
function add$1(event, _handler, once$$1, capture, passive) {
if (once$$1) {
var oldHandler = _handler;
var _target = target$1; // save current target element in closure
_handler = function handler(ev) {
var res = arguments.length === 1 ? oldHandler(ev) : oldHandler.apply(null, arguments);
if (res !== null) {
remove$2(event, _handler, capture, _target);
}
};
}
target$1.addEventListener(event, _handler, supportsPassive ? { capture: capture, passive: passive } : capture);
}
function remove$2(event, handler, capture, _target) {
(_target || target$1).removeEventListener(event, handler, capture);
}
function updateDOMListeners(oldVnode, vnode) {
if (isUndef(oldVnode.data.on) && isUndef(vnode.data.on)) {
return;
}
var on = vnode.data.on || {};
var oldOn = oldVnode.data.on || {};
target$1 = vnode.elm;
normalizeEvents(on);
updateListeners(on, oldOn, add$1, remove$2, vnode.context);
}
var events = {
create: updateDOMListeners,
update: updateDOMListeners
};
/* */
function updateDOMProps(oldVnode, vnode) {
if (isUndef(oldVnode.data.domProps) && isUndef(vnode.data.domProps)) {
return;
}
var key, cur;
var elm = vnode.elm;
var oldProps = oldVnode.data.domProps || {};
var props = vnode.data.domProps || {};
// clone observed objects, as the user probably wants to mutate it
if (isDef(props.__ob__)) {
props = vnode.data.domProps = extend({}, props);
}
for (key in oldProps) {
if (isUndef(props[key])) {
elm[key] = '';
}
}
for (key in props) {
cur = props[key];
// ignore children if the node has textContent or innerHTML,
// as these will throw away existing DOM nodes and cause removal errors
// on subsequent patches (#3360)
if (key === 'textContent' || key === 'innerHTML') {
if (vnode.children) {
vnode.children.length = 0;
}
if (cur === oldProps[key]) {
continue;
}
}
if (key === 'value') {
// store value as _value as well since
// non-string values will be stringified
elm._value = cur;
// avoid resetting cursor position when value is the same
var strCur = isUndef(cur) ? '' : String(cur);
if (shouldUpdateValue(elm, vnode, strCur)) {
elm.value = strCur;
}
} else {
elm[key] = cur;
}
}
}
// check platforms/web/util/attrs.js acceptValue
function shouldUpdateValue(elm, vnode, checkVal) {
return !elm.composing && (vnode.tag === 'option' || isDirty(elm, checkVal) || isInputChanged(elm, checkVal));
}
function isDirty(elm, checkVal) {
// return true when textbox (.number and .trim) loses focus and its value is not equal to the updated value
return document.activeElement !== elm && elm.value !== checkVal;
}
function isInputChanged(elm, newVal) {
var value = elm.value;
var modifiers = elm._vModifiers; // injected by v-model runtime
if (isDef(modifiers) && modifiers.number || elm.type === 'number') {
return toNumber(value) !== toNumber(newVal);
}
if (isDef(modifiers) && modifiers.trim) {
return value.trim() !== newVal.trim();
}
return value !== newVal;
}
var domProps = {
create: updateDOMProps,
update: updateDOMProps
};
/* */
var parseStyleText = cached(function (cssText) {
var res = {};
var listDelimiter = /;(?![^(]*\))/g;
var propertyDelimiter = /:(.+)/;
cssText.split(listDelimiter).forEach(function (item) {
if (item) {
var tmp = item.split(propertyDelimiter);
tmp.length > 1 && (res[tmp[0].trim()] = tmp[1].trim());
}
});
return res;
});
// merge static and dynamic style data on the same vnode
function normalizeStyleData(data) {
var style = normalizeStyleBinding(data.style);
// static style is pre-processed into an object during compilation
// and is always a fresh object, so it's safe to merge into it
return data.staticStyle ? extend(data.staticStyle, style) : style;
}
// normalize possible array / string values into Object
function normalizeStyleBinding(bindingStyle) {
if (Array.isArray(bindingStyle)) {
return toObject(bindingStyle);
}
if (typeof bindingStyle === 'string') {
return parseStyleText(bindingStyle);
}
return bindingStyle;
}
/**
* parent component style should be after child's
* so that parent component's style could override it
*/
function getStyle(vnode, checkChild) {
var res = {};
var styleData;
if (checkChild) {
var childNode = vnode;
while (childNode.componentInstance) {
childNode = childNode.componentInstance._vnode;
if (childNode.data && (styleData = normalizeStyleData(childNode.data))) {
extend(res, styleData);
}
}
}
if (styleData = normalizeStyleData(vnode.data)) {
extend(res, styleData);
}
var parentNode = vnode;
while (parentNode = parentNode.parent) {
if (parentNode.data && (styleData = normalizeStyleData(parentNode.data))) {
extend(res, styleData);
}
}
return res;
}
/* */
var cssVarRE = /^--/;
var importantRE = /\s*!important$/;
var setProp = function setProp(el, name, val) {
/* istanbul ignore if */
if (cssVarRE.test(name)) {
el.style.setProperty(name, val);
} else if (importantRE.test(val)) {
el.style.setProperty(name, val.replace(importantRE, ''), 'important');
} else {
var normalizedName = normalize(name);
if (Array.isArray(val)) {
// Support values array created by autoprefixer, e.g.
// {display: ["-webkit-box", "-ms-flexbox", "flex"]}
// Set them one by one, and the browser will only set those it can recognize
for (var i = 0, len = val.length; i < len; i++) {
el.style[normalizedName] = val[i];
}
} else {
el.style[normalizedName] = val;
}
}
};
var prefixes = ['Webkit', 'Moz', 'ms'];
var testEl;
var normalize = cached(function (prop) {
testEl = testEl || document.createElement('div');
prop = camelize(prop);
if (prop !== 'filter' && prop in testEl.style) {
return prop;
}
var upper = prop.charAt(0).toUpperCase() + prop.slice(1);
for (var i = 0; i < prefixes.length; i++) {
var prefixed = prefixes[i] + upper;
if (prefixed in testEl.style) {
return prefixed;
}
}
});
function updateStyle(oldVnode, vnode) {
var data = vnode.data;
var oldData = oldVnode.data;
if (isUndef(data.staticStyle) && isUndef(data.style) && isUndef(oldData.staticStyle) && isUndef(oldData.style)) {
return;
}
var cur, name;
var el = vnode.elm;
var oldStaticStyle = oldData.staticStyle;
var oldStyleBinding = oldData.normalizedStyle || oldData.style || {};
// if static style exists, stylebinding already merged into it when doing normalizeStyleData
var oldStyle = oldStaticStyle || oldStyleBinding;
var style = normalizeStyleBinding(vnode.data.style) || {};
// store normalized style under a different key for next diff
// make sure to clone it if it's reactive, since the user likley wants
// to mutate it.
vnode.data.normalizedStyle = isDef(style.__ob__) ? extend({}, style) : style;
var newStyle = getStyle(vnode, true);
for (name in oldStyle) {
if (isUndef(newStyle[name])) {
setProp(el, name, '');
}
}
for (name in newStyle) {
cur = newStyle[name];
if (cur !== oldStyle[name]) {
// ie9 setting to null has no effect, must use empty string
setProp(el, name, cur == null ? '' : cur);
}
}
}
var style = {
create: updateStyle,
update: updateStyle
};
/* */
/**
* Add class with compatibility for SVG since classList is not supported on
* SVG elements in IE
*/
function addClass(el, cls) {
/* istanbul ignore if */
if (!cls || !(cls = cls.trim())) {
return;
}
/* istanbul ignore else */
if (el.classList) {
if (cls.indexOf(' ') > -1) {
cls.split(/\s+/).forEach(function (c) {
return el.classList.add(c);
});
} else {
el.classList.add(cls);
}
} else {
var cur = " " + (el.getAttribute('class') || '') + " ";
if (cur.indexOf(' ' + cls + ' ') < 0) {
el.setAttribute('class', (cur + cls).trim());
}
}
}
/**
* Remove class with compatibility for SVG since classList is not supported on
* SVG elements in IE
*/
function removeClass(el, cls) {
/* istanbul ignore if */
if (!cls || !(cls = cls.trim())) {
return;
}
/* istanbul ignore else */
if (el.classList) {
if (cls.indexOf(' ') > -1) {
cls.split(/\s+/).forEach(function (c) {
return el.classList.remove(c);
});
} else {
el.classList.remove(cls);
}
} else {
var cur = " " + (el.getAttribute('class') || '') + " ";
var tar = ' ' + cls + ' ';
while (cur.indexOf(tar) >= 0) {
cur = cur.replace(tar, ' ');
}
el.setAttribute('class', cur.trim());
}
}
/* */
function resolveTransition(def$$1) {
if (!def$$1) {
return;
}
/* istanbul ignore else */
if ((typeof def$$1 === 'undefined' ? 'undefined' : _typeof(def$$1)) === 'object') {
var res = {};
if (def$$1.css !== false) {
extend(res, autoCssTransition(def$$1.name || 'v'));
}
extend(res, def$$1);
return res;
} else if (typeof def$$1 === 'string') {
return autoCssTransition(def$$1);
}
}
var autoCssTransition = cached(function (name) {
return {
enterClass: name + "-enter",
enterToClass: name + "-enter-to",
enterActiveClass: name + "-enter-active",
leaveClass: name + "-leave",
leaveToClass: name + "-leave-to",
leaveActiveClass: name + "-leave-active"
};
});
var hasTransition = inBrowser && !isIE9;
var TRANSITION = 'transition';
var ANIMATION = 'animation';
// Transition property/event sniffing
var transitionProp = 'transition';
var transitionEndEvent = 'transitionend';
var animationProp = 'animation';
var animationEndEvent = 'animationend';
if (hasTransition) {
/* istanbul ignore if */
if (window.ontransitionend === undefined && window.onwebkittransitionend !== undefined) {
transitionProp = 'WebkitTransition';
transitionEndEvent = 'webkitTransitionEnd';
}
if (window.onanimationend === undefined && window.onwebkitanimationend !== undefined) {
animationProp = 'WebkitAnimation';
animationEndEvent = 'webkitAnimationEnd';
}
}
// binding to window is necessary to make hot reload work in IE in strict mode
var raf = inBrowser && window.requestAnimationFrame ? window.requestAnimationFrame.bind(window) : setTimeout;
function nextFrame(fn) {
raf(function () {
raf(fn);
});
}
function addTransitionClass(el, cls) {
(el._transitionClasses || (el._transitionClasses = [])).push(cls);
addClass(el, cls);
}
function removeTransitionClass(el, cls) {
if (el._transitionClasses) {
remove(el._transitionClasses, cls);
}
removeClass(el, cls);
}
function whenTransitionEnds(el, expectedType, cb) {
var ref = getTransitionInfo(el, expectedType);
var type = ref.type;
var timeout = ref.timeout;
var propCount = ref.propCount;
if (!type) {
return cb();
}
var event = type === TRANSITION ? transitionEndEvent : animationEndEvent;
var ended = 0;
var end = function end() {
el.removeEventListener(event, onEnd);
cb();
};
var onEnd = function onEnd(e) {
if (e.target === el) {
if (++ended >= propCount) {
end();
}
}
};
setTimeout(function () {
if (ended < propCount) {
end();
}
}, timeout + 1);
el.addEventListener(event, onEnd);
}
var transformRE = /\b(transform|all)(,|$)/;
function getTransitionInfo(el, expectedType) {
var styles = window.getComputedStyle(el);
var transitionDelays = styles[transitionProp + 'Delay'].split(', ');
var transitionDurations = styles[transitionProp + 'Duration'].split(', ');
var transitionTimeout = getTimeout(transitionDelays, transitionDurations);
var animationDelays = styles[animationProp + 'Delay'].split(', ');
var animationDurations = styles[animationProp + 'Duration'].split(', ');
var animationTimeout = getTimeout(animationDelays, animationDurations);
var type;
var timeout = 0;
var propCount = 0;
/* istanbul ignore if */
if (expectedType === TRANSITION) {
if (transitionTimeout > 0) {
type = TRANSITION;
timeout = transitionTimeout;
propCount = transitionDurations.length;
}
} else if (expectedType === ANIMATION) {
if (animationTimeout > 0) {
type = ANIMATION;
timeout = animationTimeout;
propCount = animationDurations.length;
}
} else {
timeout = Math.max(transitionTimeout, animationTimeout);
type = timeout > 0 ? transitionTimeout > animationTimeout ? TRANSITION : ANIMATION : null;
propCount = type ? type === TRANSITION ? transitionDurations.length : animationDurations.length : 0;
}
var hasTransform = type === TRANSITION && transformRE.test(styles[transitionProp + 'Property']);
return {
type: type,
timeout: timeout,
propCount: propCount,
hasTransform: hasTransform
};
}
function getTimeout(delays, durations) {
/* istanbul ignore next */
while (delays.length < durations.length) {
delays = delays.concat(delays);
}
return Math.max.apply(null, durations.map(function (d, i) {
return toMs(d) + toMs(delays[i]);
}));
}
function toMs(s) {
return Number(s.slice(0, -1)) * 1000;
}
/* */
function enter(vnode, toggleDisplay) {
var el = vnode.elm;
// call leave callback now
if (isDef(el._leaveCb)) {
el._leaveCb.cancelled = true;
el._leaveCb();
}
var data = resolveTransition(vnode.data.transition);
if (isUndef(data)) {
return;
}
/* istanbul ignore if */
if (isDef(el._enterCb) || el.nodeType !== 1) {
return;
}
var css = data.css;
var type = data.type;
var enterClass = data.enterClass;
var enterToClass = data.enterToClass;
var enterActiveClass = data.enterActiveClass;
var appearClass = data.appearClass;
var appearToClass = data.appearToClass;
var appearActiveClass = data.appearActiveClass;
var beforeEnter = data.beforeEnter;
var enter = data.enter;
var afterEnter = data.afterEnter;
var enterCancelled = data.enterCancelled;
var beforeAppear = data.beforeAppear;
var appear = data.appear;
var afterAppear = data.afterAppear;
var appearCancelled = data.appearCancelled;
var duration = data.duration;
// activeInstance will always be the <transition> component managing this
// transition. One edge case to check is when the <transition> is placed
// as the root node of a child component. In that case we need to check
// <transition>'s parent for appear check.
var context = activeInstance;
var transitionNode = activeInstance.$vnode;
while (transitionNode && transitionNode.parent) {
transitionNode = transitionNode.parent;
context = transitionNode.context;
}
var isAppear = !context._isMounted || !vnode.isRootInsert;
if (isAppear && !appear && appear !== '') {
return;
}
var startClass = isAppear && appearClass ? appearClass : enterClass;
var activeClass = isAppear && appearActiveClass ? appearActiveClass : enterActiveClass;
var toClass = isAppear && appearToClass ? appearToClass : enterToClass;
var beforeEnterHook = isAppear ? beforeAppear || beforeEnter : beforeEnter;
var enterHook = isAppear ? typeof appear === 'function' ? appear : enter : enter;
var afterEnterHook = isAppear ? afterAppear || afterEnter : afterEnter;
var enterCancelledHook = isAppear ? appearCancelled || enterCancelled : enterCancelled;
var explicitEnterDuration = toNumber(isObject(duration) ? duration.enter : duration);
if ("development" !== 'production' && explicitEnterDuration != null) {
checkDuration(explicitEnterDuration, 'enter', vnode);
}
var expectsCSS = css !== false && !isIE9;
var userWantsControl = getHookArgumentsLength(enterHook);
var cb = el._enterCb = once(function () {
if (expectsCSS) {
removeTransitionClass(el, toClass);
removeTransitionClass(el, activeClass);
}
if (cb.cancelled) {
if (expectsCSS) {
removeTransitionClass(el, startClass);
}
enterCancelledHook && enterCancelledHook(el);
} else {
afterEnterHook && afterEnterHook(el);
}
el._enterCb = null;
});
if (!vnode.data.show) {
// remove pending leave element on enter by injecting an insert hook
mergeVNodeHook(vnode.data.hook || (vnode.data.hook = {}), 'insert', function () {
var parent = el.parentNode;
var pendingNode = parent && parent._pending && parent._pending[vnode.key];
if (pendingNode && pendingNode.tag === vnode.tag && pendingNode.elm._leaveCb) {
pendingNode.elm._leaveCb();
}
enterHook && enterHook(el, cb);
});
}
// start enter transition
beforeEnterHook && beforeEnterHook(el);
if (expectsCSS) {
addTransitionClass(el, startClass);
addTransitionClass(el, activeClass);
nextFrame(function () {
addTransitionClass(el, toClass);
removeTransitionClass(el, startClass);
if (!cb.cancelled && !userWantsControl) {
if (isValidDuration(explicitEnterDuration)) {
setTimeout(cb, explicitEnterDuration);
} else {
whenTransitionEnds(el, type, cb);
}
}
});
}
if (vnode.data.show) {
toggleDisplay && toggleDisplay();
enterHook && enterHook(el, cb);
}
if (!expectsCSS && !userWantsControl) {
cb();
}
}
function leave(vnode, rm) {
var el = vnode.elm;
// call enter callback now
if (isDef(el._enterCb)) {
el._enterCb.cancelled = true;
el._enterCb();
}
var data = resolveTransition(vnode.data.transition);
if (isUndef(data)) {
return rm();
}
/* istanbul ignore if */
if (isDef(el._leaveCb) || el.nodeType !== 1) {
return;
}
var css = data.css;
var type = data.type;
var leaveClass = data.leaveClass;
var leaveToClass = data.leaveToClass;
var leaveActiveClass = data.leaveActiveClass;
var beforeLeave = data.beforeLeave;
var leave = data.leave;
var afterLeave = data.afterLeave;
var leaveCancelled = data.leaveCancelled;
var delayLeave = data.delayLeave;
var duration = data.duration;
var expectsCSS = css !== false && !isIE9;
var userWantsControl = getHookArgumentsLength(leave);
var explicitLeaveDuration = toNumber(isObject(duration) ? duration.leave : duration);
if ("development" !== 'production' && isDef(explicitLeaveDuration)) {
checkDuration(explicitLeaveDuration, 'leave', vnode);
}
var cb = el._leaveCb = once(function () {
if (el.parentNode && el.parentNode._pending) {
el.parentNode._pending[vnode.key] = null;
}
if (expectsCSS) {
removeTransitionClass(el, leaveToClass);
removeTransitionClass(el, leaveActiveClass);
}
if (cb.cancelled) {
if (expectsCSS) {
removeTransitionClass(el, leaveClass);
}
leaveCancelled && leaveCancelled(el);
} else {
rm();
afterLeave && afterLeave(el);
}
el._leaveCb = null;
});
if (delayLeave) {
delayLeave(performLeave);
} else {
performLeave();
}
function performLeave() {
// the delayed leave may have already been cancelled
if (cb.cancelled) {
return;
}
// record leaving element
if (!vnode.data.show) {
(el.parentNode._pending || (el.parentNode._pending = {}))[vnode.key] = vnode;
}
beforeLeave && beforeLeave(el);
if (expectsCSS) {
addTransitionClass(el, leaveClass);
addTransitionClass(el, leaveActiveClass);
nextFrame(function () {
addTransitionClass(el, leaveToClass);
removeTransitionClass(el, leaveClass);
if (!cb.cancelled && !userWantsControl) {
if (isValidDuration(explicitLeaveDuration)) {
setTimeout(cb, explicitLeaveDuration);
} else {
whenTransitionEnds(el, type, cb);
}
}
});
}
leave && leave(el, cb);
if (!expectsCSS && !userWantsControl) {
cb();
}
}
}
// only used in dev mode
function checkDuration(val, name, vnode) {
if (typeof val !== 'number') {
warn("<transition> explicit " + name + " duration is not a valid number - " + "got " + JSON.stringify(val) + ".", vnode.context);
} else if (isNaN(val)) {
warn("<transition> explicit " + name + " duration is NaN - " + 'the duration expression might be incorrect.', vnode.context);
}
}
function isValidDuration(val) {
return typeof val === 'number' && !isNaN(val);
}
/**
* Normalize a transition hook's argument length. The hook may be:
* - a merged hook (invoker) with the original in .fns
* - a wrapped component method (check ._length)
* - a plain function (.length)
*/
function getHookArgumentsLength(fn) {
if (isUndef(fn)) {
return false;
}
var invokerFns = fn.fns;
if (isDef(invokerFns)) {
// invoker
return getHookArgumentsLength(Array.isArray(invokerFns) ? invokerFns[0] : invokerFns);
} else {
return (fn._length || fn.length) > 1;
}
}
function _enter(_, vnode) {
if (vnode.data.show !== true) {
enter(vnode);
}
}
var transition = inBrowser ? {
create: _enter,
activate: _enter,
remove: function remove$$1(vnode, rm) {
/* istanbul ignore else */
if (vnode.data.show !== true) {
leave(vnode, rm);
} else {
rm();
}
}
} : {};
var platformModules = [attrs, klass, events, domProps, style, transition];
/* */
// the directive module should be applied last, after all
// built-in modules have been applied.
var modules = platformModules.concat(baseModules);
var patch = createPatchFunction({ nodeOps: nodeOps, modules: modules });
/**
* Not type checking this file because flow doesn't like attaching
* properties to Elements.
*/
/* istanbul ignore if */
if (isIE9) {
// http://www.matts411.com/post/internet-explorer-9-oninput/
document.addEventListener('selectionchange', function () {
var el = document.activeElement;
if (el && el.vmodel) {
trigger(el, 'input');
}
});
}
var model$1 = {
inserted: function inserted(el, binding, vnode) {
if (vnode.tag === 'select') {
var cb = function cb() {
setSelected(el, binding, vnode.context);
};
cb();
/* istanbul ignore if */
if (isIE || isEdge) {
setTimeout(cb, 0);
}
} else if (vnode.tag === 'textarea' || el.type === 'text' || el.type === 'password') {
el._vModifiers = binding.modifiers;
if (!binding.modifiers.lazy) {
// Safari < 10.2 & UIWebView doesn't fire compositionend when
// switching focus before confirming composition choice
// this also fixes the issue where some browsers e.g. iOS Chrome
// fires "change" instead of "input" on autocomplete.
el.addEventListener('change', onCompositionEnd);
if (!isAndroid) {
el.addEventListener('compositionstart', onCompositionStart);
el.addEventListener('compositionend', onCompositionEnd);
}
/* istanbul ignore if */
if (isIE9) {
el.vmodel = true;
}
}
}
},
componentUpdated: function componentUpdated(el, binding, vnode) {
if (vnode.tag === 'select') {
setSelected(el, binding, vnode.context);
// in case the options rendered by v-for have changed,
// it's possible that the value is out-of-sync with the rendered options.
// detect such cases and filter out values that no longer has a matching
// option in the DOM.
var needReset = el.multiple ? binding.value.some(function (v) {
return hasNoMatchingOption(v, el.options);
}) : binding.value !== binding.oldValue && hasNoMatchingOption(binding.value, el.options);
if (needReset) {
trigger(el, 'change');
}
}
}
};
function setSelected(el, binding, vm) {
var value = binding.value;
var isMultiple = el.multiple;
if (isMultiple && !Array.isArray(value)) {
"development" !== 'production' && warn("<select multiple v-model=\"" + binding.expression + "\"> " + "expects an Array value for its binding, but got " + Object.prototype.toString.call(value).slice(8, -1), vm);
return;
}
var selected, option;
for (var i = 0, l = el.options.length; i < l; i++) {
option = el.options[i];
if (isMultiple) {
selected = looseIndexOf(value, getValue(option)) > -1;
if (option.selected !== selected) {
option.selected = selected;
}
} else {
if (looseEqual(getValue(option), value)) {
if (el.selectedIndex !== i) {
el.selectedIndex = i;
}
return;
}
}
}
if (!isMultiple) {
el.selectedIndex = -1;
}
}
function hasNoMatchingOption(value, options) {
for (var i = 0, l = options.length; i < l; i++) {
if (looseEqual(getValue(options[i]), value)) {
return false;
}
}
return true;
}
function getValue(option) {
return '_value' in option ? option._value : option.value;
}
function onCompositionStart(e) {
e.target.composing = true;
}
function onCompositionEnd(e) {
// prevent triggering an input event for no reason
if (!e.target.composing) {
return;
}
e.target.composing = false;
trigger(e.target, 'input');
}
function trigger(el, type) {
var e = document.createEvent('HTMLEvents');
e.initEvent(type, true, true);
el.dispatchEvent(e);
}
/* */
// recursively search for possible transition defined inside the component root
function locateNode(vnode) {
return vnode.componentInstance && (!vnode.data || !vnode.data.transition) ? locateNode(vnode.componentInstance._vnode) : vnode;
}
var show = {
bind: function bind(el, ref, vnode) {
var value = ref.value;
vnode = locateNode(vnode);
var transition = vnode.data && vnode.data.transition;
var originalDisplay = el.__vOriginalDisplay = el.style.display === 'none' ? '' : el.style.display;
if (value && transition && !isIE9) {
vnode.data.show = true;
enter(vnode, function () {
el.style.display = originalDisplay;
});
} else {
el.style.display = value ? originalDisplay : 'none';
}
},
update: function update(el, ref, vnode) {
var value = ref.value;
var oldValue = ref.oldValue;
/* istanbul ignore if */
if (value === oldValue) {
return;
}
vnode = locateNode(vnode);
var transition = vnode.data && vnode.data.transition;
if (transition && !isIE9) {
vnode.data.show = true;
if (value) {
enter(vnode, function () {
el.style.display = el.__vOriginalDisplay;
});
} else {
leave(vnode, function () {
el.style.display = 'none';
});
}
} else {
el.style.display = value ? el.__vOriginalDisplay : 'none';
}
},
unbind: function unbind(el, binding, vnode, oldVnode, isDestroy) {
if (!isDestroy) {
el.style.display = el.__vOriginalDisplay;
}
}
};
var platformDirectives = {
model: model$1,
show: show
};
/* */
// Provides transition support for a single element/component.
// supports transition mode (out-in / in-out)
var transitionProps = {
name: String,
appear: Boolean,
css: Boolean,
mode: String,
type: String,
enterClass: String,
leaveClass: String,
enterToClass: String,
leaveToClass: String,
enterActiveClass: String,
leaveActiveClass: String,
appearClass: String,
appearActiveClass: String,
appearToClass: String,
duration: [Number, String, Object]
};
// in case the child is also an abstract component, e.g. <keep-alive>
// we want to recursively retrieve the real component to be rendered
function getRealChild(vnode) {
var compOptions = vnode && vnode.componentOptions;
if (compOptions && compOptions.Ctor.options.abstract) {
return getRealChild(getFirstComponentChild(compOptions.children));
} else {
return vnode;
}
}
function extractTransitionData(comp) {
var data = {};
var options = comp.$options;
// props
for (var key in options.propsData) {
data[key] = comp[key];
}
// events.
// extract listeners and pass them directly to the transition methods
var listeners = options._parentListeners;
for (var key$1 in listeners) {
data[camelize(key$1)] = listeners[key$1];
}
return data;
}
function placeholder(h, rawChild) {
if (/\d-keep-alive$/.test(rawChild.tag)) {
return h('keep-alive', {
props: rawChild.componentOptions.propsData
});
}
}
function hasParentTransition(vnode) {
while (vnode = vnode.parent) {
if (vnode.data.transition) {
return true;
}
}
}
function isSameChild(child, oldChild) {
return oldChild.key === child.key && oldChild.tag === child.tag;
}
var Transition = {
name: 'transition',
props: transitionProps,
abstract: true,
render: function render(h) {
var this$1 = this;
var children = this.$slots.default;
if (!children) {
return;
}
// filter out text nodes (possible whitespaces)
children = children.filter(function (c) {
return c.tag;
});
/* istanbul ignore if */
if (!children.length) {
return;
}
// warn multiple elements
if ("development" !== 'production' && children.length > 1) {
warn('<transition> can only be used on a single element. Use ' + '<transition-group> for lists.', this.$parent);
}
var mode = this.mode;
// warn invalid mode
if ("development" !== 'production' && mode && mode !== 'in-out' && mode !== 'out-in') {
warn('invalid <transition> mode: ' + mode, this.$parent);
}
var rawChild = children[0];
// if this is a component root node and the component's
// parent container node also has transition, skip.
if (hasParentTransition(this.$vnode)) {
return rawChild;
}
// apply transition data to child
// use getRealChild() to ignore abstract components e.g. keep-alive
var child = getRealChild(rawChild);
/* istanbul ignore if */
if (!child) {
return rawChild;
}
if (this._leaving) {
return placeholder(h, rawChild);
}
// ensure a key that is unique to the vnode type and to this transition
// component instance. This key will be used to remove pending leaving nodes
// during entering.
var id = "__transition-" + this._uid + "-";
child.key = child.key == null ? id + child.tag : isPrimitive(child.key) ? String(child.key).indexOf(id) === 0 ? child.key : id + child.key : child.key;
var data = (child.data || (child.data = {})).transition = extractTransitionData(this);
var oldRawChild = this._vnode;
var oldChild = getRealChild(oldRawChild);
// mark v-show
// so that the transition module can hand over the control to the directive
if (child.data.directives && child.data.directives.some(function (d) {
return d.name === 'show';
})) {
child.data.show = true;
}
if (oldChild && oldChild.data && !isSameChild(child, oldChild)) {
// replace old child transition data with fresh one
// important for dynamic transitions!
var oldData = oldChild && (oldChild.data.transition = extend({}, data));
// handle transition mode
if (mode === 'out-in') {
// return placeholder node and queue update when leave finishes
this._leaving = true;
mergeVNodeHook(oldData, 'afterLeave', function () {
this$1._leaving = false;
this$1.$forceUpdate();
});
return placeholder(h, rawChild);
} else if (mode === 'in-out') {
var delayedLeave;
var performLeave = function performLeave() {
delayedLeave();
};
mergeVNodeHook(data, 'afterEnter', performLeave);
mergeVNodeHook(data, 'enterCancelled', performLeave);
mergeVNodeHook(oldData, 'delayLeave', function (leave) {
delayedLeave = leave;
});
}
}
return rawChild;
}
};
/* */
// Provides transition support for list items.
// supports move transitions using the FLIP technique.
// Because the vdom's children update algorithm is "unstable" - i.e.
// it doesn't guarantee the relative positioning of removed elements,
// we force transition-group to update its children into two passes:
// in the first pass, we remove all nodes that need to be removed,
// triggering their leaving transition; in the second pass, we insert/move
// into the final desired state. This way in the second pass removed
// nodes will remain where they should be.
var props = extend({
tag: String,
moveClass: String
}, transitionProps);
delete props.mode;
var TransitionGroup = {
props: props,
render: function render(h) {
var tag = this.tag || this.$vnode.data.tag || 'span';
var map = Object.create(null);
var prevChildren = this.prevChildren = this.children;
var rawChildren = this.$slots.default || [];
var children = this.children = [];
var transitionData = extractTransitionData(this);
for (var i = 0; i < rawChildren.length; i++) {
var c = rawChildren[i];
if (c.tag) {
if (c.key != null && String(c.key).indexOf('__vlist') !== 0) {
children.push(c);
map[c.key] = c;(c.data || (c.data = {})).transition = transitionData;
} else if (true) {
var opts = c.componentOptions;
var name = opts ? opts.Ctor.options.name || opts.tag || '' : c.tag;
warn("<transition-group> children must be keyed: <" + name + ">");
}
}
}
if (prevChildren) {
var kept = [];
var removed = [];
for (var i$1 = 0; i$1 < prevChildren.length; i$1++) {
var c$1 = prevChildren[i$1];
c$1.data.transition = transitionData;
c$1.data.pos = c$1.elm.getBoundingClientRect();
if (map[c$1.key]) {
kept.push(c$1);
} else {
removed.push(c$1);
}
}
this.kept = h(tag, null, kept);
this.removed = removed;
}
return h(tag, null, children);
},
beforeUpdate: function beforeUpdate() {
// force removing pass
this.__patch__(this._vnode, this.kept, false, // hydrating
true // removeOnly (!important, avoids unnecessary moves)
);
this._vnode = this.kept;
},
updated: function updated() {
var children = this.prevChildren;
var moveClass = this.moveClass || (this.name || 'v') + '-move';
if (!children.length || !this.hasMove(children[0].elm, moveClass)) {
return;
}
// we divide the work into three loops to avoid mixing DOM reads and writes
// in each iteration - which helps prevent layout thrashing.
children.forEach(callPendingCbs);
children.forEach(recordPosition);
children.forEach(applyTranslation);
// force reflow to put everything in position
var body = document.body;
var f = body.offsetHeight; // eslint-disable-line
children.forEach(function (c) {
if (c.data.moved) {
var el = c.elm;
var s = el.style;
addTransitionClass(el, moveClass);
s.transform = s.WebkitTransform = s.transitionDuration = '';
el.addEventListener(transitionEndEvent, el._moveCb = function cb(e) {
if (!e || /transform$/.test(e.propertyName)) {
el.removeEventListener(transitionEndEvent, cb);
el._moveCb = null;
removeTransitionClass(el, moveClass);
}
});
}
});
},
methods: {
hasMove: function hasMove(el, moveClass) {
/* istanbul ignore if */
if (!hasTransition) {
return false;
}
if (this._hasMove != null) {
return this._hasMove;
}
// Detect whether an element with the move class applied has
// CSS transitions. Since the element may be inside an entering
// transition at this very moment, we make a clone of it and remove
// all other transition classes applied to ensure only the move class
// is applied.
var clone = el.cloneNode();
if (el._transitionClasses) {
el._transitionClasses.forEach(function (cls) {
removeClass(clone, cls);
});
}
addClass(clone, moveClass);
clone.style.display = 'none';
this.$el.appendChild(clone);
var info = getTransitionInfo(clone);
this.$el.removeChild(clone);
return this._hasMove = info.hasTransform;
}
}
};
function callPendingCbs(c) {
/* istanbul ignore if */
if (c.elm._moveCb) {
c.elm._moveCb();
}
/* istanbul ignore if */
if (c.elm._enterCb) {
c.elm._enterCb();
}
}
function recordPosition(c) {
c.data.newPos = c.elm.getBoundingClientRect();
}
function applyTranslation(c) {
var oldPos = c.data.pos;
var newPos = c.data.newPos;
var dx = oldPos.left - newPos.left;
var dy = oldPos.top - newPos.top;
if (dx || dy) {
c.data.moved = true;
var s = c.elm.style;
s.transform = s.WebkitTransform = "translate(" + dx + "px," + dy + "px)";
s.transitionDuration = '0s';
}
}
var platformComponents = {
Transition: Transition,
TransitionGroup: TransitionGroup
};
/* */
// install platform specific utils
Vue$3.config.mustUseProp = mustUseProp;
Vue$3.config.isReservedTag = isReservedTag;
Vue$3.config.isReservedAttr = isReservedAttr;
Vue$3.config.getTagNamespace = getTagNamespace;
Vue$3.config.isUnknownElement = isUnknownElement;
// install platform runtime directives & components
extend(Vue$3.options.directives, platformDirectives);
extend(Vue$3.options.components, platformComponents);
// install platform patch function
Vue$3.prototype.__patch__ = inBrowser ? patch : noop;
// public mount method
Vue$3.prototype.$mount = function (el, hydrating) {
el = el && inBrowser ? query(el) : undefined;
return mountComponent(this, el, hydrating);
};
// devtools global hook
/* istanbul ignore next */
setTimeout(function () {
if (config.devtools) {
if (devtools) {
devtools.emit('init', Vue$3);
} else if ("development" !== 'production' && isChrome) {
console[console.info ? 'info' : 'log']('Download the Vue Devtools extension for a better development experience:\n' + 'https://github.com/vuejs/vue-devtools');
}
}
if ("development" !== 'production' && config.productionTip !== false && inBrowser && typeof console !== 'undefined') {
console[console.info ? 'info' : 'log']("You are running Vue in development mode.\n" + "Make sure to turn on production mode when deploying for production.\n" + "See more tips at https://vuejs.org/guide/deployment.html");
}
}, 0);
/* */
// check whether current browser encodes a char inside attribute values
function shouldDecode(content, encoded) {
var div = document.createElement('div');
div.innerHTML = "<div a=\"" + content + "\">";
return div.innerHTML.indexOf(encoded) > 0;
}
// #3663
// IE encodes newlines inside attribute values while other browsers don't
var shouldDecodeNewlines = inBrowser ? shouldDecode('\n', ' ') : false;
/* */
var isUnaryTag = makeMap('area,base,br,col,embed,frame,hr,img,input,isindex,keygen,' + 'link,meta,param,source,track,wbr');
// Elements that you can, intentionally, leave open
// (and which close themselves)
var canBeLeftOpenTag = makeMap('colgroup,dd,dt,li,options,p,td,tfoot,th,thead,tr,source');
// HTML5 tags https://html.spec.whatwg.org/multipage/indices.html#elements-3
// Phrasing Content https://html.spec.whatwg.org/multipage/dom.html#phrasing-content
var isNonPhrasingTag = makeMap('address,article,aside,base,blockquote,body,caption,col,colgroup,dd,' + 'details,dialog,div,dl,dt,fieldset,figcaption,figure,footer,form,' + 'h1,h2,h3,h4,h5,h6,head,header,hgroup,hr,html,legend,li,menuitem,meta,' + 'optgroup,option,param,rp,rt,source,style,summary,tbody,td,tfoot,th,thead,' + 'title,tr,track');
/* */
var decoder;
function decode(html) {
decoder = decoder || document.createElement('div');
decoder.innerHTML = html;
return decoder.textContent;
}
/**
* Not type-checking this file because it's mostly vendor code.
*/
/*!
* HTML Parser By John Resig (ejohn.org)
* Modified by Juriy "kangax" Zaytsev
* Original code by Erik Arvidsson, Mozilla Public License
* http://erik.eae.net/simplehtmlparser/simplehtmlparser.js
*/
// Regular Expressions for parsing tags and attributes
var singleAttrIdentifier = /([^\s"'<>/=]+)/;
var singleAttrAssign = /(?:=)/;
var singleAttrValues = [
// attr value double quotes
/"([^"]*)"+/.source,
// attr value, single quotes
/'([^']*)'+/.source,
// attr value, no quotes
/([^\s"'=<>`]+)/.source];
var attribute = new RegExp('^\\s*' + singleAttrIdentifier.source + '(?:\\s*(' + singleAttrAssign.source + ')' + '\\s*(?:' + singleAttrValues.join('|') + '))?');
// could use https://www.w3.org/TR/1999/REC-xml-names-19990114/#NT-QName
// but for Vue templates we can enforce a simple charset
var ncname = '[a-zA-Z_][\\w\\-\\.]*';
var qnameCapture = '((?:' + ncname + '\\:)?' + ncname + ')';
var startTagOpen = new RegExp('^<' + qnameCapture);
var startTagClose = /^\s*(\/?)>/;
var endTag = new RegExp('^<\\/' + qnameCapture + '[^>]*>');
var doctype = /^<!DOCTYPE [^>]+>/i;
var comment = /^<!--/;
var conditionalComment = /^<!\[/;
var IS_REGEX_CAPTURING_BROKEN = false;
'x'.replace(/x(.)?/g, function (m, g) {
IS_REGEX_CAPTURING_BROKEN = g === '';
});
// Special Elements (can contain anything)
var isPlainTextElement = makeMap('script,style,textarea', true);
var reCache = {};
var decodingMap = {
'<': '<',
'>': '>',
'"': '"',
'&': '&',
' ': '\n'
};
var encodedAttr = /&(?:lt|gt|quot|amp);/g;
var encodedAttrWithNewLines = /&(?:lt|gt|quot|amp|#10);/g;
function decodeAttr(value, shouldDecodeNewlines) {
var re = shouldDecodeNewlines ? encodedAttrWithNewLines : encodedAttr;
return value.replace(re, function (match) {
return decodingMap[match];
});
}
function parseHTML(html, options) {
var stack = [];
var expectHTML = options.expectHTML;
var isUnaryTag$$1 = options.isUnaryTag || no;
var canBeLeftOpenTag$$1 = options.canBeLeftOpenTag || no;
var index = 0;
var last, lastTag;
while (html) {
last = html;
// Make sure we're not in a plaintext content element like script/style
if (!lastTag || !isPlainTextElement(lastTag)) {
var textEnd = html.indexOf('<');
if (textEnd === 0) {
// Comment:
if (comment.test(html)) {
var commentEnd = html.indexOf('-->');
if (commentEnd >= 0) {
advance(commentEnd + 3);
continue;
}
}
// http://en.wikipedia.org/wiki/Conditional_comment#Downlevel-revealed_conditional_comment
if (conditionalComment.test(html)) {
var conditionalEnd = html.indexOf(']>');
if (conditionalEnd >= 0) {
advance(conditionalEnd + 2);
continue;
}
}
// Doctype:
var doctypeMatch = html.match(doctype);
if (doctypeMatch) {
advance(doctypeMatch[0].length);
continue;
}
// End tag:
var endTagMatch = html.match(endTag);
if (endTagMatch) {
var curIndex = index;
advance(endTagMatch[0].length);
parseEndTag(endTagMatch[1], curIndex, index);
continue;
}
// Start tag:
var startTagMatch = parseStartTag();
if (startTagMatch) {
handleStartTag(startTagMatch);
continue;
}
}
var text = void 0,
rest$1 = void 0,
next = void 0;
if (textEnd >= 0) {
rest$1 = html.slice(textEnd);
while (!endTag.test(rest$1) && !startTagOpen.test(rest$1) && !comment.test(rest$1) && !conditionalComment.test(rest$1)) {
// < in plain text, be forgiving and treat it as text
next = rest$1.indexOf('<', 1);
if (next < 0) {
break;
}
textEnd += next;
rest$1 = html.slice(textEnd);
}
text = html.substring(0, textEnd);
advance(textEnd);
}
if (textEnd < 0) {
text = html;
html = '';
}
if (options.chars && text) {
options.chars(text);
}
} else {
var stackedTag = lastTag.toLowerCase();
var reStackedTag = reCache[stackedTag] || (reCache[stackedTag] = new RegExp('([\\s\\S]*?)(</' + stackedTag + '[^>]*>)', 'i'));
var endTagLength = 0;
var rest = html.replace(reStackedTag, function (all, text, endTag) {
endTagLength = endTag.length;
if (!isPlainTextElement(stackedTag) && stackedTag !== 'noscript') {
text = text.replace(/<!--([\s\S]*?)-->/g, '$1').replace(/<!\[CDATA\[([\s\S]*?)]]>/g, '$1');
}
if (options.chars) {
options.chars(text);
}
return '';
});
index += html.length - rest.length;
html = rest;
parseEndTag(stackedTag, index - endTagLength, index);
}
if (html === last) {
options.chars && options.chars(html);
if ("development" !== 'production' && !stack.length && options.warn) {
options.warn("Mal-formatted tag at end of template: \"" + html + "\"");
}
break;
}
}
// Clean up any remaining tags
parseEndTag();
function advance(n) {
index += n;
html = html.substring(n);
}
function parseStartTag() {
var start = html.match(startTagOpen);
if (start) {
var match = {
tagName: start[1],
attrs: [],
start: index
};
advance(start[0].length);
var end, attr;
while (!(end = html.match(startTagClose)) && (attr = html.match(attribute))) {
advance(attr[0].length);
match.attrs.push(attr);
}
if (end) {
match.unarySlash = end[1];
advance(end[0].length);
match.end = index;
return match;
}
}
}
function handleStartTag(match) {
var tagName = match.tagName;
var unarySlash = match.unarySlash;
if (expectHTML) {
if (lastTag === 'p' && isNonPhrasingTag(tagName)) {
parseEndTag(lastTag);
}
if (canBeLeftOpenTag$$1(tagName) && lastTag === tagName) {
parseEndTag(tagName);
}
}
var unary = isUnaryTag$$1(tagName) || tagName === 'html' && lastTag === 'head' || !!unarySlash;
var l = match.attrs.length;
var attrs = new Array(l);
for (var i = 0; i < l; i++) {
var args = match.attrs[i];
// hackish work around FF bug https://bugzilla.mozilla.org/show_bug.cgi?id=369778
if (IS_REGEX_CAPTURING_BROKEN && args[0].indexOf('""') === -1) {
if (args[3] === '') {
delete args[3];
}
if (args[4] === '') {
delete args[4];
}
if (args[5] === '') {
delete args[5];
}
}
var value = args[3] || args[4] || args[5] || '';
attrs[i] = {
name: args[1],
value: decodeAttr(value, options.shouldDecodeNewlines)
};
}
if (!unary) {
stack.push({ tag: tagName, lowerCasedTag: tagName.toLowerCase(), attrs: attrs });
lastTag = tagName;
}
if (options.start) {
options.start(tagName, attrs, unary, match.start, match.end);
}
}
function parseEndTag(tagName, start, end) {
var pos, lowerCasedTagName;
if (start == null) {
start = index;
}
if (end == null) {
end = index;
}
if (tagName) {
lowerCasedTagName = tagName.toLowerCase();
}
// Find the closest opened tag of the same type
if (tagName) {
for (pos = stack.length - 1; pos >= 0; pos--) {
if (stack[pos].lowerCasedTag === lowerCasedTagName) {
break;
}
}
} else {
// If no tag name is provided, clean shop
pos = 0;
}
if (pos >= 0) {
// Close all the open elements, up the stack
for (var i = stack.length - 1; i >= pos; i--) {
if ("development" !== 'production' && (i > pos || !tagName) && options.warn) {
options.warn("tag <" + stack[i].tag + "> has no matching end tag.");
}
if (options.end) {
options.end(stack[i].tag, start, end);
}
}
// Remove the open elements from the stack
stack.length = pos;
lastTag = pos && stack[pos - 1].tag;
} else if (lowerCasedTagName === 'br') {
if (options.start) {
options.start(tagName, [], true, start, end);
}
} else if (lowerCasedTagName === 'p') {
if (options.start) {
options.start(tagName, [], false, start, end);
}
if (options.end) {
options.end(tagName, start, end);
}
}
}
}
/* */
var defaultTagRE = /\{\{((?:.|\n)+?)\}\}/g;
var regexEscapeRE = /[-.*+?^${}()|[\]\/\\]/g;
var buildRegex = cached(function (delimiters) {
var open = delimiters[0].replace(regexEscapeRE, '\\$&');
var close = delimiters[1].replace(regexEscapeRE, '\\$&');
return new RegExp(open + '((?:.|\\n)+?)' + close, 'g');
});
function parseText(text, delimiters) {
var tagRE = delimiters ? buildRegex(delimiters) : defaultTagRE;
if (!tagRE.test(text)) {
return;
}
var tokens = [];
var lastIndex = tagRE.lastIndex = 0;
var match, index;
while (match = tagRE.exec(text)) {
index = match.index;
// push text token
if (index > lastIndex) {
tokens.push(JSON.stringify(text.slice(lastIndex, index)));
}
// tag token
var exp = parseFilters(match[1].trim());
tokens.push("_s(" + exp + ")");
lastIndex = index + match[0].length;
}
if (lastIndex < text.length) {
tokens.push(JSON.stringify(text.slice(lastIndex)));
}
return tokens.join('+');
}
/* */
var onRE = /^@|^v-on:/;
var dirRE = /^v-|^@|^:/;
var forAliasRE = /(.*?)\s+(?:in|of)\s+(.*)/;
var forIteratorRE = /\((\{[^}]*\}|[^,]*),([^,]*)(?:,([^,]*))?\)/;
var argRE = /:(.*)$/;
var bindRE = /^:|^v-bind:/;
var modifierRE = /\.[^.]+/g;
var decodeHTMLCached = cached(decode);
// configurable state
var warn$2;
var delimiters;
var transforms;
var preTransforms;
var postTransforms;
var platformIsPreTag;
var platformMustUseProp;
var platformGetTagNamespace;
/**
* Convert HTML string to AST.
*/
function parse(template, options) {
warn$2 = options.warn || baseWarn;
platformGetTagNamespace = options.getTagNamespace || no;
platformMustUseProp = options.mustUseProp || no;
platformIsPreTag = options.isPreTag || no;
preTransforms = pluckModuleFunction(options.modules, 'preTransformNode');
transforms = pluckModuleFunction(options.modules, 'transformNode');
postTransforms = pluckModuleFunction(options.modules, 'postTransformNode');
delimiters = options.delimiters;
var stack = [];
var preserveWhitespace = options.preserveWhitespace !== false;
var root;
var currentParent;
var inVPre = false;
var inPre = false;
var warned = false;
function warnOnce(msg) {
if (!warned) {
warned = true;
warn$2(msg);
}
}
function endPre(element) {
// check pre state
if (element.pre) {
inVPre = false;
}
if (platformIsPreTag(element.tag)) {
inPre = false;
}
}
parseHTML(template, {
warn: warn$2,
expectHTML: options.expectHTML,
isUnaryTag: options.isUnaryTag,
canBeLeftOpenTag: options.canBeLeftOpenTag,
shouldDecodeNewlines: options.shouldDecodeNewlines,
start: function start(tag, attrs, unary) {
// check namespace.
// inherit parent ns if there is one
var ns = currentParent && currentParent.ns || platformGetTagNamespace(tag);
// handle IE svg bug
/* istanbul ignore if */
if (isIE && ns === 'svg') {
attrs = guardIESVGBug(attrs);
}
var element = {
type: 1,
tag: tag,
attrsList: attrs,
attrsMap: makeAttrsMap(attrs),
parent: currentParent,
children: []
};
if (ns) {
element.ns = ns;
}
if (isForbiddenTag(element) && !isServerRendering()) {
element.forbidden = true;
"development" !== 'production' && warn$2('Templates should only be responsible for mapping the state to the ' + 'UI. Avoid placing tags with side-effects in your templates, such as ' + "<" + tag + ">" + ', as they will not be parsed.');
}
// apply pre-transforms
for (var i = 0; i < preTransforms.length; i++) {
preTransforms[i](element, options);
}
if (!inVPre) {
processPre(element);
if (element.pre) {
inVPre = true;
}
}
if (platformIsPreTag(element.tag)) {
inPre = true;
}
if (inVPre) {
processRawAttrs(element);
} else {
processFor(element);
processIf(element);
processOnce(element);
processKey(element);
// determine whether this is a plain element after
// removing structural attributes
element.plain = !element.key && !attrs.length;
processRef(element);
processSlot(element);
processComponent(element);
for (var i$1 = 0; i$1 < transforms.length; i$1++) {
transforms[i$1](element, options);
}
processAttrs(element);
}
function checkRootConstraints(el) {
if (true) {
if (el.tag === 'slot' || el.tag === 'template') {
warnOnce("Cannot use <" + el.tag + "> as component root element because it may " + 'contain multiple nodes.');
}
if (el.attrsMap.hasOwnProperty('v-for')) {
warnOnce('Cannot use v-for on stateful component root element because ' + 'it renders multiple elements.');
}
}
}
// tree management
if (!root) {
root = element;
checkRootConstraints(root);
} else if (!stack.length) {
// allow root elements with v-if, v-else-if and v-else
if (root.if && (element.elseif || element.else)) {
checkRootConstraints(element);
addIfCondition(root, {
exp: element.elseif,
block: element
});
} else if (true) {
warnOnce("Component template should contain exactly one root element. " + "If you are using v-if on multiple elements, " + "use v-else-if to chain them instead.");
}
}
if (currentParent && !element.forbidden) {
if (element.elseif || element.else) {
processIfConditions(element, currentParent);
} else if (element.slotScope) {
// scoped slot
currentParent.plain = false;
var name = element.slotTarget || '"default"';(currentParent.scopedSlots || (currentParent.scopedSlots = {}))[name] = element;
} else {
currentParent.children.push(element);
element.parent = currentParent;
}
}
if (!unary) {
currentParent = element;
stack.push(element);
} else {
endPre(element);
}
// apply post-transforms
for (var i$2 = 0; i$2 < postTransforms.length; i$2++) {
postTransforms[i$2](element, options);
}
},
end: function end() {
// remove trailing whitespace
var element = stack[stack.length - 1];
var lastNode = element.children[element.children.length - 1];
if (lastNode && lastNode.type === 3 && lastNode.text === ' ' && !inPre) {
element.children.pop();
}
// pop stack
stack.length -= 1;
currentParent = stack[stack.length - 1];
endPre(element);
},
chars: function chars(text) {
if (!currentParent) {
if (true) {
if (text === template) {
warnOnce('Component template requires a root element, rather than just text.');
} else if (text = text.trim()) {
warnOnce("text \"" + text + "\" outside root element will be ignored.");
}
}
return;
}
// IE textarea placeholder bug
/* istanbul ignore if */
if (isIE && currentParent.tag === 'textarea' && currentParent.attrsMap.placeholder === text) {
return;
}
var children = currentParent.children;
text = inPre || text.trim() ? isTextTag(currentParent) ? text : decodeHTMLCached(text)
// only preserve whitespace if its not right after a starting tag
: preserveWhitespace && children.length ? ' ' : '';
if (text) {
var expression;
if (!inVPre && text !== ' ' && (expression = parseText(text, delimiters))) {
children.push({
type: 2,
expression: expression,
text: text
});
} else if (text !== ' ' || !children.length || children[children.length - 1].text !== ' ') {
children.push({
type: 3,
text: text
});
}
}
}
});
return root;
}
function processPre(el) {
if (getAndRemoveAttr(el, 'v-pre') != null) {
el.pre = true;
}
}
function processRawAttrs(el) {
var l = el.attrsList.length;
if (l) {
var attrs = el.attrs = new Array(l);
for (var i = 0; i < l; i++) {
attrs[i] = {
name: el.attrsList[i].name,
value: JSON.stringify(el.attrsList[i].value)
};
}
} else if (!el.pre) {
// non root node in pre blocks with no attributes
el.plain = true;
}
}
function processKey(el) {
var exp = getBindingAttr(el, 'key');
if (exp) {
if ("development" !== 'production' && el.tag === 'template') {
warn$2("<template> cannot be keyed. Place the key on real elements instead.");
}
el.key = exp;
}
}
function processRef(el) {
var ref = getBindingAttr(el, 'ref');
if (ref) {
el.ref = ref;
el.refInFor = checkInFor(el);
}
}
function processFor(el) {
var exp;
if (exp = getAndRemoveAttr(el, 'v-for')) {
var inMatch = exp.match(forAliasRE);
if (!inMatch) {
"development" !== 'production' && warn$2("Invalid v-for expression: " + exp);
return;
}
el.for = inMatch[2].trim();
var alias = inMatch[1].trim();
var iteratorMatch = alias.match(forIteratorRE);
if (iteratorMatch) {
el.alias = iteratorMatch[1].trim();
el.iterator1 = iteratorMatch[2].trim();
if (iteratorMatch[3]) {
el.iterator2 = iteratorMatch[3].trim();
}
} else {
el.alias = alias;
}
}
}
function processIf(el) {
var exp = getAndRemoveAttr(el, 'v-if');
if (exp) {
el.if = exp;
addIfCondition(el, {
exp: exp,
block: el
});
} else {
if (getAndRemoveAttr(el, 'v-else') != null) {
el.else = true;
}
var elseif = getAndRemoveAttr(el, 'v-else-if');
if (elseif) {
el.elseif = elseif;
}
}
}
function processIfConditions(el, parent) {
var prev = findPrevElement(parent.children);
if (prev && prev.if) {
addIfCondition(prev, {
exp: el.elseif,
block: el
});
} else if (true) {
warn$2("v-" + (el.elseif ? 'else-if="' + el.elseif + '"' : 'else') + " " + "used on element <" + el.tag + "> without corresponding v-if.");
}
}
function findPrevElement(children) {
var i = children.length;
while (i--) {
if (children[i].type === 1) {
return children[i];
} else {
if ("development" !== 'production' && children[i].text !== ' ') {
warn$2("text \"" + children[i].text.trim() + "\" between v-if and v-else(-if) " + "will be ignored.");
}
children.pop();
}
}
}
function addIfCondition(el, condition) {
if (!el.ifConditions) {
el.ifConditions = [];
}
el.ifConditions.push(condition);
}
function processOnce(el) {
var once$$1 = getAndRemoveAttr(el, 'v-once');
if (once$$1 != null) {
el.once = true;
}
}
function processSlot(el) {
if (el.tag === 'slot') {
el.slotName = getBindingAttr(el, 'name');
if ("development" !== 'production' && el.key) {
warn$2("`key` does not work on <slot> because slots are abstract outlets " + "and can possibly expand into multiple elements. " + "Use the key on a wrapping element instead.");
}
} else {
var slotTarget = getBindingAttr(el, 'slot');
if (slotTarget) {
el.slotTarget = slotTarget === '""' ? '"default"' : slotTarget;
}
if (el.tag === 'template') {
el.slotScope = getAndRemoveAttr(el, 'scope');
}
}
}
function processComponent(el) {
var binding;
if (binding = getBindingAttr(el, 'is')) {
el.component = binding;
}
if (getAndRemoveAttr(el, 'inline-template') != null) {
el.inlineTemplate = true;
}
}
function processAttrs(el) {
var list = el.attrsList;
var i, l, name, rawName, value, modifiers, isProp;
for (i = 0, l = list.length; i < l; i++) {
name = rawName = list[i].name;
value = list[i].value;
if (dirRE.test(name)) {
// mark element as dynamic
el.hasBindings = true;
// modifiers
modifiers = parseModifiers(name);
if (modifiers) {
name = name.replace(modifierRE, '');
}
if (bindRE.test(name)) {
// v-bind
name = name.replace(bindRE, '');
value = parseFilters(value);
isProp = false;
if (modifiers) {
if (modifiers.prop) {
isProp = true;
name = camelize(name);
if (name === 'innerHtml') {
name = 'innerHTML';
}
}
if (modifiers.camel) {
name = camelize(name);
}
if (modifiers.sync) {
addHandler(el, "update:" + camelize(name), genAssignmentCode(value, "$event"));
}
}
if (isProp || platformMustUseProp(el.tag, el.attrsMap.type, name)) {
addProp(el, name, value);
} else {
addAttr(el, name, value);
}
} else if (onRE.test(name)) {
// v-on
name = name.replace(onRE, '');
addHandler(el, name, value, modifiers, false, warn$2);
} else {
// normal directives
name = name.replace(dirRE, '');
// parse arg
var argMatch = name.match(argRE);
var arg = argMatch && argMatch[1];
if (arg) {
name = name.slice(0, -(arg.length + 1));
}
addDirective(el, name, rawName, value, arg, modifiers);
if ("development" !== 'production' && name === 'model') {
checkForAliasModel(el, value);
}
}
} else {
// literal attribute
if (true) {
var expression = parseText(value, delimiters);
if (expression) {
warn$2(name + "=\"" + value + "\": " + 'Interpolation inside attributes has been removed. ' + 'Use v-bind or the colon shorthand instead. For example, ' + 'instead of <div id="{{ val }}">, use <div :id="val">.');
}
}
addAttr(el, name, JSON.stringify(value));
}
}
}
function checkInFor(el) {
var parent = el;
while (parent) {
if (parent.for !== undefined) {
return true;
}
parent = parent.parent;
}
return false;
}
function parseModifiers(name) {
var match = name.match(modifierRE);
if (match) {
var ret = {};
match.forEach(function (m) {
ret[m.slice(1)] = true;
});
return ret;
}
}
function makeAttrsMap(attrs) {
var map = {};
for (var i = 0, l = attrs.length; i < l; i++) {
if ("development" !== 'production' && map[attrs[i].name] && !isIE && !isEdge) {
warn$2('duplicate attribute: ' + attrs[i].name);
}
map[attrs[i].name] = attrs[i].value;
}
return map;
}
// for script (e.g. type="x/template") or style, do not decode content
function isTextTag(el) {
return el.tag === 'script' || el.tag === 'style';
}
function isForbiddenTag(el) {
return el.tag === 'style' || el.tag === 'script' && (!el.attrsMap.type || el.attrsMap.type === 'text/javascript');
}
var ieNSBug = /^xmlns:NS\d+/;
var ieNSPrefix = /^NS\d+:/;
/* istanbul ignore next */
function guardIESVGBug(attrs) {
var res = [];
for (var i = 0; i < attrs.length; i++) {
var attr = attrs[i];
if (!ieNSBug.test(attr.name)) {
attr.name = attr.name.replace(ieNSPrefix, '');
res.push(attr);
}
}
return res;
}
function checkForAliasModel(el, value) {
var _el = el;
while (_el) {
if (_el.for && _el.alias === value) {
warn$2("<" + el.tag + " v-model=\"" + value + "\">: " + "You are binding v-model directly to a v-for iteration alias. " + "This will not be able to modify the v-for source array because " + "writing to the alias is like modifying a function local variable. " + "Consider using an array of objects and use v-model on an object property instead.");
}
_el = _el.parent;
}
}
/* */
var isStaticKey;
var isPlatformReservedTag;
var genStaticKeysCached = cached(genStaticKeys$1);
/**
* Goal of the optimizer: walk the generated template AST tree
* and detect sub-trees that are purely static, i.e. parts of
* the DOM that never needs to change.
*
* Once we detect these sub-trees, we can:
*
* 1. Hoist them into constants, so that we no longer need to
* create fresh nodes for them on each re-render;
* 2. Completely skip them in the patching process.
*/
function optimize(root, options) {
if (!root) {
return;
}
isStaticKey = genStaticKeysCached(options.staticKeys || '');
isPlatformReservedTag = options.isReservedTag || no;
// first pass: mark all non-static nodes.
markStatic$1(root);
// second pass: mark static roots.
markStaticRoots(root, false);
}
function genStaticKeys$1(keys) {
return makeMap('type,tag,attrsList,attrsMap,plain,parent,children,attrs' + (keys ? ',' + keys : ''));
}
function markStatic$1(node) {
node.static = isStatic(node);
if (node.type === 1) {
// do not make component slot content static. this avoids
// 1. components not able to mutate slot nodes
// 2. static slot content fails for hot-reloading
if (!isPlatformReservedTag(node.tag) && node.tag !== 'slot' && node.attrsMap['inline-template'] == null) {
return;
}
for (var i = 0, l = node.children.length; i < l; i++) {
var child = node.children[i];
markStatic$1(child);
if (!child.static) {
node.static = false;
}
}
}
}
function markStaticRoots(node, isInFor) {
if (node.type === 1) {
if (node.static || node.once) {
node.staticInFor = isInFor;
}
// For a node to qualify as a static root, it should have children that
// are not just static text. Otherwise the cost of hoisting out will
// outweigh the benefits and it's better off to just always render it fresh.
if (node.static && node.children.length && !(node.children.length === 1 && node.children[0].type === 3)) {
node.staticRoot = true;
return;
} else {
node.staticRoot = false;
}
if (node.children) {
for (var i = 0, l = node.children.length; i < l; i++) {
markStaticRoots(node.children[i], isInFor || !!node.for);
}
}
if (node.ifConditions) {
walkThroughConditionsBlocks(node.ifConditions, isInFor);
}
}
}
function walkThroughConditionsBlocks(conditionBlocks, isInFor) {
for (var i = 1, len = conditionBlocks.length; i < len; i++) {
markStaticRoots(conditionBlocks[i].block, isInFor);
}
}
function isStatic(node) {
if (node.type === 2) {
// expression
return false;
}
if (node.type === 3) {
// text
return true;
}
return !!(node.pre || !node.hasBindings && // no dynamic bindings
!node.if && !node.for && // not v-if or v-for or v-else
!isBuiltInTag(node.tag) && // not a built-in
isPlatformReservedTag(node.tag) && // not a component
!isDirectChildOfTemplateFor(node) && Object.keys(node).every(isStaticKey));
}
function isDirectChildOfTemplateFor(node) {
while (node.parent) {
node = node.parent;
if (node.tag !== 'template') {
return false;
}
if (node.for) {
return true;
}
}
return false;
}
/* */
var fnExpRE = /^\s*([\w$_]+|\([^)]*?\))\s*=>|^function\s*\(/;
var simplePathRE = /^\s*[A-Za-z_$][\w$]*(?:\.[A-Za-z_$][\w$]*|\['.*?']|\[".*?"]|\[\d+]|\[[A-Za-z_$][\w$]*])*\s*$/;
// keyCode aliases
var keyCodes = {
esc: 27,
tab: 9,
enter: 13,
space: 32,
up: 38,
left: 37,
right: 39,
down: 40,
'delete': [8, 46]
};
// #4868: modifiers that prevent the execution of the listener
// need to explicitly return null so that we can determine whether to remove
// the listener for .once
var genGuard = function genGuard(condition) {
return "if(" + condition + ")return null;";
};
var modifierCode = {
stop: '$event.stopPropagation();',
prevent: '$event.preventDefault();',
self: genGuard("$event.target !== $event.currentTarget"),
ctrl: genGuard("!$event.ctrlKey"),
shift: genGuard("!$event.shiftKey"),
alt: genGuard("!$event.altKey"),
meta: genGuard("!$event.metaKey"),
left: genGuard("'button' in $event && $event.button !== 0"),
middle: genGuard("'button' in $event && $event.button !== 1"),
right: genGuard("'button' in $event && $event.button !== 2")
};
function genHandlers(events, isNative, warn) {
var res = isNative ? 'nativeOn:{' : 'on:{';
for (var name in events) {
var handler = events[name];
// #5330: warn click.right, since right clicks do not actually fire click events.
if ("development" !== 'production' && name === 'click' && handler && handler.modifiers && handler.modifiers.right) {
warn("Use \"contextmenu\" instead of \"click.right\" since right clicks " + "do not actually fire \"click\" events.");
}
res += "\"" + name + "\":" + genHandler(name, handler) + ",";
}
return res.slice(0, -1) + '}';
}
function genHandler(name, handler) {
if (!handler) {
return 'function(){}';
}
if (Array.isArray(handler)) {
return "[" + handler.map(function (handler) {
return genHandler(name, handler);
}).join(',') + "]";
}
var isMethodPath = simplePathRE.test(handler.value);
var isFunctionExpression = fnExpRE.test(handler.value);
if (!handler.modifiers) {
return isMethodPath || isFunctionExpression ? handler.value : "function($event){" + handler.value + "}"; // inline statement
} else {
var code = '';
var genModifierCode = '';
var keys = [];
for (var key in handler.modifiers) {
if (modifierCode[key]) {
genModifierCode += modifierCode[key];
// left/right
if (keyCodes[key]) {
keys.push(key);
}
} else {
keys.push(key);
}
}
if (keys.length) {
code += genKeyFilter(keys);
}
// Make sure modifiers like prevent and stop get executed after key filtering
if (genModifierCode) {
code += genModifierCode;
}
var handlerCode = isMethodPath ? handler.value + '($event)' : isFunctionExpression ? "(" + handler.value + ")($event)" : handler.value;
return "function($event){" + code + handlerCode + "}";
}
}
function genKeyFilter(keys) {
return "if(!('button' in $event)&&" + keys.map(genFilterCode).join('&&') + ")return null;";
}
function genFilterCode(key) {
var keyVal = parseInt(key, 10);
if (keyVal) {
return "$event.keyCode!==" + keyVal;
}
var alias = keyCodes[key];
return "_k($event.keyCode," + JSON.stringify(key) + (alias ? ',' + JSON.stringify(alias) : '') + ")";
}
/* */
function bind$1(el, dir) {
el.wrapData = function (code) {
return "_b(" + code + ",'" + el.tag + "'," + dir.value + (dir.modifiers && dir.modifiers.prop ? ',true' : '') + ")";
};
}
/* */
var baseDirectives = {
bind: bind$1,
cloak: noop
};
/* */
// configurable state
var warn$3;
var transforms$1;
var dataGenFns;
var platformDirectives$1;
var isPlatformReservedTag$1;
var staticRenderFns;
var onceCount;
var currentOptions;
function generate(ast, options) {
// save previous staticRenderFns so generate calls can be nested
var prevStaticRenderFns = staticRenderFns;
var currentStaticRenderFns = staticRenderFns = [];
var prevOnceCount = onceCount;
onceCount = 0;
currentOptions = options;
warn$3 = options.warn || baseWarn;
transforms$1 = pluckModuleFunction(options.modules, 'transformCode');
dataGenFns = pluckModuleFunction(options.modules, 'genData');
platformDirectives$1 = options.directives || {};
isPlatformReservedTag$1 = options.isReservedTag || no;
var code = ast ? genElement(ast) : '_c("div")';
staticRenderFns = prevStaticRenderFns;
onceCount = prevOnceCount;
return {
render: "with(this){return " + code + "}",
staticRenderFns: currentStaticRenderFns
};
}
function genElement(el) {
if (el.staticRoot && !el.staticProcessed) {
return genStatic(el);
} else if (el.once && !el.onceProcessed) {
return genOnce(el);
} else if (el.for && !el.forProcessed) {
return genFor(el);
} else if (el.if && !el.ifProcessed) {
return genIf(el);
} else if (el.tag === 'template' && !el.slotTarget) {
return genChildren(el) || 'void 0';
} else if (el.tag === 'slot') {
return genSlot(el);
} else {
// component or element
var code;
if (el.component) {
code = genComponent(el.component, el);
} else {
var data = el.plain ? undefined : genData(el);
var children = el.inlineTemplate ? null : genChildren(el, true);
code = "_c('" + el.tag + "'" + (data ? "," + data : '') + (children ? "," + children : '') + ")";
}
// module transforms
for (var i = 0; i < transforms$1.length; i++) {
code = transforms$1[i](el, code);
}
return code;
}
}
// hoist static sub-trees out
function genStatic(el) {
el.staticProcessed = true;
staticRenderFns.push("with(this){return " + genElement(el) + "}");
return "_m(" + (staticRenderFns.length - 1) + (el.staticInFor ? ',true' : '') + ")";
}
// v-once
function genOnce(el) {
el.onceProcessed = true;
if (el.if && !el.ifProcessed) {
return genIf(el);
} else if (el.staticInFor) {
var key = '';
var parent = el.parent;
while (parent) {
if (parent.for) {
key = parent.key;
break;
}
parent = parent.parent;
}
if (!key) {
"development" !== 'production' && warn$3("v-once can only be used inside v-for that is keyed. ");
return genElement(el);
}
return "_o(" + genElement(el) + "," + onceCount++ + (key ? "," + key : "") + ")";
} else {
return genStatic(el);
}
}
function genIf(el) {
el.ifProcessed = true; // avoid recursion
return genIfConditions(el.ifConditions.slice());
}
function genIfConditions(conditions) {
if (!conditions.length) {
return '_e()';
}
var condition = conditions.shift();
if (condition.exp) {
return "(" + condition.exp + ")?" + genTernaryExp(condition.block) + ":" + genIfConditions(conditions);
} else {
return "" + genTernaryExp(condition.block);
}
// v-if with v-once should generate code like (a)?_m(0):_m(1)
function genTernaryExp(el) {
return el.once ? genOnce(el) : genElement(el);
}
}
function genFor(el) {
var exp = el.for;
var alias = el.alias;
var iterator1 = el.iterator1 ? "," + el.iterator1 : '';
var iterator2 = el.iterator2 ? "," + el.iterator2 : '';
if ("development" !== 'production' && maybeComponent(el) && el.tag !== 'slot' && el.tag !== 'template' && !el.key) {
warn$3("<" + el.tag + " v-for=\"" + alias + " in " + exp + "\">: component lists rendered with " + "v-for should have explicit keys. " + "See https://vuejs.org/guide/list.html#key for more info.", true /* tip */
);
}
el.forProcessed = true; // avoid recursion
return "_l((" + exp + ")," + "function(" + alias + iterator1 + iterator2 + "){" + "return " + genElement(el) + '})';
}
function genData(el) {
var data = '{';
// directives first.
// directives may mutate the el's other properties before they are generated.
var dirs = genDirectives(el);
if (dirs) {
data += dirs + ',';
}
// key
if (el.key) {
data += "key:" + el.key + ",";
}
// ref
if (el.ref) {
data += "ref:" + el.ref + ",";
}
if (el.refInFor) {
data += "refInFor:true,";
}
// pre
if (el.pre) {
data += "pre:true,";
}
// record original tag name for components using "is" attribute
if (el.component) {
data += "tag:\"" + el.tag + "\",";
}
// module data generation functions
for (var i = 0; i < dataGenFns.length; i++) {
data += dataGenFns[i](el);
}
// attributes
if (el.attrs) {
data += "attrs:{" + genProps(el.attrs) + "},";
}
// DOM props
if (el.props) {
data += "domProps:{" + genProps(el.props) + "},";
}
// event handlers
if (el.events) {
data += genHandlers(el.events, false, warn$3) + ",";
}
if (el.nativeEvents) {
data += genHandlers(el.nativeEvents, true, warn$3) + ",";
}
// slot target
if (el.slotTarget) {
data += "slot:" + el.slotTarget + ",";
}
// scoped slots
if (el.scopedSlots) {
data += genScopedSlots(el.scopedSlots) + ",";
}
// component v-model
if (el.model) {
data += "model:{value:" + el.model.value + ",callback:" + el.model.callback + ",expression:" + el.model.expression + "},";
}
// inline-template
if (el.inlineTemplate) {
var inlineTemplate = genInlineTemplate(el);
if (inlineTemplate) {
data += inlineTemplate + ",";
}
}
data = data.replace(/,$/, '') + '}';
// v-bind data wrap
if (el.wrapData) {
data = el.wrapData(data);
}
return data;
}
function genDirectives(el) {
var dirs = el.directives;
if (!dirs) {
return;
}
var res = 'directives:[';
var hasRuntime = false;
var i, l, dir, needRuntime;
for (i = 0, l = dirs.length; i < l; i++) {
dir = dirs[i];
needRuntime = true;
var gen = platformDirectives$1[dir.name] || baseDirectives[dir.name];
if (gen) {
// compile-time directive that manipulates AST.
// returns true if it also needs a runtime counterpart.
needRuntime = !!gen(el, dir, warn$3);
}
if (needRuntime) {
hasRuntime = true;
res += "{name:\"" + dir.name + "\",rawName:\"" + dir.rawName + "\"" + (dir.value ? ",value:(" + dir.value + "),expression:" + JSON.stringify(dir.value) : '') + (dir.arg ? ",arg:\"" + dir.arg + "\"" : '') + (dir.modifiers ? ",modifiers:" + JSON.stringify(dir.modifiers) : '') + "},";
}
}
if (hasRuntime) {
return res.slice(0, -1) + ']';
}
}
function genInlineTemplate(el) {
var ast = el.children[0];
if ("development" !== 'production' && (el.children.length > 1 || ast.type !== 1)) {
warn$3('Inline-template components must have exactly one child element.');
}
if (ast.type === 1) {
var inlineRenderFns = generate(ast, currentOptions);
return "inlineTemplate:{render:function(){" + inlineRenderFns.render + "},staticRenderFns:[" + inlineRenderFns.staticRenderFns.map(function (code) {
return "function(){" + code + "}";
}).join(',') + "]}";
}
}
function genScopedSlots(slots) {
return "scopedSlots:_u([" + Object.keys(slots).map(function (key) {
return genScopedSlot(key, slots[key]);
}).join(',') + "])";
}
function genScopedSlot(key, el) {
if (el.for && !el.forProcessed) {
return genForScopedSlot(key, el);
}
return "{key:" + key + ",fn:function(" + String(el.attrsMap.scope) + "){" + "return " + (el.tag === 'template' ? genChildren(el) || 'void 0' : genElement(el)) + "}}";
}
function genForScopedSlot(key, el) {
var exp = el.for;
var alias = el.alias;
var iterator1 = el.iterator1 ? "," + el.iterator1 : '';
var iterator2 = el.iterator2 ? "," + el.iterator2 : '';
el.forProcessed = true; // avoid recursion
return "_l((" + exp + ")," + "function(" + alias + iterator1 + iterator2 + "){" + "return " + genScopedSlot(key, el) + '})';
}
function genChildren(el, checkSkip) {
var children = el.children;
if (children.length) {
var el$1 = children[0];
// optimize single v-for
if (children.length === 1 && el$1.for && el$1.tag !== 'template' && el$1.tag !== 'slot') {
return genElement(el$1);
}
var normalizationType = checkSkip ? getNormalizationType(children) : 0;
return "[" + children.map(genNode).join(',') + "]" + (normalizationType ? "," + normalizationType : '');
}
}
// determine the normalization needed for the children array.
// 0: no normalization needed
// 1: simple normalization needed (possible 1-level deep nested array)
// 2: full normalization needed
function getNormalizationType(children) {
var res = 0;
for (var i = 0; i < children.length; i++) {
var el = children[i];
if (el.type !== 1) {
continue;
}
if (needsNormalization(el) || el.ifConditions && el.ifConditions.some(function (c) {
return needsNormalization(c.block);
})) {
res = 2;
break;
}
if (maybeComponent(el) || el.ifConditions && el.ifConditions.some(function (c) {
return maybeComponent(c.block);
})) {
res = 1;
}
}
return res;
}
function needsNormalization(el) {
return el.for !== undefined || el.tag === 'template' || el.tag === 'slot';
}
function maybeComponent(el) {
return !isPlatformReservedTag$1(el.tag);
}
function genNode(node) {
if (node.type === 1) {
return genElement(node);
} else {
return genText(node);
}
}
function genText(text) {
return "_v(" + (text.type === 2 ? text.expression // no need for () because already wrapped in _s()
: transformSpecialNewlines(JSON.stringify(text.text))) + ")";
}
function genSlot(el) {
var slotName = el.slotName || '"default"';
var children = genChildren(el);
var res = "_t(" + slotName + (children ? "," + children : '');
var attrs = el.attrs && "{" + el.attrs.map(function (a) {
return camelize(a.name) + ":" + a.value;
}).join(',') + "}";
var bind$$1 = el.attrsMap['v-bind'];
if ((attrs || bind$$1) && !children) {
res += ",null";
}
if (attrs) {
res += "," + attrs;
}
if (bind$$1) {
res += (attrs ? '' : ',null') + "," + bind$$1;
}
return res + ')';
}
// componentName is el.component, take it as argument to shun flow's pessimistic refinement
function genComponent(componentName, el) {
var children = el.inlineTemplate ? null : genChildren(el, true);
return "_c(" + componentName + "," + genData(el) + (children ? "," + children : '') + ")";
}
function genProps(props) {
var res = '';
for (var i = 0; i < props.length; i++) {
var prop = props[i];
res += "\"" + prop.name + "\":" + transformSpecialNewlines(prop.value) + ",";
}
return res.slice(0, -1);
}
// #3895, #4268
function transformSpecialNewlines(text) {
return text.replace(/\u2028/g, '\\u2028').replace(/\u2029/g, '\\u2029');
}
/* */
// these keywords should not appear inside expressions, but operators like
// typeof, instanceof and in are allowed
var prohibitedKeywordRE = new RegExp('\\b' + ('do,if,for,let,new,try,var,case,else,with,await,break,catch,class,const,' + 'super,throw,while,yield,delete,export,import,return,switch,default,' + 'extends,finally,continue,debugger,function,arguments').split(',').join('\\b|\\b') + '\\b');
// these unary operators should not be used as property/method names
var unaryOperatorsRE = new RegExp('\\b' + 'delete,typeof,void'.split(',').join('\\s*\\([^\\)]*\\)|\\b') + '\\s*\\([^\\)]*\\)');
// check valid identifier for v-for
var identRE = /[A-Za-z_$][\w$]*/;
// strip strings in expressions
var stripStringRE = /'(?:[^'\\]|\\.)*'|"(?:[^"\\]|\\.)*"|`(?:[^`\\]|\\.)*\$\{|\}(?:[^`\\]|\\.)*`|`(?:[^`\\]|\\.)*`/g;
// detect problematic expressions in a template
function detectErrors(ast) {
var errors = [];
if (ast) {
checkNode(ast, errors);
}
return errors;
}
function checkNode(node, errors) {
if (node.type === 1) {
for (var name in node.attrsMap) {
if (dirRE.test(name)) {
var value = node.attrsMap[name];
if (value) {
if (name === 'v-for') {
checkFor(node, "v-for=\"" + value + "\"", errors);
} else if (onRE.test(name)) {
checkEvent(value, name + "=\"" + value + "\"", errors);
} else {
checkExpression(value, name + "=\"" + value + "\"", errors);
}
}
}
}
if (node.children) {
for (var i = 0; i < node.children.length; i++) {
checkNode(node.children[i], errors);
}
}
} else if (node.type === 2) {
checkExpression(node.expression, node.text, errors);
}
}
function checkEvent(exp, text, errors) {
var stipped = exp.replace(stripStringRE, '');
var keywordMatch = stipped.match(unaryOperatorsRE);
if (keywordMatch && stipped.charAt(keywordMatch.index - 1) !== '$') {
errors.push("avoid using JavaScript unary operator as property name: " + "\"" + keywordMatch[0] + "\" in expression " + text.trim());
}
checkExpression(exp, text, errors);
}
function checkFor(node, text, errors) {
checkExpression(node.for || '', text, errors);
checkIdentifier(node.alias, 'v-for alias', text, errors);
checkIdentifier(node.iterator1, 'v-for iterator', text, errors);
checkIdentifier(node.iterator2, 'v-for iterator', text, errors);
}
function checkIdentifier(ident, type, text, errors) {
if (typeof ident === 'string' && !identRE.test(ident)) {
errors.push("invalid " + type + " \"" + ident + "\" in expression: " + text.trim());
}
}
function checkExpression(exp, text, errors) {
try {
new Function("return " + exp);
} catch (e) {
var keywordMatch = exp.replace(stripStringRE, '').match(prohibitedKeywordRE);
if (keywordMatch) {
errors.push("avoid using JavaScript keyword as property name: " + "\"" + keywordMatch[0] + "\" in expression " + text.trim());
} else {
errors.push("invalid expression: " + text.trim());
}
}
}
/* */
function baseCompile(template, options) {
var ast = parse(template.trim(), options);
optimize(ast, options);
var code = generate(ast, options);
return {
ast: ast,
render: code.render,
staticRenderFns: code.staticRenderFns
};
}
function makeFunction(code, errors) {
try {
return new Function(code);
} catch (err) {
errors.push({ err: err, code: code });
return noop;
}
}
function createCompiler(baseOptions) {
var functionCompileCache = Object.create(null);
function compile(template, options) {
var finalOptions = Object.create(baseOptions);
var errors = [];
var tips = [];
finalOptions.warn = function (msg, tip$$1) {
(tip$$1 ? tips : errors).push(msg);
};
if (options) {
// merge custom modules
if (options.modules) {
finalOptions.modules = (baseOptions.modules || []).concat(options.modules);
}
// merge custom directives
if (options.directives) {
finalOptions.directives = extend(Object.create(baseOptions.directives), options.directives);
}
// copy other options
for (var key in options) {
if (key !== 'modules' && key !== 'directives') {
finalOptions[key] = options[key];
}
}
}
var compiled = baseCompile(template, finalOptions);
if (true) {
errors.push.apply(errors, detectErrors(compiled.ast));
}
compiled.errors = errors;
compiled.tips = tips;
return compiled;
}
function compileToFunctions(template, options, vm) {
options = options || {};
/* istanbul ignore if */
if (true) {
// detect possible CSP restriction
try {
new Function('return 1');
} catch (e) {
if (e.toString().match(/unsafe-eval|CSP/)) {
warn('It seems you are using the standalone build of Vue.js in an ' + 'environment with Content Security Policy that prohibits unsafe-eval. ' + 'The template compiler cannot work in this environment. Consider ' + 'relaxing the policy to allow unsafe-eval or pre-compiling your ' + 'templates into render functions.');
}
}
}
// check cache
var key = options.delimiters ? String(options.delimiters) + template : template;
if (functionCompileCache[key]) {
return functionCompileCache[key];
}
// compile
var compiled = compile(template, options);
// check compilation errors/tips
if (true) {
if (compiled.errors && compiled.errors.length) {
warn("Error compiling template:\n\n" + template + "\n\n" + compiled.errors.map(function (e) {
return "- " + e;
}).join('\n') + '\n', vm);
}
if (compiled.tips && compiled.tips.length) {
compiled.tips.forEach(function (msg) {
return tip(msg, vm);
});
}
}
// turn code into functions
var res = {};
var fnGenErrors = [];
res.render = makeFunction(compiled.render, fnGenErrors);
var l = compiled.staticRenderFns.length;
res.staticRenderFns = new Array(l);
for (var i = 0; i < l; i++) {
res.staticRenderFns[i] = makeFunction(compiled.staticRenderFns[i], fnGenErrors);
}
// check function generation errors.
// this should only happen if there is a bug in the compiler itself.
// mostly for codegen development use
/* istanbul ignore if */
if (true) {
if ((!compiled.errors || !compiled.errors.length) && fnGenErrors.length) {
warn("Failed to generate render function:\n\n" + fnGenErrors.map(function (ref) {
var err = ref.err;
var code = ref.code;
return err.toString() + " in\n\n" + code + "\n";
}).join('\n'), vm);
}
}
return functionCompileCache[key] = res;
}
return {
compile: compile,
compileToFunctions: compileToFunctions
};
}
/* */
function transformNode(el, options) {
var warn = options.warn || baseWarn;
var staticClass = getAndRemoveAttr(el, 'class');
if ("development" !== 'production' && staticClass) {
var expression = parseText(staticClass, options.delimiters);
if (expression) {
warn("class=\"" + staticClass + "\": " + 'Interpolation inside attributes has been removed. ' + 'Use v-bind or the colon shorthand instead. For example, ' + 'instead of <div class="{{ val }}">, use <div :class="val">.');
}
}
if (staticClass) {
el.staticClass = JSON.stringify(staticClass);
}
var classBinding = getBindingAttr(el, 'class', false /* getStatic */);
if (classBinding) {
el.classBinding = classBinding;
}
}
function genData$1(el) {
var data = '';
if (el.staticClass) {
data += "staticClass:" + el.staticClass + ",";
}
if (el.classBinding) {
data += "class:" + el.classBinding + ",";
}
return data;
}
var klass$1 = {
staticKeys: ['staticClass'],
transformNode: transformNode,
genData: genData$1
};
/* */
function transformNode$1(el, options) {
var warn = options.warn || baseWarn;
var staticStyle = getAndRemoveAttr(el, 'style');
if (staticStyle) {
/* istanbul ignore if */
if (true) {
var expression = parseText(staticStyle, options.delimiters);
if (expression) {
warn("style=\"" + staticStyle + "\": " + 'Interpolation inside attributes has been removed. ' + 'Use v-bind or the colon shorthand instead. For example, ' + 'instead of <div style="{{ val }}">, use <div :style="val">.');
}
}
el.staticStyle = JSON.stringify(parseStyleText(staticStyle));
}
var styleBinding = getBindingAttr(el, 'style', false /* getStatic */);
if (styleBinding) {
el.styleBinding = styleBinding;
}
}
function genData$2(el) {
var data = '';
if (el.staticStyle) {
data += "staticStyle:" + el.staticStyle + ",";
}
if (el.styleBinding) {
data += "style:(" + el.styleBinding + "),";
}
return data;
}
var style$1 = {
staticKeys: ['staticStyle'],
transformNode: transformNode$1,
genData: genData$2
};
var modules$1 = [klass$1, style$1];
/* */
function text(el, dir) {
if (dir.value) {
addProp(el, 'textContent', "_s(" + dir.value + ")");
}
}
/* */
function html(el, dir) {
if (dir.value) {
addProp(el, 'innerHTML', "_s(" + dir.value + ")");
}
}
var directives$1 = {
model: model,
text: text,
html: html
};
/* */
var baseOptions = {
expectHTML: true,
modules: modules$1,
directives: directives$1,
isPreTag: isPreTag,
isUnaryTag: isUnaryTag,
mustUseProp: mustUseProp,
canBeLeftOpenTag: canBeLeftOpenTag,
isReservedTag: isReservedTag,
getTagNamespace: getTagNamespace,
staticKeys: genStaticKeys(modules$1)
};
var ref$1 = createCompiler(baseOptions);
var compileToFunctions = ref$1.compileToFunctions;
/* */
var idToTemplate = cached(function (id) {
var el = query(id);
return el && el.innerHTML;
});
var mount = Vue$3.prototype.$mount;
Vue$3.prototype.$mount = function (el, hydrating) {
el = el && query(el);
/* istanbul ignore if */
if (el === document.body || el === document.documentElement) {
"development" !== 'production' && warn("Do not mount Vue to <html> or <body> - mount to normal elements instead.");
return this;
}
var options = this.$options;
// resolve template/el and convert to render function
if (!options.render) {
var template = options.template;
if (template) {
if (typeof template === 'string') {
if (template.charAt(0) === '#') {
template = idToTemplate(template);
/* istanbul ignore if */
if ("development" !== 'production' && !template) {
warn("Template element not found or is empty: " + options.template, this);
}
}
} else if (template.nodeType) {
template = template.innerHTML;
} else {
if (true) {
warn('invalid template option:' + template, this);
}
return this;
}
} else if (el) {
template = getOuterHTML(el);
}
if (template) {
/* istanbul ignore if */
if ("development" !== 'production' && config.performance && mark) {
mark('compile');
}
var ref = compileToFunctions(template, {
shouldDecodeNewlines: shouldDecodeNewlines,
delimiters: options.delimiters
}, this);
var render = ref.render;
var staticRenderFns = ref.staticRenderFns;
options.render = render;
options.staticRenderFns = staticRenderFns;
/* istanbul ignore if */
if ("development" !== 'production' && config.performance && mark) {
mark('compile end');
measure(this._name + " compile", 'compile', 'compile end');
}
}
}
return mount.call(this, el, hydrating);
};
/**
* Get outerHTML of elements, taking care
* of SVG elements in IE as well.
*/
function getOuterHTML(el) {
if (el.outerHTML) {
return el.outerHTML;
} else {
var container = document.createElement('div');
container.appendChild(el.cloneNode(true));
return container.innerHTML;
}
}
Vue$3.compile = compileToFunctions;
module.exports = Vue$3;
/* WEBPACK VAR INJECTION */
}).call(exports, __webpack_require__("./node_modules/webpack/buildin/global.js"));
/***/
},
/***/"./node_modules/webpack/buildin/global.js":
/***/function node_modulesWebpackBuildinGlobalJs(module, exports) {
var g;
// This works in non-strict mode
g = function () {
return this;
}();
try {
// This works if eval is allowed (see CSP)
g = g || Function("return this")() || (1, eval)("this");
} catch (e) {
// This works if the window reference is available
if ((typeof window === 'undefined' ? 'undefined' : _typeof(window)) === "object") g = window;
}
// g can still be undefined, but nothing to do about it...
// We return undefined, instead of nothing here, so it's
// easier to handle this case. if(!global) { ...}
module.exports = g;
/***/
},
/***/"./resources/assets/js/App.vue":
/***/function resourcesAssetsJsAppVue(module, exports, __webpack_require__) {
var Component = __webpack_require__("./node_modules/vue-loader/lib/component-normalizer.js")(
/* script */
__webpack_require__("./node_modules/babel-loader/lib/index.js?{\"cacheDirectory\":true,\"presets\":[[\"env\",{\"modules\":false,\"targets\":{\"browsers\":[\"> 2%\"],\"uglify\":true}}]]}!./node_modules/vue-loader/lib/selector.js?type=script&index=0!./resources/assets/js/App.vue"),
/* template */
__webpack_require__("./node_modules/vue-loader/lib/template-compiler/index.js?{\"id\":\"data-v-290ad3a6\"}!./node_modules/vue-loader/lib/selector.js?type=template&index=0!./resources/assets/js/App.vue"),
/* scopeId */
null,
/* cssModules */
null);
Component.options.__file = "/var/www/html/recipebox/resources/assets/js/App.vue";
if (Component.esModule && Object.keys(Component.esModule).some(function (key) {
return key !== "default" && key !== "__esModule";
})) {
console.error("named exports are not supported in *.vue files.");
}
if (Component.options.functional) {
console.error("[vue-loader] App.vue: functional components are not supported with templates, they should use render functions.");
}
/* hot reload */
if (false) {
(function () {
var hotAPI = require("vue-hot-reload-api");
hotAPI.install(require("vue"), false);
if (!hotAPI.compatible) return;
module.hot.accept();
if (!module.hot.data) {
hotAPI.createRecord("data-v-290ad3a6", Component.options);
} else {
hotAPI.reload("data-v-290ad3a6", Component.options);
}
})();
}
module.exports = Component.exports;
/***/
},
/***/"./resources/assets/js/app.js":
/***/function resourcesAssetsJsAppJs(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
/* harmony import */var __WEBPACK_IMPORTED_MODULE_0_vue__ = __webpack_require__("./node_modules/vue/dist/vue.common.js");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_0_vue___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_vue__);
/* harmony import */var __WEBPACK_IMPORTED_MODULE_1__App_vue__ = __webpack_require__("./resources/assets/js/App.vue");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_1__App_vue___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_1__App_vue__);
/* harmony import */var __WEBPACK_IMPORTED_MODULE_2__router__ = __webpack_require__("./resources/assets/js/router/index.js");
var app = new __WEBPACK_IMPORTED_MODULE_0_vue___default.a({
el: '#root',
template: '<app></app>',
components: { App: __WEBPACK_IMPORTED_MODULE_1__App_vue___default.a },
router: __WEBPACK_IMPORTED_MODULE_2__router__["a" /* default */]
});
/***/
},
/***/"./resources/assets/js/helpers/api.js":
/***/function resourcesAssetsJsHelpersApiJs(module, __webpack_exports__, __webpack_require__) {
"use strict";
/* harmony export (immutable) */
__webpack_exports__["a"] = post;
/* harmony import */var __WEBPACK_IMPORTED_MODULE_0_axios__ = __webpack_require__("./node_modules/axios/index.js");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_0_axios___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_axios__);
function post(url, data) {
return __WEBPACK_IMPORTED_MODULE_0_axios___default.a({
method: 'POST',
url: url,
data: data,
headers: {
'Authorization': ''
}
});
}
/***/
},
/***/"./resources/assets/js/helpers/flash.js":
/***/function resourcesAssetsJsHelpersFlashJs(module, __webpack_exports__, __webpack_require__) {
"use strict";
/* harmony default export */
__webpack_exports__["a"] = {
state: {
success: null,
error: null
},
setSuccess: function setSuccess(message) {
var _this = this;
this.state.success = message;
setTimeout(function () {
_this.removeSuccess();
}, 3000);
},
setError: function setError(message) {
var _this2 = this;
this.state.error = message;
setTimeout(function () {
_this2.removeError();
}, 10000);
},
removeSuccess: function removeSuccess() {
this.state.success = null;
},
removeError: function removeError() {
this.state.error = null;
}
};
/***/
},
/***/"./resources/assets/js/router/index.js":
/***/function resourcesAssetsJsRouterIndexJs(module, __webpack_exports__, __webpack_require__) {
"use strict";
/* harmony import */
var __WEBPACK_IMPORTED_MODULE_0_vue__ = __webpack_require__("./node_modules/vue/dist/vue.common.js");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_0_vue___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_0_vue__);
/* harmony import */var __WEBPACK_IMPORTED_MODULE_1_vue_router__ = __webpack_require__("./node_modules/vue-router/dist/vue-router.esm.js");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_2__views_Auth_Login_vue__ = __webpack_require__("./resources/assets/js/views/Auth/Login.vue");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_2__views_Auth_Login_vue___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_2__views_Auth_Login_vue__);
/* harmony import */var __WEBPACK_IMPORTED_MODULE_3__views_Auth_Register_vue__ = __webpack_require__("./resources/assets/js/views/Auth/Register.vue");
/* harmony import */var __WEBPACK_IMPORTED_MODULE_3__views_Auth_Register_vue___default = __webpack_require__.n(__WEBPACK_IMPORTED_MODULE_3__views_Auth_Register_vue__);
__WEBPACK_IMPORTED_MODULE_0_vue___default.a.use(__WEBPACK_IMPORTED_MODULE_1_vue_router__["a" /* default */]);
var router = new __WEBPACK_IMPORTED_MODULE_1_vue_router__["a" /* default */]({
routes: [{ path: '/register', component: __WEBPACK_IMPORTED_MODULE_3__views_Auth_Register_vue___default.a }, { path: '/login', component: __WEBPACK_IMPORTED_MODULE_2__views_Auth_Login_vue___default.a }]
});
/* harmony default export */__webpack_exports__["a"] = router;
/***/
},
/***/"./resources/assets/js/store/auth.js":
/***/function resourcesAssetsJsStoreAuthJs(module, __webpack_exports__, __webpack_require__) {
"use strict";
/**
* Created by akke on 6/26/17.
*/
/* harmony default export */
__webpack_exports__["a"] = {
state: {
api_token: null,
user_id: null
},
initialize: function initialize() {
this.state.api_token = localStorage.getItem('api_token');
this.state.user_id = localStorage.getItem('user_id');
},
set: function set(api_token, user_id) {
localStorage.setItem('api_token', api_token);
localStorage.setItem('user_id', user_id);
},
remove: function remove() {
localStorage.removeItem('api_token');
localStorage.removeItem('user_id');
this.initialize();
}
};
/***/
},
/***/"./resources/assets/js/views/Auth/Login.vue":
/***/function resourcesAssetsJsViewsAuthLoginVue(module, exports, __webpack_require__) {
var Component = __webpack_require__("./node_modules/vue-loader/lib/component-normalizer.js")(
/* script */
__webpack_require__("./node_modules/babel-loader/lib/index.js?{\"cacheDirectory\":true,\"presets\":[[\"env\",{\"modules\":false,\"targets\":{\"browsers\":[\"> 2%\"],\"uglify\":true}}]]}!./node_modules/vue-loader/lib/selector.js?type=script&index=0!./resources/assets/js/views/Auth/Login.vue"),
/* template */
__webpack_require__("./node_modules/vue-loader/lib/template-compiler/index.js?{\"id\":\"data-v-fc9dd3f2\"}!./node_modules/vue-loader/lib/selector.js?type=template&index=0!./resources/assets/js/views/Auth/Login.vue"),
/* scopeId */
null,
/* cssModules */
null);
Component.options.__file = "/var/www/html/recipebox/resources/assets/js/views/Auth/Login.vue";
if (Component.esModule && Object.keys(Component.esModule).some(function (key) {
return key !== "default" && key !== "__esModule";
})) {
console.error("named exports are not supported in *.vue files.");
}
if (Component.options.functional) {
console.error("[vue-loader] Login.vue: functional components are not supported with templates, they should use render functions.");
}
/* hot reload */
if (false) {
(function () {
var hotAPI = require("vue-hot-reload-api");
hotAPI.install(require("vue"), false);
if (!hotAPI.compatible) return;
module.hot.accept();
if (!module.hot.data) {
hotAPI.createRecord("data-v-fc9dd3f2", Component.options);
} else {
hotAPI.reload("data-v-fc9dd3f2", Component.options);
}
})();
}
module.exports = Component.exports;
/***/
},
/***/"./resources/assets/js/views/Auth/Register.vue":
/***/function resourcesAssetsJsViewsAuthRegisterVue(module, exports, __webpack_require__) {
var Component = __webpack_require__("./node_modules/vue-loader/lib/component-normalizer.js")(
/* script */
__webpack_require__("./node_modules/babel-loader/lib/index.js?{\"cacheDirectory\":true,\"presets\":[[\"env\",{\"modules\":false,\"targets\":{\"browsers\":[\"> 2%\"],\"uglify\":true}}]]}!./node_modules/vue-loader/lib/selector.js?type=script&index=0!./resources/assets/js/views/Auth/Register.vue"),
/* template */
__webpack_require__("./node_modules/vue-loader/lib/template-compiler/index.js?{\"id\":\"data-v-1d037175\"}!./node_modules/vue-loader/lib/selector.js?type=template&index=0!./resources/assets/js/views/Auth/Register.vue"),
/* scopeId */
null,
/* cssModules */
null);
Component.options.__file = "/var/www/html/recipebox/resources/assets/js/views/Auth/Register.vue";
if (Component.esModule && Object.keys(Component.esModule).some(function (key) {
return key !== "default" && key !== "__esModule";
})) {
console.error("named exports are not supported in *.vue files.");
}
if (Component.options.functional) {
console.error("[vue-loader] Register.vue: functional components are not supported with templates, they should use render functions.");
}
/* hot reload */
if (false) {
(function () {
var hotAPI = require("vue-hot-reload-api");
hotAPI.install(require("vue"), false);
if (!hotAPI.compatible) return;
module.hot.accept();
if (!module.hot.data) {
hotAPI.createRecord("data-v-1d037175", Component.options);
} else {
hotAPI.reload("data-v-1d037175", Component.options);
}
})();
}
module.exports = Component.exports;
/***/
},
/***/"./resources/assets/sass/app.scss":
/***/function resourcesAssetsSassAppScss(module, exports) {
// removed by extract-text-webpack-plugin
/***/},
/***/0:
/***/function _(module, exports, __webpack_require__) {
__webpack_require__("./resources/assets/js/app.js");
module.exports = __webpack_require__("./resources/assets/sass/app.scss");
/***/
}
/******/ });
//# sourceMappingURL=app.d2aa8f2d2c6ee55b8886.js.map
|
*/
function throwIfCancellationRequested(config) {
if (config.cancelToken) {
|
__init__.py
|
from dataclasses import fields
from warnings import warn
__all__ = ['dataslots', 'with_slots']
def with_slots(*args, **kwargs):
warn("Use dataslots decorator instead of with_slots", category=PendingDeprecationWarning, stacklevel=2)
return dataslots(*args, **kwargs)
def dataslots(_cls=None, *, add_dict=False, add_weakref=False):
"""
Decorator to add __slots__ to class created by dataclass. Returns new class object as it's not possible
to add __slots__ after class creation.
"""
def _slots_setstate(self, state):
|
def wrap(cls):
cls_dict = dict(cls.__dict__)
# Create only missing slots
inherited_slots = set().union(*(getattr(c, '__slots__', set()) for c in cls.mro()))
field_names = set(tuple(f.name for f in fields(cls)))
if add_dict:
field_names.add('__dict__')
if add_weakref:
field_names.add('__weakref__')
cls_dict['__slots__'] = tuple(field_names - inherited_slots)
# Erase filed names from class __dict__
for f in field_names:
cls_dict.pop(f, None)
# Erase __dict__ and __weakref__
cls_dict.pop('__dict__', None)
cls_dict.pop('__weakref__', None)
# Pickle fix for frozen dataclass as mentioned in https://bugs.python.org/issue36424
# Use only if __getstate__ and __setstate__ are not declared and frozen=True
if all(param not in cls_dict for param in ['__getstate__', '__setstate__']) and \
cls.__dataclass_params__.frozen:
cls_dict['__setstate__'] = _slots_setstate
# Prepare new class with slots
new_cls = type(cls)(cls.__name__, cls.__bases__, cls_dict)
new_cls.__qualname__ = getattr(cls, '__qualname__')
return new_cls
return wrap if _cls is None else wrap(_cls)
|
for param_dict in filter(None, state):
for slot, value in param_dict.items():
object.__setattr__(self, slot, value)
|
webview.js
|
'use strict';
module.exports = (Franz, options) => {
function
|
() {
let directCount = 0;
let indirectCount = 0;
let roomInfoContainer = document.querySelectorAll('.unread');
Array.prototype.forEach.call(roomInfoContainer, function (room) {
directCount = 1
});
Franz.setBadge(directCount, indirectCount);
}
Franz.loop(getMessages);
}
|
getMessages
|
auth.js
|
// todo: add auth schema's
// {
// connection: "Username-Password-Authentication",
// email: "[email protected]",
// password: "Password123!",
// user_metadata: {
// name: "John",
// surname: "Doe",
// },
// email_verified: false,
// verify_email: false,
// app_metadata: {},
// };
// const signUpSchema = Joi.object().keys({
// connection: Joi.string().required(),
// email: Joi.string().email().required(),
// password: Joi.string().required(),
|
// }),
// email_verified: Joi.boolean(),
// verify_email: Joi.boolean(),
// });
|
// user_metadata: Joi.object().keys({
// name: Joi.string(),
// surname: Joi.string(),
|
application.py
|
# Copyright (C) 2017 TU Dresden
# Licensed under the ISC license (see LICENSE.txt)
#
# Authors: Christian Menard
from mocasin.util import logging
from mocasin.simulate.channel import RuntimeChannel
from mocasin.simulate.process import RuntimeDataflowProcess
from mocasin.simulate.adapter import SimulateLoggerAdapter
log = logging.getLogger(__name__)
class RuntimeApplication(object):
"""Represents the runtime instance of an application.
Attributes:
name (str): the application name
system (System): the system the application is supposed to be
executed on
"""
def __init__(self, name, system):
"""Initialize a RuntimeApplication
Args:
name (str): the application name
system (System): the system the application is supposed to be
executed on
"""
self.name = name
self.system = system
@property
def env(self):
"""The simpy environment"""
return self.system.env
class RuntimeDataflowApplication(RuntimeApplication):
"""Represents the runtime instance of a dataflow application.
Attributes:
mapping (Mapping): a mapping object for this application
_pocesses (list of RuntimeProcess): a list of runtime processes that
belong to this application
_channeles (list of RuntimeChannel): a list of runtime channels that
belong to this application
Args:
name (str): the application name
graph (DataflowGraph): the graph denoting the dataflow application
mapping (Mapping): a mapping to the platform implemented by system
trace (DataflowTrace): the trace representing the execution
behavior of the application
system (System): the system the application is supposed to be
executed on
wait_for_initial_tokens (bool): If true, the application's processes
only start if initial tokens (first reads in the trace) are
available. Otherwise, they would start and immediately block.
"""
def __init__(
self,
name,
graph,
app_trace,
system,
wait_for_initial_tokens=False,
):
super().__init__(name, system)
self.graph = graph
self.trace = app_trace
self._is_new = True
self._is_running = False
self._is_paused = False
self._is_finished = False
# a dict mapping each process to a processor/scheduler
# leave it uninitialized for now, it will set by calling run()
self._process_mappings = None
log.debug("initialize new runtime application: %s", name)
# Instantiate all channels
self._channels = {}
for c in graph.channels():
self._channels[c.name] = RuntimeChannel(c.name, c.token_size, self)
# Instantiate all processes
self._processes = {}
for p in graph.processes():
proc = RuntimeDataflowProcess(p.name, self, wait_for_initial_tokens)
self._processes[p.name] = proc
for c in p.incoming_channels:
rc = self._channels[c.name]
proc.connect_to_incomming_channel(rc)
for c in p.outgoing_channels:
rc = self._channels[c.name]
proc.connect_to_outgoing_channel(rc)
self._log = SimulateLoggerAdapter(log, self.name, self.env)
def processes(self):
"""Get a list of all processes
:returns: a list of the application's processes
:rtype: list[RuntimeProcess]
"""
return self._processes.values()
def channels(self):
"""Get a list of all channels
:returns: a list of the application's channels
:rtype: list[RuntimeChannel]
"""
return self._channels.values()
def find_process(self, process_name):
"""Find a process by name"""
return self._processes[process_name]
def find_channel(self, channel_name):
"""Find a channel by name"""
return self._channels[channel_name]
def run(self, mapping):
"""Start execution of this application
Yields:
~simpy.events.Event: an event that is triggered when the
application finishes execution.
"""
assert self.is_new()
assert not self._process_mappings
self._is_new = False
self._is_running = True
self._log.info(f"Application {self.name} starts")
# map all processes and channels
# first some sanity checks
if mapping.graph != self.graph:
raise RuntimeError("dataflow graph and mapping incompatible")
if mapping.platform != self.system.platform:
raise RuntimeError(
f"Mapping {self.name} to an incompatible platform"
)
# map all channels:
for channel in self.graph.channels():
info = mapping.channel_info(channel)
self.find_channel(channel.name).update_mapping_info(info)
# map all processes
self._process_mappings = {}
for process in self.graph.processes():
info = mapping.process_info(process)
runtime_process = self.find_process(process.name)
self._process_mappings[runtime_process] = info.affinity
# start all the processes
for process, processor in self._process_mappings.items():
self.system.start_process(process, processor)
# create an event that is triggered when all processes completed and
# wait for this event
finished = self.env.all_of([p.finished for p in self.processes()])
|
self._log.info(f"Application {self.name} terminates")
self._is_running = False
self._is_finished = True
def kill(self):
"""Stop execution of this application
This method kills each running process of this application. The
processes might not stop immediately as operations such as producing
or consuming tokens are considered atomic an cannot be interrupted.
The simpy process managing run will terminate as soon as all processes
terminated.
Examples:
Usage::
app_finished = env.process(app.run())
yield env.timeout(1000000000) # wait 1ms
app.kill()
# wait until the application stopped completely
yield app_finished
"""
for p in self.processes():
p.kill()
def _is_state_valid(self):
"""Check that the application is exactly in one state."""
tup = (
self._is_new,
self._is_running,
self._is_paused,
self._is_finished,
)
return sum(tup) == 1
def is_new(self):
"""Check if the application has not yet started."""
assert self._is_state_valid()
return self._is_new
def is_running(self):
"""Check if the application is running."""
assert self._is_state_valid()
return self._is_running
def is_paused(self):
"""Check if the application is paused."""
assert self._is_state_valid()
return self._is_paused
def is_finished(self):
"""Check if the application is finished."""
assert self._is_state_valid()
return self._is_finished
def update_mapping(self, mapping):
"""Update the mapping used by this application, causing a migration of
processes.
Args:
Mapping: an updated mapping to be used by the application
"""
assert self.is_running()
self._log.debug("Update mapping")
# iterate over all proceses
for process in self._process_mappings.keys():
current_processor = self._process_mappings[process]
dataflow_process = self.graph.find_process(process.name)
new_processor = mapping.process_info(dataflow_process).affinity
# move the processes
if current_processor != new_processor:
self._log.debug(
f"Move process {process.full_name} from {current_processor}"
f" to {new_processor}"
)
self._process_mappings[process] = new_processor
self.system.move_process(
process, current_processor, new_processor
)
# and also update the channel mappings
self._update_channel_mappings(mapping)
def _update_channel_mappings(self, mapping):
# iterate over all channels
for name, channel in self._channels.items():
dataflow_channel = self.graph.find_channel(name)
mapping_info = mapping.channel_info(dataflow_channel)
self._log.debug(
f"Update channel of {channel.name} primitive to "
f"{mapping_info.primitive.name}"
)
channel.update_mapping_info(mapping_info)
def pause(self):
"""Pause the execution of this application
The application can be resumed later by calling resume()
"""
assert self.is_running()
self._is_running = False
self._is_paused = True
self._log.debug("Pause")
# simply pause all processes
for process, current_processor in self._process_mappings.items():
self.system.pause_process(process, current_processor)
def resume(self, mapping=None):
"""Resume the execution of a paused application
Args:
mapping (Mapping, optional): an optional updated application mapping
If None, the application is resumed with its old mapping.
"""
assert self.is_paused()
self._is_paused = False
self._is_running = True
self._log.debug("Resume")
if mapping:
# if a mapping is provided, we first need to update all channels
self._update_channel_mappings(mapping)
# and then we resume all processes on their new processors
for process in self._process_mappings.keys():
dataflow_process = self.graph.find_process(process.name)
new_processor = mapping.process_info(dataflow_process).affinity
self._process_mappings[process] = new_processor
self.system.resume_process(process, new_processor)
else:
# if no mapping is provided, then we resume all processes according
# to the old mapping
for process, processor in self._process_mappings.items():
self.system.resume_process(process, processor)
def get_progress(self):
"""Calculate how far this application has advanced its computation
The application progress is calculate as the average over the progress
of all processes. Note that the resulting progress can be lower than
expected, if some of the processes are currently running. This is
because processes only update there status at certain points
(preemption, segment completion) and not continuously.
Returns:
float: completion ratio
"""
process_progress = [p.get_progress() for p in self._processes.values()]
return sum(process_progress) / len(process_progress)
|
finished.callbacks.append(self._app_finished_callback)
yield finished
def _app_finished_callback(self, event):
|
middleware.go
|
package account
import (
"errors"
"fmt"
"log"
"net/http"
"net/url"
"strings"
"github.com/Alkemic/webrss/repository"
)
const (
sessionCookieName = "session"
sessionUsernameKey = "userID"
backParamName = "back"
LoginPageURL = "/login"
LogoutPageURL = "/logout"
)
var ErrMissingUserID = errors.New("missing user id in session data")
type sessionRepository interface {
Get(sessionID string) (map[string]interface{}, error)
Set(sessionID string, data map[string]interface{}) error
Delete(sessionID string) error
}
func
|
(loginURL string, req *http.Request) string {
return loginURL + "?" + backParamName + "=" + url.PathEscape(req.RequestURI)
}
type Middleware struct {
log *log.Logger
settingsRepo settingsRepository
sessionRepo sessionRepository
}
func NewAuthenticateMiddleware(log *log.Logger, settingsRepo settingsRepository, sessionRepo sessionRepository) *Middleware {
return &Middleware{
log: log,
settingsRepo: settingsRepo,
sessionRepo: sessionRepo,
}
}
func (m *Middleware) getUsername(sessionID string) (string, error) {
sessionData, err := m.sessionRepo.Get(sessionID)
if err != nil {
return "", fmt.Errorf("cannot get session: %w", err)
}
rawUsername, ok := sessionData[sessionUsernameKey]
if !ok || rawUsername == "" {
return "", ErrMissingUserID
}
username, ok := rawUsername.(string)
if !ok {
return "", ErrMissingUserID
}
return username, nil
}
func (m *Middleware) LoginRequiredMiddleware(f http.HandlerFunc) http.HandlerFunc {
return func(rw http.ResponseWriter, req *http.Request) {
if req.URL.Path == LoginPageURL || req.URL.Path == LogoutPageURL {
f(rw, req)
return
}
sessionID, err := getSessionID(req)
if err != nil {
m.log.Println("cannot get session:", err)
http.Error(rw, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
username, err := m.getUsername(sessionID)
if err != nil {
if errors.Is(err, repository.ErrNotFound) || errors.Is(err, ErrMissingUserID) {
if strings.Contains(req.Header.Get("Accept"), "application/json") {
http.Error(rw, http.StatusText(http.StatusForbidden), http.StatusForbidden)
return
}
http.Redirect(rw, req, buildLoginUrl(LoginPageURL, req), http.StatusFound)
return
}
m.log.Println("cannot get user id:", err)
http.Error(rw, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
user, err := m.settingsRepo.GetUser(req.Context())
if err != nil {
m.log.Println("cannot get user:", err)
http.Error(rw, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
if user.Name != username {
m.log.Println("wrong user in session")
http.Error(rw, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
return
}
SetUser(req, user)
f(rw, req)
}
}
|
buildLoginUrl
|
run_tracker.py
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
import json
import os
import sys
import threading
import time
import urllib
from contextlib import contextmanager
from urlparse import urlparse
import httplib
from pants.base.config import Config
from pants.base.run_info import RunInfo
from pants.base.worker_pool import SubprocPool, WorkerPool
from pants.base.workunit import WorkUnit
from pants.goal.aggregated_timings import AggregatedTimings
from pants.goal.artifact_cache_stats import ArtifactCacheStats
from pants.reporting.report import Report
class RunTracker(object):
"""Tracks and times the execution of a pants run.
Also manages background work.
Use like this:
run_tracker.start()
with run_tracker.new_workunit('compile'):
with run_tracker.new_workunit('java'):
...
with run_tracker.new_workunit('scala'):
...
run_tracker.close()
Can track execution against multiple 'roots', e.g., one for the main thread and another for
background threads.
"""
# The name of the tracking root for the main thread (and the foreground worker threads).
DEFAULT_ROOT_NAME = 'main'
# The name of the tracking root for the background worker threads.
BACKGROUND_ROOT_NAME = 'background'
@classmethod
def from_config(cls, config):
if not isinstance(config, Config):
raise ValueError('Expected a Config object, given %s of type %s' % (config, type(config)))
info_dir = RunInfo.dir(config)
stats_upload_url = config.getdefault('stats_upload_url', default=None)
stats_upload_timeout = config.getdefault('stats_upload_timeout', default=2)
num_foreground_workers = config.getdefault('num_foreground_workers', default=8)
num_background_workers = config.getdefault('num_background_workers', default=8)
return cls(info_dir,
stats_upload_url=stats_upload_url,
num_foreground_workers=num_foreground_workers,
num_background_workers=num_background_workers)
def __init__(self,
info_dir,
stats_upload_url=None,
stats_upload_timeout=2,
num_foreground_workers=8,
num_background_workers=8):
self.run_timestamp = time.time() # A double, so we get subsecond precision for ids.
cmd_line = ' '.join(['./pants'] + sys.argv[1:])
# run_id is safe for use in paths.
millis = (self.run_timestamp * 1000) % 1000
run_id = 'pants_run_%s_%d' % \
(time.strftime('%Y_%m_%d_%H_%M_%S', time.localtime(self.run_timestamp)), millis)
self.info_dir = os.path.join(info_dir, run_id)
self.run_info = RunInfo(os.path.join(self.info_dir, 'info'))
self.run_info.add_basic_info(run_id, self.run_timestamp)
self.run_info.add_info('cmd_line', cmd_line)
self.stats_url = stats_upload_url
self.stats_timeout = stats_upload_timeout
# Create a 'latest' symlink, after we add_infos, so we're guaranteed that the file exists.
link_to_latest = os.path.join(os.path.dirname(self.info_dir), 'latest')
if os.path.lexists(link_to_latest):
os.unlink(link_to_latest)
os.symlink(self.info_dir, link_to_latest)
# Time spent in a workunit, including its children.
self.cumulative_timings = AggregatedTimings(os.path.join(self.info_dir, 'cumulative_timings'))
# Time spent in a workunit, not including its children.
self.self_timings = AggregatedTimings(os.path.join(self.info_dir, 'self_timings'))
# Hit/miss stats for the artifact cache.
self.artifact_cache_stats = \
ArtifactCacheStats(os.path.join(self.info_dir, 'artifact_cache_stats'))
# Number of threads for foreground work.
self._num_foreground_workers = num_foreground_workers
# Number of threads for background work.
self._num_background_workers = num_background_workers
# We report to this Report.
self.report = None
# self._threadlocal.current_workunit contains the current workunit for the calling thread.
# Note that multiple threads may share a name (e.g., all the threads in a pool).
self._threadlocal = threading.local()
# For main thread work. Created on start().
self._main_root_workunit = None
# For concurrent foreground work. Created lazily if needed.
# Associated with the main thread's root workunit.
self._foreground_worker_pool = None
# For background work. Created lazily if needed.
self._background_worker_pool = None
self._background_root_workunit = None
# Trigger subproc pool init while our memory image is still clean (see SubprocPool docstring)
SubprocPool.foreground()
self._aborted = False
def register_thread(self, parent_workunit):
"""Register the parent workunit for all work in the calling thread.
Multiple threads may have the same parent (e.g., all the threads in a pool).
"""
self._threadlocal.current_workunit = parent_workunit
def is_under_main_root(self, workunit):
"""Is the workunit running under the main thread's root."""
return workunit.root() == self._main_root_workunit
def start(self, report):
"""Start tracking this pants run.
report: an instance of pants.reporting.Report."""
self.report = report
self.report.open()
self._main_root_workunit = WorkUnit(run_tracker=self, parent=None, labels=[],
name=RunTracker.DEFAULT_ROOT_NAME, cmd=None)
self.register_thread(self._main_root_workunit)
self._main_root_workunit.start()
self.report.start_workunit(self._main_root_workunit)
def set_root_outcome(self, outcome):
"""Useful for setup code that doesn't have a reference to a workunit."""
self._main_root_workunit.set_outcome(outcome)
@contextmanager
def new_workunit(self, name, labels=None, cmd=''):
"""Creates a (hierarchical) subunit of work for the purpose of timing and reporting.
- name: A short name for this work. E.g., 'resolve', 'compile', 'scala', 'zinc'.
- labels: An optional iterable of labels. The reporters can use this to decide how to
display information about this work.
- cmd: An optional longer string representing this work.
E.g., the cmd line of a compiler invocation.
Use like this:
with run_tracker.new_workunit(name='compile', labels=[WorkUnit.TASK]) as workunit:
<do scoped work here>
<set the outcome on workunit if necessary>
Note that the outcome will automatically be set to failure if an exception is raised
in a workunit, and to success otherwise, so usually you only need to set the
outcome explicitly if you want to set it to warning.
"""
parent = self._threadlocal.current_workunit
with self.new_workunit_under_parent(name, parent=parent, labels=labels, cmd=cmd) as workunit:
self._threadlocal.current_workunit = workunit
try:
yield workunit
finally:
self._threadlocal.current_workunit = parent
@contextmanager
def new_workunit_under_parent(self, name, parent, labels=None, cmd=''):
"""Creates a (hierarchical) subunit of work for the purpose of timing and reporting.
- name: A short name for this work. E.g., 'resolve', 'compile', 'scala', 'zinc'.
- parent: The new workunit is created under this parent.
- labels: An optional iterable of labels. The reporters can use this to decide how to
display information about this work.
- cmd: An optional longer string representing this work.
E.g., the cmd line of a compiler invocation.
Task code should not typically call this directly.
"""
workunit = WorkUnit(run_tracker=self, parent=parent, name=name, labels=labels, cmd=cmd)
workunit.start()
try:
self.report.start_workunit(workunit)
yield workunit
except KeyboardInterrupt:
workunit.set_outcome(WorkUnit.ABORTED)
self._aborted = True
raise
except:
workunit.set_outcome(WorkUnit.FAILURE)
raise
else:
workunit.set_outcome(WorkUnit.SUCCESS)
finally:
self.report.end_workunit(workunit)
workunit.end()
def log(self, level, *msg_elements):
"""Log a message against the current workunit."""
self.report.log(self._threadlocal.current_workunit, level, *msg_elements)
def upload_stats(self):
"""Send timing results to URL specified in pants.ini"""
def error(msg):
# Report aleady closed, so just print error.
print("WARNING: Failed to upload stats to %s due to %s" % (self.stats_url, msg), file=sys.stderr)
if self.stats_url:
params = {
'run_info': json.dumps(self.run_info.get_as_dict()),
'cumulative_timings': json.dumps(self.cumulative_timings.get_all()),
'self_timings': json.dumps(self.self_timings.get_all()),
'artifact_cache_stats': json.dumps(self.artifact_cache_stats.get_all())
}
headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
url = urlparse(self.stats_url)
try:
if url.scheme == 'https':
http_conn = httplib.HTTPSConnection(url.netloc, timeout=self.stats_timeout)
else:
http_conn = httplib.HTTPConnection(url.netloc, timeout=self.stats_timeout)
http_conn.request('POST', url.path, urllib.urlencode(params), headers)
resp = http_conn.getresponse()
if resp.status != 200:
error("HTTP error code: %d" % resp.status)
except Exception as e:
error("Error: %s" % e)
def end(self):
"""This pants run is over, so stop tracking it.
Note: If end() has been called once, subsequent calls are no-ops.
"""
if self._background_worker_pool:
if self._aborted:
self.log(Report.INFO, "Aborting background workers.")
self._background_worker_pool.abort()
else:
self.log(Report.INFO, "Waiting for background workers to finish.")
self._background_worker_pool.shutdown()
self.report.end_workunit(self._background_root_workunit)
self._background_root_workunit.end()
if self._foreground_worker_pool:
if self._aborted:
self.log(Report.INFO, "Aborting foreground workers.")
self._foreground_worker_pool.abort()
else:
self.log(Report.INFO, "Waiting for foreground workers to finish.")
self._foreground_worker_pool.shutdown()
SubprocPool.shutdown(self._aborted)
self.report.end_workunit(self._main_root_workunit)
self._main_root_workunit.end()
outcome = self._main_root_workunit.outcome()
if self._background_root_workunit:
outcome = min(outcome, self._background_root_workunit.outcome())
outcome_str = WorkUnit.outcome_string(outcome)
log_level = WorkUnit.choose_for_outcome(outcome, Report.ERROR, Report.ERROR,
Report.WARN, Report.INFO, Report.INFO)
self.log(log_level, outcome_str)
if self.run_info.get_info('outcome') is None:
try:
self.run_info.add_info('outcome', outcome_str)
except IOError:
pass # If the goal is clean-all then the run info dir no longer exists...
self.report.close()
self.upload_stats()
def
|
(self):
if self._foreground_worker_pool is None: # Initialize lazily.
self._foreground_worker_pool = WorkerPool(parent_workunit=self._main_root_workunit,
run_tracker=self,
num_workers=self._num_foreground_workers)
return self._foreground_worker_pool
def get_background_root_workunit(self):
if self._background_root_workunit is None:
self._background_root_workunit = WorkUnit(run_tracker=self, parent=None, labels=[],
name='background', cmd=None)
self._background_root_workunit.start()
self.report.start_workunit(self._background_root_workunit)
return self._background_root_workunit
def background_worker_pool(self):
if self._background_worker_pool is None: # Initialize lazily.
self._background_worker_pool = WorkerPool(parent_workunit=self.get_background_root_workunit(),
run_tracker=self,
num_workers=self._num_background_workers)
return self._background_worker_pool
|
foreground_worker_pool
|
batsim.rs
|
extern crate env_logger;
extern crate log;
extern crate zmq;
extern crate serde_json;
use std::collections::HashMap;
use self::serde_json::Error;
use json_protocol::*;
use std::fmt;
/// Base trait to implement scheduler for batsim.
pub trait Scheduler {
/// When the simulation in started, batsim will call `simulation_begins` to give to the
/// scheduler information on the simulations such as the number of resourcs available a
/// configuration (optional) and the original timestamp.
#[warn(unused_variables)]
fn simulation_begins(
&mut self,
timestamp: &f64,
nb_resources: i32,
config: serde_json::Value,
) -> Option<Vec<BatsimEvent>>;
/// When batsim receive a job from the submiter it will inform the scheduler.
/// This function can return an array of Batsim event to send back to batsim.
#[warn(unused_variables)]
fn on_job_submission(
&mut self,
timestamp: &f64,
job: Job,
profile: Option<Profile>,
) -> Option<Vec<BatsimEvent>>;
/// When a job is finished batsim will inform the scheduler with this function.
///
/// * `self` Take a reference mutable on the scheduler so we can update it.
/// * `timestamp` The at which the event occured
/// * `job_id` The string id of the terminated job.
/// * `status` The return status of the job.
#[warn(unused_variables)]
fn on_job_completed(
&mut self,
timestamp: &f64,
job_id: String,
job_state: String,
return_code: i32,
alloc: String,
) -> Option<Vec<BatsimEvent>>;
/// When the scheduler kill on or several jobs batsim acknoiwledge by sending back the id of
/// the killed job.
#[warn(unused_variables)]
fn on_job_killed(&mut self, timestamp: &f64, job_ids: Vec<String>) -> Option<Vec<BatsimEvent>>;
/// The function is called at the reception of a message
/// Before loop trhough each event and call `on_*` functions.
#[warn(unused_variables)]
fn on_message_received_end(&mut self, timestamp: &mut f64) -> Option<Vec<BatsimEvent>>;
/// The function is called just before sending back events to batsim.
#[warn(unused_variables)]
fn on_message_received_begin(&mut self, timestamp: &f64) -> Option<Vec<BatsimEvent>>;
/// This function is called a the end of the simulation.
/// The timestamp is mutable so we can let the scheduler
/// the time it it spend on schedling
#[warn(unused_variables)]
fn on_simulation_ends(&mut self, timestamp: &f64);
}
pub struct Batsim<'a> {
//zmq_context: zmq::Context,
zmq_socket: zmq::Socket,
time: f64,
nb_resources: i32,
scheduler: &'a mut Scheduler,
}
impl Job {
/// Split the job id in two parts `(workload id, job id)` as defined n batsim.
pub fn split_id(id: &String) -> (String, String) {
let workload;
let decimal_id;
let indx = id.find('!').unwrap();
{
let (w_id, job_id) = id.split_at(indx + 1);
decimal_id = String::from(job_id);
workload = String::from(w_id);
}
(workload, decimal_id)
}
}
impl Clone for Job {
fn clone(&self) -> Job {
Job {
id: self.id.clone(),
res: self.res,
profile: self.profile.clone(),
subtime: self.subtime,
walltime: self.walltime,
other: self.other.clone(),
}
}
}
impl fmt::Display for Job {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"(id: {}\t,res: {},\t subtime: {},\t walltime: {})",
self.id, self.res, self.subtime, self.walltime
)
}
}
impl<'a> Batsim<'a> {
/// Constructs a new `Batsim`.
pub fn new(scheduler: &'a mut Scheduler, endpointport: Option<i64>) -> Batsim<'a> {
let socket_url = format!("tcp://*:{}", endpointport.unwrap_or(28000));
let context = zmq::Context::new();
let socket = context.socket(zmq::REP).unwrap();
assert!(socket.bind(socket_url.as_str()).is_ok());
Batsim {
scheduler: scheduler,
//zmq_context: context,
zmq_socket: socket,
time: -1.0,
nb_resources: -1,
}
}
pub fn init(&mut self) -> Result<(), Error> {
let init_message: BatsimMessage = self.get_next_message().unwrap();
let init_event: &BatsimEvent = &init_message.events[0];
let mut res = self.get_nop().unwrap();
match *init_event {
BatsimEvent::SIMULATION_BEGINS {
ref data,
ref timestamp,
} => {
let mut temp_timestamp = *timestamp;
self.nb_resources = data.nb_resources;
match self.scheduler.simulation_begins(
&mut temp_timestamp,
data.nb_resources,
data.config.clone(),
) {
Some(mut events) => res.events.append(&mut events),
None => {}
}
self.time = temp_timestamp;
}
_ => panic!("We should receive a SIMULATION BEGIN at this point"),
};
res.now = self.time;
try!(self.send_message(res));
Ok(())
}
pub fn get_next_message(&self) -> Option<BatsimMessage> {
let msg = self.pull_network_message().unwrap();
//Batsim will send us the init message with
//the data we need to initialize the scheduler.
Some(read_batsim_message(&msg).unwrap())
}
pub fn pull_network_message(&self) -> Result<String, Error> {
let msg = match self.zmq_socket.recv_msg(0) {
Ok(msg) => msg,
Err(why) => panic!("{:?}", why),
};
Ok(String::from(msg.as_str().unwrap()))
}
pub fn get_nop(&self) -> Result<BatsimMessage, Error>
|
pub fn send_message(&self, message: BatsimMessage) -> Result<(), Error> {
let message_json = serde_json::to_string(&message).unwrap();
self.zmq_socket.send_str(message_json.as_str(), 0).unwrap();
Ok(())
}
pub fn run_simulation(&mut self) -> Result<(), Error> {
match self.init() {
Ok(()) => {}
_ => panic!("Could not initialize the simulation, aborting."),
};
let mut next: Option<BatsimMessage> = self.get_next_message();
'main: while let Some(msg) = next {
trace!("Received msg: {:?}", msg);
self.time = msg.now;
let mut schedule_timestamp: f64 = msg.now;
let mut res = self.get_nop().unwrap();
match self
.scheduler
.on_message_received_begin(&schedule_timestamp)
{
Some(mut events) => res.events.append(&mut events),
None => {}
};
for event in msg.events {
match event {
BatsimEvent::SIMULATION_BEGINS { .. } => {
panic!(
"Received simulation_begins,
this should not happends at this point"
);
}
BatsimEvent::JOB_SUBMITTED {
ref data,
ref timestamp,
} => {
match self.scheduler.on_job_submission(
timestamp,
data.job.clone(),
data.profile.clone(),
) {
Some(mut events) => res.events.append(&mut events),
None => {}
};
}
//
BatsimEvent::SIMULATION_ENDS { ref timestamp } => {
self.scheduler.on_simulation_ends(timestamp);
next = None;
try!(self.send_message(res));
continue 'main;
}
BatsimEvent::JOB_COMPLETED {
ref data,
ref timestamp,
} => {
match self.scheduler.on_job_completed(
timestamp,
data.job_id.clone(),
data.job_state.clone(),
data.return_code.clone(),
data.alloc.clone(),
) {
Some(mut events) => res.events.append(&mut events),
None => {}
};
}
BatsimEvent::JOB_KILLED {
ref data,
ref timestamp,
} => {
match self
.scheduler
.on_job_killed(timestamp, data.job_ids.clone())
{
Some(mut events) => res.events.append(&mut events),
None => {}
};
}
BatsimEvent::NOTIFY {
ref timestamp,
ref data,
} => {
trace!("Notify: {:?}", data);
}
e => panic!("Unexpected event {:?}", e),
}
}
match self
.scheduler
.on_message_received_end(&mut schedule_timestamp)
{
Some(mut events) => res.events.append(&mut events),
None => {}
};
res.now = schedule_timestamp;
trace!("Send msg to batsim: {:?}", res);
try!(self.send_message(res));
next = self.get_next_message();
}
Ok(())
}
}
pub fn allocate_job_event(time: f64, job: &Job, allocation: String, mapping: Option<HashMap<String, String>>) -> BatsimEvent {
BatsimEvent::EXECUTE_JOB {
timestamp: time,
data: ExecuteJob {
job_id: job.id.clone(),
alloc: allocation,
mapping: mapping,
},
}
}
pub fn reject_job_event(time: f64, job: &Job) -> BatsimEvent {
BatsimEvent::REJECT_JOB {
timestamp: time,
data: RejectJob {
job_id: job.id.clone(),
},
}
}
pub fn notify_event(time: f64, n_type: String) -> BatsimEvent {
BatsimEvent::NOTIFY {
timestamp: time,
data: Notify {
notify_type: n_type.clone(),
},
}
}
pub fn submit_job_event(time: f64, job: &Job, profile: Option<&Profile>) -> BatsimEvent {
let p = match profile {
Some(prof) => Some(prof.clone()),
None => None,
};
BatsimEvent::SUBMIT_JOB {
timestamp: time,
data: SubmitJob {
job_id: job.id.clone(),
job: job.clone(),
profile: p,
},
}
}
pub fn kill_jobs_event(time: f64, jobs: Vec<&Job>) -> BatsimEvent {
let mut job_ids = vec![];
for job in jobs {
job_ids.push(job.id.clone());
}
BatsimEvent::KILL_JOB {
timestamp: time,
data: KillJob { job_ids: job_ids },
}
}
///Convert a json fromated string into a typed rust struct `BatsimMessage`.
pub fn read_batsim_message(msg_str: &str) -> Result<BatsimMessage, Error> {
let message: BatsimMessage = match serde_json::from_str(msg_str) {
Ok(value) => value,
Err(why) => panic!("{:?} full str: {}", why, msg_str),
};
Ok(message)
}
|
{
Ok(BatsimMessage {
now: self.time,
events: Vec::new(),
})
}
|
actuator_status_provider.py
|
"""Stub method for providing current device actuator status."""
# Copyright 2019 WolkAbout Technology s.r.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Tuple
from typing import Union
from wolk_gateway_module.model.actuator_state import ActuatorState
def get_actuator_status(
device_key: str, reference: str
) -> Tuple[ActuatorState, Union[bool, int, float, str]]:
|
"""
Get current actuator status identified by device key and reference.
Reads the status of actuator from the device
and returns it as a tuple containing the actuator state and current value.
Must be implemented as non blocking.
Must be implemented as thread safe.
:param device_key: Device key to which the actuator belongs to
:type device_key: str
:param reference: Actuator reference
:type reference: str
:returns: (state, value)
:rtype: (ActuatorState, bool or int or float or str)
"""
raise NotImplementedError
|
|
email.go
|
package notify
import (
"github.com/TimothyYe/godns/internal/settings"
log "github.com/sirupsen/logrus"
"gopkg.in/gomail.v2"
)
type EmailNotify struct {
conf *settings.Settings
}
func
|
(conf *settings.Settings) INotify {
return &EmailNotify{conf: conf}
}
func (n *EmailNotify) Send(domain, currentIP string) error {
log.Debug("Sending notification to: ", n.conf.Notify.Mail.SendTo)
m := gomail.NewMessage()
m.SetHeader("From", n.conf.Notify.Mail.SMTPUsername)
m.SetHeader("To", n.conf.Notify.Mail.SendTo)
m.SetHeader("Subject", "GoDNS Notification")
log.Debug("currentIP:", currentIP)
log.Debug("domain:", domain)
m.SetBody("text/html", buildTemplate(currentIP, domain, mailTemplate))
d := gomail.NewDialer(
n.conf.Notify.Mail.SMTPServer,
n.conf.Notify.Mail.SMTPPort,
n.conf.Notify.Mail.SMTPUsername,
n.conf.Notify.Mail.SMTPPassword)
// Send the email config by sendlist .
if err := d.DialAndSend(m); err != nil {
return err
}
return nil
}
var mailTemplate = `
<html>
<body>
<div role="section">
<div style="background-color: #281557;">
<div class="layout one-col" style="Margin: 0 auto;max-width: 600px;min-width: 320px; width: 320px;width: calc(28000% - 167400px);overflow-wrap: break-word;word-wrap: break-word;word-break: break-word;">
<div class="layout__inner" style="border-collapse: collapse;display: table;width: 100%;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" role="presentation"><tr class="layout-full-width" style="background-color: #281557;"><td class="layout__edges"> </td><td style="width: 600px" class="w560"><![endif]-->
<div class="column" style="max-width: 600px;min-width: 320px; width: 320px;width: calc(28000% - 167400px);text-align: left;color: #8e959c;font-size: 14px;line-height: 21px;font-family: sans-serif;">
<div style="Margin-left: 20px;Margin-right: 20px;">
<div style="mso-line-height-rule: exactly;line-height: 10px;font-size: 1px;"> </div>
</div>
</div>
<!--[if (mso)|(IE)]></td><td class="layout__edges"> </td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="background-color: #281557;">
<div class="layout one-col" style="Margin: 0 auto;max-width: 600px;min-width: 320px; width: 320px;width: calc(28000% - 167400px);overflow-wrap: break-word;word-wrap: break-word;word-break: break-word;">
<div class="layout__inner" style="border-collapse: collapse;display: table;width: 100%;">
<!--[if (mso)|(IE)]><table width="100%" cellpadding="0" cellspacing="0" role="presentation"><tr class="layout-full-width" style="background-color: #281557;"><td class="layout__edges"> </td><td style="width: 600px" class="w560"><![endif]-->
<div class="column" style="max-width: 600px;min-width: 320px; width: 320px;width: calc(28000% - 167400px);text-align: left;color: #8e959c;font-size: 14px;line-height: 21px;font-family: sans-serif;">
<div style="Margin-left: 20px;Margin-right: 20px;">
<div style="mso-line-height-rule: exactly;line-height: 50px;font-size: 1px;"> </div>
</div>
<div style="Margin-left: 20px;Margin-right: 20px;">
<div style="mso-line-height-rule: exactly;mso-text-raise: 4px;">
<h1 class="size-28" style="Margin-top: 0;Margin-bottom: 0;font-style: normal;font-weight: normal;color: #000;font-size: 24px;line-height: 32px;font-family: avenir,sans-serif;text-align: center;"
lang="x-size-28">
<span class="font-avenir">
<span style="color:#ffffff">Your IP address has been changed to</span>
</span>
</h1>
<h1 class="size-48" style="Margin-top: 20px;Margin-bottom: 0;font-style: normal;font-weight: normal;color: #000;font-size: 36px;line-height: 43px;font-family: avenir,sans-serif;text-align: center;"
lang="x-size-48">
<span class="font-avenir">
<strong>
<span style="color:#ffffff">{{ .CurrentIP }}</span>
</strong>
</span>
</h1>
<h2 class="size-28" style="Margin-top: 20px;Margin-bottom: 16px;font-style: normal;font-weight: normal;color: #e31212;font-size: 24px;line-height: 32px;font-family: Avenir,sans-serif;text-align: center;"
lang="x-size-28">
<font color="#ffffff">
<strong>Domain {{ .Domain }} is updated</strong>
</font>
</h2>
</div>
</div>
<div style="Margin-left: 20px;Margin-right: 20px;">
<div style="mso-line-height-rule: exactly;line-height: 15px;font-size: 1px;"> </div>
</div>
<div style="Margin-left: 20px;Margin-right: 20px;">
<div style="mso-line-height-rule: exactly;line-height: 35px;font-size: 1px;"> </div>
</div>
</div>
<!--[if (mso)|(IE)]></td><td class="layout__edges"> </td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="mso-line-height-rule: exactly;line-height: 20px;font-size: 20px;"> </div>
<div style="mso-line-height-rule: exactly;" role="contentinfo">
<div class="layout email-footer" style="Margin: 0 auto;max-width: 600px;min-width: 320px; width: 320px;width: calc(28000% - 167400px);overflow-wrap: break-word;word-wrap: break-word;word-break: break-word;">
<div class="layout__inner" style="border-collapse: collapse;display: table;width: 100%;">
<!--[if (mso)|(IE)]><table align="center" cellpadding="0" cellspacing="0" role="presentation"><tr class="layout-email-footer"><td style="width: 400px;" valign="top" class="w360"><![endif]-->
<div class="column wide" style="text-align: left;font-size: 12px;line-height: 19px;color: #adb3b9;font-family: sans-serif;Float: left;max-width: 400px;min-width: 320px; width: 320px;width: calc(8000% - 47600px);">
<div style="Margin-left: 20px;Margin-right: 20px;Margin-top: 10px;Margin-bottom: 10px;">
<div style="font-size: 12px;line-height: 19px;">
</div>
<div style="font-size: 12px;line-height: 19px;Margin-top: 18px;">
</div>
<!--[if mso]> <![endif]-->
</div>
</div>
<!--[if (mso)|(IE)]></td><td style="width: 200px;" valign="top" class="w160"><![endif]-->
<div class="column narrow" style="text-align: left;font-size: 12px;line-height: 19px;color: #adb3b9;font-family: sans-serif;Float: left;max-width: 320px;min-width: 200px; width: 320px;width: calc(72200px - 12000%);">
<div style="Margin-left: 20px;Margin-right: 20px;Margin-top: 10px;Margin-bottom: 10px;">
</div>
</div>
<!--[if (mso)|(IE)]></td></tr></table><![endif]-->
</div>
</div>
</div>
<div style="mso-line-height-rule: exactly;line-height: 40px;font-size: 40px;"> </div>
</body>
</div>
</html>
`
|
NewEmailNotify
|
main.rs
|
use code_generator::CodeGenerator;
use syntax_analyzer::ast_node::*;
use std::env;
use std::fs::File;
use std::io::{stdin, stdout, BufRead, BufReader, BufWriter, Write};
fn main() {
let mut reader: Box<dyn BufRead> = match env::args().nth(1) {
None => Box::new(BufReader::new(stdin())),
Some(filename) => Box::new(BufReader::new(
File::open(filename).expect("cannot open file"),
)),
};
let mut writer: Box<dyn Write> = match env::args().nth(2) {
None => Box::new(BufWriter::new(stdout())),
Some(filename) => Box::new(BufWriter::new(
File::create(filename).expect("cannot create file"),
)),
};
let mut ast_str = String::new();
|
let code = CodeGenerator::generate(&ast).unwrap();
writer.write(code.as_bytes()).expect("write error");
}
|
reader.read_to_string(&mut ast_str).expect("read error");
let ast = ASTReader::read_ast(ast_str.lines());
|
flask_sendgrid.py
|
# -*- coding: utf-8 -*-
"""Flask extension pacakge for Sendgrid"""
from . import FlaskExtension
from sendgrid import SendGridClient, Mail
class SendGrid(FlaskExtension):
"""A helper class for managing a the SendGrid API calls"""
EXTENSION_NAME = 'sendgrid'
def __init__(self, app=None):
super(SendGrid, self).__init__(app=app)
def _create_instance(self, app):
client = SendGridClient(
app.config.get('SENDGRID_USERNAME'),
app.config.get('SENDGRID_PASSWORD'))
return client
def
|
(self, body=None, subject=None, recipient=None, sender=None):
"""Sends an email"""
mail = Mail(to=recipient,
from_email=sender,
subject=subject,
text=body)
self.instance.send(mail)
|
send_mail
|
elements.py
|
import re
from collections import defaultdict
from datetime import datetime
from elasticsearch_dsl import Keyword, Text
from protean import BaseAggregate, BaseValueObject
from protean.core.model import BaseModel
from protean.fields import DateTime, Integer, String
from protean.fields import Text as ProteanText
from protean.fields import ValueObject
class Person(BaseAggregate):
first_name = String(max_length=50, required=True)
last_name = String(max_length=50, required=True)
age = Integer(default=21)
created_at = DateTime(default=datetime.now())
class Alien(BaseAggregate):
first_name = String(max_length=50, required=True)
last_name = String(max_length=50, required=True)
age = Integer(default=21)
class User(BaseAggregate):
email = String(max_length=255, required=True, unique=True)
password = String(max_length=3026)
class Email(BaseValueObject):
REGEXP = r"\"?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)\"?"
# This is the external facing data attribute
address = String(max_length=254, required=True)
def clean(self):
"""Business rules of Email address"""
errors = defaultdict(list)
if not bool(re.match(Email.REGEXP, self.address)):
errors["address"].append("is invalid")
return errors
class ComplexUser(BaseAggregate):
email = ValueObject(Email, required=True)
password = String(required=True, max_length=255)
class Provider(BaseAggregate):
name = ProteanText()
about = ProteanText()
class ProviderCustomModel(BaseModel):
id = Keyword()
name = Text(fields={"raw": Keyword()})
about = Text()
class
|
:
schema = "providers"
class Receiver(BaseAggregate):
name = String()
age = Integer()
|
Meta
|
settings.py
|
"""
Здесь собраны все команды настроек
"""
import json
from vkbottle.user import Blueprint, Message
from utils.edit_msg import edit_msg
from utils.emojis import ENABLED, DISABLED, ERROR
from filters import ForEveryoneRule
bp = Blueprint("Settings command")
@bp.on.message(ForEveryoneRule("settings"), text="<prefix>для всех <command>")
async def for_everyone_handler(message: Message
|
Команда для изменения доступности других команд для других людей
"""
with open("commands_for_everyone.json", "r", encoding="utf-8") as file:
content = json.load(file)
if command == "default":
with open("commands_for_everyone.json", "w", encoding="utf-8") as file:
content = {
"advancements": True,
"blank": True,
"bomb": True,
"code": False,
"demotivator": True,
"info": True,
"interactive_commands": True,
"ping": True,
"random_case": True,
"settings": False,
"show_config": False,
}
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api,
message,
f"{ENABLED} | Настройки для всех вернуты к значению по умолчанию",
)
elif command == "none":
with open("commands_for_everyone.json", "w", encoding="utf-8") as file:
for allowed_command in content:
content[allowed_command] = False
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api, message, f"{DISABLED} | Все команды для всех выключены"
)
elif command not in content:
await edit_msg(bp.api, message, f"{ERROR} | Такой команды нет ")
else:
if content[command]:
content[command] = False
with open(
"commands_for_everyone.json", "w", encoding="utf-8"
) as file:
content[command] = False
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api, message, f"{DISABLED} | Команда {command} отключена "
)
else:
content[command] = True
with open(
"commands_for_everyone.json", "w", encoding="utf-8"
) as file:
content[command] = True
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api,
message,
f"Команда {command} включена " + ENABLED,
)
@bp.on.message(ForEveryoneRule("settings"), text="<prefix>для всех")
async def show_for_everyone_handler(message: Message):
"""
Команда для проверки доступности команд для других людей
"""
with open("commands_for_everyone.json", "r", encoding="utf-8") as file:
content = json.load(file)
text = "Команды для всех:\n"
for command in content:
if content[command]:
text += f"{command} | {ENABLED}\n"
else:
text += f"{command} | {DISABLED}\n"
await edit_msg(bp.api, message, text)
@bp.on.message(ForEveryoneRule("settings"), text="<prefix>время бомбы <time>")
async def set_bomb_time_handler(message: Message, time):
"""
Команда для настройки времени бомбы (!бомба)
"""
try:
time = int(time)
except ValueError:
await edit_msg(
bp.api,
message,
"Время бомбы - не число! " + ERROR,
)
return
if time < 1:
await edit_msg(
bp.api,
message,
"Время бомбы не может быть меньше 1! " + ERROR,
)
else:
with open("config.json", "r", encoding="utf-8") as file:
content = json.load(file)
with open("config.json", "w", encoding="utf-8") as file:
content["bomb_time"] = int(message.text.split()[2])
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api,
message,
f"{ENABLED} | Время бомбы изменено на "
f"{content['bomb_time']} секунд ",
)
@bp.on.message(
ForEveryoneRule("settings"), text="<prefix>время удаления <time>"
)
async def set_delete_time_handler(message: Message, time):
"""
Команда для настройки времени удаления всех выполненных команд
"""
try:
time = int(time)
except ValueError:
await edit_msg(
bp.api,
message,
"Время удаления - не число! " + ERROR,
)
return
if time < 0:
await edit_msg(
bp.api,
message,
"Время удаления не может быть меньше 0! " + ERROR,
)
else:
with open("config.json", "r", encoding="utf-8") as file:
content = json.load(file)
with open("config.json", "w", encoding="utf-8") as file:
content["delete_after"] = int(message.text.split()[2])
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api,
message,
f"{ENABLED} | Время удаления изменено на "
f"{content['delete_after']} секунд",
)
@bp.on.message(
ForEveryoneRule("settings"), text="<prefix>префикс <prefix_new>"
)
async def set_prefix_handler(message: Message, prefix_new):
"""
Команда для изменения префикса бота
"""
with open("config.json", "r", encoding="utf-8") as file:
content = json.load(file)
with open("config.json", "w", encoding="utf-8") as file:
content["prefix"] = prefix_new
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api,
message,
f'{ENABLED} | Ваш префикс изменился на "{content["prefix"]}"!',
)
@bp.on.message(ForEveryoneRule("settings"), text="<prefix>инфо лс")
async def info_in_dm_handler(message: Message):
"""
Команда для изменения отправки информации о людях (!инфо)
"""
with open("config.json", "r", encoding="utf-8") as file:
content = json.load(file)
if content["send_info_in_dm"]:
content["send_info_in_dm"] = False
with open("config.json", "w", encoding="utf-8") as file:
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api,
message,
"👥 | Теперь информация будет присылаться в чат",
)
else:
content["send_info_in_dm"] = True
with open("config.json", "w", encoding="utf-8") as file:
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api,
message,
"👤 | Теперь информация будет присылаться в лс",
)
@bp.on.message(ForEveryoneRule("settings"), text="<prefix>ред")
async def edit_or_del_handler(message: Message):
"""
Команда для выбора - редактировать, или удалять команды
"""
with open("config.json", "r", encoding="utf-8") as file:
content = json.load(file)
if content["edit_or_send"] == "edit":
content["edit_or_send"] = "send"
with open("config.json", "w", encoding="utf-8") as file:
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api,
message,
f"{DISABLED} | Теперь сообщения будут отправляться, а не "
"редактироваться",
)
else:
content["edit_or_send"] = "edit"
with open("config.json", "w", encoding="utf-8") as file:
file.write(json.dumps(content, indent=4))
await edit_msg(
bp.api,
message,
f"{ENABLED} | Теперь сообщения будут редактироваться, а не "
"отправляться",
)
@bp.on.message(ForEveryoneRule("settings"), text="<prefix>debug")
async def debug_mode_handler(message: Message):
"""
Команда для включения и выключения режима debug
"""
with open("config.json", "r", encoding="utf-8") as file:
content = json.load(file)
if content["debug"]:
content["debug"] = False
with open("config.json", "w", encoding="utf-8") as file:
file.write(json.dumps(content, indent=4))
await edit_msg(bp.api, message, f"{DISABLED} | Debug-режим выключен")
else:
content["debug"] = True
with open("config.json", "w", encoding="utf-8") as file:
file.write(json.dumps(content, indent=4))
await edit_msg(bp.api, message, f"{ENABLED} | Debug-режим включен")
|
, command):
"""
|
cherrypick-must-have-milestone.go
|
/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package mungers
import (
"fmt"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/test-infra/mungegithub/features"
"k8s.io/test-infra/mungegithub/github"
"k8s.io/test-infra/mungegithub/options"
"github.com/golang/glog"
githubapi "github.com/google/go-github/github"
)
const (
pickMustHaveMilestoneFormat = "Removing label `%s` because no release milestone was set. This is an invalid state and thus this PR is not being considered for cherry-pick to any release branch. Please add an appropriate release milestone and then re-add the label."
)
var (
pickMustHaveMilestoneBody = fmt.Sprintf(pickMustHaveMilestoneFormat, cpCandidateLabel)
)
// PickMustHaveMilestone will remove the the cherrypick-candidate label from
// any PR that does not have a 'release' milestone set.
type PickMustHaveMilestone struct{}
func init()
|
// Name is the name usable in --pr-mungers
func (PickMustHaveMilestone) Name() string { return "cherrypick-must-have-milestone" }
// RequiredFeatures is a slice of 'features' that must be provided
func (PickMustHaveMilestone) RequiredFeatures() []string { return []string{} }
// Initialize will initialize the munger
func (PickMustHaveMilestone) Initialize(config *github.Config, features *features.Features) error {
return nil
}
// EachLoop is called at the start of every munge loop
func (PickMustHaveMilestone) EachLoop() error { return nil }
// RegisterOptions registers options for this munger; returns any that require a restart when changed.
func (PickMustHaveMilestone) RegisterOptions(opts *options.Options) sets.String { return nil }
// Munge is the workhorse the will actually make updates to the PR
func (PickMustHaveMilestone) Munge(obj *github.MungeObject) {
if !obj.IsPR() {
return
}
if !obj.HasLabel(cpCandidateLabel) {
return
}
releaseMilestone, ok := obj.ReleaseMilestone()
if !ok {
return
}
hasLabel := obj.HasLabel(cpCandidateLabel)
if hasLabel && releaseMilestone == "" {
obj.WriteComment(pickMustHaveMilestoneBody)
obj.RemoveLabel(cpCandidateLabel)
}
}
func (PickMustHaveMilestone) isStaleIssueComment(obj *github.MungeObject, comment *githubapi.IssueComment) bool {
if !obj.IsRobot(comment.User) {
return false
}
if *comment.Body != pickMustHaveMilestoneBody {
return false
}
milestone, ok := obj.ReleaseMilestone()
if !ok {
return false
}
stale := milestone != ""
if stale {
glog.V(6).Infof("Found stale PickMustHaveMilestone comment")
}
return stale
}
// StaleIssueComments returns a slice of stale issue comments.
func (p PickMustHaveMilestone) StaleIssueComments(obj *github.MungeObject, comments []*githubapi.IssueComment) []*githubapi.IssueComment {
return forEachCommentTest(obj, comments, p.isStaleIssueComment)
}
|
{
p := PickMustHaveMilestone{}
RegisterMungerOrDie(p)
RegisterStaleIssueComments(p)
}
|
main.rs
|
// Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Clidoc generates documentation for host tool commands consisting of their --help output.
use {
anyhow::{bail, Context, Result},
argh::FromArgs,
flate2::{write::GzEncoder, Compression},
log::{debug, info, LevelFilter},
std::{
collections::HashSet,
env,
ffi::{OsStr, OsString},
fs::{self, File},
io::{BufWriter, Write},
path::{Path, PathBuf},
process::Command,
sync::Once,
},
tar::Builder,
};
|
use simplelog::{Config, SimpleLogger};
/// CliDoc generates documentation for core Fuchsia developer tools.
#[derive(Debug, FromArgs)]
struct Opt {
// Default input dir is parent dir of this tool, containing host tools exes
// $FUCHSIA_DIR/out/default/host_x64 or $FUCHSIA_DIR/out/default/host-tools
/// set the input folder
#[argh(
option,
short = 'i',
default = "env::current_exe().unwrap().parent().unwrap().to_path_buf()"
)]
in_dir: PathBuf,
/// set the output directory
#[argh(option, short = 'o', default = "PathBuf::from(\".\".to_string())")]
out_dir: PathBuf,
/// reduce text output
#[argh(switch)]
quiet: bool,
/// increase text output
#[argh(switch, short = 'v')]
verbose: bool,
/// path for tarball- if set the output will be compressed as a tarball
/// and intermediate files will be cleaned up
/// For example: "clidoc_out.tar.gz". Note that .tar.gz is not automatically
/// added as a file extension.
#[argh(option)]
tarball_dir: Option<PathBuf>,
/// commands to run, otherwise defaults to internal list of commands.
/// relative paths are on the input_path. Absolute paths are used as-is.
#[argh(positional)]
cmd_list: Vec<PathBuf>,
}
// Formatting styles for codeblocks.
const CODEBLOCK_START: &str = "```none {: style=\"white-space: break-spaces;\" \
.devsite-disable-click-to-copy}\n";
const CODEBLOCK_END: &str = "```\n";
const HEADER: &str = r#"<!-- DO NOT EDIT THIS FILE DIRECTLY
This file is generated using clidoc by parsing the help output of this tool.
Please edit the help output or clidoc's processing of that output to make changes
to this file.
-->
"#;
// TODO(fxb/69336): Move allow list to its own separate config file.
const ALLOW_LIST: &'static [&'static str] = &[
"blobfs-compression",
"bootserver",
"cmc",
"fconfig",
"ffx",
"fidl-format",
"fidlc",
"fidlcat",
"fidlgen",
"fpublish",
"fremote",
"fserve",
"fssh",
"fvdl",
"minfs",
"pm",
"symbol-index",
"symbolize",
"symbolizer",
"triage",
"zbi",
"zxdb",
];
fn main() -> Result<()> {
let opt: Opt = argh::from_env();
run(opt)
}
static INIT_LOGGER: Once = Once::new();
fn set_up_logger(opt: &Opt) {
INIT_LOGGER.call_once(|| {
if opt.verbose {
SimpleLogger::init(LevelFilter::Debug, Config::default())
.expect("Set logger to debug level");
debug!("Debug logging enabled.");
} else if opt.quiet {
SimpleLogger::init(LevelFilter::Warn, Config::default())
.expect("Set logger to warn level");
} else {
SimpleLogger::init(LevelFilter::Info, Config::default())
.expect("Set logger to info level");
}
});
}
fn run(opt: Opt) -> Result<()> {
if opt.quiet && opt.verbose {
bail!("cannot use --quiet and --verbose together");
}
set_up_logger(&opt);
// Set the directory for the command executables.
let input_path = &opt.in_dir;
info!("Input dir: {}", input_path.display());
// Set the directory to output documentation to.
let output_path = &opt.out_dir;
info!("Output dir: {}", output_path.display());
let mut cmd_paths: Vec<PathBuf>;
if opt.cmd_list.is_empty() {
debug!("Building cmd list from defaults");
// Create a set of SDK tools to generate documentation for.
let allow_list: HashSet<OsString> =
ALLOW_LIST.iter().cloned().map(OsString::from).collect();
// Create a vector of full paths to each command in the allow_list.
cmd_paths = get_command_paths(&input_path, &allow_list)?;
} else {
// Use the commands passed on the command line. If they are relative paths,
// make them absolute based on the input_path.
cmd_paths = Vec::new();
for p in opt.cmd_list {
if p.is_absolute() {
cmd_paths.push(p);
} else {
cmd_paths.push(input_path.join(p));
}
}
debug!("Using cmds from opt.cmd_list: {:?}", cmd_paths);
}
// Create the directory for doc files if it doesn't exist.
create_output_dir(&output_path)
.context(format!("Unable to create output directory {:?}", output_path))?;
// Write documentation output for each command.
for cmd_path in cmd_paths.iter() {
write_formatted_output(&cmd_path, output_path).context(format!(
"Unable to write generate doc for {:?} to {:?}",
cmd_path, output_path
))?;
}
info!("Generated documentation at dir: {}", &output_path.display());
if let Some(tardir) = opt.tarball_dir {
info!("Tarballing output at {:?}", tardir);
let tar_gz = File::create(tardir)?;
let enc = GzEncoder::new(tar_gz, Compression::default());
let mut tar = Builder::new(enc);
tar.append_dir_all(".", output_path.to_str().expect("Get file name of outdir"))?;
info!("Cleaning up {:?}", output_path);
fs::remove_dir_all(output_path)?
}
Ok(())
}
/// Helper function for write_formatted_output.
///
/// Recursively calls `cmd_name`'s subcommands and writes to `output_writer`.
fn recurse_cmd_output<W: Write>(
cmd_name: &str,
cmd_path: &PathBuf,
output_writer: &mut W,
cmds_sequence: &Vec<&String>,
) -> Result<()> {
// Create vector to collect subcommands.
let mut cmds_list: Vec<String> = Vec::new();
let mut inside_command_section = false;
// Track command level starting from 0, to set command headers' formatting.
let cmd_level = cmds_sequence.len();
// Write out the header.
let cmd_heading_formatting = "#".repeat(cmd_level + 1);
writeln!(output_writer, "{} {}\n", cmd_heading_formatting, cmd_name)?;
writeln!(output_writer, "{}", CODEBLOCK_START)?;
debug!("Processing {:?} {:?}", cmd_path, cmds_sequence);
// Get terminal output for cmd <subcommands> --help for a given command.
let lines: Vec<String> = help_output_for(&cmd_path, &cmds_sequence)?;
for line in lines {
// TODO(fxb/69457): Capture all section headers in addition to "Commands" and "Options".
match line.to_lowercase().as_str() {
"subcommands:" | "commands:" => {
write_heading(output_writer, &line)?;
inside_command_section = true;
}
"options:" => {
write_heading(output_writer, &line)?;
inside_command_section = false;
}
// Command section ends at a blank line (or end of file).
"" => {
writeln!(output_writer, "")?;
inside_command_section = false;
}
// Collect sub-commands into a vector.
_ if inside_command_section => {
// Command name is the first word on the line.
if let Some(command) = line.split_whitespace().next() {
match command.as_ref() {
"commands" | "subcommands" => {
debug!("skipping {:?} to avoid recursion", command);
}
_ => {
cmds_list.push(command.to_string());
}
}
writeln!(output_writer, "{}", line)?;
}
}
_ => {
if line.contains(&cmd_path.as_path().display().to_string()) {
let line_no_path =
line.replace(&cmd_path.as_path().display().to_string(), &cmd_name);
// Write line after stripping full path preceeding command name.
writeln!(output_writer, "{}", line_no_path)?;
} else if !line.contains("sdk WARN:") && !line.contains("See 'ffx help <command>'")
{
// TODO(fxb/71456): Remove filtering ffx repeated line after documentation standardized.
// Write non-header lines unedited.
writeln!(output_writer, "{}", line)?;
}
}
}
}
// Close preformatting at the end.
writeln!(output_writer, "{}", CODEBLOCK_END)?;
cmds_list.sort();
for cmd in cmds_list {
// Copy current command sequence and append newest command.
let mut cmds_sequence = cmds_sequence.clone();
cmds_sequence.push(&cmd);
recurse_cmd_output(&cmd, &cmd_path, output_writer, &cmds_sequence)?;
}
Ok(())
}
fn write_heading<W: Write>(output_writer: &mut W, heading: &String) -> Result<()> {
// End preformatting before writing a section header.
writeln!(output_writer, "{}", CODEBLOCK_END)?;
// Write the section heading.
writeln!(output_writer, "__{}__\n", heading)?;
// Begin preformatting for next section of non-headers.
writeln!(output_writer, "{}", CODEBLOCK_START)?;
Ok(())
}
/// Write output of cmd at `cmd_path` to new cmd.md file at `output_path`.
fn write_formatted_output(cmd_path: &PathBuf, output_path: &PathBuf) -> Result<()> {
// Get name of command from full path to the command executable.
let cmd_name = cmd_path.file_name().expect("Could not get file name for command");
let output_md_path = md_path(&cmd_name, &output_path);
debug!("Generating docs for {:?} to {:?}", cmd_path, output_md_path);
// Create vector for commands to call in sequence.
let cmd_sequence = Vec::new();
// Create a buffer writer to format and write consecutive lines to a file.
let file = File::create(&output_md_path).context(format!("create {:?}", output_md_path))?;
let output_writer = &mut BufWriter::new(file);
let cmd_name = cmd_name.to_str().expect("Could not convert cmd_name from OsStr to str");
writeln!(output_writer, "{}", HEADER)?;
// Write ouput for cmd and all of its subcommands.
recurse_cmd_output(&cmd_name, &cmd_path, output_writer, &cmd_sequence)
}
/// Generate a vector of full paths to each command in the allow_list.
fn get_command_paths(input_path: &Path, allow_list: &HashSet<OsString>) -> Result<Vec<PathBuf>> {
// Build a set of all file names in the input_path dir.
let mut files = HashSet::new();
if let Ok(paths) = fs::read_dir(&input_path) {
for path in paths {
if let Ok(path) = path {
files.insert(path.file_name());
}
}
}
// Get the intersection of all files and commands in the allow_list.
let commands: HashSet<_> = files.intersection(&allow_list).collect();
info!("Including tools: {:?}", commands);
// Build full paths to allowed commands found in the input_path dir.
let mut cmd_paths = Vec::new();
for c in commands.iter() {
let path = Path::new(&input_path).join(c);
cmd_paths.push(path);
}
Ok(cmd_paths)
}
/// Create the output dir if doesn't exist, recursively creating subdirs in path.
fn create_output_dir(path: &Path) -> Result<()> {
if !path.exists() {
fs::create_dir_all(path)
.with_context(|| format!("Unable to create output directory {}", path.display()))?;
info!("Created directory {}", path.display());
}
Ok(())
}
/// Get cmd --help output when given a full path to a cmd.
fn help_output_for(tool: &Path, subcommands: &Vec<&String>) -> Result<Vec<String>> {
let output = Command::new(&tool)
.args(&*subcommands)
.arg("--help")
.output()
.context(format!("Command failed for {:?}", &tool.display()))?;
let stdout = output.stdout;
let stderr = output.stderr;
// Convert string outputs to vector of lines.
let stdout_string = String::from_utf8(stdout).expect("Help string from utf8");
let mut combined_lines = stdout_string.lines().map(String::from).collect::<Vec<_>>();
let stderr_string = String::from_utf8(stderr).expect("Help string from utf8");
let stderr_lines = stderr_string.lines().map(String::from).collect::<Vec<_>>();
combined_lines.extend(stderr_lines);
Ok(combined_lines)
}
/// Given a cmd name and a dir, create a full path ending in cmd.md.
fn md_path(file_stem: &OsStr, dir: &PathBuf) -> PathBuf {
let mut path = Path::new(dir).join(file_stem);
path.set_extension("md");
path
}
#[cfg(test)]
mod tests {
use {super::*, flate2::read::GzDecoder, tar::Archive};
#[test]
fn run_test_commands() {
let tmp_dir = tempfile::Builder::new().prefix("clidoc-test-out").tempdir().unwrap();
let argv = [
"-v",
"-o",
&tmp_dir.path().to_str().unwrap(),
"clidoc_test_data/tool_with_subcommands.sh",
];
let cmd = "clidoc-test";
let opt = Opt::from_args(&[cmd], &argv).unwrap();
let generated = tmp_dir.path().join("tool_with_subcommands.md");
let expected = &opt.in_dir.join("clidoc_test_data/tool_with_subcommands.md");
run(opt).expect("tool_with_subcommands could not be generated");
let generated_contents = fs::read_to_string(generated).unwrap();
let expected_contents = fs::read_to_string(expected).unwrap();
assert_eq!(generated_contents, expected_contents);
}
#[test]
fn run_test_archive_and_cleanup() {
let tmp_dir = tempfile::Builder::new().prefix("clidoc-tar-test").tempdir().unwrap();
let argv = [
"--tarball-dir",
"clidoc_out.tar.gz",
"-v",
"-o",
&tmp_dir.path().to_str().unwrap(),
"clidoc_test_data/tool_with_subcommands.sh",
];
let cmd = "clidoc-test-archive";
let opt = Opt::from_args(&[cmd], &argv).unwrap();
run(opt).expect("tool_with_subcommands could not be generated");
// With the tarball-dir flag set, the md file should be zipped
// and not exist.
assert!(!tmp_dir.path().join("tool_with_subcommands.md").exists());
let tar_gz = File::open("clidoc_out.tar.gz").expect("open tarball");
let tar = GzDecoder::new(tar_gz);
let mut archive = Archive::new(tar);
archive.unpack(".").expect("extract tar");
assert!(Path::new("tool_with_subcommands.md").exists());
}
}
| |
button.component.spec.ts
|
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { ButtonComponent } from './button.component';
describe('ButtonComponent', () => {
let component: ButtonComponent;
let fixture: ComponentFixture<ButtonComponent>;
let element: HTMLElement = null;
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ ButtonComponent ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(ButtonComponent);
component = fixture.componentInstance;
fixture.detectChanges();
element = fixture.nativeElement;
});
it('should create', () => {
expect(component).toBeTruthy();
});
describe('color methods', () => {
it('should return true for HEX', () => {
const result = component.isHex('#713FC2');
expect(result).toBeTrue();
});
it('should return false for color name', () => {
const result = component.isHex('primary');
expect(result).toBeFalse();
});
});
describe('bindings', () => {
it('color bind', () => {
component.color = 'danger';
fixture.detectChanges();
const bindingValue = element.getAttribute('data-bui-color');
expect(bindingValue).toEqual('danger');
});
it('design bind', () => {
component.design = 'flat';
fixture.detectChanges();
const bindingValue = element.getAttribute('data-bui-design');
expect(bindingValue).toEqual('flat');
});
});
});
| ||
ftpsender.py
|
#!/usr/bin/env python
# stdlib imports
from ftplib import FTP, error_perm
import os.path
import shutil
import tempfile
# local
from .sender import Sender
class FTPSender(Sender):
'''Class for sending and deleting files and directories via FTP.
PDLSender uses a local installation of Product Distribution Layer (PDL)
(https://ehppdl1.cr.usgs.gov/index.html#documentation)
to send a file or a directory, along with desired metadata to one or more
PDL hubs.
Required properties:
- remote_host Name of FTP server.
- remote_directory String path on remote_host where local files should
be copied to.
Optional properties:
- user String user name, for FTP servers where anonymous login is not
allowed.
- password String password, for FTP servers where anonymous login is
not allowed.
Usage:
sender = FTPSender(properties={'remote_host':'ftp.gov',
'remote_directory':'/pub/incoming/event1'},
local_directory = '/home/user/event1')
sender.send() => Creates remote url: ftp://ftp.gov/pub/incoming/event1 with contents of /home/user/event1 in it.
OR
sender = FTPSender(properties={'remote_host':'ftp.gov',
'remote_directory':'/pub/incoming/event1'},
local_directory = '/home/user/event1/version1')
sender.send() => Creates remote url: ftp://ftp.gov/pub/incoming/event1 with contents of /home/user/event1/version1 in it.
OR
sender = FTPSender(properties={'remote_host':'ftp.gov',
'remote_directory':'/pub/incoming/event1'},
local_files = ['/home/user/event1/version1/file1.txt','/home/user/event1/version1/file2.txt'])
sender.send() => Creates remote files: ftp://ftp.gov/pub/incoming/event1/file1.txt AND
ftp://ftp.gov/pub/incoming/event1/file1.txt
'''
_required_properties = ['remote_directory', 'remote_host']
_optional_properties = ['user', 'password']
def send(self):
'''
Send any files or folders that have been passed to constructor.
Returns:
Tuple of Number of files sent to remote SSH server and message
describing success.
Raises:
Exception when files cannot be sent to remote FTP server for any
reason.
'''
remote_host = self._properties['remote_host']
remote_folder = self._properties['remote_directory']
try:
# this should put us at the top level folder
ftp = self._setup()
# send any files we want
nfiles = 0
for f in self._local_files:
self.__sendfile(f, ftp)
nfiles += 1
# send everything in the directories we specified
if self._local_directory is not None:
local_directory = self._local_directory
allfiles = self.getAllLocalFiles()
for filename in allfiles:
try:
self._copy_file_with_path(
ftp, filename, remote_folder,
local_folder=local_directory)
nfiles += 1
except:
x = 1
ftp.quit()
return (nfiles, f'{int(nfiles):d} files were sent successfully to {remote_host} {remote_folder}')
except Exception as obj:
raise Exception(
f'Could not send to {host}. Error "{str(obj)}"')
def cancel(self):
"""
Create a cancel file (named as indicated in constructor "cancelfile"
parameter) in remote_directory on remote_host.
Args:
cancel_content: String containing text that should be written to
the cancelfile.
Returns:
A string message describing what has occurred.
"""
remote_host = self._properties['remote_host']
remote_folder = self._properties['remote_directory']
ftp = self._setup()
# Create local .cancel file, then copy it to ftp server
tempdir = tempfile.mkdtemp()
try:
tfile = os.path.join(tempdir, self._cancelfile) # local file
f = open(tfile, 'wt')
f.close()
ftp.cwd(remote_folder)
self.__sendfile(tfile, ftp)
except Exception as e:
raise Exception(
f'Could not create .cancel file on {remote_host}/{remote_folder}')
finally:
shutil.rmtree(tempdir)
return (f'{self._cancelfile} file succesfully placed on {remote_host} {remote_folder}')
def _setup(self):
"""Initiate an ftp connection with properties passed to constructor.
Navigate to/create directory (as necessary) specified by
remote_directory property.
Returns:
Instance of the ftplib.FTP class.
"""
host = self._properties['remote_host']
remote_folder = self._properties['remote_directory']
# attempt to login to remote host
try:
dirparts = self._split(remote_folder)
ftp = FTP(host)
if 'user' in self._properties:
user = self._properties['user']
else:
user = ''
if 'password' in self._properties:
password = self._properties['password']
else:
password = ''
if user == '':
ftp.login()
else:
ftp.login(user, password)
except error_perm as msg:
raise Exception(f'Could not login to remote host {host}')
# attempt to cd to remote directory
try:
self._create_remote_directory(ftp, remote_folder)
except Exception as e:
ftp.quit()
raise Exception(
f'Could not navigate to directory "{remote_folder}" on remote host {host}')
return ftp
def _create_remote_directory(self, ftp, remote_directory):
"""Create directory (recursively) on remote_host.
Args:
ftp: ftplib.FTP instance.
remote_directory: String path of directory on remote system which
needs to be created.
Raises:
Exception when unable to create remote_directory.
"""
# attempt to cd to remote directory
ftp.cwd('/')
try:
ftp.cwd(remote_directory)
except error_perm as msg:
dirparts = self._split(remote_directory)
for directory in dirparts:
try:
ftp.cwd(directory)
except error_perm as msg:
try:
ftp.mkd(directory)
ftp.cwd(directory)
except error_perm as msg:
raise Exception(
f'Unable to create subdirectory {directory}.')
def _copy_file_with_path(self, ftp, local_file, remote_folder,
local_folder=None):
"""
Copy local_file to remote_folder, preserving relative path and creating
required sub-directories.
Usage:
local_file: /home/user/data/events/us2016abcd/data_files/datafile.txt
remote_folder: /data/archive/events
local_folder: /home/user/data/events/us2016abcd
would create:
/data/archive/events/us2016abcd/data_files/datafile.txt
local_file: /home/user/data/events/us2016abcd/data_files/datafile.txt
remote_folder: /data/archive/events/us2016abcd
local_folder: None
would create:
/data/archive/events/us2016abcd/datafile.txt
Args:
local_file: Local file to copy.
remote_folder: Remote folder to copy local files to.
local_folder: Top of local directory where file copying started.
If None, local_file should be copied to a file of the same
name (not preserving path) into remote_folder.
"""
if local_folder is None:
ftp.cwd(remote_folder)
self.__sendfile(filename, ftp)
else:
local_parts = local_file.replace(local_folder, '').strip(
os.path.sep).split(os.path.sep)
remote_parts = self._split(remote_folder)
all_parts = remote_parts + local_parts
remote_file = '/' + '/'.join(all_parts)
print(remote_file)
remfolder, remfile = self._path_split(remote_file)
try:
ftp.cwd(remfolder)
except error_perm as ep:
self._create_remote_directory(ftp, remfolder)
self.__sendfile(local_file, ftp)
ftp.cwd(remote_folder)
def __sendfile(self, filename, ftp):
|
def _join(self, *path_parts):
return '/' + '/'.join(path_parts)
def _split(self, path):
return path.strip('/').split('/')
def _path_split(self, path):
parts = path.strip('/').split('/')
fname = parts[-1]
fpath = '/' + '/'.join(parts[0:-1])
return (fpath, fname)
|
'''Internal function used to send a file using an FTP object.
Args:
filename: Local filename
ftp: Instance of FTP object.
'''
# in case somebody is polling for this file,
# make a temporary file first, then rename it
# so the poller doesn't grab it before its finished transferring.
fbase, fpath = os.path.split(filename) # this is a local file
tmpfile = fpath + '.tmp'
cmd = "STOR " + tmpfile
# we don't tell the ftp server about the local path to the file
# actually send the file
ftp.storbinary(cmd, open(filename, "rb"), 1024)
# rename it to the desired destination
ftp.rename(tmpfile, fpath)
|
macros.rs
|
//! A bunch of methods and structures more or less related to resolving macros and
//! interface provided by `Resolver` to macro expander.
use crate::imports::ImportResolver;
use crate::Namespace::*;
use crate::{AmbiguityError, AmbiguityErrorMisc, AmbiguityKind, BuiltinMacroState, Determinacy};
use crate::{CrateLint, ParentScope, ResolutionError, Resolver, Scope, ScopeSet, Weak};
use crate::{ModuleKind, ModuleOrUniformRoot, NameBinding, PathResult, Segment, ToNameBinding};
use rustc_ast::{self as ast, NodeId};
use rustc_ast_lowering::ResolverAstLowering;
use rustc_ast_pretty::pprust;
use rustc_attr::StabilityLevel;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::ptr_key::PtrKey;
use rustc_data_structures::sync::Lrc;
use rustc_errors::struct_span_err;
use rustc_expand::base::{Indeterminate, ResolverExpand, SyntaxExtension, SyntaxExtensionKind};
use rustc_expand::compile_declarative_macro;
use rustc_expand::expand::{AstFragment, Invocation, InvocationKind};
use rustc_feature::is_builtin_attr_name;
use rustc_hir::def::{self, DefKind, NonMacroAttrKind};
use rustc_hir::def_id;
use rustc_hir::PrimTy;
use rustc_middle::middle::stability;
use rustc_middle::ty;
use rustc_session::lint::builtin::{LEGACY_DERIVE_HELPERS, SOFT_UNSTABLE, UNUSED_MACROS};
use rustc_session::lint::BuiltinLintDiagnostics;
use rustc_session::parse::feature_err;
use rustc_session::Session;
use rustc_span::edition::Edition;
use rustc_span::hygiene::{self, ExpnData, ExpnId, ExpnKind};
use rustc_span::hygiene::{AstPass, MacroKind};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use std::cell::Cell;
use std::{mem, ptr};
type Res = def::Res<NodeId>;
/// Binding produced by a `macro_rules` item.
/// Not modularized, can shadow previous `macro_rules` bindings, etc.
#[derive(Debug)]
pub struct MacroRulesBinding<'a> {
crate binding: &'a NameBinding<'a>,
/// `macro_rules` scope into which the `macro_rules` item was planted.
crate parent_macro_rules_scope: MacroRulesScopeRef<'a>,
crate ident: Ident,
}
/// The scope introduced by a `macro_rules!` macro.
/// This starts at the macro's definition and ends at the end of the macro's parent
/// module (named or unnamed), or even further if it escapes with `#[macro_use]`.
/// Some macro invocations need to introduce `macro_rules` scopes too because they
/// can potentially expand into macro definitions.
#[derive(Copy, Clone, Debug)]
pub enum MacroRulesScope<'a> {
/// Empty "root" scope at the crate start containing no names.
Empty,
/// The scope introduced by a `macro_rules!` macro definition.
Binding(&'a MacroRulesBinding<'a>),
/// The scope introduced by a macro invocation that can potentially
/// create a `macro_rules!` macro definition.
Invocation(ExpnId),
}
/// `macro_rules!` scopes are always kept by reference and inside a cell.
/// The reason is that we update scopes with value `MacroRulesScope::Invocation(invoc_id)`
/// in-place after `invoc_id` gets expanded.
/// This helps to avoid uncontrollable growth of `macro_rules!` scope chains,
/// which usually grow lineraly with the number of macro invocations
/// in a module (including derives) and hurt performance.
pub(crate) type MacroRulesScopeRef<'a> = PtrKey<'a, Cell<MacroRulesScope<'a>>>;
// Macro namespace is separated into two sub-namespaces, one for bang macros and
// one for attribute-like macros (attributes, derives).
// We ignore resolutions from one sub-namespace when searching names in scope for another.
fn sub_namespace_match(candidate: Option<MacroKind>, requirement: Option<MacroKind>) -> bool {
#[derive(PartialEq)]
enum SubNS {
Bang,
AttrLike,
}
let sub_ns = |kind| match kind {
MacroKind::Bang => SubNS::Bang,
MacroKind::Attr | MacroKind::Derive => SubNS::AttrLike,
};
let candidate = candidate.map(sub_ns);
let requirement = requirement.map(sub_ns);
// "No specific sub-namespace" means "matches anything" for both requirements and candidates.
candidate.is_none() || requirement.is_none() || candidate == requirement
}
// We don't want to format a path using pretty-printing,
// `format!("{}", path)`, because that tries to insert
// line-breaks and is slow.
fn fast_print_path(path: &ast::Path) -> Symbol {
if path.segments.len() == 1 {
path.segments[0].ident.name
} else {
let mut path_str = String::with_capacity(64);
for (i, segment) in path.segments.iter().enumerate() {
if i != 0 {
path_str.push_str("::");
}
if segment.ident.name != kw::PathRoot {
path_str.push_str(&segment.ident.as_str())
}
}
Symbol::intern(&path_str)
}
}
/// The code common between processing `#![register_tool]` and `#![register_attr]`.
fn registered_idents(
sess: &Session,
attrs: &[ast::Attribute],
attr_name: Symbol,
descr: &str,
) -> FxHashSet<Ident> {
let mut registered = FxHashSet::default();
for attr in sess.filter_by_name(attrs, attr_name) {
for nested_meta in attr.meta_item_list().unwrap_or_default() {
match nested_meta.ident() {
Some(ident) => {
if let Some(old_ident) = registered.replace(ident) {
let msg = format!("{} `{}` was already registered", descr, ident);
sess.struct_span_err(ident.span, &msg)
.span_label(old_ident.span, "already registered here")
.emit();
}
}
None => {
let msg = format!("`{}` only accepts identifiers", attr_name);
let span = nested_meta.span();
sess.struct_span_err(span, &msg).span_label(span, "not an identifier").emit();
}
}
}
}
registered
}
crate fn registered_attrs_and_tools(
sess: &Session,
attrs: &[ast::Attribute],
) -> (FxHashSet<Ident>, FxHashSet<Ident>) {
let registered_attrs = registered_idents(sess, attrs, sym::register_attr, "attribute");
let mut registered_tools = registered_idents(sess, attrs, sym::register_tool, "tool");
// We implicitly add `rustfmt` and `clippy` to known tools,
// but it's not an error to register them explicitly.
let predefined_tools = [sym::clippy, sym::rustfmt];
registered_tools.extend(predefined_tools.iter().cloned().map(Ident::with_dummy_span));
(registered_attrs, registered_tools)
}
impl<'a> ResolverExpand for Resolver<'a> {
fn next_node_id(&mut self) -> NodeId {
self.next_node_id()
}
fn resolve_dollar_crates(&mut self) {
hygiene::update_dollar_crate_names(|ctxt| {
let ident = Ident::new(kw::DollarCrate, DUMMY_SP.with_ctxt(ctxt));
match self.resolve_crate_root(ident).kind {
ModuleKind::Def(.., name) if name != kw::Empty => name,
_ => kw::Crate,
}
});
}
fn visit_ast_fragment_with_placeholders(&mut self, expansion: ExpnId, fragment: &AstFragment) {
// Integrate the new AST fragment into all the definition and module structures.
// We are inside the `expansion` now, but other parent scope components are still the same.
let parent_scope = ParentScope { expansion, ..self.invocation_parent_scopes[&expansion] };
let output_macro_rules_scope = self.build_reduced_graph(fragment, parent_scope);
self.output_macro_rules_scopes.insert(expansion, output_macro_rules_scope);
parent_scope.module.unexpanded_invocations.borrow_mut().remove(&expansion);
}
fn register_builtin_macro(&mut self, name: Symbol, ext: SyntaxExtensionKind) {
if self.builtin_macros.insert(name, BuiltinMacroState::NotYetSeen(ext)).is_some() {
self.session
.diagnostic()
.bug(&format!("built-in macro `{}` was already registered", name));
}
}
// Create a new Expansion with a definition site of the provided module, or
// a fake empty `#[no_implicit_prelude]` module if no module is provided.
fn expansion_for_ast_pass(
&mut self,
call_site: Span,
pass: AstPass,
features: &[Symbol],
parent_module_id: Option<NodeId>,
) -> ExpnId {
let expn_id = ExpnId::fresh(Some(ExpnData::allow_unstable(
ExpnKind::AstPass(pass),
call_site,
self.session.edition(),
features.into(),
None,
)));
let parent_scope = if let Some(module_id) = parent_module_id {
let parent_def_id = self.local_def_id(module_id);
self.definitions.add_parent_module_of_macro_def(expn_id, parent_def_id.to_def_id());
self.module_map[&parent_def_id]
} else {
self.definitions.add_parent_module_of_macro_def(
expn_id,
def_id::DefId::local(def_id::CRATE_DEF_INDEX),
);
self.empty_module
};
self.ast_transform_scopes.insert(expn_id, parent_scope);
expn_id
}
fn resolve_imports(&mut self) {
ImportResolver { r: self }.resolve_imports()
}
fn resolve_macro_invocation(
&mut self,
invoc: &Invocation,
eager_expansion_root: ExpnId,
force: bool,
) -> Result<Lrc<SyntaxExtension>, Indeterminate> {
let invoc_id = invoc.expansion_data.id;
let parent_scope = match self.invocation_parent_scopes.get(&invoc_id) {
Some(parent_scope) => *parent_scope,
None => {
// If there's no entry in the table, then we are resolving an eagerly expanded
// macro, which should inherit its parent scope from its eager expansion root -
// the macro that requested this eager expansion.
let parent_scope = *self
.invocation_parent_scopes
.get(&eager_expansion_root)
.expect("non-eager expansion without a parent scope");
self.invocation_parent_scopes.insert(invoc_id, parent_scope);
parent_scope
}
};
let (path, kind, inner_attr, derives) = match invoc.kind {
InvocationKind::Attr { ref attr, ref derives, .. } => (
&attr.get_normal_item().path,
MacroKind::Attr,
attr.style == ast::AttrStyle::Inner,
self.arenas.alloc_ast_paths(derives),
),
InvocationKind::Bang { ref mac, .. } => (&mac.path, MacroKind::Bang, false, &[][..]),
InvocationKind::Derive { ref path, .. } => (path, MacroKind::Derive, false, &[][..]),
};
// Derives are not included when `invocations` are collected, so we have to add them here.
let parent_scope = &ParentScope { derives, ..parent_scope };
let require_inert = !invoc.fragment_kind.supports_macro_expansion();
let node_id = self.lint_node_id(eager_expansion_root);
let (ext, res) = self.smart_resolve_macro_path(
path,
kind,
require_inert,
inner_attr,
parent_scope,
node_id,
force,
)?;
let span = invoc.span();
invoc_id.set_expn_data(ext.expn_data(
parent_scope.expansion,
span,
fast_print_path(path),
res.opt_def_id(),
));
if let Res::Def(_, _) = res {
let normal_module_def_id = self.macro_def_scope(invoc_id).nearest_parent_mod;
self.definitions.add_parent_module_of_macro_def(invoc_id, normal_module_def_id);
// Gate macro attributes in `#[derive]` output.
if !self.session.features_untracked().macro_attributes_in_derive_output
&& kind == MacroKind::Attr
&& ext.builtin_name != Some(sym::derive)
{
let mut expn_id = parent_scope.expansion;
loop {
// Helper attr table is a quick way to determine whether the attr is `derive`.
if self.helper_attrs.contains_key(&expn_id) {
feature_err(
&self.session.parse_sess,
sym::macro_attributes_in_derive_output,
path.span,
"macro attributes in `#[derive]` output are unstable",
)
.emit();
break;
} else {
let expn_data = expn_id.expn_data();
match expn_data.kind {
ExpnKind::Root
| ExpnKind::Macro(MacroKind::Bang | MacroKind::Derive, _) => {
break;
}
_ => expn_id = expn_data.parent,
}
}
}
}
}
Ok(ext)
}
fn check_unused_macros(&mut self) {
for (_, &(node_id, span)) in self.unused_macros.iter() {
self.lint_buffer.buffer_lint(UNUSED_MACROS, node_id, span, "unused macro definition");
}
}
fn lint_node_id(&self, expn_id: ExpnId) -> NodeId {
// FIXME - make this more precise. This currently returns the NodeId of the
// nearest closing item - we should try to return the closest parent of the ExpnId
self.invocation_parents
.get(&expn_id)
.map_or(ast::CRATE_NODE_ID, |id| self.def_id_to_node_id[*id])
}
fn has_derive_copy(&self, expn_id: ExpnId) -> bool {
self.containers_deriving_copy.contains(&expn_id)
}
fn resolve_derives(
&mut self,
expn_id: ExpnId,
derives: Vec<ast::Path>,
force: bool,
) -> Result<(), Indeterminate> {
// Block expansion of the container until we resolve all derives in it.
// This is required for two reasons:
// - Derive helper attributes are in scope for the item to which the `#[derive]`
// is applied, so they have to be produced by the container's expansion rather
// than by individual derives.
// - Derives in the container need to know whether one of them is a built-in `Copy`.
// FIXME: Try to cache intermediate results to avoid resolving same derives multiple times.
let parent_scope = self.invocation_parent_scopes[&expn_id];
let mut exts = Vec::new();
let mut helper_attrs = Vec::new();
let mut has_derive_copy = false;
for path in derives {
exts.push((
match self.resolve_macro_path(
&path,
Some(MacroKind::Derive),
&parent_scope,
true,
force,
) {
Ok((Some(ext), _)) => {
let span =
path.segments.last().unwrap().ident.span.normalize_to_macros_2_0();
helper_attrs
.extend(ext.helper_attrs.iter().map(|name| Ident::new(*name, span)));
has_derive_copy |= ext.builtin_name == Some(sym::Copy);
ext
}
Ok(_) | Err(Determinacy::Determined) => self.dummy_ext(MacroKind::Derive),
Err(Determinacy::Undetermined) => return Err(Indeterminate),
},
path,
))
}
self.derive_resolutions.insert(expn_id, exts);
self.helper_attrs.insert(expn_id, helper_attrs);
// Mark this derive as having `Copy` either if it has `Copy` itself or if its parent derive
// has `Copy`, to support cases like `#[derive(Clone, Copy)] #[derive(Debug)]`.
if has_derive_copy || self.has_derive_copy(parent_scope.expansion) {
self.containers_deriving_copy.insert(expn_id);
}
Ok(())
}
fn take_derive_resolutions(
&mut self,
expn_id: ExpnId,
) -> Option<Vec<(Lrc<SyntaxExtension>, ast::Path)>> {
self.derive_resolutions.remove(&expn_id)
}
// The function that implements the resolution logic of `#[cfg_accessible(path)]`.
// Returns true if the path can certainly be resolved in one of three namespaces,
// returns false if the path certainly cannot be resolved in any of the three namespaces.
// Returns `Indeterminate` if we cannot give a certain answer yet.
fn cfg_accessible(&mut self, expn_id: ExpnId, path: &ast::Path) -> Result<bool, Indeterminate> {
let span = path.span;
let path = &Segment::from_path(path);
let parent_scope = self.invocation_parent_scopes[&expn_id];
let mut indeterminate = false;
for ns in [TypeNS, ValueNS, MacroNS].iter().copied() {
match self.resolve_path(path, Some(ns), &parent_scope, false, span, CrateLint::No) {
PathResult::Module(ModuleOrUniformRoot::Module(_)) => return Ok(true),
PathResult::NonModule(partial_res) if partial_res.unresolved_segments() == 0 => {
return Ok(true);
}
PathResult::Indeterminate => indeterminate = true,
// FIXME: `resolve_path` is not ready to report partially resolved paths
// correctly, so we just report an error if the path was reported as unresolved.
// This needs to be fixed for `cfg_accessible` to be useful.
PathResult::NonModule(..) | PathResult::Failed { .. } => {}
PathResult::Module(_) => panic!("unexpected path resolution"),
}
}
if indeterminate {
return Err(Indeterminate);
}
self.session
.struct_span_err(span, "not sure whether the path is accessible or not")
.span_note(span, "`cfg_accessible` is not fully implemented")
.emit();
Ok(false)
}
}
impl<'a> Resolver<'a> {
/// Resolve macro path with error reporting and recovery.
/// Uses dummy syntax extensions for unresolved macros or macros with unexpected resolutions
/// for better error recovery.
fn smart_resolve_macro_path(
&mut self,
path: &ast::Path,
kind: MacroKind,
require_inert: bool,
inner_attr: bool,
parent_scope: &ParentScope<'a>,
node_id: NodeId,
force: bool,
) -> Result<(Lrc<SyntaxExtension>, Res), Indeterminate> {
let (ext, res) = match self.resolve_macro_path(path, Some(kind), parent_scope, true, force)
{
Ok((Some(ext), res)) => (ext, res),
Ok((None, res)) => (self.dummy_ext(kind), res),
Err(Determinacy::Determined) => (self.dummy_ext(kind), Res::Err),
Err(Determinacy::Undetermined) => return Err(Indeterminate),
};
// Report errors for the resolved macro.
for segment in &path.segments {
if let Some(args) = &segment.args {
self.session.span_err(args.span(), "generic arguments in macro path");
}
if kind == MacroKind::Attr && segment.ident.as_str().starts_with("rustc") {
self.session.span_err(
segment.ident.span,
"attributes starting with `rustc` are reserved for use by the `rustc` compiler",
);
}
}
match res {
Res::Def(DefKind::Macro(_), def_id) => {
if let Some(def_id) = def_id.as_local() {
self.unused_macros.remove(&def_id);
if self.proc_macro_stubs.contains(&def_id) {
self.session.span_err(
path.span,
"can't use a procedural macro from the same crate that defines it",
);
}
}
}
Res::NonMacroAttr(..) | Res::Err => {}
_ => panic!("expected `DefKind::Macro` or `Res::NonMacroAttr`"),
};
self.check_stability_and_deprecation(&ext, path, node_id);
let unexpected_res = if ext.macro_kind() != kind {
Some((kind.article(), kind.descr_expected()))
} else if require_inert && matches!(res, Res::Def(..)) {
Some(("a", "non-macro attribute"))
} else {
None
};
if let Some((article, expected)) = unexpected_res {
let path_str = pprust::path_to_string(path);
let msg = format!("expected {}, found {} `{}`", expected, res.descr(), path_str);
self.session
.struct_span_err(path.span, &msg)
.span_label(path.span, format!("not {} {}", article, expected))
.emit();
return Ok((self.dummy_ext(kind), Res::Err));
}
// We are trying to avoid reporting this error if other related errors were reported.
if res != Res::Err
&& inner_attr
&& !self.session.features_untracked().custom_inner_attributes
{
let msg = match res {
Res::Def(..) => "inner macro attributes are unstable",
Res::NonMacroAttr(..) => "custom inner attributes are unstable",
_ => unreachable!(),
};
if path == &sym::test {
self.session.parse_sess.buffer_lint(SOFT_UNSTABLE, path.span, node_id, msg);
} else {
feature_err(&self.session.parse_sess, sym::custom_inner_attributes, path.span, msg)
.emit();
}
}
Ok((ext, res))
}
pub fn resolve_macro_path(
&mut self,
path: &ast::Path,
kind: Option<MacroKind>,
parent_scope: &ParentScope<'a>,
trace: bool,
force: bool,
) -> Result<(Option<Lrc<SyntaxExtension>>, Res), Determinacy> {
let path_span = path.span;
let mut path = Segment::from_path(path);
// Possibly apply the macro helper hack
if kind == Some(MacroKind::Bang)
&& path.len() == 1
&& path[0].ident.span.ctxt().outer_expn_data().local_inner_macros
{
let root = Ident::new(kw::DollarCrate, path[0].ident.span);
path.insert(0, Segment::from_ident(root));
}
let res = if path.len() > 1 {
let res = match self.resolve_path(
&path,
Some(MacroNS),
parent_scope,
false,
path_span,
CrateLint::No,
) {
PathResult::NonModule(path_res) if path_res.unresolved_segments() == 0 => {
Ok(path_res.base_res())
}
PathResult::Indeterminate if !force => return Err(Determinacy::Undetermined),
PathResult::NonModule(..)
| PathResult::Indeterminate
| PathResult::Failed { .. } => Err(Determinacy::Determined),
PathResult::Module(..) => unreachable!(),
};
if trace {
let kind = kind.expect("macro kind must be specified if tracing is enabled");
self.multi_segment_macro_resolutions.push((
path,
path_span,
kind,
*parent_scope,
res.ok(),
));
}
self.prohibit_imported_non_macro_attrs(None, res.ok(), path_span);
res
} else {
let scope_set = kind.map_or(ScopeSet::All(MacroNS, false), ScopeSet::Macro);
let binding = self.early_resolve_ident_in_lexical_scope(
path[0].ident,
scope_set,
parent_scope,
false,
force,
path_span,
);
if let Err(Determinacy::Undetermined) = binding {
return Err(Determinacy::Undetermined);
}
if trace {
let kind = kind.expect("macro kind must be specified if tracing is enabled");
self.single_segment_macro_resolutions.push((
path[0].ident,
kind,
*parent_scope,
binding.ok(),
));
}
let res = binding.map(|binding| binding.res());
self.prohibit_imported_non_macro_attrs(binding.ok(), res.ok(), path_span);
res
};
res.map(|res| (self.get_macro(res), res))
}
// Resolve an identifier in lexical scope.
// This is a variation of `fn resolve_ident_in_lexical_scope` that can be run during
// expansion and import resolution (perhaps they can be merged in the future).
// The function is used for resolving initial segments of macro paths (e.g., `foo` in
// `foo::bar!(); or `foo!();`) and also for import paths on 2018 edition.
crate fn early_resolve_ident_in_lexical_scope(
&mut self,
orig_ident: Ident,
scope_set: ScopeSet,
parent_scope: &ParentScope<'a>,
record_used: bool,
force: bool,
path_span: Span,
) -> Result<&'a NameBinding<'a>, Determinacy> {
bitflags::bitflags! {
struct Flags: u8 {
|
const MODULE = 1 << 1;
const MISC_SUGGEST_CRATE = 1 << 2;
const MISC_SUGGEST_SELF = 1 << 3;
const MISC_FROM_PRELUDE = 1 << 4;
}
}
assert!(force || !record_used); // `record_used` implies `force`
// Make sure `self`, `super` etc produce an error when passed to here.
if orig_ident.is_path_segment_keyword() {
return Err(Determinacy::Determined);
}
let (ns, macro_kind, is_import) = match scope_set {
ScopeSet::All(ns, is_import) => (ns, None, is_import),
ScopeSet::AbsolutePath(ns) => (ns, None, false),
ScopeSet::Macro(macro_kind) => (MacroNS, Some(macro_kind), false),
};
// This is *the* result, resolution from the scope closest to the resolved identifier.
// However, sometimes this result is "weak" because it comes from a glob import or
// a macro expansion, and in this case it cannot shadow names from outer scopes, e.g.
// mod m { ... } // solution in outer scope
// {
// use prefix::*; // imports another `m` - innermost solution
// // weak, cannot shadow the outer `m`, need to report ambiguity error
// m::mac!();
// }
// So we have to save the innermost solution and continue searching in outer scopes
// to detect potential ambiguities.
let mut innermost_result: Option<(&NameBinding<'_>, Flags)> = None;
let mut determinacy = Determinacy::Determined;
// Go through all the scopes and try to resolve the name.
let break_result = self.visit_scopes(
scope_set,
parent_scope,
orig_ident.span.ctxt(),
|this, scope, use_prelude, ctxt| {
let ident = Ident::new(orig_ident.name, orig_ident.span.with_ctxt(ctxt));
let ok = |res, span, arenas| {
Ok((
(res, ty::Visibility::Public, span, ExpnId::root()).to_name_binding(arenas),
Flags::empty(),
))
};
let result = match scope {
Scope::DeriveHelpers(expn_id) => {
if let Some(attr) = this
.helper_attrs
.get(&expn_id)
.and_then(|attrs| attrs.iter().rfind(|i| ident == **i))
{
let binding = (
Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper),
ty::Visibility::Public,
attr.span,
expn_id,
)
.to_name_binding(this.arenas);
Ok((binding, Flags::empty()))
} else {
Err(Determinacy::Determined)
}
}
Scope::DeriveHelpersCompat => {
let mut result = Err(Determinacy::Determined);
for derive in parent_scope.derives {
let parent_scope = &ParentScope { derives: &[], ..*parent_scope };
match this.resolve_macro_path(
derive,
Some(MacroKind::Derive),
parent_scope,
true,
force,
) {
Ok((Some(ext), _)) => {
if ext.helper_attrs.contains(&ident.name) {
result = ok(
Res::NonMacroAttr(NonMacroAttrKind::DeriveHelperCompat),
derive.span,
this.arenas,
);
break;
}
}
Ok(_) | Err(Determinacy::Determined) => {}
Err(Determinacy::Undetermined) => {
result = Err(Determinacy::Undetermined)
}
}
}
result
}
Scope::MacroRules(macro_rules_scope) => match macro_rules_scope.get() {
MacroRulesScope::Binding(macro_rules_binding)
if ident == macro_rules_binding.ident =>
{
Ok((macro_rules_binding.binding, Flags::MACRO_RULES))
}
MacroRulesScope::Invocation(_) => Err(Determinacy::Undetermined),
_ => Err(Determinacy::Determined),
},
Scope::CrateRoot => {
let root_ident = Ident::new(kw::PathRoot, ident.span);
let root_module = this.resolve_crate_root(root_ident);
let binding = this.resolve_ident_in_module_ext(
ModuleOrUniformRoot::Module(root_module),
ident,
ns,
parent_scope,
record_used,
path_span,
);
match binding {
Ok(binding) => Ok((binding, Flags::MODULE | Flags::MISC_SUGGEST_CRATE)),
Err((Determinacy::Undetermined, Weak::No)) => {
return Some(Err(Determinacy::determined(force)));
}
Err((Determinacy::Undetermined, Weak::Yes)) => {
Err(Determinacy::Undetermined)
}
Err((Determinacy::Determined, _)) => Err(Determinacy::Determined),
}
}
Scope::Module(module) => {
let adjusted_parent_scope = &ParentScope { module, ..*parent_scope };
let binding = this.resolve_ident_in_module_unadjusted_ext(
ModuleOrUniformRoot::Module(module),
ident,
ns,
adjusted_parent_scope,
true,
record_used,
path_span,
);
match binding {
Ok(binding) => {
let misc_flags = if ptr::eq(module, this.graph_root) {
Flags::MISC_SUGGEST_CRATE
} else if module.is_normal() {
Flags::MISC_SUGGEST_SELF
} else {
Flags::empty()
};
Ok((binding, Flags::MODULE | misc_flags))
}
Err((Determinacy::Undetermined, Weak::No)) => {
return Some(Err(Determinacy::determined(force)));
}
Err((Determinacy::Undetermined, Weak::Yes)) => {
Err(Determinacy::Undetermined)
}
Err((Determinacy::Determined, _)) => Err(Determinacy::Determined),
}
}
Scope::RegisteredAttrs => match this.registered_attrs.get(&ident).cloned() {
Some(ident) => ok(
Res::NonMacroAttr(NonMacroAttrKind::Registered),
ident.span,
this.arenas,
),
None => Err(Determinacy::Determined),
},
Scope::MacroUsePrelude => {
match this.macro_use_prelude.get(&ident.name).cloned() {
Some(binding) => Ok((binding, Flags::MISC_FROM_PRELUDE)),
None => Err(Determinacy::determined(
this.graph_root.unexpanded_invocations.borrow().is_empty(),
)),
}
}
Scope::BuiltinAttrs => {
if is_builtin_attr_name(ident.name) {
ok(
Res::NonMacroAttr(NonMacroAttrKind::Builtin(ident.name)),
DUMMY_SP,
this.arenas,
)
} else {
Err(Determinacy::Determined)
}
}
Scope::ExternPrelude => match this.extern_prelude_get(ident, !record_used) {
Some(binding) => Ok((binding, Flags::empty())),
None => Err(Determinacy::determined(
this.graph_root.unexpanded_invocations.borrow().is_empty(),
)),
},
Scope::ToolPrelude => match this.registered_tools.get(&ident).cloned() {
Some(ident) => ok(Res::ToolMod, ident.span, this.arenas),
None => Err(Determinacy::Determined),
},
Scope::StdLibPrelude => {
let mut result = Err(Determinacy::Determined);
if let Some(prelude) = this.prelude {
if let Ok(binding) = this.resolve_ident_in_module_unadjusted(
ModuleOrUniformRoot::Module(prelude),
ident,
ns,
parent_scope,
false,
path_span,
) {
if use_prelude || this.is_builtin_macro(binding.res()) {
result = Ok((binding, Flags::MISC_FROM_PRELUDE));
}
}
}
result
}
Scope::BuiltinTypes => match PrimTy::from_name(ident.name) {
Some(prim_ty) => ok(Res::PrimTy(prim_ty), DUMMY_SP, this.arenas),
None => Err(Determinacy::Determined),
},
};
match result {
Ok((binding, flags))
if sub_namespace_match(binding.macro_kind(), macro_kind) =>
{
if !record_used {
return Some(Ok(binding));
}
if let Some((innermost_binding, innermost_flags)) = innermost_result {
// Found another solution, if the first one was "weak", report an error.
let (res, innermost_res) = (binding.res(), innermost_binding.res());
if res != innermost_res {
let is_builtin = |res| {
matches!(res, Res::NonMacroAttr(NonMacroAttrKind::Builtin(..)))
};
let derive_helper =
Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper);
let derive_helper_compat =
Res::NonMacroAttr(NonMacroAttrKind::DeriveHelperCompat);
let ambiguity_error_kind = if is_import {
Some(AmbiguityKind::Import)
} else if is_builtin(innermost_res) || is_builtin(res) {
Some(AmbiguityKind::BuiltinAttr)
} else if innermost_res == derive_helper_compat
|| res == derive_helper_compat && innermost_res != derive_helper
{
Some(AmbiguityKind::DeriveHelper)
} else if innermost_flags.contains(Flags::MACRO_RULES)
&& flags.contains(Flags::MODULE)
&& !this.disambiguate_macro_rules_vs_modularized(
innermost_binding,
binding,
)
|| flags.contains(Flags::MACRO_RULES)
&& innermost_flags.contains(Flags::MODULE)
&& !this.disambiguate_macro_rules_vs_modularized(
binding,
innermost_binding,
)
{
Some(AmbiguityKind::MacroRulesVsModularized)
} else if innermost_binding.is_glob_import() {
Some(AmbiguityKind::GlobVsOuter)
} else if innermost_binding
.may_appear_after(parent_scope.expansion, binding)
{
Some(AmbiguityKind::MoreExpandedVsOuter)
} else {
None
};
if let Some(kind) = ambiguity_error_kind {
let misc = |f: Flags| {
if f.contains(Flags::MISC_SUGGEST_CRATE) {
AmbiguityErrorMisc::SuggestCrate
} else if f.contains(Flags::MISC_SUGGEST_SELF) {
AmbiguityErrorMisc::SuggestSelf
} else if f.contains(Flags::MISC_FROM_PRELUDE) {
AmbiguityErrorMisc::FromPrelude
} else {
AmbiguityErrorMisc::None
}
};
this.ambiguity_errors.push(AmbiguityError {
kind,
ident: orig_ident,
b1: innermost_binding,
b2: binding,
misc1: misc(innermost_flags),
misc2: misc(flags),
});
return Some(Ok(innermost_binding));
}
}
} else {
// Found the first solution.
innermost_result = Some((binding, flags));
}
}
Ok(..) | Err(Determinacy::Determined) => {}
Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined,
}
None
},
);
if let Some(break_result) = break_result {
return break_result;
}
// The first found solution was the only one, return it.
if let Some((binding, _)) = innermost_result {
return Ok(binding);
}
Err(Determinacy::determined(determinacy == Determinacy::Determined || force))
}
crate fn finalize_macro_resolutions(&mut self) {
let check_consistency = |this: &mut Self,
path: &[Segment],
span,
kind: MacroKind,
initial_res: Option<Res>,
res: Res| {
if let Some(initial_res) = initial_res {
if res != initial_res {
// Make sure compilation does not succeed if preferred macro resolution
// has changed after the macro had been expanded. In theory all such
// situations should be reported as errors, so this is a bug.
this.session.delay_span_bug(span, "inconsistent resolution for a macro");
}
} else {
// It's possible that the macro was unresolved (indeterminate) and silently
// expanded into a dummy fragment for recovery during expansion.
// Now, post-expansion, the resolution may succeed, but we can't change the
// past and need to report an error.
// However, non-speculative `resolve_path` can successfully return private items
// even if speculative `resolve_path` returned nothing previously, so we skip this
// less informative error if the privacy error is reported elsewhere.
if this.privacy_errors.is_empty() {
let msg = format!(
"cannot determine resolution for the {} `{}`",
kind.descr(),
Segment::names_to_string(path)
);
let msg_note = "import resolution is stuck, try simplifying macro imports";
this.session.struct_span_err(span, &msg).note(msg_note).emit();
}
}
};
let macro_resolutions = mem::take(&mut self.multi_segment_macro_resolutions);
for (mut path, path_span, kind, parent_scope, initial_res) in macro_resolutions {
// FIXME: Path resolution will ICE if segment IDs present.
for seg in &mut path {
seg.id = None;
}
match self.resolve_path(
&path,
Some(MacroNS),
&parent_scope,
true,
path_span,
CrateLint::No,
) {
PathResult::NonModule(path_res) if path_res.unresolved_segments() == 0 => {
let res = path_res.base_res();
check_consistency(self, &path, path_span, kind, initial_res, res);
}
path_res @ PathResult::NonModule(..) | path_res @ PathResult::Failed { .. } => {
let (span, label) = if let PathResult::Failed { span, label, .. } = path_res {
(span, label)
} else {
(
path_span,
format!(
"partially resolved path in {} {}",
kind.article(),
kind.descr()
),
)
};
self.report_error(
span,
ResolutionError::FailedToResolve { label, suggestion: None },
);
}
PathResult::Module(..) | PathResult::Indeterminate => unreachable!(),
}
}
let macro_resolutions = mem::take(&mut self.single_segment_macro_resolutions);
for (ident, kind, parent_scope, initial_binding) in macro_resolutions {
match self.early_resolve_ident_in_lexical_scope(
ident,
ScopeSet::Macro(kind),
&parent_scope,
true,
true,
ident.span,
) {
Ok(binding) => {
let initial_res = initial_binding.map(|initial_binding| {
self.record_use(ident, MacroNS, initial_binding, false);
initial_binding.res()
});
let res = binding.res();
let seg = Segment::from_ident(ident);
check_consistency(self, &[seg], ident.span, kind, initial_res, res);
if res == Res::NonMacroAttr(NonMacroAttrKind::DeriveHelperCompat) {
self.lint_buffer.buffer_lint_with_diagnostic(
LEGACY_DERIVE_HELPERS,
self.lint_node_id(parent_scope.expansion),
ident.span,
"derive helper attribute is used before it is introduced",
BuiltinLintDiagnostics::LegacyDeriveHelpers(binding.span),
);
}
}
Err(..) => {
let expected = kind.descr_expected();
let msg = format!("cannot find {} `{}` in this scope", expected, ident);
let mut err = self.session.struct_span_err(ident.span, &msg);
self.unresolved_macro_suggestions(&mut err, kind, &parent_scope, ident);
err.emit();
}
}
}
let builtin_attrs = mem::take(&mut self.builtin_attrs);
for (ident, parent_scope) in builtin_attrs {
let _ = self.early_resolve_ident_in_lexical_scope(
ident,
ScopeSet::Macro(MacroKind::Attr),
&parent_scope,
true,
true,
ident.span,
);
}
}
fn check_stability_and_deprecation(
&mut self,
ext: &SyntaxExtension,
path: &ast::Path,
node_id: NodeId,
) {
let span = path.span;
if let Some(stability) = &ext.stability {
if let StabilityLevel::Unstable { reason, issue, is_soft } = stability.level {
let feature = stability.feature;
if !self.active_features.contains(&feature) && !span.allows_unstable(feature) {
let lint_buffer = &mut self.lint_buffer;
let soft_handler =
|lint, span, msg: &_| lint_buffer.buffer_lint(lint, node_id, span, msg);
stability::report_unstable(
self.session,
feature,
reason,
issue,
is_soft,
span,
soft_handler,
);
}
}
}
if let Some(depr) = &ext.deprecation {
let path = pprust::path_to_string(&path);
let (message, lint) = stability::deprecation_message(depr, "macro", &path);
stability::early_report_deprecation(
&mut self.lint_buffer,
&message,
depr.suggestion,
lint,
span,
node_id,
);
}
}
fn prohibit_imported_non_macro_attrs(
&self,
binding: Option<&'a NameBinding<'a>>,
res: Option<Res>,
span: Span,
) {
if let Some(Res::NonMacroAttr(kind)) = res {
if kind != NonMacroAttrKind::Tool && binding.map_or(true, |b| b.is_import()) {
let msg =
format!("cannot use {} {} through an import", kind.article(), kind.descr());
let mut err = self.session.struct_span_err(span, &msg);
if let Some(binding) = binding {
err.span_note(binding.span, &format!("the {} imported here", kind.descr()));
}
err.emit();
}
}
}
crate fn check_reserved_macro_name(&mut self, ident: Ident, res: Res) {
// Reserve some names that are not quite covered by the general check
// performed on `Resolver::builtin_attrs`.
if ident.name == sym::cfg || ident.name == sym::cfg_attr {
let macro_kind = self.get_macro(res).map(|ext| ext.macro_kind());
if macro_kind.is_some() && sub_namespace_match(macro_kind, Some(MacroKind::Attr)) {
self.session.span_err(
ident.span,
&format!("name `{}` is reserved in attribute namespace", ident),
);
}
}
}
/// Compile the macro into a `SyntaxExtension` and possibly replace
/// its expander to a pre-defined one for built-in macros.
crate fn compile_macro(&mut self, item: &ast::Item, edition: Edition) -> SyntaxExtension {
let mut result = compile_declarative_macro(
&self.session,
self.session.features_untracked(),
item,
edition,
);
if let Some(builtin_name) = result.builtin_name {
// The macro was marked with `#[rustc_builtin_macro]`.
if let Some(builtin_macro) = self.builtin_macros.get_mut(&builtin_name) {
// The macro is a built-in, replace its expander function
// while still taking everything else from the source code.
// If we already loaded this builtin macro, give a better error message than 'no such builtin macro'.
match mem::replace(builtin_macro, BuiltinMacroState::AlreadySeen(item.span)) {
BuiltinMacroState::NotYetSeen(ext) => result.kind = ext,
BuiltinMacroState::AlreadySeen(span) => {
struct_span_err!(
self.session,
item.span,
E0773,
"attempted to define built-in macro more than once"
)
.span_note(span, "previously defined here")
.emit();
}
}
} else {
let msg = format!("cannot find a built-in macro with name `{}`", item.ident);
self.session.span_err(item.span, &msg);
}
}
result
}
}
|
const MACRO_RULES = 1 << 0;
|
softplus_op_test.py
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Softplus and SoftplusGrad."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import nn_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class SoftplusTest(test.TestCase):
def _npSoftplus(self, np_features):
np_features = np.asarray(np_features)
zero = np.asarray(0).astype(np_features.dtype)
return np.logaddexp(zero, np_features)
def
|
(self, np_features, use_gpu=False):
np_softplus = self._npSoftplus(np_features)
with self.test_session(use_gpu=use_gpu):
softplus = nn_ops.softplus(np_features)
tf_softplus = softplus.eval()
self.assertAllCloseAccordingToType(np_softplus, tf_softplus)
self.assertTrue(np.all(tf_softplus > 0))
self.assertShapeEqual(np_softplus, softplus)
def testNumbers(self):
for t in [np.float16, np.float32, np.float64]:
self._testSoftplus(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=False)
self._testSoftplus(
np.array([[-9, 7, -5, 3, -1], [1, -3, 5, -7, 9]]).astype(t),
use_gpu=True)
log_eps = np.log(np.finfo(t).eps)
one = t(1)
ten = t(10)
self._testSoftplus(
[
log_eps, log_eps - one, log_eps + one, log_eps - ten,
log_eps + ten, -log_eps, -log_eps - one, -log_eps + one,
-log_eps - ten, -log_eps + ten
],
use_gpu=False)
self._testSoftplus(
[
log_eps, log_eps - one, log_eps + one, log_eps - ten,
log_eps + ten - log_eps, -log_eps - one, -log_eps + one,
-log_eps - ten, -log_eps + ten
],
use_gpu=True)
def testGradient(self):
with self.test_session():
x = constant_op.constant(
[-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9],
shape=[2, 5],
name="x")
y = nn_ops.softplus(x, name="softplus")
x_init = np.asarray(
[[-0.9, -0.7, -0.5, -0.3, -0.1], [0.1, 0.3, 0.5, 0.7, 0.9]],
dtype=np.float32,
order="F")
err = gradient_checker.compute_gradient_error(
x, [2, 5], y, [2, 5], x_init_value=x_init)
print("softplus (float) gradient err = ", err)
self.assertLess(err, 1e-4)
if __name__ == "__main__":
test.main()
|
_testSoftplus
|
cfat5005.py
|
"""
This file offers the methods to automatically retrieve the graph c-fat500-5.
The graph is automatically retrieved from the NetworkRepository repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-03 22:54:22.066913
The undirected graph c-fat500-5 has 500 nodes and 23191 unweighted edges,
of which none are self-loops. The graph is quite dense as it has a density
of 0.18590 and is connected, as it has a single component. The graph median
node degree is 92, the mean node degree is 92.76 and the node degree mode
is 92. The top 5 most central nodes are 499 (degree 95), 498 (degree 95),
483 (degree 95), 482 (degree 95) and 467 (degree 95).
References
---------------------
Please cite the following if you use the data:
@inproceedings{nr,
title = {The Network Data Repository with Interactive Graph Analytics and Visualization},
author={Ryan A. Rossi and Nesreen K. Ahmed},
booktitle = {AAAI},
url={http://networkrepository.com},
year={2015}
}
@misc{dimacs,
author={{DIMACS}},
title={DIMACS Challenge},
note={http://dimacs.rutgers.edu/Challenges/}}
@article{rossi2014coloring,
title={Coloring Large Complex Networks},
author={Ryan A. Rossi and Nesreen K. Ahmed},
booktitle={Social Network Analysis and Mining},
pages={1--51},
year={2014}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.networkrepository import CFat5005
# Then load the graph
graph = CFat5005()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
|
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def CFat5005(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/networkrepository",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the c-fat500-5 graph.
The graph is automatically retrieved from the NetworkRepository repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of c-fat500-5 graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-03 22:54:22.066913
The undirected graph c-fat500-5 has 500 nodes and 23191 unweighted edges,
of which none are self-loops. The graph is quite dense as it has a density
of 0.18590 and is connected, as it has a single component. The graph median
node degree is 92, the mean node degree is 92.76 and the node degree mode
is 92. The top 5 most central nodes are 499 (degree 95), 498 (degree 95),
483 (degree 95), 482 (degree 95) and 467 (degree 95).
References
---------------------
Please cite the following if you use the data:
@inproceedings{nr,
title = {The Network Data Repository with Interactive Graph Analytics and Visualization},
author={Ryan A. Rossi and Nesreen K. Ahmed},
booktitle = {AAAI},
url={http://networkrepository.com},
year={2015}
}
@misc{dimacs,
author={{DIMACS}},
title={DIMACS Challenge},
note={http://dimacs.rutgers.edu/Challenges/}}
@article{rossi2014coloring,
title={Coloring Large Complex Networks},
author={Ryan A. Rossi and Nesreen K. Ahmed},
booktitle={Social Network Analysis and Mining},
pages={1--51},
year={2014}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.networkrepository import CFat5005
# Then load the graph
graph = CFat5005()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="CFat5005",
dataset="networkrepository",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
|
# Remember that, if you need, you can enable the memory-time trade-offs:
|
lib.rs
|
#![allow(clippy::unused_unit, unused_qualifications, missing_debug_implementations)]
#![cfg_attr(not(feature = "std"), no_std)]
pub use pallet::*;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
#[cfg(feature = "runtime-benchmarks")]
mod benchmarking;
mod types;
#[frame_support::pallet]
pub mod pallet {
pub use crate::types::{
DisputeResolver, FeeHandler, PaymentDetail, PaymentHandler, PaymentState,
};
use frame_support::{dispatch::DispatchResultWithPostInfo, pallet_prelude::*};
use frame_system::pallet_prelude::*;
use orml_traits::{MultiCurrency, MultiReservableCurrency};
use sp_runtime::Percent;
type BalanceOf<T> =
<<T as Config>::Asset as MultiCurrency<<T as frame_system::Config>::AccountId>>::Balance;
type AssetIdOf<T> =
<<T as Config>::Asset as MultiCurrency<<T as frame_system::Config>::AccountId>>::CurrencyId;
#[pallet::config]
pub trait Config: frame_system::Config {
/// Because this pallet emits events, it depends on the runtime's definition of an event.
type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;
/// the type of assets this pallet can hold in payment
type Asset: MultiReservableCurrency<Self::AccountId>;
/// Dispute resolution account
type DisputeResolver: DisputeResolver<Self::AccountId>;
/// Fee handler trait
type FeeHandler: FeeHandler<Self::AccountId>;
/// Incentive percentage - amount witheld from sender
#[pallet::constant]
type IncentivePercentage: Get<Percent>;
}
#[pallet::pallet]
#[pallet::generate_store(pub(super) trait Store)]
pub struct Pallet<T>(_);
#[pallet::storage]
#[pallet::getter(fn rates)]
/// Payments created by a user, this method of storageDoubleMap is chosen since there is no usecase for
/// listing payments by provider/currency. The payment will only be referenced by the creator in
/// any transaction of interest.
/// The storage map keys are the creator and the recipient, this also ensures
/// that for any (sender,recipient) combo, only a single payment is active. The history of payment is not stored.
pub(super) type Payment<T: Config> = StorageDoubleMap<
_,
Blake2_128Concat,
T::AccountId, // payment creator
Blake2_128Concat,
T::AccountId, // payment recipient
PaymentDetail<AssetIdOf<T>, BalanceOf<T>, T::AccountId>,
>;
#[pallet::event]
#[pallet::generate_deposit(pub(super) fn deposit_event)]
pub enum Event<T: Config> {
/// A new payment has been created
PaymentCreated(T::AccountId, AssetIdOf<T>, BalanceOf<T>),
/// Payment amount released to the recipient
PaymentReleased(T::AccountId, T::AccountId),
/// Payment has been cancelled by the creator
PaymentCancelled(T::AccountId, T::AccountId),
}
#[pallet::error]
pub enum Error<T> {
/// The selected payment does not exist
InvalidPayment,
/// The selected payment cannot be released
PaymentAlreadyReleased,
/// The selected payment already exists and is in process
PaymentAlreadyInProcess,
/// Action permitted only for whitelisted users
InvalidAction,
}
#[pallet::hooks]
impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> {}
#[pallet::call]
impl<T: Config> Pallet<T> {
/// This allows any user to create a new payment, that releases only to specified recipient
/// The only action is to store the details of this payment in storage and reserve
/// the specified amount.
#[pallet::weight(10_000 + T::DbWeight::get().writes(1))]
pub fn create(
origin: OriginFor<T>,
recipient: T::AccountId,
asset: AssetIdOf<T>,
amount: BalanceOf<T>,
) -> DispatchResultWithPostInfo {
let who = ensure_signed(origin)?;
<Self as PaymentHandler<T::AccountId, AssetIdOf<T>, BalanceOf<T>>>::create_payment(
who, recipient, asset, amount,
)?;
Ok(().into())
}
/// Release any created payment, this will transfer the reserved amount from the
/// creator of the payment to the assigned recipient
#[pallet::weight(10_000 + T::DbWeight::get().writes(1))]
pub fn release(origin: OriginFor<T>, to: T::AccountId) -> DispatchResultWithPostInfo {
let who = ensure_signed(origin)?;
<Self as PaymentHandler<T::AccountId, AssetIdOf<T>, BalanceOf<T>>>::release_payment(
who, to,
)?;
Ok(().into())
}
/// Cancel a payment in created state, this will release the reserved back to
/// creator of the payment. This extrinsic can only be called by the recipient
/// of the payment
#[pallet::weight(10_000 + T::DbWeight::get().writes(1))]
pub fn cancel(origin: OriginFor<T>, creator: T::AccountId) -> DispatchResultWithPostInfo
|
/// Allow admins to set state of a payment
/// This extrinsic is used to resolve disputes between the creator and
/// recipient of the payment.
#[pallet::weight(10_000 + T::DbWeight::get().writes(1))]
pub fn resolve(
origin: OriginFor<T>,
from: T::AccountId,
recipient: T::AccountId,
new_state: PaymentState,
) -> DispatchResultWithPostInfo {
use PaymentState::*;
let who = ensure_signed(origin)?;
// ensure the caller is the assigned resolver
if let Some(payment) = Payment::<T>::get(from.clone(), recipient.clone()) {
ensure!(who == payment.resolver_account, Error::<T>::InvalidAction)
}
// try to update the payment to new state
match new_state {
Cancelled => {
<Self as PaymentHandler<T::AccountId, AssetIdOf<T>, BalanceOf<T>>>::cancel_payment(
from, recipient,
)
}
Released => <Self as PaymentHandler<
T::AccountId,
AssetIdOf<T>,
BalanceOf<T>,
>>::release_payment(from, recipient),
Created | NeedsReview => Err(Error::<T>::InvalidAction.into()),
}?;
Ok(().into())
}
}
impl<T: Config> PaymentHandler<T::AccountId, AssetIdOf<T>, BalanceOf<T>> for Pallet<T> {
/// The function will create a new payment. When a new payment is created, an amount + incentive
/// is reserved from the payment creator. The incentive amount is reserved in the creators account.
/// The amount is transferred to the payment recipent but kept in reserved state. Only when the release action
/// is triggered the amount is released to the recipent and incentive released to creator.
fn create_payment(
from: T::AccountId,
recipient: T::AccountId,
asset: AssetIdOf<T>,
amount: BalanceOf<T>,
) -> DispatchResult {
Payment::<T>::try_mutate(
from.clone(),
recipient.clone(),
|maybe_payment| -> DispatchResult {
let incentive_amount = T::IncentivePercentage::get() * amount;
let (fee_recipient, fee_percent) = T::FeeHandler::apply_fees(&from, &recipient);
let fee_amount = fee_percent * amount;
let new_payment = Some(PaymentDetail {
asset,
amount,
incentive_amount,
state: PaymentState::Created,
resolver_account: T::DisputeResolver::get_origin(),
fee_detail: (fee_recipient, fee_amount),
});
match maybe_payment {
Some(x) => {
// do not overwrite an in-process payment!
// ensure the payment is not in created state, it should
// be in released/cancelled, in which case it can be overwritten
ensure!(
x.state != PaymentState::Created,
Error::<T>::PaymentAlreadyInProcess
);
// reserve the incentive + fees amount from the payment creator
T::Asset::reserve(asset, &from, incentive_amount + fee_amount)?;
// transfer amount to recipient
T::Asset::transfer(asset, &from, &recipient, amount)?;
// reserved the amount in the recipient account
T::Asset::reserve(asset, &recipient, amount)?;
*maybe_payment = new_payment
},
None => {
// reserve the incentive amount from the payment creator
T::Asset::reserve(asset, &from, incentive_amount + fee_amount)?;
// transfer amount to recipient
T::Asset::transfer(asset, &from, &recipient, amount)?;
// reserved the amount in the recipient account
T::Asset::reserve(asset, &recipient, amount)?;
*maybe_payment = new_payment
},
}
Self::deposit_event(Event::PaymentCreated(from, asset, amount));
Ok(())
},
)
}
/// The function will release an existing payment, a release action will remove the reserve
/// placed on both the incentive amount and the transfer amount and it will be "released" to the respective account.
fn release_payment(from: T::AccountId, to: T::AccountId) -> DispatchResult {
use PaymentState::*;
// add the payment detail to storage
Payment::<T>::try_mutate(
from.clone(),
to.clone(),
|maybe_payment| -> DispatchResult {
let payment = maybe_payment.as_mut().ok_or(Error::<T>::InvalidPayment)?;
// ensure the payment is in created state
ensure!(payment.state == Created, Error::<T>::PaymentAlreadyReleased);
// unreserve the incentive amount back to the creator
T::Asset::unreserve(
payment.asset,
&from,
payment.incentive_amount + payment.fee_detail.1,
);
// unreserve the amount to the recipent
T::Asset::unreserve(payment.asset, &to, payment.amount);
// transfer fee amount to marketplace
T::Asset::transfer(
payment.asset,
&from, // fee is paid by payment creator
&payment.fee_detail.0, // account of fee recipient
payment.fee_detail.1, // amount of fee
)?;
payment.state = PaymentState::Released;
Ok(())
},
)?;
Self::deposit_event(Event::PaymentReleased(from, to));
Ok(())
}
/// This function will allows user to cancel a payment. When cancelling a payment, steps are
/// - Unreserve the incentive amount
/// - Unreserve the payment amount
/// - Transfer amount from recipent to sender
fn cancel_payment(from: T::AccountId, to: T::AccountId) -> DispatchResult {
// add the payment detail to storage
Payment::<T>::try_mutate(
from.clone(),
to.clone(),
|maybe_payment| -> DispatchResult {
let payment = maybe_payment.take().ok_or(Error::<T>::InvalidPayment)?;
// ensure the payment is in created state
ensure!(
payment.state == PaymentState::Created,
Error::<T>::PaymentAlreadyReleased
);
// unreserve the incentive amount from the owner account
T::Asset::unreserve(
payment.asset,
&from,
payment.incentive_amount + payment.fee_detail.1,
);
T::Asset::unreserve(payment.asset, &to, payment.amount);
// transfer amount to creator
match T::Asset::transfer(payment.asset, &to, &from, payment.amount) {
Ok(_) =>
*maybe_payment =
Some(PaymentDetail { state: PaymentState::Cancelled, ..payment }),
Err(_) =>
*maybe_payment =
Some(PaymentDetail { state: PaymentState::NeedsReview, ..payment }),
}
Ok(())
},
)?;
Self::deposit_event(Event::PaymentReleased(from, to));
Ok(())
}
fn get_payment_details(
from: T::AccountId,
to: T::AccountId,
) -> Option<PaymentDetail<AssetIdOf<T>, BalanceOf<T>, T::AccountId>> {
Payment::<T>::get(from, to)
}
}
}
|
{
let who = ensure_signed(origin)?;
<Self as PaymentHandler<T::AccountId, AssetIdOf<T>, BalanceOf<T>>>::cancel_payment(
creator, who, // the caller must be the provider, creator cannot cancel
)?;
Ok(().into())
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.