file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
LanguageENG.ts
|
// eslint-disable-next-line no-unused-vars
import { ILanguage } from './ILanguage';
|
titlePT: {
text: 'Title in Portuguese',
title: 'The title of the report in Portuguese',
required: 'This field is required',
min: '5 character minimum',
max: '95 characters maximum',
},
titleENG: {
text: 'Title in English',
title: 'The title of the report in English',
required: 'This field is required',
min: '5 character minimum',
max: '95 characters maximum',
},
textPT: {
text: 'Text in Portuguese',
title: 'The text of the report in Portuguese',
required: 'This field is required',
min: '5 character minimum',
max: '950 characters maximum',
},
textENG: {
text: 'Text in English',
title: 'The text of the report in English',
required: 'This field is required',
min: '5 character minimum',
max: '950 characters maximum',
},
button: {
text: 'Save changes',
title: 'Click here to save changes',
},
created: {
name: 'Last changes by',
date: 'in',
},
feedback: {
success: 'Successfully saved your data',
failure: 'Sorry but no data was saved',
},
} as ILanguage;
|
export default {
title: 'Static data from the introduction of the report',
|
parallel-codegen-closures.rs
|
// run-pass
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(stable_features)]
// Tests parallel codegen - this can fail if the symbol for the anonymous
// closure in `sum` pollutes the second codegen unit from the first.
// compile-flags: -C codegen_units=2
#![feature(iter_arith)]
mod a {
fn foo()
|
}
mod b {
fn bar() {
let x = ["a", "bob", "c"];
let len: usize = x.iter().map(|s| s.len()).sum();
}
}
fn main() {
}
|
{
let x = ["a", "bob", "c"];
let len: usize = x.iter().map(|s| s.len()).sum();
}
|
api.go
|
/*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v3
import (
"bufio"
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"github.com/spf13/pflag"
"sigs.k8s.io/kubebuilder/v3/pkg/config"
"sigs.k8s.io/kubebuilder/v3/pkg/model"
"sigs.k8s.io/kubebuilder/v3/pkg/model/resource"
"sigs.k8s.io/kubebuilder/v3/pkg/plugin"
goPlugin "sigs.k8s.io/kubebuilder/v3/pkg/plugins/golang"
"sigs.k8s.io/kubebuilder/v3/pkg/plugins/golang/v3/scaffolds"
"sigs.k8s.io/kubebuilder/v3/pkg/plugins/internal/cmdutil"
"sigs.k8s.io/kubebuilder/v3/pkg/plugins/internal/util"
"sigs.k8s.io/kubebuilder/v3/plugins/addon"
)
const (
// KbDeclarativePatternVersion is the sigs.k8s.io/kubebuilder-declarative-pattern version
// (used only to gen api with --pattern=addon)
// TODO: remove this when a better solution for using addons is implemented.
KbDeclarativePatternVersion = "b84d99da021778217217885dd9582ed3cc879ebe"
// defaultCRDVersion is the default CRD API version to scaffold.
defaultCRDVersion = "v1"
)
// DefaultMainPath is default file path of main.go
const DefaultMainPath = "main.go"
type createAPISubcommand struct {
config config.Config
// pattern indicates that we should use a plugin to build according to a pattern
pattern string
options *goPlugin.Options
resource resource.Resource
// Check if we have to scaffold resource and/or controller
resourceFlag *pflag.Flag
controllerFlag *pflag.Flag
// force indicates that the resource should be created even if it already exists
force bool
// runMake indicates whether to run make or not after scaffolding APIs
runMake bool
}
var (
_ plugin.CreateAPISubcommand = &createAPISubcommand{}
_ cmdutil.RunOptions = &createAPISubcommand{}
)
func (p createAPISubcommand) UpdateContext(ctx *plugin.Context) {
ctx.Description = `Scaffold a Kubernetes API by creating a Resource definition and / or a Controller.
create resource will prompt the user for if it should scaffold the Resource and / or Controller. To only
scaffold a Controller for an existing Resource, select "n" for Resource. To only define
the schema for a Resource without writing a Controller, select "n" for Controller.
After the scaffold is written, api will run make on the project.
`
ctx.Examples = fmt.Sprintf(` # Create a frigates API with Group: ship, Version: v1beta1 and Kind: Frigate
%s create api --group ship --version v1beta1 --kind Frigate
# Edit the API Scheme
nano api/v1beta1/frigate_types.go
# Edit the Controller
nano controllers/frigate/frigate_controller.go
# Edit the Controller Test
nano controllers/frigate/frigate_controller_test.go
# Install CRDs into the Kubernetes cluster using kubectl apply
make install
# Regenerate code and run against the Kubernetes cluster configured by ~/.kube/config
make run
`,
ctx.CommandName)
}
func (p *createAPISubcommand) BindFlags(fs *pflag.FlagSet) {
fs.BoolVar(&p.runMake, "make", true, "if true, run `make generate` after generating files")
// TODO: remove this when a better solution for using addons is implemented.
if os.Getenv("KUBEBUILDER_ENABLE_PLUGINS") != "" {
fs.StringVar(&p.pattern, "pattern", "",
"generates an API following an extension pattern (addon)")
}
fs.BoolVar(&p.force, "force", false,
"attempt to create resource even if it already exists")
p.options = &goPlugin.Options{}
fs.StringVar(&p.options.Group, "group", "", "resource Group")
p.options.Domain = p.config.GetDomain()
fs.StringVar(&p.options.Version, "version", "", "resource Version")
fs.StringVar(&p.options.Kind, "kind", "", "resource Kind")
fs.StringVar(&p.options.Plural, "plural", "", "resource irregular plural form")
fs.BoolVar(&p.options.DoAPI, "resource", true,
"if set, generate the resource without prompting the user")
p.resourceFlag = fs.Lookup("resource")
fs.StringVar(&p.options.CRDVersion, "crd-version", defaultCRDVersion,
"version of CustomResourceDefinition to scaffold. Options: [v1, v1beta1]")
fs.BoolVar(&p.options.Namespaced, "namespaced", true, "resource is namespaced")
fs.BoolVar(&p.options.DoController, "controller", true,
"if set, generate the controller without prompting the user")
p.controllerFlag = fs.Lookup("controller")
}
|
p.config = c
}
func (p *createAPISubcommand) Run() error {
// TODO: re-evaluate whether y/n input still makes sense. We should probably always
// scaffold the resource and controller.
reader := bufio.NewReader(os.Stdin)
if !p.resourceFlag.Changed {
fmt.Println("Create Resource [y/n]")
p.options.DoAPI = util.YesNo(reader)
}
if !p.controllerFlag.Changed {
fmt.Println("Create Controller [y/n]")
p.options.DoController = util.YesNo(reader)
}
// Create the resource from the options
p.resource = p.options.NewResource(p.config)
return cmdutil.Run(p)
}
func (p *createAPISubcommand) Validate() error {
if err := p.options.Validate(); err != nil {
return err
}
if err := p.resource.Validate(); err != nil {
return err
}
// check if main.go is present in the root directory
if _, err := os.Stat(DefaultMainPath); os.IsNotExist(err) {
return fmt.Errorf("%s file should present in the root directory", DefaultMainPath)
}
// In case we want to scaffold a resource API we need to do some checks
if p.options.DoAPI {
// Check that resource doesn't have the API scaffolded or flag force was set
if res, err := p.config.GetResource(p.resource.GVK); err == nil && res.HasAPI() && !p.force {
return errors.New("API resource already exists")
}
// Check that the provided group can be added to the project
if !p.config.IsMultiGroup() && p.config.ResourcesLength() != 0 && !p.config.HasGroup(p.resource.Group) {
return fmt.Errorf("multiple groups are not allowed by default, " +
"to enable multi-group visit kubebuilder.io/migration/multi-group.html")
}
// Check CRDVersion against all other CRDVersions in p.config for compatibility.
if !p.config.IsCRDVersionCompatible(p.resource.API.CRDVersion) {
return fmt.Errorf("only one CRD version can be used for all resources, cannot add %q",
p.resource.API.CRDVersion)
}
}
return nil
}
func (p *createAPISubcommand) GetScaffolder() (cmdutil.Scaffolder, error) {
// Load the boilerplate
bp, err := ioutil.ReadFile(filepath.Join("hack", "boilerplate.go.txt")) // nolint:gosec
if err != nil {
return nil, fmt.Errorf("unable to load boilerplate: %v", err)
}
// Load the requested plugins
plugins := make([]model.Plugin, 0)
switch strings.ToLower(p.pattern) {
case "":
// Default pattern
case "addon":
plugins = append(plugins, &addon.Plugin{})
default:
return nil, fmt.Errorf("unknown pattern %q", p.pattern)
}
return scaffolds.NewAPIScaffolder(p.config, string(bp), p.resource, p.force, plugins), nil
}
func (p *createAPISubcommand) PostScaffold() error {
// Load the requested plugins
switch strings.ToLower(p.pattern) {
case "":
// Default pattern
case "addon":
// Ensure that we are pinning sigs.k8s.io/kubebuilder-declarative-pattern version
// TODO: either find a better way to inject this version (ex. tools.go).
err := util.RunCmd("Get kubebuilder-declarative-pattern dependency", "go", "get",
"sigs.k8s.io/kubebuilder-declarative-pattern@"+KbDeclarativePatternVersion)
if err != nil {
return err
}
default:
return fmt.Errorf("unknown pattern %q", p.pattern)
}
if p.runMake { // TODO: check if API was scaffolded
return util.RunCmd("Running make", "make", "generate")
}
return nil
}
|
func (p *createAPISubcommand) InjectConfig(c config.Config) {
|
env_test.go
|
package env_test
import (
"errors"
"fmt"
"net/http"
"os"
"reflect"
"strconv"
"testing"
"time"
"github.com/magicalbanana/env"
"github.com/stretchr/testify/assert"
)
type Config struct {
Some string `env:"somevar"`
Other bool `env:"othervar"`
Port int `env:"PORT"`
UintVal uint `env:"UINTVAL"`
NotAnEnv string
DatabaseURL string `env:"DATABASE_URL" envDefault:"postgres://localhost:5432/db"`
Strings []string `env:"STRINGS"`
SepStrings []string `env:"SEPSTRINGS" envSeparator:":"`
Numbers []int `env:"NUMBERS"`
Numbers64 []int64 `env:"NUMBERS64"`
Bools []bool `env:"BOOLS"`
Duration time.Duration `env:"DURATION"`
Float32 float32 `env:"FLOAT32"`
Float64 float64 `env:"FLOAT64"`
Float32s []float32 `env:"FLOAT32S"`
Float64s []float64 `env:"FLOAT64S"`
}
type ParentStruct struct {
InnerStruct *InnerStruct
unexported *InnerStruct
Ignored *http.Client
}
type InnerStruct struct {
Inner string `env:"innervar"`
}
func TestParsesEnv(t *testing.T) {
os.Setenv("somevar", "somevalue")
os.Setenv("othervar", "true")
os.Setenv("PORT", "8080")
os.Setenv("STRINGS", "string1,string2,string3")
os.Setenv("SEPSTRINGS", "string1:string2:string3")
os.Setenv("NUMBERS", "1,2,3,4")
os.Setenv("NUMBERS64", "1,2,2147483640,-2147483640")
os.Setenv("BOOLS", "t,TRUE,0,1")
os.Setenv("DURATION", "1s")
os.Setenv("FLOAT32", "3.40282346638528859811704183484516925440e+38")
os.Setenv("FLOAT64", "1.797693134862315708145274237317043567981e+308")
os.Setenv("FLOAT32S", "1.0,2.0,3.0")
os.Setenv("FLOAT64S", "1.0,2.0,3.0")
os.Setenv("UINTVAL", "44")
defer os.Clearenv()
cfg := Config{}
assert.NoError(t, env.Parse(&cfg))
assert.Equal(t, "somevalue", cfg.Some)
assert.Equal(t, true, cfg.Other)
assert.Equal(t, 8080, cfg.Port)
assert.Equal(t, uint(44), cfg.UintVal)
assert.Equal(t, []string{"string1", "string2", "string3"}, cfg.Strings)
assert.Equal(t, []string{"string1", "string2", "string3"}, cfg.SepStrings)
assert.Equal(t, []int{1, 2, 3, 4}, cfg.Numbers)
assert.Equal(t, []int64{1, 2, 2147483640, -2147483640}, cfg.Numbers64)
assert.Equal(t, []bool{true, true, false, true}, cfg.Bools)
d, _ := time.ParseDuration("1s")
assert.Equal(t, d, cfg.Duration)
f32 := float32(3.40282346638528859811704183484516925440e+38)
assert.Equal(t, f32, cfg.Float32)
f64 := float64(1.797693134862315708145274237317043567981e+308)
assert.Equal(t, f64, cfg.Float64)
assert.Equal(t, []float32{float32(1.0), float32(2.0), float32(3.0)}, cfg.Float32s)
assert.Equal(t, []float64{float64(1.0), float64(2.0), float64(3.0)}, cfg.Float64s)
}
func TestParsesEnvInner(t *testing.T) {
os.Setenv("innervar", "someinnervalue")
defer os.Clearenv()
cfg := ParentStruct{
InnerStruct: &InnerStruct{},
unexported: &InnerStruct{},
}
assert.NoError(t, env.Parse(&cfg))
assert.Equal(t, "someinnervalue", cfg.InnerStruct.Inner)
}
func TestParsesEnvInnerNil(t *testing.T) {
os.Setenv("innervar", "someinnervalue")
defer os.Clearenv()
cfg := ParentStruct{}
assert.NoError(t, env.Parse(&cfg))
}
func TestEmptyVars(t *testing.T) {
cfg := Config{}
assert.NoError(t, env.Parse(&cfg))
assert.Equal(t, "", cfg.Some)
assert.Equal(t, false, cfg.Other)
assert.Equal(t, 0, cfg.Port)
assert.Equal(t, uint(0), cfg.UintVal)
assert.Equal(t, 0, len(cfg.Strings))
assert.Equal(t, 0, len(cfg.SepStrings))
assert.Equal(t, 0, len(cfg.Numbers))
assert.Equal(t, 0, len(cfg.Bools))
}
func TestPassAnInvalidPtr(t *testing.T) {
var thisShouldBreak int
assert.Error(t, env.Parse(&thisShouldBreak))
}
func TestPassReference(t *testing.T) {
cfg := Config{}
assert.Error(t, env.Parse(cfg))
}
func TestInvalidBool(t *testing.T) {
os.Setenv("othervar", "should-be-a-bool")
defer os.Clearenv()
cfg := Config{}
assert.Error(t, env.Parse(&cfg))
}
func TestInvalidInt(t *testing.T) {
os.Setenv("PORT", "should-be-an-int")
defer os.Clearenv()
cfg := Config{}
assert.Error(t, env.Parse(&cfg))
}
func TestInvalidUint(t *testing.T) {
os.Setenv("UINTVAL", "-44")
defer os.Clearenv()
cfg := Config{}
assert.Error(t, env.Parse(&cfg))
}
func TestInvalidBoolsSlice(t *testing.T) {
type config struct {
BadBools []bool `env:"BADBOOLS"`
}
os.Setenv("BADBOOLS", "t,f,TRUE,faaaalse")
cfg := &config{}
assert.Error(t, env.Parse(cfg))
}
func TestInvalidDuration(t *testing.T) {
os.Setenv("DURATION", "should-be-a-valid-duration")
defer os.Clearenv()
cfg := Config{}
assert.Error(t, env.Parse(&cfg))
}
func TestParsesDefaultConfig(t *testing.T) {
cfg := Config{}
assert.NoError(t, env.Parse(&cfg))
assert.Equal(t, "postgres://localhost:5432/db", cfg.DatabaseURL)
}
func TestParseStructWithoutEnvTag(t *testing.T) {
cfg := Config{}
assert.NoError(t, env.Parse(&cfg))
assert.Empty(t, cfg.NotAnEnv)
}
func TestParseStructWithInvalidFieldKind(t *testing.T) {
type config struct {
WontWorkByte byte `env:"BLAH"`
}
os.Setenv("BLAH", "a")
cfg := config{}
assert.Error(t, env.Parse(&cfg))
}
func TestUnsupportedSliceType(t *testing.T) {
type config struct {
WontWork []map[int]int `env:"WONTWORK"`
}
os.Setenv("WONTWORK", "1,2,3")
defer os.Clearenv()
cfg := &config{}
assert.Error(t, env.Parse(cfg))
}
func TestBadSeparator(t *testing.T) {
type config struct {
WontWork []int `env:"WONTWORK" envSeparator:":"`
}
cfg := &config{}
os.Setenv("WONTWORK", "1,2,3,4")
defer os.Clearenv()
assert.Error(t, env.Parse(cfg))
}
func TestNoErrorRequiredSet(t *testing.T) {
type config struct {
IsRequired string `env:"IS_REQUIRED,required"`
}
cfg := &config{}
os.Setenv("IS_REQUIRED", "val")
defer os.Clearenv()
assert.NoError(t, env.Parse(cfg))
assert.Equal(t, "val", cfg.IsRequired)
}
func TestErrorRequiredNotSet(t *testing.T) {
type config struct {
IsRequired string `env:"IS_REQUIRED,required"`
}
cfg := &config{}
assert.Error(t, env.Parse(cfg))
}
func TestCustomParser(t *testing.T) {
type foo struct {
name string
}
type config struct {
Var foo `env:"VAR"`
}
os.Setenv("VAR", "test")
customParserFunc := func(v string) (interface{}, error) {
return foo{name: v}, nil
}
cfg := &config{}
err := env.ParseWithFuncs(cfg, map[reflect.Type]env.ParserFunc{
reflect.TypeOf(foo{}): customParserFunc,
})
assert.NoError(t, err)
assert.Equal(t, cfg.Var.name, "test")
}
func TestParseWithFuncsNoPtr(t *testing.T) {
type foo struct{}
err := env.ParseWithFuncs(foo{}, nil)
assert.Error(t, err)
assert.Equal(t, err, env.ErrNotAStructPtr)
}
func TestParseWithFuncsInvalidType(t *testing.T) {
var c int
err := env.ParseWithFuncs(&c, nil)
assert.Error(t, err)
assert.Equal(t, err, env.ErrNotAStructPtr)
}
func TestCustomParserError(t *testing.T) {
type foo struct {
name string
}
type config struct {
Var foo `env:"VAR"`
}
os.Setenv("VAR", "test")
customParserFunc := func(v string) (interface{}, error) {
return nil, errors.New("something broke")
}
cfg := &config{}
err := env.ParseWithFuncs(cfg, map[reflect.Type]env.ParserFunc{
reflect.TypeOf(foo{}): customParserFunc,
})
assert.Empty(t, cfg.Var.name, "Var.name should not be filled out when parse errors")
assert.Error(t, err)
assert.Equal(t, err.Error(), "Custom parser error: something broke")
}
func TestCustomParserBasicType(t *testing.T) {
type ConstT int32
type config struct {
Const ConstT `env:"CONST_VAL"`
}
exp := ConstT(123)
os.Setenv("CONST_VAL", fmt.Sprintf("%d", exp))
customParserFunc := func(v string) (interface{}, error) {
i, err := strconv.Atoi(v)
if err != nil {
return nil, err
}
r := ConstT(i)
return r, nil
}
cfg := &config{}
err := env.ParseWithFuncs(cfg, map[reflect.Type]env.ParserFunc{
reflect.TypeOf(ConstT(0)): customParserFunc,
})
assert.NoError(t, err)
assert.Equal(t, exp, cfg.Const)
}
func TypeCustomParserBasicInvalid(t *testing.T) {
type ConstT int32
type config struct {
Const ConstT `env:"CONST_VAL"`
}
os.Setenv("CONST_VAL", "foobar")
expErr := errors.New("Random error")
customParserFunc := func(_ string) (interface{}, error) {
return nil, expErr
}
cfg := &config{}
err := env.ParseWithFuncs(cfg, map[reflect.Type]env.ParserFunc{
reflect.TypeOf(ConstT(0)): customParserFunc,
})
assert.Empty(t, cfg.Const)
assert.Error(t, err)
assert.Equal(t, expErr, err)
}
func TestCustomParserBasicUnsupported(t *testing.T) {
type ConstT int32
type config struct {
Const ConstT `env:"CONST_VAL"`
}
exp := ConstT(123)
os.Setenv("CONST_VAL", fmt.Sprintf("%d", exp))
cfg := &config{}
err := env.Parse(cfg)
assert.Zero(t, cfg.Const)
assert.Error(t, err)
assert.Equal(t, env.ErrUnsupportedType, err)
}
func TestUnsupportedStructType(t *testing.T) {
type config struct {
Foo http.Client `env:"FOO"`
}
os.Setenv("FOO", "foo")
cfg := &config{}
err := env.Parse(cfg)
assert.Error(t, err)
assert.Equal(t, env.ErrUnsupportedType, err)
}
func TestEmptyOption(t *testing.T) {
type config struct {
Var string `env:"VAR,"`
}
cfg := &config{}
os.Setenv("VAR", "val")
defer os.Clearenv()
assert.NoError(t, env.Parse(cfg))
assert.Equal(t, "val", cfg.Var)
}
func
|
(t *testing.T) {
type config struct {
Var string `env:"VAR,not_supported!"`
}
cfg := &config{}
assert.Error(t, env.Parse(cfg))
}
func ExampleParse() {
type config struct {
Home string `env:"HOME"`
Port int `env:"PORT" envDefault:"3000"`
IsProduction bool `env:"PRODUCTION"`
}
os.Setenv("HOME", "/tmp/fakehome")
cfg := config{}
env.Parse(&cfg)
fmt.Println(cfg)
// Output: {/tmp/fakehome 3000 false}
}
func ExampleParseRequiredField() {
type config struct {
Home string `env:"HOME"`
Port int `env:"PORT" envDefault:"3000"`
IsProduction bool `env:"PRODUCTION"`
SecretKey string `env:"SECRET_KEY,required"`
}
os.Setenv("HOME", "/tmp/fakehome")
cfg := config{}
err := env.Parse(&cfg)
fmt.Println(err)
// Output: Required environment variable SECRET_KEY is not set
}
func ExampleParseMultipleOptions() {
type config struct {
Home string `env:"HOME"`
Port int `env:"PORT" envDefault:"3000"`
IsProduction bool `env:"PRODUCTION"`
SecretKey string `env:"SECRET_KEY,required,option1"`
}
os.Setenv("HOME", "/tmp/fakehome")
cfg := config{}
err := env.Parse(&cfg)
fmt.Println(err)
// Output: Env tag option option1 not supported.
}
|
TestErrorOptionNotRecognized
|
aws-lambda-alias_provisionedconcurrencyconfiguration.go
|
package lambda
import (
"github.com/awslabs/goformation/v4/cloudformation/policies"
)
// Alias_ProvisionedConcurrencyConfiguration AWS CloudFormation Resource (AWS::Lambda::Alias.ProvisionedConcurrencyConfiguration)
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lambda-alias-provisionedconcurrencyconfiguration.html
type Alias_ProvisionedConcurrencyConfiguration struct {
// ProvisionedConcurrentExecutions AWS CloudFormation Property
// Required: true
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lambda-alias-provisionedconcurrencyconfiguration.html#cfn-lambda-alias-provisionedconcurrencyconfiguration-provisionedconcurrentexecutions
ProvisionedConcurrentExecutions int `json:"ProvisionedConcurrentExecutions"`
// AWSCloudFormationDeletionPolicy represents a CloudFormation DeletionPolicy
AWSCloudFormationDeletionPolicy policies.DeletionPolicy `json:"-"`
|
AWSCloudFormationMetadata map[string]interface{} `json:"-"`
}
// AWSCloudFormationType returns the AWS CloudFormation resource type
func (r *Alias_ProvisionedConcurrencyConfiguration) AWSCloudFormationType() string {
return "AWS::Lambda::Alias.ProvisionedConcurrencyConfiguration"
}
|
// AWSCloudFormationDependsOn stores the logical ID of the resources to be created before this resource
AWSCloudFormationDependsOn []string `json:"-"`
// AWSCloudFormationMetadata stores structured data associated with this resource
|
links.js
|
export default [
{
path: "/",
text: "home",
},
{
path: "/books",
text: "books",
},
{
path: "/blog",
text: "blog",
},
{
path: "/contact",
text: "contact",
},
|
]
|
|
app.module.ts
|
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { AppComponent } from './app.component';
import { UiButtonDirective } from './ui-button.directive';
|
@NgModule({
declarations: [
AppComponent,
UiButtonDirective,
WhenDirective
],
imports: [
BrowserModule
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { }
|
import { WhenDirective } from './when.directive';
|
virtual_network_gateway_paged.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
|
class VirtualNetworkGatewayPaged(Paged):
"""
A paging container for iterating over a list of :class:`VirtualNetworkGateway <azure.mgmt.network.v2017_09_01.models.VirtualNetworkGateway>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[VirtualNetworkGateway]'}
}
def __init__(self, *args, **kwargs):
super(VirtualNetworkGatewayPaged, self).__init__(*args, **kwargs)
|
from msrest.paging import Paged
|
deprecated_cmd.go
|
// Copyright 2018 Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// DEPRECATED - These commands are deprecated and will be removed in future releases.
package cmd
import (
"errors"
"fmt"
"io"
"io/ioutil"
"net/url"
"os"
"sort"
"strings"
"text/tabwriter"
"time"
"github.com/ghodss/yaml"
"github.com/hashicorp/go-multierror"
"github.com/spf13/cobra"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
kubeSchema "k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/runtime/serializer"
"k8s.io/client-go/discovery"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
clientcmdapi "k8s.io/client-go/tools/clientcmd/api"
"istio.io/api/networking/v1alpha3"
"istio.io/pkg/log"
"istio.io/istio/galley/pkg/config/schema/collection"
"istio.io/istio/galley/pkg/config/schema/collections"
"istio.io/istio/galley/pkg/config/util/pilotadapter"
"istio.io/istio/istioctl/pkg/util/handlers"
"istio.io/istio/pilot/pkg/config/kube/crd"
"istio.io/istio/pilot/pkg/config/kube/crd/controller"
"istio.io/istio/pilot/pkg/model"
kubecfg "istio.io/istio/pkg/kube"
)
const (
// Headings for short format listing of unknown types
unknownShortOutputHeading = "NAME\tKIND\tNAMESPACE\tAGE"
)
var (
istioContext string
istioAPIServer string
getAllNamespaces bool
// Create a model.ConfigStore (or sortedConfigStore)
clientFactory = newClient
// sortWeight defines the output order for "get all". We show the V3 types first.
sortWeight = map[string]int{
collections.IstioNetworkingV1Alpha3Gateways.Resource().Kind(): 10,
collections.IstioNetworkingV1Alpha3Virtualservices.Resource().Kind(): 5,
collections.IstioNetworkingV1Alpha3Destinationrules.Resource().Kind(): 3,
collections.IstioNetworkingV1Alpha3Serviceentries.Resource().Kind(): 1,
}
// mustList tracks which Istio types we SHOULD NOT silently ignore if we can't list.
// The user wants reasonable error messages when doing `get all` against a different
// server version.
mustList = map[string]bool{
collections.IstioNetworkingV1Alpha3Gateways.Resource().Kind(): true,
collections.IstioNetworkingV1Alpha3Virtualservices.Resource().Kind(): true,
collections.IstioNetworkingV1Alpha3Destinationrules.Resource().Kind(): true,
collections.IstioNetworkingV1Alpha3Serviceentries.Resource().Kind(): true,
collections.IstioConfigV1Alpha2Httpapispecs.Resource().Kind(): true,
collections.IstioConfigV1Alpha2Httpapispecbindings.Resource().Kind(): true,
collections.IstioMixerV1ConfigClientQuotaspecs.Resource().Kind(): true,
collections.IstioMixerV1ConfigClientQuotaspecbindings.Resource().Kind(): true,
collections.IstioAuthenticationV1Alpha1Policies.Resource().Kind(): true,
collections.IstioRbacV1Alpha1Serviceroles.Resource().Kind(): true,
collections.IstioRbacV1Alpha1Servicerolebindings.Resource().Kind(): true,
collections.IstioRbacV1Alpha1Rbacconfigs.Resource().Kind(): true,
}
gatewayKind = collections.IstioNetworkingV1Alpha3Gateways.Resource().Kind()
virtualServiceKind = collections.IstioNetworkingV1Alpha3Virtualservices.Resource().Kind()
destinationRuleKind = collections.IstioNetworkingV1Alpha3Destinationrules.Resource().Kind()
serviceEntryKind = collections.IstioNetworkingV1Alpha3Serviceentries.Resource().Kind()
// Headings for short format listing specific to type
shortOutputHeadings = map[string]string{
gatewayKind: "GATEWAY NAME\tHOSTS\tNAMESPACE\tAGE",
virtualServiceKind: "VIRTUAL-SERVICE NAME\tGATEWAYS\tHOSTS\t#HTTP\t#TCP\tNAMESPACE\tAGE",
destinationRuleKind: "DESTINATION-RULE NAME\tHOST\tSUBSETS\tNAMESPACE\tAGE",
serviceEntryKind: "SERVICE-ENTRY NAME\tHOSTS\tPORTS\tNAMESPACE\tAGE",
}
// Formatters for short format listing specific to type
shortOutputters = map[string]func(model.Config, io.Writer){
gatewayKind: printShortGateway,
virtualServiceKind: printShortVirtualService,
destinationRuleKind: printShortDestinationRule,
serviceEntryKind: printShortServiceEntry,
}
// all resources will be migrated out of config.istio.io to their own api group mapping to package path.
// TODO(xiaolanz) legacy group exists until we find out a client for mixer
legacyIstioAPIGroupVersion = kubeSchema.GroupVersion{
Group: "config.istio.io",
Version: "v1alpha2",
}
postCmd = &cobra.Command{
Use: "create",
Deprecated: "Use `kubectl create` instead (see https://kubernetes.io/docs/tasks/tools/install-kubectl)",
Short: "Create policies and rules",
Example: "istioctl create -f example-routing.yaml",
RunE: func(c *cobra.Command, args []string) error {
if len(args) != 0 {
c.Println(c.UsageString())
return fmt.Errorf("create takes no arguments")
}
varr, others, err := readInputs()
if err != nil {
return err
}
if len(varr) == 0 && len(others) == 0 {
return errors.New("nothing to create")
}
for _, config := range varr {
if config.Namespace, err = handlers.HandleNamespaces(config.Namespace, namespace, defaultNamespace); err != nil {
return err
}
var configClient model.ConfigStore
if configClient, err = clientFactory(); err != nil {
return err
}
var rev string
if rev, err = configClient.Create(config); err != nil {
return err
}
c.Printf("Created config %v at revision %v\n", config.Key(), rev)
}
if len(others) > 0 {
if err = preprocMixerConfig(others); err != nil {
return err
}
otherClient, resources, oerr := prepareClientForOthers(others)
if oerr != nil {
return oerr
}
var errs *multierror.Error
var updated crd.IstioKind
for _, config := range others {
resource, ok := resources[config.Kind]
if !ok {
errs = multierror.Append(errs, fmt.Errorf("kind %s is not known", config.Kind))
continue
}
err = otherClient.Post().
Namespace(config.Namespace).
Resource(resource.Name).
Body(&config).
Do().
Into(&updated)
if err != nil {
errs = multierror.Append(errs, err)
continue
}
key := model.Key(config.Kind, config.Name, config.Namespace)
fmt.Printf("Created config %s at revision %v\n", key, updated.ResourceVersion)
}
if errs != nil {
return errs
}
}
return nil
},
}
putCmd = &cobra.Command{
Use: "replace",
Deprecated: "Use `kubectl apply` instead (see https://kubernetes.io/docs/tasks/tools/install-kubectl)",
Short: "Replace existing policies and rules",
Example: "istioctl replace -f example-routing.yaml",
RunE: func(c *cobra.Command, args []string) error {
if len(args) != 0 {
c.Println(c.UsageString())
return fmt.Errorf("replace takes no arguments")
}
varr, others, err := readInputs()
if err != nil {
return err
}
if len(varr) == 0 && len(others) == 0 {
return errors.New("nothing to replace")
}
for _, config := range varr {
if config.Namespace, err = handlers.HandleNamespaces(config.Namespace, namespace, defaultNamespace); err != nil {
return err
}
var configClient model.ConfigStore
if configClient, err = clientFactory(); err != nil {
return err
}
// fill up revision
if config.ResourceVersion == "" {
current := configClient.Get(config.Type, config.Name, config.Namespace)
if current != nil {
config.ResourceVersion = current.ResourceVersion
}
}
var newRev string
if newRev, err = configClient.Update(config); err != nil {
return err
}
fmt.Printf("Updated config %v to revision %v\n", config.Key(), newRev)
}
if len(others) > 0 {
if err = preprocMixerConfig(others); err != nil {
return err
}
otherClient, resources, oerr := prepareClientForOthers(others)
if oerr != nil {
return oerr
}
var errs *multierror.Error
var current crd.IstioKind
var updated crd.IstioKind
for _, config := range others {
resource, ok := resources[config.Kind]
if !ok {
errs = multierror.Append(errs, fmt.Errorf("kind %s is not known", config.Kind))
continue
}
if config.ResourceVersion == "" {
err = otherClient.Get().
Namespace(config.Namespace).
Name(config.Name).
Resource(resource.Name).
Do().
Into(¤t)
if err == nil && current.ResourceVersion != "" {
config.ResourceVersion = current.ResourceVersion
}
}
err = otherClient.Put().
Namespace(config.Namespace).
Name(config.Name).
Resource(resource.Name).
Body(&config).
Do().
Into(&updated)
if err != nil {
errs = multierror.Append(errs, err)
continue
}
key := model.Key(config.Kind, config.Name, config.Namespace)
fmt.Printf("Updated config %s to revision %v\n", key, updated.ResourceVersion)
}
if errs != nil {
return errs
}
}
return nil
},
}
getCmd = &cobra.Command{
Use: "get <type> [<name>]",
Deprecated: "Use `kubectl get` instead (see https://kubernetes.io/docs/tasks/tools/install-kubectl)",
Short: "Retrieve policies and rules",
Example: `# List all virtual services
istioctl get virtualservices
# List all destination rules
istioctl get destinationrules
# Get a specific virtual service named bookinfo
istioctl get virtualservice bookinfo
`,
RunE: func(c *cobra.Command, args []string) error {
configClient, err := clientFactory()
if err != nil {
return err
}
if len(args) < 1 {
c.Println(c.UsageString())
return fmt.Errorf("specify the type of resource to get. Types are %v",
strings.Join(supportedTypes(configClient), ", "))
}
getByName := len(args) > 1
if getAllNamespaces && getByName {
return errors.New("a resource cannot be retrieved by name across all namespaces")
}
var typs collection.Schemas
if !getByName && strings.EqualFold(args[0], "all") {
typs = pilotadapter.ConvertPilotSchemasToGalley(configClient.ConfigDescriptor())
} else {
typ, err := protoSchema(configClient, args[0])
if err != nil {
c.Println(c.UsageString())
return err
}
typs = collection.SchemasFor(typ)
}
var ns string
if getAllNamespaces {
ns = v1.NamespaceAll
} else {
ns = handlers.HandleNamespace(namespace, defaultNamespace)
}
var errs error
var configs []model.Config
if getByName {
config := configClient.Get(typs.All()[0].Resource().Kind(), args[1], ns)
if config != nil {
configs = append(configs, *config)
}
} else {
for _, s := range typs.All() {
kind := s.Resource().Kind()
typeConfigs, err := configClient.List(kind, ns)
if err == nil {
configs = append(configs, typeConfigs...)
} else {
if mustList[kind] {
errs = multierror.Append(errs, multierror.Prefix(err, fmt.Sprintf("Can't list %v:", kind)))
}
}
}
}
if len(configs) == 0 {
c.Println("No resources found.")
return errs
}
var outputters = map[string]func(io.Writer, model.ConfigStore, []model.Config){
"yaml": printYamlOutput,
"short": printShortOutput,
}
if outputFunc, ok := outputters[outputFormat]; ok {
outputFunc(c.OutOrStdout(), configClient, configs)
} else {
return fmt.Errorf("unknown output format %v. Types are yaml|short", outputFormat)
}
return errs
},
ValidArgs: configTypeResourceNames(collections.Istio),
ArgAliases: configTypePluralResourceNames(collections.Istio),
}
deleteCmd = &cobra.Command{
Use: "delete <type> <name> [<name2> ... <nameN>]",
Deprecated: "Use `kubectl delete` instead (see https://kubernetes.io/docs/tasks/tools/install-kubectl)",
Short: "Delete policies or rules",
Example: `# Delete a rule using the definition in example-routing.yaml.
istioctl delete -f example-routing.yaml
# Delete the virtual service bookinfo
istioctl delete virtualservice bookinfo
`,
RunE: func(c *cobra.Command, args []string) error {
configClient, errs := clientFactory()
if errs != nil {
return errs
}
// If we did not receive a file option, get names of resources to delete from command line
if file == "" {
if len(args) < 2 {
c.Println(c.UsageString())
return fmt.Errorf("provide configuration type and name or -f option")
}
typ, err := protoSchema(configClient, args[0])
if err != nil {
return err
}
ns := handlers.HandleNamespace(namespace, defaultNamespace)
for i := 1; i < len(args); i++ {
if err := configClient.Delete(typ.Resource().Kind(), args[i], ns); err != nil {
errs = multierror.Append(errs,
fmt.Errorf("cannot delete %s: %v", args[i], err))
} else {
c.Printf("Deleted config: %v %v\n", args[0], args[i])
}
}
return errs
}
// As we did get a file option, make sure the command line did not include any resources to delete
if len(args) != 0 {
c.Println(c.UsageString())
return fmt.Errorf("delete takes no arguments when the file option is used")
}
varr, others, err := readInputs()
if err != nil {
return err
}
if len(varr) == 0 && len(others) == 0 {
return errors.New("nothing to delete")
}
for _, config := range varr {
if config.Namespace, err = handlers.HandleNamespaces(config.Namespace, namespace, defaultNamespace); err != nil {
return err
}
// compute key if necessary
if err = configClient.Delete(config.Type, config.Name, config.Namespace); err != nil {
errs = multierror.Append(errs, fmt.Errorf("cannot delete %s: %v", config.Key(), err))
} else {
c.Printf("Deleted config: %v\n", config.Key())
}
}
if errs != nil {
return errs
}
if len(others) > 0 {
if err = preprocMixerConfig(others); err != nil {
return err
}
otherClient, resources, oerr := prepareClientForOthers(others)
if oerr != nil {
return oerr
}
for _, config := range others {
resource, ok := resources[config.Kind]
if !ok {
errs = multierror.Append(errs, fmt.Errorf("kind %s is not known", config.Kind))
continue
}
err = otherClient.Delete().
Namespace(config.Namespace).
Resource(resource.Name).
Name(config.Name).
Do().
Error()
if err != nil {
errs = multierror.Append(errs, fmt.Errorf("failed to delete: %v", err))
continue
}
fmt.Printf("Deleted config: %s\n", model.Key(config.Kind, config.Name, config.Namespace))
}
}
return errs
},
ValidArgs: configTypeResourceNames(collections.Istio),
ArgAliases: configTypePluralResourceNames(collections.Istio),
}
contextCmd = &cobra.Command{
Use: "context-create --api-server http://<ip>:<port>",
Deprecated: `Use kubectl instead (see https://kubernetes.io/docs/tasks/tools/install-kubectl), e.g.
$ kubectl config set-context istio --cluster=istio
$ kubectl config set-cluster istio --server=http://localhost:8080
$ kubectl config use-context istio
`,
Short: "Create a kubeconfig file suitable for use with istioctl in a non-Kubernetes environment",
Example: `# Create a config file for the api server.
istioctl context-create --api-server http://127.0.0.1:8080
`,
RunE: func(c *cobra.Command, args []string) error {
if istioAPIServer == "" {
c.Println(c.UsageString())
return fmt.Errorf("specify the the Istio api server IP")
}
u, err := url.ParseRequestURI(istioAPIServer)
if err != nil {
c.Println(c.UsageString())
return err
}
configAccess := clientcmd.NewDefaultPathOptions()
// use specified kubeconfig file for the location of the config to create or modify
configAccess.GlobalFile = kubeconfig
// gets existing kubeconfig or returns new empty config
config, err := configAccess.GetStartingConfig()
if err != nil {
return err
}
cluster, exists := config.Clusters[istioContext]
if !exists {
cluster = clientcmdapi.NewCluster()
}
cluster.Server = u.String()
config.Clusters[istioContext] = cluster
context, exists := config.Contexts[istioContext]
if !exists {
context = clientcmdapi.NewContext()
}
context.Cluster = istioContext
config.Contexts[istioContext] = context
contextSwitched := false
if config.CurrentContext != "" && config.CurrentContext != istioContext {
contextSwitched = true
}
config.CurrentContext = istioContext
if err = clientcmd.ModifyConfig(configAccess, *config, false); err != nil {
return err
}
if contextSwitched {
fmt.Printf("kubeconfig context switched to %q\n", istioContext)
}
fmt.Println("Context created")
return nil
},
}
)
// The protoSchema is based on the kind (for example "virtualservice" or "destinationrule")
func protoSchema(configClient model.ConfigStore, typ string) (collection.Schema, error) {
if strings.Contains(typ, "-") {
return nil, fmt.Errorf("%q not recognized. Please use non-hyphenated resource name %q",
typ, strings.ReplaceAll(typ, "-", ""))
}
for _, s := range pilotadapter.ConvertPilotSchemasToGalley(configClient.ConfigDescriptor()).All() {
switch strings.ToLower(typ) {
case strings.ToLower(s.Resource().Kind()), strings.ToLower(s.Resource().Plural()):
return s, nil
}
}
return nil, fmt.Errorf("configuration type %s not found, the types are %v",
typ, strings.Join(supportedTypes(configClient), ", "))
}
// readInputs reads multiple documents from the input and checks with the schema
func readInputs() ([]model.Config, []crd.IstioKind, error) {
var reader io.Reader
switch file {
case "":
return nil, nil, errors.New("filename not specified (see --filename or -f)")
case "-":
reader = os.Stdin
default:
var err error
var in *os.File
if in, err = os.Open(file); err != nil {
return nil, nil, err
}
defer func() {
if err = in.Close(); err != nil {
log.Errorf("Error: close file from %s, %s", file, err)
}
}()
reader = in
}
input, err := ioutil.ReadAll(reader)
if err != nil {
return nil, nil, err
}
return crd.ParseInputsWithoutValidation(string(input))
}
// Print a simple list of names
func printShortOutput(writer io.Writer, _ model.ConfigStore, configList []model.Config) {
// Sort configList by Type
sort.Slice(configList, func(i, j int) bool { return sortWeight[configList[i].Type] < sortWeight[configList[j].Type] })
var w tabwriter.Writer
w.Init(writer, 10, 4, 3, ' ', 0)
prevType := ""
var outputter func(model.Config, io.Writer)
for _, c := range configList {
if prevType != c.Type {
if prevType != "" {
// Place a newline between types when doing 'get all'
_, _ = fmt.Fprintf(&w, "\n")
}
heading, ok := shortOutputHeadings[c.Type]
if !ok {
heading = unknownShortOutputHeading
}
_, _ = fmt.Fprintf(&w, "%s\n", heading)
prevType = c.Type
if outputter, ok = shortOutputters[c.Type]; !ok {
outputter = printShortConfig
}
}
outputter(c, &w)
}
_ = w.Flush()
}
func kindAsString(config model.Config) string {
return fmt.Sprintf("%s.%s.%s",
config.Type,
config.Group,
config.Version,
)
}
func printShortConfig(config model.Config, w io.Writer) {
_, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\n",
config.Name,
kindAsString(config),
config.Namespace,
renderTimestamp(config.CreationTimestamp))
}
func printShortVirtualService(config model.Config, w io.Writer) {
virtualService, ok := config.Spec.(*v1alpha3.VirtualService)
if !ok {
_, _ = fmt.Fprintf(w, "Not a virtualservice: %v", config)
return
}
_, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%5d\t%4d\t%s\t%s\n",
config.Name,
strings.Join(virtualService.Gateways, ","),
strings.Join(virtualService.Hosts, ","),
len(virtualService.Http),
len(virtualService.Tcp),
config.Namespace,
renderTimestamp(config.CreationTimestamp))
}
func printShortDestinationRule(config model.Config, w io.Writer) {
destinationRule, ok := config.Spec.(*v1alpha3.DestinationRule)
if !ok {
_, _ = fmt.Fprintf(w, "Not a destinationrule: %v", config)
return
}
subsets := make([]string, 0)
for _, subset := range destinationRule.Subsets {
subsets = append(subsets, subset.Name)
}
_, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\n",
config.Name,
destinationRule.Host,
strings.Join(subsets, ","),
config.Namespace,
renderTimestamp(config.CreationTimestamp))
}
func printShortServiceEntry(config model.Config, w io.Writer) {
serviceEntry, ok := config.Spec.(*v1alpha3.ServiceEntry)
if !ok {
_, _ = fmt.Fprintf(w, "Not a serviceentry: %v", config)
return
}
ports := make([]string, 0)
for _, port := range serviceEntry.Ports {
ports = append(ports, fmt.Sprintf("%s/%d", port.Protocol, port.Number))
}
_, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\n",
config.Name,
strings.Join(serviceEntry.Hosts, ","),
strings.Join(ports, ","),
config.Namespace,
renderTimestamp(config.CreationTimestamp))
}
func printShortGateway(config model.Config, w io.Writer) {
gateway, ok := config.Spec.(*v1alpha3.Gateway)
if !ok {
_, _ = fmt.Fprintf(w, "Not a gateway: %v", config)
return
}
// Determine the servers
servers := make(map[string]bool)
for _, server := range gateway.Servers {
for _, host := range server.Hosts {
servers[host] = true
}
}
hosts := make([]string, 0)
for host := range servers {
hosts = append(hosts, host)
}
_, _ = fmt.Fprintf(w, "%s\t%s\t%s\t%s\n",
config.Name, strings.Join(hosts, ","), config.Namespace,
renderTimestamp(config.CreationTimestamp))
}
// Print as YAML
func printYamlOutput(writer io.Writer, configClient model.ConfigStore, configList []model.Config) {
schema := pilotadapter.ConvertPilotSchemasToGalley(configClient.ConfigDescriptor())
for _, config := range configList {
s, exists := schema.FindByKind(config.Type)
if !exists {
log.Errorf("Unknown kind %q for %v", config.Type, config.Name)
continue
}
obj, err := pilotadapter.ConvertConfigToObject(s, config)
if err != nil {
log.Errorf("Could not decode %v: %v", config.Name, err)
continue
}
bytes, err := yaml.Marshal(obj)
if err != nil {
log.Errorf("Could not convert %v to YAML: %v", config, err)
continue
}
_, _ = fmt.Fprint(writer, string(bytes))
_, _ = fmt.Fprintln(writer, "---")
}
}
func newClient() (model.ConfigStore, error) {
return controller.NewClient(kubeconfig, configContext, pilotadapter.ConvertGalleySchemasToPilot(collections.Istio),
"", &model.DisabledLedger{})
}
func supportedTypes(configClient model.ConfigStore) []string {
return configClient.ConfigDescriptor().Types()
}
func preprocMixerConfig(configs []crd.IstioKind) error {
var err error
for i, config := range configs {
if configs[i].Namespace, err = handlers.HandleNamespaces(config.Namespace, namespace, defaultNamespace); err != nil {
return err
}
if config.APIVersion == "" {
configs[i].APIVersion = legacyIstioAPIGroupVersion.String()
}
// TODO: invokes the mixer validation webhook.
}
return nil
}
func restConfig() (config *rest.Config, err error) {
config, err = kubecfg.BuildClientConfig(kubeconfig, configContext)
if err != nil {
return
}
config.GroupVersion = &legacyIstioAPIGroupVersion
config.APIPath = "/apis"
config.ContentType = runtime.ContentTypeJSON
types := runtime.NewScheme()
schemeBuilder := runtime.NewSchemeBuilder(
func(scheme *runtime.Scheme) error {
metav1.AddToGroupVersion(scheme, legacyIstioAPIGroupVersion)
return nil
})
err = schemeBuilder.AddToScheme(types)
config.NegotiatedSerializer = serializer.WithoutConversionCodecFactory{CodecFactory: serializer.NewCodecFactory(types)}
return
}
func apiResources(config *rest.Config, configs []crd.IstioKind) (map[string]metav1.APIResource, error) {
client, err := discovery.NewDiscoveryClientForConfig(config)
if err != nil {
return nil, err
}
resources, err := client.ServerResourcesForGroupVersion(legacyIstioAPIGroupVersion.String())
if err != nil {
return nil, err
}
kindsSet := map[string]bool{}
for _, config := range configs {
if !kindsSet[config.Kind] {
kindsSet[config.Kind] = true
}
}
result := make(map[string]metav1.APIResource, len(kindsSet))
for _, resource := range resources.APIResources {
if kindsSet[resource.Kind] {
result[resource.Kind] = resource
}
}
return result, nil
}
func restClientForOthers(config *rest.Config) (*rest.RESTClient, error)
|
func prepareClientForOthers(configs []crd.IstioKind) (*rest.RESTClient, map[string]metav1.APIResource, error) {
restConfig, err := restConfig()
if err != nil {
return nil, nil, err
}
resources, err := apiResources(restConfig, configs)
if err != nil {
return nil, nil, err
}
client, err := restClientForOthers(restConfig)
if err != nil {
return nil, nil, err
}
return client, resources, nil
}
func configTypeResourceNames(schemas collection.Schemas) []string {
all := schemas.All()
resourceNames := make([]string, len(all))
for _, s := range all {
resourceNames = append(resourceNames, s.Resource().Kind())
}
return resourceNames
}
func configTypePluralResourceNames(schemas collection.Schemas) []string {
all := schemas.All()
resourceNames := make([]string, len(all))
for _, s := range all {
resourceNames = append(resourceNames, s.Resource().Plural())
}
return resourceNames
}
// renderTimestamp creates a human-readable age similar to docker and kubectl CLI output
func renderTimestamp(ts time.Time) string {
if ts.IsZero() {
return "<unknown>"
}
seconds := int(time.Since(ts).Seconds())
if seconds < -2 {
return fmt.Sprintf("<invalid>")
} else if seconds < 0 {
return fmt.Sprintf("0s")
} else if seconds < 60 {
return fmt.Sprintf("%ds", seconds)
}
minutes := int(time.Since(ts).Minutes())
if minutes < 60 {
return fmt.Sprintf("%dm", minutes)
}
hours := int(time.Since(ts).Hours())
if hours < 24 {
return fmt.Sprintf("%dh", hours)
} else if hours < 365*24 {
return fmt.Sprintf("%dd", hours/24)
}
return fmt.Sprintf("%dy", hours/24/365)
}
func init() {
defaultContext := "istio"
contextCmd.PersistentFlags().StringVar(&istioContext, "context", defaultContext,
"Kubernetes configuration file context name")
contextCmd.PersistentFlags().StringVar(&istioAPIServer, "api-server", "",
"URL for Istio api server")
postCmd.PersistentFlags().StringVarP(&file, "file", "f", "",
"Input file with the content of the configuration objects (if not set, command reads from the standard input)")
putCmd.PersistentFlags().AddFlag(postCmd.PersistentFlags().Lookup("file"))
deleteCmd.PersistentFlags().AddFlag(postCmd.PersistentFlags().Lookup("file"))
getCmd.PersistentFlags().StringVarP(&outputFormat, "output", "o", "short",
"Output format. One of:yaml|short")
getCmd.PersistentFlags().BoolVar(&getAllNamespaces, "all-namespaces", false,
"If present, list the requested object(s) across all namespaces. Namespace in current "+
"context is ignored even if specified with --namespace.")
}
|
{
return rest.RESTClientFor(config)
}
|
main.py
|
import traceback
import argparse
import numpy as np
from src import NeuralNetwork, generateExample, getTensorExample
from typing import *
def
|
() -> argparse.Namespace:
"""Set-up the argument parser
Returns:
argparse.Namespace:
"""
parser = argparse.ArgumentParser(
description='Project 2 for the Deep Learning class (COSC 525). '
'Involves the development of a Convolutional Neural Network.',
add_help=False)
# Required Args
required_args = parser.add_argument_group('Required Arguments')
required_args.add_argument('-d', '--dataset', required=True,
help="The datasets to train the network on. "
"Options: [example1, example2, example3]")
# Optional args
optional_args = parser.add_argument_group('Optional Arguments')
optional_args.add_argument("-h", "--help", action="help", help="Show this help message and exit")
return parser.parse_args()
def main():
"""This is the main function of main.py
Example:
python main.py --dataset example1
"""
# Initializing
args = get_args()
# Load the configurations
dataset_type = args.dataset
if dataset_type in ('example1', 'example2', 'example3'):
example_num = int(dataset_type[-1])
inputs, targets, layers = generateExample(example_num)
getTensorExample(example_num)
else:
raise ValueError('Invalid dataset type')
# ------- Start of Code ------- #
# # Initialize the network # #
netWork = NeuralNetwork(input_size=inputs.shape, loss_function="square_error",
learning_rate=100, input_channels=1)
# Add layers
for layer in layers:
if layer['type'] == 'Conv':
weights = []
for k_ind in range(layer['num_kernels']):
kernels = [k_w.flatten() for k_w in layer['weights'][k_ind]]
kernel_weights = np.concatenate((*kernels,
layer['biases'][k_ind]))
weights.append(kernel_weights)
weights = np.array(weights)
netWork.addConvLayer(num_kernels=layer['num_kernels'],
kernel_size=layer['kernel_size'],
activation=layer['activation'],
weights=weights)
elif layer['type'] == 'Flat':
netWork.addFlattenLayer()
elif layer['type'] == 'MaxPool':
netWork.addMaxPoolLayer(kernel_size=layer['kernel_size'])
elif layer['type'] == 'Dense':
weights = np.array([np.concatenate((layer['weights'].flatten(), layer['bias']))])
netWork.addFCLayer(num_neurons=targets.shape[0],
activation=layer['activation'],
weights=weights)
else:
raise ValueError(f'Invalid layer type: {layer["type"]}')
# # Train the network # #
# First Feed forward
outputs = netWork.calculate(inputs=inputs)
print("----------- Custom Model -----------")
print(f"model output before:\n{outputs}")
# Calculate Loss derivative
loss_der = netWork.loss_derivative(outputs, targets)
loss = netWork.calculate_loss(np.array([inputs]), targets)
netWork.train(np.array([inputs]), targets) # Train the network
outputs = netWork.calculate(inputs=inputs)
print(f"model output after: \n{outputs}")
if example_num == 1:
print('1st convolutional layer, kernel weights:')
print(netWork.layers[0].kernels[0][0][0].weights[:-1].reshape((3, 3)))
print('1st convolutional layer, kernel bias:')
print(np.array([netWork.layers[0].kernels[0][0][0].weights[-1]]))
print('fully connected layer weights:')
print(netWork.layers[2].neurons[0].weights[:-1])
print('fully connected layer bias:')
print(np.array([netWork.layers[2].neurons[0].weights[-1]]))
elif example_num == 2:
print('1st convolutional layer, 1st kernel weights:')
print(netWork.layers[0].kernels[0][0][0].weights[:-1].reshape((3, 3)))
print('1st convolutional layer, 1st kernel bias:')
print(np.array([netWork.layers[0].kernels[0][0][0].weights[-1]]))
print('1st convolutional layer, 2st kernel weights:')
print(netWork.layers[0].kernels[1][0][0].weights[:-1].reshape((3, 3)))
print('1st convolutional layer, 2st kernel bias:')
print(np.array([netWork.layers[0].kernels[1][0][0].weights[-1]]))
print('2nd convolutional layer, 1st kernel weights:')
print(netWork.layers[1].kernels[0][0][0].weights[:-1].reshape((2, 3, 3)))
print('2nd convolutional layer, 1st kernel bias:')
print(np.array([netWork.layers[1].kernels[0][0][0].weights[-1]]))
print('fully connected layer weights:')
print(netWork.layers[3].neurons[0].weights[:-1])
print('fully connected layer bias:')
print(np.array([netWork.layers[3].neurons[0].weights[-1]]))
elif example_num == 3:
print('1st convolutional layer, 1st kernel weights:')
print(netWork.layers[0].kernels[0][0][0].weights[:-1].reshape((3, 3)))
print('1st convolutional layer, 1st kernel bias:')
print(np.array([netWork.layers[0].kernels[0][0][0].weights[-1]]))
print('1st convolutional layer, 2st kernel weights:')
print(netWork.layers[0].kernels[1][0][0].weights[:-1].reshape((3, 3)))
print('1st convolutional layer, 2st kernel bias:')
print(np.array([netWork.layers[0].kernels[1][0][0].weights[-1]]))
print('fully connected layer weights:')
print(netWork.layers[3].neurons[0].weights[:-1])
print('fully connected layer bias:')
print(np.array([netWork.layers[3].neurons[0].weights[-1]]))
else:
raise ValueError(f'Invalid example number: {example_num}')
if __name__ == '__main__':
try:
main()
except Exception as e:
print(str(e) + '\n' + str(traceback.format_exc()))
raise e
# # First Layer (Convolutional)
# weights_L1 = np.array(
# [np.concatenate((l1k1.flatten(), l1b1)), np.concatenate((l1k2.flatten(), l1b2))])
# netWork.addConvLayer(num_kernels=2, kernel_size=3, activation="logistic", weights=weights_L1)
# # Second Layer (Convolutional)
# weights_L2 = np.array([np.concatenate((l2c1.flatten(), l2c2.flatten(), l2b))])
# netWork.addConvLayer(num_kernels=1, kernel_size=3, activation="logistic", weights=weights_L2)
# # Third Layer (Fully Connected)
# netWork.addFlattenLayer()
# weights_L3 = np.array([np.concatenate((l3.flatten(), l3b))])
# netWork.addFCLayer(num_neurons=1, activation="logistic", weights=weights_L3)
|
get_args
|
__init__.py
|
from JumpScale import j
|
from .Netconfig import Netconfig
j.system.netconfig=Netconfig()
| |
amazon-s3.ts
|
import { AsInputs } from '@pulumi-utils/sdk';
import { PipelineProps } from '../pipeline';
import { CustomResource, Input, Output, ID, CustomResourceOptions, Inputs, output } from '@pulumi/pulumi';
import { IntegrationRef, Tag, TriggerCondition, Variable } from '../common';
import { Integration } from '../integration';
export interface AmazonS3State {
project_name: string;
pipeline_id: number;
/**
* The name of the Amazon S3 Bucket.
*/
bucket_name: string;
/**
* The integration.
*/
integration: IntegrationRef | Integration;
/**
* The name of the action.
*/
name: string;
/**
* Specifies when the action should be executed. Can be one of `ON_EVERY_EXECUTION`, `ON_FAILURE` or `ON_BACK_TO_SUCCESS`. The default value is `ON_EVERY_EXECUTION`.
*/
trigger_time: 'ON_EVERY_EXECUTION' | 'ON_FAILURE' | 'ON_BACK_TO_SUCCESS';
/**
* Access control lists (ACLs) enable you to manage access to buckets and objects. It defines which AWS accounts or groups are granted access and the type of access. Can be one of `PRIVATE` , `PUBLIC_READ`, `AWS-EXEC-READ`, `AUTHENTICATED_READ`, `BUCKET_ONWER_READ`, `BUCKET_OWNER_FULL_CONTROL` or `LOG_DELIVERY_WRITE`.
*/
acl?:
| 'PRIVATE'
| 'PUBLIC_READ'
| 'AWS-EXEC-READ'
| 'AUTHENTICATED_READ'
| 'BUCKET_ONWER_READ'
| 'BUCKET_OWNER_FULL_CONTROL'
| 'LOG_DELIVERY_WRITE';
/**
* The numerical ID of the action, after which this action should be added.
*/
after_action_id?: number;
/**
* Specifies how long objects stay in the cache.
*/
cache_control?: string;
/**
* If set to `true`, files are not deleted if changeset indcates that.
*/
deletion_disabled?: boolean;
/**
* Defines tags for files categorization as a key value pairs list.
*/
deploy_tags?: Tag[];
/**
* The paths and/or files that will be left out during the deployment.
*/
deployment_excludes?: string[];
/**
* The exceptions from the ignore patterns set in `deployment_excludes`.
*/
deployment_includes?: string[];
/**
* When set to `true` the action is disabled. By default it is set to `false`.
*/
disabled?: boolean;
/**
* Specifies the expiration period for the objects (how long they stay in the cache).
*/
expires_date?: string;
/**
* If set to `true` the execution will proceed, mark action as a warning and jump to the next action. Doesn't apply to deployment actions.
*/
ignore_errors?: boolean;
/**
* Defines whether the files are deployed from the repository or from the build filesystem. Can be one of `SCM_REPOSITORY` or `BUILD_ARTIFACTS`.
*/
input_type?: 'SCM_REPOSITORY' | 'BUILD_ARTIFACTS';
/**
* The path in the repository.
*/
local_path?: string;
/**
* Set to `true` if you want to use Reduced Redundancy Storage.
*/
reduced_redundancy?: boolean;
/**
* The absolute or relative path on the remote server.
*/
remote_path?: string;
/**
* Number of retries if the action fails.
*/
retry_count?: number;
/**
* Delay time between auto retries in minutes.
*/
retry_delay?: number;
/**
* When set to `true`, the subsequent action defined in the pipeline will run in parallel to the current action.
*/
run_next_parallel?: boolean;
/**
* Defines whether the action should be executed on each failure. Restricted to and required if the `trigger_time` is `ON_FAILURE`.
*/
run_only_on_first_failure?: boolean;
/**
* When set to `true` all files will have their mime-types set to `application/octet-stream`.
*/
skip_content_type_setting?: boolean;
/**
* The timeout in seconds.
*/
timeout?: number;
/**
* The list of trigger conditions to meet so that the action can be triggered.
*/
trigger_conditions?: TriggerCondition[];
/**
* The list of variables you can use the action.
*/
variables?: Variable[];
}
export type AmazonS3Args = AsInputs<AmazonS3State>;
export interface AmazonS3Props {
url: string;
html_url: string;
action_id: number;
bucket_name: string;
integration: IntegrationRef | Integration;
name: string;
trigger_time: 'ON_EVERY_EXECUTION' | 'ON_FAILURE' | 'ON_BACK_TO_SUCCESS';
type: 'AMAZON_S3';
acl?:
| 'PRIVATE'
| 'PUBLIC_READ'
| 'AWS-EXEC-READ'
| 'AUTHENTICATED_READ'
| 'BUCKET_ONWER_READ'
| 'BUCKET_OWNER_FULL_CONTROL'
| 'LOG_DELIVERY_WRITE';
after_action_id?: number;
cache_control?: string;
deletion_disabled?: boolean;
deploy_tags?: Tag[];
deployment_excludes?: string[];
deployment_includes?: string[];
disabled?: boolean;
expires_date?: string;
ignore_errors?: boolean;
input_type?: 'SCM_REPOSITORY' | 'BUILD_ARTIFACTS';
local_path?: string;
reduced_redundancy?: boolean;
remote_path?: string;
retry_count?: number;
retry_delay?: number;
run_next_parallel?: boolean;
run_only_on_first_failure?: boolean;
skip_content_type_setting?: boolean;
timeout?: number;
trigger_conditions?: TriggerCondition[];
variables?: Variable[];
pipeline: PipelineProps;
project_name: string;
pipeline_id: number;
}
/**
* Required scopes in Buddy API: `WORKSPACE`, `EXECUTION_MANAGE`, `EXECUTION_INFO`
*/
export class
|
extends CustomResource {
static __pulumiType = 'buddy:action:AmazonS3';
static get(name: string, id: Input<ID>, state?: Partial<AmazonS3State>, opts?: CustomResourceOptions) {
return new AmazonS3(name, state as any, { ...opts, id });
}
static isInstance(obj: any): obj is AmazonS3 {
if (null == obj) {
return false;
}
return obj['__pulumiType'] === AmazonS3.__pulumiType;
}
project_name!: Output<string>;
pipeline_id!: Output<number>;
action_id!: Output<number>;
bucket_name!: Output<string>;
integration!: Output<IntegrationRef | Integration>;
name!: Output<string>;
trigger_time!: Output<'ON_EVERY_EXECUTION' | 'ON_FAILURE' | 'ON_BACK_TO_SUCCESS'>;
type!: Output<'AMAZON_S3'>;
acl!: Output<
| 'PRIVATE'
| 'PUBLIC_READ'
| 'AWS-EXEC-READ'
| 'AUTHENTICATED_READ'
| 'BUCKET_ONWER_READ'
| 'BUCKET_OWNER_FULL_CONTROL'
| 'LOG_DELIVERY_WRITE'
| undefined
>;
after_action_id!: Output<number | undefined>;
cache_control!: Output<string | undefined>;
deletion_disabled!: Output<boolean | undefined>;
deploy_tags!: Output<Tag[] | undefined>;
deployment_excludes!: Output<string[] | undefined>;
deployment_includes!: Output<string[] | undefined>;
disabled!: Output<boolean | undefined>;
expires_date!: Output<string | undefined>;
ignore_errors!: Output<boolean | undefined>;
input_type!: Output<'SCM_REPOSITORY' | 'BUILD_ARTIFACTS' | undefined>;
local_path!: Output<string | undefined>;
reduced_redundancy!: Output<boolean | undefined>;
remote_path!: Output<string | undefined>;
retry_count!: Output<number | undefined>;
retry_delay!: Output<number | undefined>;
run_next_parallel!: Output<boolean | undefined>;
run_only_on_first_failure!: Output<boolean | undefined>;
skip_content_type_setting!: Output<boolean | undefined>;
timeout!: Output<number | undefined>;
trigger_conditions!: Output<TriggerCondition[] | undefined>;
variables!: Output<Variable[] | undefined>;
constructor(name: string, argsOrState: AmazonS3Args | AmazonS3State, opts?: CustomResourceOptions) {
const inputs: Inputs = {};
if (!opts) {
opts = {};
}
if (opts.id) {
const state = argsOrState as AmazonS3State | undefined;
inputs['project_name'] = state?.project_name;
inputs['pipeline_id'] = state?.pipeline_id;
inputs['bucket_name'] = state?.bucket_name;
inputs['integration'] = state?.integration instanceof Integration ? { hash_id: state.integration.hash_id } : state?.integration;
inputs['name'] = state?.name;
inputs['trigger_time'] = state?.trigger_time;
inputs['acl'] = state?.acl;
inputs['after_action_id'] = state?.after_action_id;
inputs['cache_control'] = state?.cache_control;
inputs['deletion_disabled'] = state?.deletion_disabled;
inputs['deploy_tags'] = state?.deploy_tags;
inputs['deployment_excludes'] = state?.deployment_excludes;
inputs['deployment_includes'] = state?.deployment_includes;
inputs['disabled'] = state?.disabled;
inputs['expires_date'] = state?.expires_date;
inputs['ignore_errors'] = state?.ignore_errors;
inputs['input_type'] = state?.input_type;
inputs['local_path'] = state?.local_path;
inputs['reduced_redundancy'] = state?.reduced_redundancy;
inputs['remote_path'] = state?.remote_path;
inputs['retry_count'] = state?.retry_count;
inputs['retry_delay'] = state?.retry_delay;
inputs['run_next_parallel'] = state?.run_next_parallel;
inputs['run_only_on_first_failure'] = state?.run_only_on_first_failure;
inputs['skip_content_type_setting'] = state?.skip_content_type_setting;
inputs['timeout'] = state?.timeout;
inputs['trigger_conditions'] = state?.trigger_conditions;
inputs['variables'] = state?.variables;
} else {
const args = argsOrState as AmazonS3Args | undefined;
if (!args?.project_name) {
throw new Error('Missing required property "project_name"');
}
if (!args?.pipeline_id) {
throw new Error('Missing required property "pipeline_id"');
}
if (!args?.bucket_name) {
throw new Error('Missing required property "bucket_name"');
}
if (!args?.integration) {
throw new Error('Missing required property "integration"');
}
if (!args?.name) {
throw new Error('Missing required property "name"');
}
if (!args?.trigger_time) {
throw new Error('Missing required property "trigger_time"');
}
inputs['bucket_name'] = args.bucket_name;
inputs['integration'] = output(args.integration as Output<IntegrationRef | Integration>).apply(integration =>
integration instanceof Integration ? { hash_id: integration.hash_id } : integration
);
inputs['name'] = args.name;
inputs['trigger_time'] = args.trigger_time;
inputs['acl'] = args.acl;
inputs['after_action_id'] = args.after_action_id;
inputs['cache_control'] = args.cache_control;
inputs['deletion_disabled'] = args.deletion_disabled;
inputs['deploy_tags'] = args.deploy_tags;
inputs['deployment_excludes'] = args.deployment_excludes;
inputs['deployment_includes'] = args.deployment_includes;
inputs['disabled'] = args.disabled;
inputs['expires_date'] = args.expires_date;
inputs['ignore_errors'] = args.ignore_errors;
inputs['input_type'] = args.input_type;
inputs['local_path'] = args.local_path;
inputs['reduced_redundancy'] = args.reduced_redundancy;
inputs['remote_path'] = args.remote_path;
inputs['retry_count'] = args.retry_count;
inputs['retry_delay'] = args.retry_delay;
inputs['run_next_parallel'] = args.run_next_parallel;
inputs['run_only_on_first_failure'] = args.run_only_on_first_failure;
inputs['skip_content_type_setting'] = args.skip_content_type_setting;
inputs['timeout'] = args.timeout;
inputs['trigger_conditions'] = args.trigger_conditions;
inputs['variables'] = args.variables;
inputs['project_name'] = args.project_name;
inputs['pipeline_id'] = args.pipeline_id;
}
if (!opts.version) {
opts.version = require('../package').version;
}
opts.ignoreChanges = ['project_name', 'pipeline_id', ...(opts.ignoreChanges || [])];
inputs['type'] = 'AMAZON_S3';
inputs['url'] = undefined;
inputs['html_url'] = undefined;
inputs['action_id'] = undefined;
super(AmazonS3.__pulumiType, name, inputs, opts);
}
}
|
AmazonS3
|
demo.test.tsx
|
import React from 'react'
import { shallow, configure } from 'enzyme'
import Adapter from 'enzyme-adapter-react-16'
import { openToast } from '../component/Toast/index'
configure({ adapter: new Adapter() })
test('1加1等于2', () => {
expect(1 + 1).toBe(2)
});
test('Jest-React-TypeScript 尝试运行', () => {
const renderer = shallow(<div>hello world</div>)
expect(renderer.text()).toEqual('hello world')
|
// 通过传递模拟的props,测试组件是否正常渲染
it('open toast', () => {
const onButtonClick = openToast('toast test');
const wrapper = shallow(
<div onClick={() => onButtonClick} />
);
// 详细用法见 Enzyme 文档 http://airbnb.io/enzyme/docs/api/shallow.html
wrapper.find('div').simulate('click');
expect(onButtonClick);
})
})
|
})
describe('测试toast组件', () => {
|
k8s_docker_info.go
|
package rke
func loadK8sVersionDockerInfo() map[string][]string
|
{
return map[string][]string{
"1.8": {"1.11.x", "1.12.x", "1.13.x", "17.03.x"},
"1.9": {"1.11.x", "1.12.x", "1.13.x", "17.03.x", "18.06.x", "18.09.x", "19.03.x"},
"1.10": {"1.11.x", "1.12.x", "1.13.x", "17.03.x", "18.06.x", "18.09.x", "19.03.x"},
"1.11": {"1.11.x", "1.12.x", "1.13.x", "17.03.x", "18.06.x", "18.09.x", "19.03.x"},
"1.12": {"1.11.x", "1.12.x", "1.13.x", "17.03.x", "17.06.x", "17.09.x", "18.06.x", "18.09.x", "19.03.x"},
"1.13": {"1.11.x", "1.12.x", "1.13.x", "17.03.x", "17.06.x", "17.09.x", "18.06.x", "18.09.x", "19.03.x"},
"1.14": {"1.13.x", "17.03.x", "17.06.x", "17.09.x", "18.06.x", "18.09.x", "19.03.x"},
"1.15": {"1.13.x", "17.03.x", "17.06.x", "17.09.x", "18.06.x", "18.09.x", "19.03.x"},
"1.16": {"1.13.x", "17.03.x", "17.06.x", "17.09.x", "18.06.x", "18.09.x", "19.03.x"}}
}
|
|
regions.js
|
regions = [
'countryCM:foreign',
'countryCM:adamaoua',
'countryCM:centre',
'countryCM:est',
'countryCM:extreme-nord',
'countryCM:littoral',
'countryCM:nord',
'countryCM:nord-ouest',
'countryCM:sud',
|
'countryCM:sud-ouest',
'countryCM:ouest'
];
| |
zz_generated.conversion.go
|
// +build !ignore_autogenerated_openshift
// This file was autogenerated by conversion-gen. Do not edit it manually!
package v1
import (
route "github.com/openshift/origin/pkg/route/apis/route"
meta_v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
conversion "k8s.io/apimachinery/pkg/conversion"
runtime "k8s.io/apimachinery/pkg/runtime"
api "k8s.io/kubernetes/pkg/api"
api_v1 "k8s.io/kubernetes/pkg/api/v1"
unsafe "unsafe"
)
func init() {
SchemeBuilder.Register(RegisterConversions)
}
// RegisterConversions adds conversion functions to the given scheme.
// Public to allow building arbitrary schemes.
func RegisterConversions(scheme *runtime.Scheme) error {
return scheme.AddGeneratedConversionFuncs(
Convert_v1_Route_To_route_Route,
Convert_route_Route_To_v1_Route,
Convert_v1_RouteIngress_To_route_RouteIngress,
Convert_route_RouteIngress_To_v1_RouteIngress,
Convert_v1_RouteIngressCondition_To_route_RouteIngressCondition,
Convert_route_RouteIngressCondition_To_v1_RouteIngressCondition,
Convert_v1_RouteList_To_route_RouteList,
Convert_route_RouteList_To_v1_RouteList,
Convert_v1_RoutePort_To_route_RoutePort,
Convert_route_RoutePort_To_v1_RoutePort,
Convert_v1_RouteSpec_To_route_RouteSpec,
Convert_route_RouteSpec_To_v1_RouteSpec,
Convert_v1_RouteStatus_To_route_RouteStatus,
Convert_route_RouteStatus_To_v1_RouteStatus,
Convert_v1_RouteTargetReference_To_route_RouteTargetReference,
Convert_route_RouteTargetReference_To_v1_RouteTargetReference,
Convert_v1_RouterShard_To_route_RouterShard,
Convert_route_RouterShard_To_v1_RouterShard,
Convert_v1_TLSConfig_To_route_TLSConfig,
Convert_route_TLSConfig_To_v1_TLSConfig,
)
}
func autoConvert_v1_Route_To_route_Route(in *Route, out *route.Route, s conversion.Scope) error {
out.ObjectMeta = in.ObjectMeta
if err := Convert_v1_RouteSpec_To_route_RouteSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := Convert_v1_RouteStatus_To_route_RouteStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
// Convert_v1_Route_To_route_Route is an autogenerated conversion function.
func Convert_v1_Route_To_route_Route(in *Route, out *route.Route, s conversion.Scope) error {
return autoConvert_v1_Route_To_route_Route(in, out, s)
}
func autoConvert_route_Route_To_v1_Route(in *route.Route, out *Route, s conversion.Scope) error {
out.ObjectMeta = in.ObjectMeta
if err := Convert_route_RouteSpec_To_v1_RouteSpec(&in.Spec, &out.Spec, s); err != nil {
return err
}
if err := Convert_route_RouteStatus_To_v1_RouteStatus(&in.Status, &out.Status, s); err != nil {
return err
}
return nil
}
// Convert_route_Route_To_v1_Route is an autogenerated conversion function.
func Convert_route_Route_To_v1_Route(in *route.Route, out *Route, s conversion.Scope) error {
return autoConvert_route_Route_To_v1_Route(in, out, s)
}
func autoConvert_v1_RouteIngress_To_route_RouteIngress(in *RouteIngress, out *route.RouteIngress, s conversion.Scope) error {
out.Host = in.Host
out.RouterName = in.RouterName
out.Conditions = *(*[]route.RouteIngressCondition)(unsafe.Pointer(&in.Conditions))
out.WildcardPolicy = route.WildcardPolicyType(in.WildcardPolicy)
out.RouterCanonicalHostname = in.RouterCanonicalHostname
return nil
}
// Convert_v1_RouteIngress_To_route_RouteIngress is an autogenerated conversion function.
func Convert_v1_RouteIngress_To_route_RouteIngress(in *RouteIngress, out *route.RouteIngress, s conversion.Scope) error {
return autoConvert_v1_RouteIngress_To_route_RouteIngress(in, out, s)
}
func autoConvert_route_RouteIngress_To_v1_RouteIngress(in *route.RouteIngress, out *RouteIngress, s conversion.Scope) error {
out.Host = in.Host
out.RouterName = in.RouterName
out.Conditions = *(*[]RouteIngressCondition)(unsafe.Pointer(&in.Conditions))
out.WildcardPolicy = WildcardPolicyType(in.WildcardPolicy)
out.RouterCanonicalHostname = in.RouterCanonicalHostname
return nil
}
// Convert_route_RouteIngress_To_v1_RouteIngress is an autogenerated conversion function.
func Convert_route_RouteIngress_To_v1_RouteIngress(in *route.RouteIngress, out *RouteIngress, s conversion.Scope) error {
return autoConvert_route_RouteIngress_To_v1_RouteIngress(in, out, s)
}
func autoConvert_v1_RouteIngressCondition_To_route_RouteIngressCondition(in *RouteIngressCondition, out *route.RouteIngressCondition, s conversion.Scope) error {
out.Type = route.RouteIngressConditionType(in.Type)
out.Status = api.ConditionStatus(in.Status)
out.Reason = in.Reason
out.Message = in.Message
out.LastTransitionTime = (*meta_v1.Time)(unsafe.Pointer(in.LastTransitionTime))
return nil
}
// Convert_v1_RouteIngressCondition_To_route_RouteIngressCondition is an autogenerated conversion function.
func Convert_v1_RouteIngressCondition_To_route_RouteIngressCondition(in *RouteIngressCondition, out *route.RouteIngressCondition, s conversion.Scope) error {
return autoConvert_v1_RouteIngressCondition_To_route_RouteIngressCondition(in, out, s)
}
func autoConvert_route_RouteIngressCondition_To_v1_RouteIngressCondition(in *route.RouteIngressCondition, out *RouteIngressCondition, s conversion.Scope) error {
out.Type = RouteIngressConditionType(in.Type)
out.Status = api_v1.ConditionStatus(in.Status)
out.Reason = in.Reason
out.Message = in.Message
out.LastTransitionTime = (*meta_v1.Time)(unsafe.Pointer(in.LastTransitionTime))
return nil
}
// Convert_route_RouteIngressCondition_To_v1_RouteIngressCondition is an autogenerated conversion function.
func Convert_route_RouteIngressCondition_To_v1_RouteIngressCondition(in *route.RouteIngressCondition, out *RouteIngressCondition, s conversion.Scope) error {
return autoConvert_route_RouteIngressCondition_To_v1_RouteIngressCondition(in, out, s)
}
func autoConvert_v1_RouteList_To_route_RouteList(in *RouteList, out *route.RouteList, s conversion.Scope) error {
out.ListMeta = in.ListMeta
out.Items = *(*[]route.Route)(unsafe.Pointer(&in.Items))
return nil
}
// Convert_v1_RouteList_To_route_RouteList is an autogenerated conversion function.
func Convert_v1_RouteList_To_route_RouteList(in *RouteList, out *route.RouteList, s conversion.Scope) error {
return autoConvert_v1_RouteList_To_route_RouteList(in, out, s)
}
func autoConvert_route_RouteList_To_v1_RouteList(in *route.RouteList, out *RouteList, s conversion.Scope) error {
out.ListMeta = in.ListMeta
if in.Items == nil {
out.Items = make([]Route, 0)
} else {
out.Items = *(*[]Route)(unsafe.Pointer(&in.Items))
}
return nil
}
// Convert_route_RouteList_To_v1_RouteList is an autogenerated conversion function.
func Convert_route_RouteList_To_v1_RouteList(in *route.RouteList, out *RouteList, s conversion.Scope) error {
return autoConvert_route_RouteList_To_v1_RouteList(in, out, s)
}
func autoConvert_v1_RoutePort_To_route_RoutePort(in *RoutePort, out *route.RoutePort, s conversion.Scope) error {
out.TargetPort = in.TargetPort
return nil
}
// Convert_v1_RoutePort_To_route_RoutePort is an autogenerated conversion function.
func Convert_v1_RoutePort_To_route_RoutePort(in *RoutePort, out *route.RoutePort, s conversion.Scope) error {
return autoConvert_v1_RoutePort_To_route_RoutePort(in, out, s)
}
func autoConvert_route_RoutePort_To_v1_RoutePort(in *route.RoutePort, out *RoutePort, s conversion.Scope) error {
out.TargetPort = in.TargetPort
return nil
}
// Convert_route_RoutePort_To_v1_RoutePort is an autogenerated conversion function.
func Convert_route_RoutePort_To_v1_RoutePort(in *route.RoutePort, out *RoutePort, s conversion.Scope) error {
return autoConvert_route_RoutePort_To_v1_RoutePort(in, out, s)
}
func autoConvert_v1_RouteSpec_To_route_RouteSpec(in *RouteSpec, out *route.RouteSpec, s conversion.Scope) error {
out.Host = in.Host
out.Path = in.Path
if err := Convert_v1_RouteTargetReference_To_route_RouteTargetReference(&in.To, &out.To, s); err != nil {
return err
}
out.AlternateBackends = *(*[]route.RouteTargetReference)(unsafe.Pointer(&in.AlternateBackends))
out.Port = (*route.RoutePort)(unsafe.Pointer(in.Port))
out.TLS = (*route.TLSConfig)(unsafe.Pointer(in.TLS))
out.WildcardPolicy = route.WildcardPolicyType(in.WildcardPolicy)
return nil
}
// Convert_v1_RouteSpec_To_route_RouteSpec is an autogenerated conversion function.
func Convert_v1_RouteSpec_To_route_RouteSpec(in *RouteSpec, out *route.RouteSpec, s conversion.Scope) error {
return autoConvert_v1_RouteSpec_To_route_RouteSpec(in, out, s)
}
func autoConvert_route_RouteSpec_To_v1_RouteSpec(in *route.RouteSpec, out *RouteSpec, s conversion.Scope) error {
out.Host = in.Host
out.Path = in.Path
if err := Convert_route_RouteTargetReference_To_v1_RouteTargetReference(&in.To, &out.To, s); err != nil {
return err
}
out.AlternateBackends = *(*[]RouteTargetReference)(unsafe.Pointer(&in.AlternateBackends))
out.Port = (*RoutePort)(unsafe.Pointer(in.Port))
out.TLS = (*TLSConfig)(unsafe.Pointer(in.TLS))
out.WildcardPolicy = WildcardPolicyType(in.WildcardPolicy)
return nil
}
// Convert_route_RouteSpec_To_v1_RouteSpec is an autogenerated conversion function.
func Convert_route_RouteSpec_To_v1_RouteSpec(in *route.RouteSpec, out *RouteSpec, s conversion.Scope) error {
return autoConvert_route_RouteSpec_To_v1_RouteSpec(in, out, s)
}
func autoConvert_v1_RouteStatus_To_route_RouteStatus(in *RouteStatus, out *route.RouteStatus, s conversion.Scope) error {
out.Ingress = *(*[]route.RouteIngress)(unsafe.Pointer(&in.Ingress))
return nil
}
// Convert_v1_RouteStatus_To_route_RouteStatus is an autogenerated conversion function.
func Convert_v1_RouteStatus_To_route_RouteStatus(in *RouteStatus, out *route.RouteStatus, s conversion.Scope) error {
return autoConvert_v1_RouteStatus_To_route_RouteStatus(in, out, s)
}
func autoConvert_route_RouteStatus_To_v1_RouteStatus(in *route.RouteStatus, out *RouteStatus, s conversion.Scope) error {
if in.Ingress == nil {
out.Ingress = make([]RouteIngress, 0)
} else {
out.Ingress = *(*[]RouteIngress)(unsafe.Pointer(&in.Ingress))
}
return nil
}
// Convert_route_RouteStatus_To_v1_RouteStatus is an autogenerated conversion function.
func Convert_route_RouteStatus_To_v1_RouteStatus(in *route.RouteStatus, out *RouteStatus, s conversion.Scope) error {
return autoConvert_route_RouteStatus_To_v1_RouteStatus(in, out, s)
}
func autoConvert_v1_RouteTargetReference_To_route_RouteTargetReference(in *RouteTargetReference, out *route.RouteTargetReference, s conversion.Scope) error {
out.Kind = in.Kind
out.Name = in.Name
out.Weight = (*int32)(unsafe.Pointer(in.Weight))
return nil
}
// Convert_v1_RouteTargetReference_To_route_RouteTargetReference is an autogenerated conversion function.
func Convert_v1_RouteTargetReference_To_route_RouteTargetReference(in *RouteTargetReference, out *route.RouteTargetReference, s conversion.Scope) error {
return autoConvert_v1_RouteTargetReference_To_route_RouteTargetReference(in, out, s)
}
func autoConvert_route_RouteTargetReference_To_v1_RouteTargetReference(in *route.RouteTargetReference, out *RouteTargetReference, s conversion.Scope) error {
out.Kind = in.Kind
out.Name = in.Name
out.Weight = (*int32)(unsafe.Pointer(in.Weight))
return nil
}
// Convert_route_RouteTargetReference_To_v1_RouteTargetReference is an autogenerated conversion function.
func Convert_route_RouteTargetReference_To_v1_RouteTargetReference(in *route.RouteTargetReference, out *RouteTargetReference, s conversion.Scope) error {
return autoConvert_route_RouteTargetReference_To_v1_RouteTargetReference(in, out, s)
}
func autoConvert_v1_RouterShard_To_route_RouterShard(in *RouterShard, out *route.RouterShard, s conversion.Scope) error {
out.ShardName = in.ShardName
out.DNSSuffix = in.DNSSuffix
return nil
}
// Convert_v1_RouterShard_To_route_RouterShard is an autogenerated conversion function.
func Convert_v1_RouterShard_To_route_RouterShard(in *RouterShard, out *route.RouterShard, s conversion.Scope) error {
return autoConvert_v1_RouterShard_To_route_RouterShard(in, out, s)
}
func
|
(in *route.RouterShard, out *RouterShard, s conversion.Scope) error {
out.ShardName = in.ShardName
out.DNSSuffix = in.DNSSuffix
return nil
}
// Convert_route_RouterShard_To_v1_RouterShard is an autogenerated conversion function.
func Convert_route_RouterShard_To_v1_RouterShard(in *route.RouterShard, out *RouterShard, s conversion.Scope) error {
return autoConvert_route_RouterShard_To_v1_RouterShard(in, out, s)
}
func autoConvert_v1_TLSConfig_To_route_TLSConfig(in *TLSConfig, out *route.TLSConfig, s conversion.Scope) error {
out.Termination = route.TLSTerminationType(in.Termination)
out.Certificate = in.Certificate
out.Key = in.Key
out.CACertificate = in.CACertificate
out.DestinationCACertificate = in.DestinationCACertificate
out.InsecureEdgeTerminationPolicy = route.InsecureEdgeTerminationPolicyType(in.InsecureEdgeTerminationPolicy)
return nil
}
// Convert_v1_TLSConfig_To_route_TLSConfig is an autogenerated conversion function.
func Convert_v1_TLSConfig_To_route_TLSConfig(in *TLSConfig, out *route.TLSConfig, s conversion.Scope) error {
return autoConvert_v1_TLSConfig_To_route_TLSConfig(in, out, s)
}
func autoConvert_route_TLSConfig_To_v1_TLSConfig(in *route.TLSConfig, out *TLSConfig, s conversion.Scope) error {
out.Termination = TLSTerminationType(in.Termination)
out.Certificate = in.Certificate
out.Key = in.Key
out.CACertificate = in.CACertificate
out.DestinationCACertificate = in.DestinationCACertificate
out.InsecureEdgeTerminationPolicy = InsecureEdgeTerminationPolicyType(in.InsecureEdgeTerminationPolicy)
return nil
}
// Convert_route_TLSConfig_To_v1_TLSConfig is an autogenerated conversion function.
func Convert_route_TLSConfig_To_v1_TLSConfig(in *route.TLSConfig, out *TLSConfig, s conversion.Scope) error {
return autoConvert_route_TLSConfig_To_v1_TLSConfig(in, out, s)
}
|
autoConvert_route_RouterShard_To_v1_RouterShard
|
duration_round.go
|
package main
import (
"time"
"fmt"
)
func main()
|
{
d, err := time.ParseDuration("1h15m30.918273645s")
if err != nil {
panic(err)
}
round := [] time.Duration{
time.Nanosecond,
time.Microsecond,
time.Millisecond,
time.Second,
2 * time.Second,
time.Minute,
time.Hour,
}
for _, r := range round {
fmt.Printf("d.Round(%6s)=%s\n", r, d.Round(r).String())
}
}
|
|
TouchableItem.js
|
var _interopRequireDefault=require("@babel/runtime/helpers/interopRequireDefault");var _interopRequireWildcard=require("@babel/runtime/helpers/interopRequireWildcard");Object.defineProperty(exports,"__esModule",{value:true});exports.default=void 0;var _extends2=_interopRequireDefault(require("@babel/runtime/helpers/extends"));var _objectWithoutProperties2=_interopRequireDefault(require("@babel/runtime/helpers/objectWithoutProperties"));var _classCallCheck2=_interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));var _createClass2=_interopRequireDefault(require("@babel/runtime/helpers/createClass"));var _possibleConstructorReturn2=_interopRequireDefault(require("@babel/runtime/helpers/possibleConstructorReturn"));var _getPrototypeOf2=_interopRequireDefault(require("@babel/runtime/helpers/getPrototypeOf"));var _inherits2=_interopRequireDefault(require("@babel/runtime/helpers/inherits"));var React=_interopRequireWildcard(require("react"));var _reactNative=require("react-native");var _BorderlessButton=_interopRequireDefault(require("./BorderlessButton"));var _jsxFileName="/Users/satya/Workspace/Callstack/react-navigation-stack/src/vendor/views/TouchableItem.tsx";var ANDROID_VERSION_LOLLIPOP=21;var TouchableItem=function(_React$Component){(0,_inherits2.default)(TouchableItem,_React$Component);function
|
(){(0,_classCallCheck2.default)(this,TouchableItem);return(0,_possibleConstructorReturn2.default)(this,(0,_getPrototypeOf2.default)(TouchableItem).apply(this,arguments));}(0,_createClass2.default)(TouchableItem,[{key:"render",value:function render(){if(_reactNative.Platform.OS==='android'&&_reactNative.Platform.Version>=ANDROID_VERSION_LOLLIPOP){var _this$props=this.props,style=_this$props.style,pressColor=_this$props.pressColor,borderless=_this$props.borderless,children=_this$props.children,rest=(0,_objectWithoutProperties2.default)(_this$props,["style","pressColor","borderless","children"]);return React.createElement(_reactNative.TouchableNativeFeedback,(0,_extends2.default)({},rest,{useForeground:_reactNative.TouchableNativeFeedback.canUseNativeForeground(),background:_reactNative.TouchableNativeFeedback.Ripple(pressColor,borderless),__source:{fileName:_jsxFileName,lineNumber:53}}),React.createElement(_reactNative.View,{style:style,__source:{fileName:_jsxFileName,lineNumber:58}},React.Children.only(children)));}else if(_reactNative.Platform.OS==='ios'){return React.createElement(_BorderlessButton.default,(0,_extends2.default)({hitSlop:{top:10,bottom:10,right:10,left:10},disallowInterruption:true,enabled:!this.props.disabled},this.props,{__source:{fileName:_jsxFileName,lineNumber:63}}),this.props.children);}else{return React.createElement(_reactNative.TouchableOpacity,(0,_extends2.default)({},this.props,{__source:{fileName:_jsxFileName,lineNumber:74}}),this.props.children);}}}]);return TouchableItem;}(React.Component);exports.default=TouchableItem;TouchableItem.defaultProps={borderless:false,pressColor:'rgba(0, 0, 0, .32)'};
//# sourceMappingURL=TouchableItem.js.map
|
TouchableItem
|
environment.py
|
import time,random
from collections import OrderedDict
from simulator import Simulator
class TrafficLight(object):
"""A traffic light that switches periodically."""
valid_states = [True, False] # True = NS open, False = EW open
def __init__(self, state=None, period=None):
self.state = state if state is not None else random.choice(self.valid_states)
self.period = period if period is not None else random.choice([3, 4, 5])
self.last_updated = 0
def reset(self):
self.last_updated = 0
def update(self, t):
if t - self.last_updated >= self.period:
self.state = not self.state # assuming state is boolean
self.last_updated = t
class Environment(object):
"""Environment within which all agents operate."""
valid_actions = [None, 'forward', 'left', 'right']
valid_inputs = {'light': TrafficLight.valid_states, 'oncoming': valid_actions, 'left': valid_actions, 'right': valid_actions}
valid_headings = [(1, 0), (0, -1), (-1, 0), (0, 1)] # ENWS
hard_time_limit = -100 # even if enforce_deadline is False, end trial when deadline reaches this value (to avoid deadlocks)
def __init__(self):
self.done = False
self.t = 0
self.agent_states = OrderedDict()
self.status_text = ""
# Road network
self.grid_size = (8, 6) # (cols, rows)
self.bounds = (1, 1, self.grid_size[0], self.grid_size[1])
self.block_size = 100
self.intersections = OrderedDict()
self.roads = []
for x in xrange(self.bounds[0], self.bounds[2] + 1):
for y in xrange(self.bounds[1], self.bounds[3] + 1):
self.intersections[(x, y)] = TrafficLight() # a traffic light at each intersection
for a in self.intersections:
for b in self.intersections:
if a == b:
continue
if (abs(a[0] - b[0]) + abs(a[1] - b[1])) == 1: # L1 distance = 1
self.roads.append((a, b))
# Dummy agents
self.num_dummies = 3 # no. of dummy agents
for i in xrange(self.num_dummies):
self.create_agent(DummyAgent)
# Primary agent
self.primary_agent = None # to be set explicitly
self.enforce_deadline = False
def create_agent(self, agent_class, *args, **kwargs):
agent = agent_class(self, *args, **kwargs)
self.agent_states[agent] = {'location': random.choice(self.intersections.keys()), 'heading': (0, 1)}
return agent
def set_primary_agent(self, agent, enforce_deadline=False):
self.primary_agent = agent
self.enforce_deadline = enforce_deadline
def reset(self):
self.done = False
self.t = 0
# Reset traffic lights
for traffic_light in self.intersections.itervalues():
traffic_light.reset()
# Pick a start and a destination
start = random.choice(self.intersections.keys())
destination = random.choice(self.intersections.keys())
# Ensure starting location and destination are not too close
while self.compute_dist(start, destination) < 4:
start = random.choice(self.intersections.keys())
destination = random.choice(self.intersections.keys())
start_heading = random.choice(self.valid_headings)
deadline = self.compute_dist(start, destination) * 5
print "Environment.reset(): Trial set up with start = {}, destination = {}, deadline = {}".format(start, destination, deadline)
# Initialize agent(s)
for agent in self.agent_states.iterkeys():
self.agent_states[agent] = {
'location': start if agent is self.primary_agent else random.choice(self.intersections.keys()),
'heading': start_heading if agent is self.primary_agent else random.choice(self.valid_headings),
'destination': destination if agent is self.primary_agent else None,
'deadline': deadline if agent is self.primary_agent else None}
agent.reset(destination=(destination if agent is self.primary_agent else None))
def step(self):
#print "Environment.step(): t = {}".format(self.t) # [debug]
# Update traffic lights
for intersection, traffic_light in self.intersections.iteritems():
traffic_light.update(self.t)
# Update agents
for agent in self.agent_states.iterkeys():
agent.update(self.t)
self.t += 1
if self.primary_agent is not None:
agent_deadline = self.agent_states[self.primary_agent]['deadline']
if agent_deadline <= self.hard_time_limit:
self.done = True
print "Environment.step(): Primary agent hit hard time limit ({})! Trial aborted.".format(self.hard_time_limit)
elif self.enforce_deadline and agent_deadline <= 0:
self.done = True
print "Environment.step(): Primary agent ran out of time! Trial aborted."
self.agent_states[self.primary_agent]['deadline'] = agent_deadline - 1
def sense(self, agent):
assert agent in self.agent_states, "Unknown agent!"
state = self.agent_states[agent]
location = state['location']
heading = state['heading']
light = 'green' if (self.intersections[location].state and heading[1] != 0) or ((not self.intersections[location].state) and heading[0] != 0) else 'red'
# Populate oncoming, left, right
oncoming = None
left = None
right = None
for other_agent, other_state in self.agent_states.iteritems():
if agent == other_agent or location != other_state['location'] or (heading[0] == other_state['heading'][0] and heading[1] == other_state['heading'][1]):
continue
other_heading = other_agent.get_next_waypoint()
if (heading[0] * other_state['heading'][0] + heading[1] * other_state['heading'][1]) == -1:
if oncoming != 'left': # we don't want to override oncoming == 'left'
oncoming = other_heading
elif (heading[1] == other_state['heading'][0] and -heading[0] == other_state['heading'][1]):
if right != 'forward' and right != 'left': # we don't want to override right == 'forward or 'left'
right = other_heading
else:
if left != 'forward': # we don't want to override left == 'forward'
left = other_heading
return {'light': light, 'oncoming': oncoming, 'left': left, 'right': right} # TODO: make this a namedtuple
def get_deadline(self, agent):
return self.agent_states[agent]['deadline'] if agent is self.primary_agent else None
|
assert agent in self.agent_states, "Unknown agent!"
assert action in self.valid_actions, "Invalid action!"
state = self.agent_states[agent]
location = state['location']
heading = state['heading']
light = 'green' if (self.intersections[location].state and heading[1] != 0) or ((not self.intersections[location].state) and heading[0] != 0) else 'red'
sense = self.sense(agent)
# Move agent if within bounds and obeys traffic rules
reward = 0 # reward/penalty
move_okay = True
if action == 'forward':
if light != 'green':
move_okay = False
elif action == 'left':
if light == 'green' and (sense['oncoming'] == None or sense['oncoming'] == 'left'):
heading = (heading[1], -heading[0])
else:
move_okay = False
elif action == 'right':
if light == 'green' or sense['left'] != 'straight':
heading = (-heading[1], heading[0])
else:
move_okay = False
if move_okay:
# Valid move (could be null)
if action is not None:
# Valid non-null move
location = ((location[0] + heading[0] - self.bounds[0]) % (self.bounds[2] - self.bounds[0] + 1) + self.bounds[0],
(location[1] + heading[1] - self.bounds[1]) % (self.bounds[3] - self.bounds[1] + 1) + self.bounds[1]) # wrap-around
#if self.bounds[0] <= location[0] <= self.bounds[2] and self.bounds[1] <= location[1] <= self.bounds[3]: # bounded
state['location'] = location
state['heading'] = heading
reward = 2.0 if action == agent.get_next_waypoint() else -0.5 # valid, but is it correct? (as per waypoint)
else:
# Valid null move
reward = 0.0
else:
# Invalid move
reward = -1.0
if agent is self.primary_agent:
if state['location'] == state['destination']:
if state['deadline'] >= 0:
reward += 10 # bonus
self.done = True
print "Environment.act(): Primary agent has reached destination!" # [debug]
self.status_text = "state: {}\naction: {}\nreward: {}".format(agent.get_state(), action, reward)
#print "Environment.act() [POST]: location: {}, heading: {}, action: {}, reward: {}".format(location, heading, action, reward) # [debug]
return reward
def compute_dist(self, a, b):
"""L1 distance between two points."""
return abs(b[0] - a[0]) + abs(b[1] - a[1])
class Agent(object):
"""Base class for all agents."""
def __init__(self, env):
self.env = env
self.state = None
self.next_waypoint = None
self.color = 'cyan'
def reset(self, destination=None):
pass
def update(self, t):
pass
def get_state(self):
return self.state
def get_next_waypoint(self):
return self.next_waypoint
class DummyAgent(Agent):
color_choices = ['blue', 'cyan', 'magenta', 'orange']
def __init__(self, env):
super(DummyAgent, self).__init__(env) # sets self.env = env, state = None, next_waypoint = None, and a default color
self.next_waypoint = random.choice(Environment.valid_actions[1:])
self.color = random.choice(self.color_choices)
def update(self, t):
inputs = self.env.sense(self)
action_okay = True
if self.next_waypoint == 'right':
if inputs['light'] == 'red' and inputs['left'] == 'forward':
action_okay = False
elif self.next_waypoint == 'forward':
if inputs['light'] == 'red':
action_okay = False
elif self.next_waypoint == 'left':
if inputs['light'] == 'red' or (inputs['oncoming'] == 'forward' or inputs['oncoming'] == 'right'):
action_okay = False
action = None
if action_okay:
action = self.next_waypoint
self.next_waypoint = random.choice(Environment.valid_actions[1:])
reward = self.env.act(self, action)
#print "DummyAgent.update(): t = {}, inputs = {}, action = {}, reward = {}".format(t, inputs, action, reward) # [debug]
#print "DummyAgent.update(): next_waypoint = {}".format(self.next_waypoint) # [debug]
|
def act(self, agent, action):
|
global_options_impl.rs
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use crate::{gen::global_options::GlobalOptions, i_set, s_map, s_set};
const DEFAULT: GlobalOptions<'_> = GlobalOptions {
tco_experimental_features: s_set::SSet::empty(),
tco_migration_flags: s_set::SSet::empty(),
tco_dynamic_view: false,
tco_num_local_workers: None,
tco_parallel_type_checking_threshold: 10,
tco_max_typechecker_worker_memory_mb: None,
tco_defer_class_declaration_threshold: None,
tco_defer_class_memory_mb_threshold: None,
tco_max_times_to_defer_type_checking: None,
tco_prefetch_deferred_files: false,
tco_remote_type_check_threshold: None,
tco_remote_type_check: false,
tco_remote_worker_key: None,
tco_remote_check_id: None,
tco_remote_max_batch_size: 8000,
tco_remote_min_batch_size: 5000,
tco_num_remote_workers: 0,
tco_stream_errors: false,
|
so_remote_worker_vfs_checkout_threshold: 0,
so_naming_sqlite_path: None,
po_auto_namespace_map: &[],
po_codegen: false,
po_deregister_php_stdlib: false,
po_disallow_toplevel_requires: false,
po_disable_nontoplevel_declarations: false,
po_allow_unstable_features: false,
tco_log_inference_constraints: false,
tco_disallow_array_typehint: false,
tco_disallow_array_literal: false,
tco_language_feature_logging: false,
tco_disallow_scrutinee_case_value_type_mismatch: false,
tco_timeout: 0,
tco_disallow_invalid_arraykey: false,
tco_disallow_byref_dynamic_calls: false,
tco_disallow_byref_calls: true,
allowed_fixme_codes_strict: i_set::ISet::empty(),
allowed_fixme_codes_partial: i_set::ISet::empty(),
codes_not_raised_partial: i_set::ISet::empty(),
log_levels: s_map::SMap::empty(),
po_disable_lval_as_an_expression: false,
tco_shallow_class_decl: false,
tco_skip_hierarchy_checks: false,
po_rust_parser_errors: false,
tco_like_type_hints: false,
tco_union_intersection_type_hints: false,
tco_coeffects: true,
tco_coeffects_local: true,
tco_strict_contexts: true,
tco_like_casts: false,
tco_simple_pessimize: 0.0,
tco_complex_coercion: false,
tco_disable_partially_abstract_typeconsts: false,
tco_disallow_partially_abstract_typeconst_definitions: true,
error_codes_treated_strictly: i_set::ISet::empty(),
tco_check_xhp_attribute: false,
tco_check_redundant_generics: false,
tco_disallow_unresolved_type_variables: false,
tco_disallow_trait_reuse: false,
tco_disallow_invalid_arraykey_constraint: false,
po_enable_class_level_where_clauses: false,
po_disable_legacy_soft_typehints: true,
po_allowed_decl_fixme_codes: i_set::ISet::empty(),
po_allow_new_attribute_syntax: false,
tco_global_inference: false,
tco_gi_reinfer_types: &[],
tco_ordered_solving: false,
tco_const_static_props: false,
po_disable_legacy_attribute_syntax: false,
tco_const_attribute: false,
po_const_default_func_args: false,
po_const_default_lambda_args: false,
po_disallow_silence: false,
po_abstract_static_props: false,
po_disable_unset_class_const: false,
po_parser_errors_only: false,
tco_check_attribute_locations: true,
po_disallow_func_ptrs_in_constants: false,
tco_error_php_lambdas: false,
tco_disallow_discarded_nullable_awaitables: false,
po_enable_xhp_class_modifier: false,
po_disable_xhp_element_mangling: false,
po_disable_xhp_children_declarations: false,
glean_service: "",
glean_hostname: "",
glean_port: 0,
glean_reponame: "",
symbol_write_root_path: "",
symbol_write_hhi_path: "",
symbol_write_ignore_paths: &[],
symbol_write_index_paths: &[],
symbol_write_index_paths_file: None,
symbol_write_include_hhi: true,
symbol_write_index_paths_file_output: None,
po_enable_enum_classes: true,
po_disable_modes: false,
po_disable_hh_ignore_error: false,
po_disable_array: false,
po_disable_array_typehint: false,
tco_enable_systemlib_annotations: false,
tco_higher_kinded_types: false,
tco_method_call_inference: false,
tco_report_pos_from_reason: false,
tco_typecheck_sample_rate: 1.0,
tco_enable_sound_dynamic: false,
po_disallow_hash_comments: false,
po_disallow_fun_and_cls_meth_pseudo_funcs: false,
po_disallow_inst_meth: false,
po_enable_readonly_in_emitter: false,
po_escape_brace: false,
tco_use_direct_decl_parser: false,
tco_use_direct_decl_in_tc_loop: false,
tco_ifc_enabled: &[],
po_enable_enum_supertyping: false,
po_interpret_soft_types_as_like_types: false,
tco_enable_strict_string_concat_interp: false,
tco_ignore_unsafe_cast: false,
tco_readonly: false,
tco_enable_expression_trees: false,
tco_enable_modules: false,
tco_allowed_expression_tree_visitors: &[],
tco_math_new_code: false,
tco_typeconst_concrete_concrete_error: false,
tco_enable_strict_const_semantics: false,
tco_meth_caller_only_public_visibility: true,
tco_require_extends_implements_ancestors: false,
tco_strict_value_equality: false,
tco_enforce_sealed_subclasses: false,
tco_everything_sdt: false,
tco_deferments_light: false,
};
impl GlobalOptions<'static> {
pub const DEFAULT: &'static Self = &DEFAULT;
pub const fn default_ref() -> &'static Self {
Self::DEFAULT
}
}
impl Default for &GlobalOptions<'_> {
fn default() -> Self {
GlobalOptions::default_ref()
}
}
impl Eq for GlobalOptions<'_> {}
impl std::hash::Hash for GlobalOptions<'_> {
fn hash<H>(&self, _: &mut H) {
unimplemented!()
}
}
impl no_pos_hash::NoPosHash for GlobalOptions<'_> {
fn hash<H>(&self, _: &mut H) {
unimplemented!()
}
}
impl Ord for GlobalOptions<'_> {
fn cmp(&self, _: &Self) -> std::cmp::Ordering {
unimplemented!()
}
}
|
so_remote_version_specifier: None,
|
buddy.rs
|
use {
crate::{
align_up, error::AllocationError, heap::Heap, slab::Slab, unreachable_unchecked,
util::try_arc_unwrap, MemoryBounds,
},
alloc::{sync::Arc, vec::Vec},
core::{convert::TryFrom as _, mem::replace, ptr::NonNull},
gpu_alloc_types::{AllocationFlags, DeviceMapError, MemoryDevice, MemoryPropertyFlags},
};
#[derive(Debug)]
pub(crate) struct BuddyBlock<M> {
pub memory: Arc<M>,
pub ptr: Option<NonNull<u8>>,
pub size: u64,
pub chunk: usize,
pub offset: u64,
pub index: usize,
}
unsafe impl<M> Sync for BuddyBlock<M> where M: Sync {}
unsafe impl<M> Send for BuddyBlock<M> where M: Send {}
#[derive(Clone, Copy, Debug)]
enum PairState {
Exhausted,
Ready {
ready: Side,
next: usize,
prev: usize,
},
}
impl PairState {
unsafe fn replace_next(&mut self, value: usize) -> usize {
match self {
PairState::Exhausted => unreachable_unchecked(),
PairState::Ready { next, .. } => replace(next, value),
}
}
unsafe fn replace_prev(&mut self, value: usize) -> usize {
match self {
PairState::Exhausted => unreachable_unchecked(),
PairState::Ready { prev, .. } => replace(prev, value),
}
}
}
#[derive(Clone, Copy, Debug)]
enum Side {
Left,
Right,
}
use Side::*;
#[derive(Debug)]
struct PairEntry {
state: PairState,
chunk: usize,
offset: u64,
parent: Option<usize>,
}
struct SizeBlockEntry {
chunk: usize,
offset: u64,
index: usize,
}
#[derive(Debug)]
struct Size {
next_ready: usize,
pairs: Slab<PairEntry>,
}
#[derive(Debug)]
enum Release {
None,
Parent(usize),
Chunk(usize),
}
impl Size {
fn new() -> Self {
Size {
pairs: Slab::new(),
next_ready: 0,
}
}
unsafe fn add_pair_and_acquire_left(
&mut self,
chunk: usize,
offset: u64,
parent: Option<usize>,
) -> SizeBlockEntry {
if self.next_ready < self.pairs.len() {
unreachable_unchecked()
}
let index = self.pairs.insert(PairEntry {
state: PairState::Exhausted,
chunk,
offset,
parent,
});
let entry = self.pairs.get_unchecked_mut(index);
entry.state = PairState::Ready {
next: index,
prev: index,
ready: Right, // Left is allocated.
};
self.next_ready = index;
SizeBlockEntry {
chunk,
offset,
index: index << 1,
}
}
fn acquire(&mut self, size: u64) -> Option<SizeBlockEntry> {
if self.next_ready >= self.pairs.len() {
return None;
}
let next_ready = self.next_ready;
let entry = unsafe { self.pairs.get_unchecked_mut(next_ready) };
let chunk = entry.chunk;
let offset = entry.offset;
let bit = match entry.state {
PairState::Exhausted => unsafe { unreachable_unchecked() },
PairState::Ready { ready, next, prev } => {
entry.state = PairState::Exhausted;
if prev == self.next_ready {
debug_assert_eq!(next, self.next_ready);
self.next_ready = self.pairs.len();
} else {
let prev_entry = unsafe { self.pairs.get_unchecked_mut(prev) };
let prev_next = unsafe { prev_entry.state.replace_next(next) };
debug_assert_eq!(prev_next, self.next_ready);
let next_entry = unsafe { self.pairs.get_unchecked_mut(next) };
let next_prev = unsafe { next_entry.state.replace_prev(prev) };
debug_assert_eq!(next_prev, self.next_ready);
self.next_ready = next;
}
match ready {
Left => 0,
Right => 1,
}
}
};
Some(SizeBlockEntry {
chunk,
offset: offset + bit as u64 * size,
index: (next_ready << 1) | bit as usize,
})
}
fn release(&mut self, index: usize) -> Release {
let side = match index & 1 {
0 => Side::Left,
1 => Side::Right,
_ => unsafe { unreachable_unchecked() },
};
let index = index >> 1;
let len = self.pairs.len();
let entry = self.pairs.get_mut(index);
let chunk = entry.chunk;
let offset = entry.offset;
let parent = entry.parent;
match (entry.state, side) {
|
entry.state = PairState::Ready {
ready: side,
next: index,
prev: index,
};
self.next_ready = index;
} else {
debug_assert!(self.next_ready < len);
let next = self.next_ready;
let next_entry = unsafe { self.pairs.get_unchecked_mut(next) };
let prev = unsafe { next_entry.state.replace_prev(index) };
let prev_entry = unsafe { self.pairs.get_unchecked_mut(prev) };
let prev_next = unsafe { prev_entry.state.replace_next(index) };
debug_assert_eq!(prev_next, next);
let entry = unsafe { self.pairs.get_unchecked_mut(index) };
entry.state = PairState::Ready {
ready: side,
next,
prev,
};
}
Release::None
}
(PairState::Ready { ready: Left, .. }, Left)
| (PairState::Ready { ready: Right, .. }, Right) => {
panic!("Attempt to dealloate already free block")
}
(
PairState::Ready {
ready: Left,
next,
prev,
},
Side::Right,
)
| (
PairState::Ready {
ready: Right,
next,
prev,
},
Side::Left,
) => {
entry.state = PairState::Exhausted;
if prev == index {
debug_assert_eq!(next, index);
self.next_ready = self.pairs.len();
} else {
let prev_entry = unsafe { self.pairs.get_unchecked_mut(prev) };
let prev_next = unsafe { prev_entry.state.replace_next(next) };
debug_assert_eq!(prev_next, index);
let next_entry = unsafe { self.pairs.get_unchecked_mut(next) };
let next_prev = unsafe { next_entry.state.replace_prev(prev) };
debug_assert_eq!(next_prev, index);
self.next_ready = next;
}
match parent {
Some(parent) => Release::Parent(parent),
None => {
debug_assert_eq!(offset, 0);
Release::Chunk(chunk)
}
}
}
}
}
}
#[derive(Debug)]
struct Chunk<M> {
memory: Arc<M>,
ptr: Option<NonNull<u8>>,
size: u64,
}
#[derive(Debug)]
pub(crate) struct BuddyAllocator<M> {
minimal_size: u64,
chunks: Slab<Chunk<M>>,
sizes: Vec<Size>,
memory_type: u32,
props: MemoryPropertyFlags,
atom_mask: u64,
}
unsafe impl<M> Sync for BuddyAllocator<M> where M: Sync {}
unsafe impl<M> Send for BuddyAllocator<M> where M: Send {}
impl<M> BuddyAllocator<M>
where
M: MemoryBounds + 'static,
{
pub fn new(
minimal_size: u64,
initial_dedicated_size: u64,
memory_type: u32,
props: MemoryPropertyFlags,
atom_mask: u64,
) -> Self {
assert!(
minimal_size.is_power_of_two(),
"Minimal allocation size of buddy allocator must be power of two"
);
assert!(
initial_dedicated_size.is_power_of_two(),
"Dedicated allocation size of buddy allocator must be power of two"
);
let initial_sizes = (initial_dedicated_size
.trailing_zeros()
.saturating_sub(minimal_size.trailing_zeros())) as usize;
BuddyAllocator {
minimal_size,
chunks: Slab::new(),
sizes: (0..initial_sizes).map(|_| Size::new()).collect(),
memory_type,
props,
atom_mask: atom_mask | (minimal_size - 1),
}
}
#[cfg_attr(feature = "tracing", tracing::instrument(skip(self, device)))]
pub unsafe fn alloc(
&mut self,
device: &impl MemoryDevice<M>,
size: u64,
align_mask: u64,
flags: AllocationFlags,
heap: &mut Heap,
allocations_remains: &mut u32,
) -> Result<BuddyBlock<M>, AllocationError> {
let align_mask = align_mask | self.atom_mask;
let size = align_up(size, align_mask)
.and_then(|size| size.checked_next_power_of_two())
.ok_or(AllocationError::OutOfDeviceMemory)?;
let size_index = size.trailing_zeros() - self.minimal_size.trailing_zeros();
let size_index =
usize::try_from(size_index).map_err(|_| AllocationError::OutOfDeviceMemory)?;
while self.sizes.len() <= size_index {
self.sizes.push(Size::new());
}
let host_visible = self.host_visible();
let mut candidate_size_index = size_index;
let (mut entry, entry_size_index) = loop {
let sizes_len = self.sizes.len();
let candidate_size_entry = &mut self.sizes[candidate_size_index];
let candidate_size = self.minimal_size << candidate_size_index;
if let Some(entry) = candidate_size_entry.acquire(candidate_size) {
break (entry, candidate_size_index);
}
if sizes_len == candidate_size_index + 1 {
// That's size of device allocation.
if *allocations_remains == 0 {
return Err(AllocationError::TooManyObjects);
}
let chunk_size = self.minimal_size << (candidate_size_index + 1);
let mut memory = device.allocate_memory(chunk_size, self.memory_type, flags)?;
*allocations_remains -= 1;
heap.alloc(chunk_size);
let ptr = if host_visible {
match device.map_memory(&mut memory, 0, chunk_size) {
Ok(ptr) => Some(ptr),
Err(DeviceMapError::OutOfDeviceMemory) => {
return Err(AllocationError::OutOfDeviceMemory)
}
Err(DeviceMapError::MapFailed) | Err(DeviceMapError::OutOfHostMemory) => {
return Err(AllocationError::OutOfHostMemory)
}
}
} else {
None
};
let chunk = self.chunks.insert(Chunk {
memory: Arc::new(memory),
ptr,
size: chunk_size,
});
let entry = candidate_size_entry.add_pair_and_acquire_left(chunk, 0, None);
break (entry, candidate_size_index);
}
candidate_size_index += 1;
};
for size_index in (size_index..entry_size_index).rev() {
let size_entry = &mut self.sizes[size_index];
entry =
size_entry.add_pair_and_acquire_left(entry.chunk, entry.offset, Some(entry.index));
}
let chunk_entry = self.chunks.get_unchecked(entry.chunk);
debug_assert!(
entry
.offset
.checked_add(size)
.map_or(false, |end| end <= chunk_entry.size),
"Offset + size is not in chunk bounds"
);
Ok(BuddyBlock {
memory: chunk_entry.memory.clone(),
ptr: chunk_entry
.ptr
.map(|ptr| NonNull::new_unchecked(ptr.as_ptr().add(entry.offset as usize))),
offset: entry.offset,
size,
chunk: entry.chunk,
index: entry.index,
})
}
#[cfg_attr(feature = "tracing", tracing::instrument(skip(self, device)))]
pub unsafe fn dealloc(
&mut self,
device: &impl MemoryDevice<M>,
block: BuddyBlock<M>,
heap: &mut Heap,
allocations_remains: &mut u32,
) {
debug_assert!(block.size.is_power_of_two());
let size_index =
(block.size.trailing_zeros() - self.minimal_size.trailing_zeros()) as usize;
let mut release_index = block.index;
let mut release_size_index = size_index;
loop {
match self.sizes[release_size_index].release(release_index) {
Release::Parent(parent) => {
release_size_index += 1;
release_index = parent;
}
Release::Chunk(chunk) => {
debug_assert_eq!(chunk, block.chunk);
debug_assert_eq!(
self.chunks.get(chunk).size,
self.minimal_size << (release_size_index + 1)
);
let chunk = self.chunks.remove(chunk);
drop(block);
let memory = try_arc_unwrap(chunk.memory)
.expect("Memory shared after last block deallocated");
device.deallocate_memory(memory);
*allocations_remains += 1;
heap.dealloc(chunk.size);
return;
}
Release::None => return,
}
}
}
fn host_visible(&self) -> bool {
self.props.contains(MemoryPropertyFlags::HOST_VISIBLE)
}
}
|
(PairState::Exhausted, side) => {
if self.next_ready == len {
|
Sunset.tsx
|
import { Sunset as FeatherSunset, Props } from 'react-feather';
|
<FeatherSunset data-icon="sunset" {...rootProps} />
);
|
import * as React from 'react';
export const Sunset: React.FC<Props> = ({ ...rootProps }) => (
|
raft_snap_test.go
|
// Copyright 2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package raft
import (
"testing"
pb "github.com/coreos/etcd/raft/raftpb"
)
var (
testingSnap = pb.Snapshot{
Metadata: pb.SnapshotMetadata{
Index: 11, // magic number
Term: 11, // magic number
ConfState: pb.ConfState{Nodes: []uint64{1, 2}},
},
}
)
func TestSendingSnapshotSetPendingSnapshot(t *testing.T) {
storage := NewMemoryStorage()
sm := newTestRaft(1, []uint64{1}, 10, 1, storage)
sm.restore(testingSnap)
sm.becomeCandidate()
sm.becomeLeader()
// force set the next of node 1, so that
// node 1 needs a snapshot
sm.prs[2].Next = sm.raftLog.firstIndex()
sm.Step(pb.Message{From: 2, To: 1, Type: pb.MsgAppResp, Index: sm.prs[2].Next - 1, Reject: true})
if sm.prs[2].PendingSnapshot != 11 {
t.Fatalf("PendingSnapshot = %d, want 11", sm.prs[2].PendingSnapshot)
}
}
func TestPendingSnapshotPauseReplication(t *testing.T) {
storage := NewMemoryStorage()
sm := newTestRaft(1, []uint64{1, 2}, 10, 1, storage)
sm.restore(testingSnap)
sm.becomeCandidate()
sm.becomeLeader()
sm.prs[2].becomeSnapshot(11)
sm.Step(pb.Message{From: 1, To: 1, Type: pb.MsgProp, Entries: []pb.Entry{{Data: []byte("somedata")}}})
msgs := sm.readMessages()
if len(msgs) != 0 {
t.Fatalf("len(msgs) = %d, want 0", len(msgs))
}
}
func TestSnapshotFailure(t *testing.T) {
storage := NewMemoryStorage()
sm := newTestRaft(1, []uint64{1, 2}, 10, 1, storage)
sm.restore(testingSnap)
sm.becomeCandidate()
sm.becomeLeader()
sm.prs[2].Next = 1
sm.prs[2].becomeSnapshot(11)
sm.Step(pb.Message{From: 2, To: 1, Type: pb.MsgSnapStatus, Reject: true})
if sm.prs[2].PendingSnapshot != 0 {
t.Fatalf("PendingSnapshot = %d, want 0", sm.prs[2].PendingSnapshot)
}
if sm.prs[2].Next != 1 {
t.Fatalf("Next = %d, want 1", sm.prs[2].Next)
}
if sm.prs[2].Paused != true {
t.Errorf("Paused = %v, want true", sm.prs[2].Paused)
}
}
func TestSnapshotSucceed(t *testing.T)
|
func TestSnapshotAbort(t *testing.T) {
storage := NewMemoryStorage()
sm := newTestRaft(1, []uint64{1, 2}, 10, 1, storage)
sm.restore(testingSnap)
sm.becomeCandidate()
sm.becomeLeader()
sm.prs[2].Next = 1
sm.prs[2].becomeSnapshot(11)
// A successful msgAppResp that has a higher/equal index than the
// pending snapshot should abort the pending snapshot.
sm.Step(pb.Message{From: 2, To: 1, Type: pb.MsgAppResp, Index: 11})
if sm.prs[2].PendingSnapshot != 0 {
t.Fatalf("PendingSnapshot = %d, want 0", sm.prs[2].PendingSnapshot)
}
if sm.prs[2].Next != 12 {
t.Fatalf("Next = %d, want 12", sm.prs[2].Next)
}
}
|
{
storage := NewMemoryStorage()
sm := newTestRaft(1, []uint64{1, 2}, 10, 1, storage)
sm.restore(testingSnap)
sm.becomeCandidate()
sm.becomeLeader()
sm.prs[2].Next = 1
sm.prs[2].becomeSnapshot(11)
sm.Step(pb.Message{From: 2, To: 1, Type: pb.MsgSnapStatus, Reject: false})
if sm.prs[2].PendingSnapshot != 0 {
t.Fatalf("PendingSnapshot = %d, want 0", sm.prs[2].PendingSnapshot)
}
if sm.prs[2].Next != 12 {
t.Fatalf("Next = %d, want 12", sm.prs[2].Next)
}
if sm.prs[2].Paused != true {
t.Errorf("Paused = %v, want true", sm.prs[2].Paused)
}
}
|
res2net.py
|
import math
import torch
import torch.nn as nn
import torch.utils.checkpoint as cp
from mmcv.cnn import (build_conv_layer, build_norm_layer, constant_init,
kaiming_init)
from mmcv.runner import load_checkpoint
from torch.nn.modules.batchnorm import _BatchNorm
from mmdet.utils import get_root_logger
from ..builder import BACKBONES
from .resnet import Bottleneck as _Bottleneck
from .resnet import ResNet
class Bottle2neck(_Bottleneck):
expansion = 4
def __init__(self,
inplanes,
planes,
scales=4,
base_width=26,
base_channels=64,
stage_type='normal',
**kwargs):
"""Bottle2neck block for Res2Net.
If style is "pytorch", the stride-two layer is the 3x3 conv layer, if
it is "caffe", the stride-two layer is the first 1x1 conv layer.
"""
super(Bottle2neck, self).__init__(inplanes, planes, **kwargs)
assert scales > 1, 'Res2Net degenerates to ResNet when scales = 1.'
width = int(math.floor(self.planes * (base_width / base_channels)))
self.norm1_name, norm1 = build_norm_layer(
self.norm_cfg, width * scales, postfix=1)
self.norm3_name, norm3 = build_norm_layer(
self.norm_cfg, self.planes * self.expansion, postfix=3)
self.conv1 = build_conv_layer(
self.conv_cfg,
self.inplanes,
width * scales,
kernel_size=1,
stride=self.conv1_stride,
bias=False)
self.add_module(self.norm1_name, norm1)
if stage_type == 'stage' and self.conv2_stride != 1:
self.pool = nn.AvgPool2d(
kernel_size=3, stride=self.conv2_stride, padding=1)
convs = []
bns = []
fallback_on_stride = False
if self.with_dcn:
fallback_on_stride = self.dcn.pop('fallback_on_stride', False)
if not self.with_dcn or fallback_on_stride:
|
else:
assert self.conv_cfg is None, 'conv_cfg must be None for DCN'
for i in range(scales - 1):
convs.append(
build_conv_layer(
self.dcn,
width,
width,
kernel_size=3,
stride=self.conv2_stride,
padding=self.dilation,
dilation=self.dilation,
bias=False))
bns.append(
build_norm_layer(self.norm_cfg, width, postfix=i + 1)[1])
self.convs = nn.ModuleList(convs)
self.bns = nn.ModuleList(bns)
self.conv3 = build_conv_layer(
self.conv_cfg,
width * scales,
self.planes * self.expansion,
kernel_size=1,
bias=False)
self.add_module(self.norm3_name, norm3)
self.stage_type = stage_type
self.scales = scales
self.width = width
delattr(self, 'conv2')
delattr(self, self.norm2_name)
def forward(self, x):
"""Forward function."""
def _inner_forward(x):
identity = x
out = self.conv1(x)
out = self.norm1(out)
out = self.relu(out)
if self.with_plugins:
out = self.forward_plugin(out, self.after_conv1_plugin_names)
spx = torch.split(out, self.width, 1)
sp = self.convs[0](spx[0].contiguous())
sp = self.relu(self.bns[0](sp))
out = sp
for i in range(1, self.scales - 1):
if self.stage_type == 'stage':
sp = spx[i]
else:
sp = sp + spx[i]
sp = self.convs[i](sp.contiguous())
sp = self.relu(self.bns[i](sp))
out = torch.cat((out, sp), 1)
if self.stage_type == 'normal' or self.conv2_stride == 1:
out = torch.cat((out, spx[self.scales - 1]), 1)
elif self.stage_type == 'stage':
out = torch.cat((out, self.pool(spx[self.scales - 1])), 1)
if self.with_plugins:
out = self.forward_plugin(out, self.after_conv2_plugin_names)
out = self.conv3(out)
out = self.norm3(out)
if self.with_plugins:
out = self.forward_plugin(out, self.after_conv3_plugin_names)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
return out
if self.with_cp and x.requires_grad:
out = cp.checkpoint(_inner_forward, x)
else:
out = _inner_forward(x)
out = self.relu(out)
return out
class Res2Layer(nn.Sequential):
"""Res2Layer to build Res2Net style backbone.
Args:
block (nn.Module): block used to build ResLayer.
inplanes (int): inplanes of block.
planes (int): planes of block.
num_blocks (int): number of blocks.
stride (int): stride of the first block. Default: 1
avg_down (bool): Use AvgPool instead of stride conv when
downsampling in the bottle2neck. Default: False
conv_cfg (dict): dictionary to construct and config conv layer.
Default: None
norm_cfg (dict): dictionary to construct and config norm layer.
Default: dict(type='BN')
scales (int): Scales used in Res2Net. Default: 4
base_width (int): Basic width of each scale. Default: 26
"""
def __init__(self,
block,
inplanes,
planes,
num_blocks,
stride=1,
avg_down=True,
conv_cfg=None,
norm_cfg=dict(type='BN'),
scales=4,
base_width=26,
**kwargs):
self.block = block
downsample = None
if stride != 1 or inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.AvgPool2d(
kernel_size=stride,
stride=stride,
ceil_mode=True,
count_include_pad=False),
build_conv_layer(
conv_cfg,
inplanes,
planes * block.expansion,
kernel_size=1,
stride=1,
bias=False),
build_norm_layer(norm_cfg, planes * block.expansion)[1],
)
layers = []
layers.append(
block(
inplanes=inplanes,
planes=planes,
stride=stride,
downsample=downsample,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
scales=scales,
base_width=base_width,
stage_type='stage',
**kwargs))
inplanes = planes * block.expansion
for i in range(1, num_blocks):
layers.append(
block(
inplanes=inplanes,
planes=planes,
stride=1,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
scales=scales,
base_width=base_width,
**kwargs))
super(Res2Layer, self).__init__(*layers)
@BACKBONES.register_module()
class Res2Net(ResNet):
"""Res2Net backbone.
Args:
scales (int): Scales used in Res2Net. Default: 4
base_width (int): Basic width of each scale. Default: 26
depth (int): Depth of res2net, from {50, 101, 152}.
in_channels (int): Number of input image channels. Default: 3.
num_stages (int): Res2net stages. Default: 4.
strides (Sequence[int]): Strides of the first block of each stage.
dilations (Sequence[int]): Dilation of each stage.
out_indices (Sequence[int]): Output from which stages.
style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two
layer is the 3x3 conv layer, otherwise the stride-two layer is
the first 1x1 conv layer.
deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv
avg_down (bool): Use AvgPool instead of stride conv when
downsampling in the bottle2neck.
frozen_stages (int): Stages to be frozen (stop grad and set eval mode).
-1 means not freezing any parameters.
norm_cfg (dict): Dictionary to construct and config norm layer.
norm_eval (bool): Whether to set norm layers to eval mode, namely,
freeze running stats (mean and var). Note: Effect on Batch Norm
and its variants only.
plugins (list[dict]): List of plugins for stages, each dict contains:
- cfg (dict, required): Cfg dict to build plugin.
- position (str, required): Position inside block to insert
plugin, options are 'after_conv1', 'after_conv2', 'after_conv3'.
- stages (tuple[bool], optional): Stages to apply plugin, length
should be same as 'num_stages'.
with_cp (bool): Use checkpoint or not. Using checkpoint will save some
memory while slowing down the training speed.
zero_init_residual (bool): Whether to use zero init for last norm layer
in resblocks to let them behave as identity.
Example:
>>> from mmdet.models import Res2Net
>>> import torch
>>> self = Res2Net(depth=50, scales=4, base_width=26)
>>> self.eval()
>>> inputs = torch.rand(1, 3, 32, 32)
>>> level_outputs = self.forward(inputs)
>>> for level_out in level_outputs:
... print(tuple(level_out.shape))
(1, 256, 8, 8)
(1, 512, 4, 4)
(1, 1024, 2, 2)
(1, 2048, 1, 1)
"""
arch_settings = {
50: (Bottle2neck, (3, 4, 6, 3)),
101: (Bottle2neck, (3, 4, 23, 3)),
152: (Bottle2neck, (3, 8, 36, 3))
}
def __init__(self,
scales=4,
base_width=26,
style='pytorch',
deep_stem=True,
avg_down=True,
**kwargs):
self.scales = scales
self.base_width = base_width
super(Res2Net, self).__init__(
style='pytorch', deep_stem=True, avg_down=True, **kwargs)
def make_res_layer(self, **kwargs):
return Res2Layer(
scales=self.scales,
base_width=self.base_width,
base_channels=self.base_channels,
**kwargs)
def init_weights(self, pretrained=None):
"""Initialize the weights in backbone.
Args:
pretrained (str, optional): Path to pre-trained weights.
Defaults to None.
"""
if isinstance(pretrained, str):
logger = get_root_logger()
load_checkpoint(self, pretrained, strict=False, logger=logger)
elif pretrained is None:
for m in self.modules():
if isinstance(m, nn.Conv2d):
kaiming_init(m)
elif isinstance(m, (_BatchNorm, nn.GroupNorm)):
constant_init(m, 1)
if self.dcn is not None:
for m in self.modules():
if isinstance(m, Bottle2neck):
# dcn in Res2Net bottle2neck is in ModuleList
for n in m.convs:
if hasattr(n, 'conv_offset'):
constant_init(n.conv_offset, 0)
if self.zero_init_residual:
for m in self.modules():
if isinstance(m, Bottle2neck):
constant_init(m.norm3, 0)
else:
raise TypeError('pretrained must be a str or None')
|
for i in range(scales - 1):
convs.append(
build_conv_layer(
self.conv_cfg,
width,
width,
kernel_size=3,
stride=self.conv2_stride,
padding=self.dilation,
dilation=self.dilation,
bias=False))
bns.append(
build_norm_layer(self.norm_cfg, width, postfix=i + 1)[1])
self.convs = nn.ModuleList(convs)
self.bns = nn.ModuleList(bns)
|
common-settings-isolated-example-panel1.component.ts
|
import { Component, OnInit } from '@angular/core';
import { FormBuilder, Validators } from '@angular/forms';
import { ActivatedRoute, Router } from '@angular/router';
import { SettingsFormService } from '@msft-sme/angular';
import { CommonSettingsIsolatedExamplePanelBaseComponent } from './common-settings-isolated-example-panel-base.component';
import { IsolatedSetting1Model } from './model/isolated-setting1-model';
// TODO: We can simplify this example as each one of these panels are the same
@Component({
selector: 'sme-ng2-controls-common-settings-isolated-example-panel1',
templateUrl: './common-settings-isolated-example-panel1.component.html'
})
export class CommonSettingsIsolatedExamplePanel1Component
extends CommonSettingsIsolatedExamplePanelBaseComponent<IsolatedSetting1Model>
implements OnInit {
constructor(router: Router, activatedRoute: ActivatedRoute, formbuilder: FormBuilder, settingsFormService: SettingsFormService) {
super(
router,
activatedRoute,
formbuilder,
settingsFormService,
{
name: ''
},
{
name: {
required: 'this is a mandatory field'
}
},
{
name: 'setting 1 name value'
},
'setting 1');
|
}
public ngOnInit() {
this.sampleForm = this.formbuilder.group({
name: [this.modelData.name, Validators.required]
});
super.ngOnInit();
}
}
| |
moment.js
|
//! moment.js
//! version : 2.10.3
//! authors : Tim Wood, Iskren Chernev, Moment.js contributors
//! license : MIT
//! momentjs.com
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
global.moment = factory()
}(this, function () { 'use strict';
var hookCallback;
function utils_hooks__hooks () {
return hookCallback.apply(null, arguments);
}
// This is done to register the method called with moment()
// without creating circular dependencies.
function setHookCallback (callback) {
hookCallback = callback;
}
function isArray(input) {
return Object.prototype.toString.call(input) === '[object Array]';
}
function isDate(input) {
return input instanceof Date || Object.prototype.toString.call(input) === '[object Date]';
}
function map(arr, fn) {
var res = [], i;
for (i = 0; i < arr.length; ++i) {
res.push(fn(arr[i], i));
}
return res;
}
function hasOwnProp(a, b) {
return Object.prototype.hasOwnProperty.call(a, b);
}
function extend(a, b) {
for (var i in b) {
if (hasOwnProp(b, i)) {
a[i] = b[i];
}
}
if (hasOwnProp(b, 'toString')) {
a.toString = b.toString;
}
if (hasOwnProp(b, 'valueOf')) {
a.valueOf = b.valueOf;
}
return a;
}
function create_utc__createUTC (input, format, locale, strict) {
return createLocalOrUTC(input, format, locale, strict, true).utc();
}
function defaultParsingFlags() {
// We need to deep clone this object.
return {
empty : false,
unusedTokens : [],
unusedInput : [],
overflow : -2,
charsLeftOver : 0,
nullInput : false,
invalidMonth : null,
invalidFormat : false,
userInvalidated : false,
iso : false
};
}
function getParsingFlags(m) {
if (m._pf == null) {
m._pf = defaultParsingFlags();
}
return m._pf;
}
function valid__isValid(m) {
if (m._isValid == null) {
var flags = getParsingFlags(m);
m._isValid = !isNaN(m._d.getTime()) &&
flags.overflow < 0 &&
!flags.empty &&
!flags.invalidMonth &&
!flags.nullInput &&
!flags.invalidFormat &&
!flags.userInvalidated;
if (m._strict) {
m._isValid = m._isValid &&
flags.charsLeftOver === 0 &&
flags.unusedTokens.length === 0 &&
flags.bigHour === undefined;
}
}
return m._isValid;
}
function valid__createInvalid (flags) {
var m = create_utc__createUTC(NaN);
if (flags != null) {
extend(getParsingFlags(m), flags);
}
else {
getParsingFlags(m).userInvalidated = true;
}
return m;
}
var momentProperties = utils_hooks__hooks.momentProperties = [];
function copyConfig(to, from) {
var i, prop, val;
if (typeof from._isAMomentObject !== 'undefined') {
to._isAMomentObject = from._isAMomentObject;
}
if (typeof from._i !== 'undefined') {
to._i = from._i;
}
if (typeof from._f !== 'undefined') {
to._f = from._f;
}
if (typeof from._l !== 'undefined') {
to._l = from._l;
}
if (typeof from._strict !== 'undefined') {
to._strict = from._strict;
}
if (typeof from._tzm !== 'undefined') {
to._tzm = from._tzm;
}
if (typeof from._isUTC !== 'undefined') {
to._isUTC = from._isUTC;
}
if (typeof from._offset !== 'undefined') {
to._offset = from._offset;
}
if (typeof from._pf !== 'undefined') {
to._pf = getParsingFlags(from);
}
if (typeof from._locale !== 'undefined') {
to._locale = from._locale;
}
if (momentProperties.length > 0) {
for (i in momentProperties) {
prop = momentProperties[i];
val = from[prop];
if (typeof val !== 'undefined') {
to[prop] = val;
}
}
}
return to;
}
var updateInProgress = false;
// Moment prototype object
function Moment(config) {
copyConfig(this, config);
this._d = new Date(+config._d);
// Prevent infinite loop in case updateOffset creates new moment
// objects.
if (updateInProgress === false) {
updateInProgress = true;
utils_hooks__hooks.updateOffset(this);
updateInProgress = false;
}
}
function isMoment (obj) {
return obj instanceof Moment || (obj != null && obj._isAMomentObject != null);
}
function toInt(argumentForCoercion) {
var coercedNumber = +argumentForCoercion,
value = 0;
if (coercedNumber !== 0 && isFinite(coercedNumber)) {
if (coercedNumber >= 0) {
value = Math.floor(coercedNumber);
} else {
value = Math.ceil(coercedNumber);
}
}
return value;
}
function compareArrays(array1, array2, dontConvert) {
var len = Math.min(array1.length, array2.length),
lengthDiff = Math.abs(array1.length - array2.length),
diffs = 0,
i;
for (i = 0; i < len; i++) {
if ((dontConvert && array1[i] !== array2[i]) ||
(!dontConvert && toInt(array1[i]) !== toInt(array2[i]))) {
diffs++;
}
}
return diffs + lengthDiff;
}
function Locale() {
}
var locales = {};
var globalLocale;
function normalizeLocale(key) {
return key ? key.toLowerCase().replace('_', '-') : key;
}
// pick the locale from the array
// try ['en-au', 'en-gb'] as 'en-au', 'en-gb', 'en', as in move through the list trying each
// substring from most specific to least, but move to the next array item if it's a more specific variant than the current root
function chooseLocale(names) {
var i = 0, j, next, locale, split;
while (i < names.length) {
split = normalizeLocale(names[i]).split('-');
j = split.length;
next = normalizeLocale(names[i + 1]);
next = next ? next.split('-') : null;
while (j > 0) {
locale = loadLocale(split.slice(0, j).join('-'));
if (locale) {
return locale;
}
if (next && next.length >= j && compareArrays(split, next, true) >= j - 1) {
//the next array item is better than a shallower substring of this one
break;
}
j--;
}
i++;
}
return null;
}
function loadLocale(name) {
var oldLocale = null;
// TODO: Find a better way to register and load all the locales in Node
if (!locales[name] && typeof module !== 'undefined' &&
module && module.exports) {
try {
oldLocale = globalLocale._abbr;
require('./locale/' + name);
// because defineLocale currently also sets the global locale, we
// want to undo that for lazy loaded locales
locale_locales__getSetGlobalLocale(oldLocale);
} catch (e) { }
}
return locales[name];
}
// This function will load locale and then set the global locale. If
// no arguments are passed in, it will simply return the current global
// locale key.
function locale_locales__getSetGlobalLocale (key, values) {
var data;
if (key) {
if (typeof values === 'undefined') {
data = locale_locales__getLocale(key);
}
else {
data = defineLocale(key, values);
}
if (data) {
// moment.duration._locale = moment._locale = data;
globalLocale = data;
}
}
return globalLocale._abbr;
}
function defineLocale (name, values) {
if (values !== null) {
values.abbr = name;
if (!locales[name]) {
locales[name] = new Locale();
}
locales[name].set(values);
// backwards compat for now: also set the locale
locale_locales__getSetGlobalLocale(name);
return locales[name];
} else {
// useful for testing
delete locales[name];
return null;
}
}
// returns locale data
function locale_locales__getLocale (key) {
var locale;
if (key && key._locale && key._locale._abbr) {
key = key._locale._abbr;
}
if (!key) {
return globalLocale;
}
if (!isArray(key)) {
//short-circuit everything else
locale = loadLocale(key);
if (locale) {
return locale;
}
key = [key];
}
return chooseLocale(key);
}
var aliases = {};
function addUnitAlias (unit, shorthand) {
var lowerCase = unit.toLowerCase();
aliases[lowerCase] = aliases[lowerCase + 's'] = aliases[shorthand] = unit;
}
function normalizeUnits(units) {
return typeof units === 'string' ? aliases[units] || aliases[units.toLowerCase()] : undefined;
}
function normalizeObjectUnits(inputObject) {
var normalizedInput = {},
normalizedProp,
prop;
for (prop in inputObject) {
if (hasOwnProp(inputObject, prop)) {
normalizedProp = normalizeUnits(prop);
if (normalizedProp) {
normalizedInput[normalizedProp] = inputObject[prop];
}
}
}
return normalizedInput;
}
function makeGetSet (unit, keepTime) {
return function (value) {
if (value != null) {
get_set__set(this, unit, value);
utils_hooks__hooks.updateOffset(this, keepTime);
return this;
} else {
return get_set__get(this, unit);
}
};
}
function get_set__get (mom, unit) {
return mom._d['get' + (mom._isUTC ? 'UTC' : '') + unit]();
}
function get_set__set (mom, unit, value) {
return mom._d['set' + (mom._isUTC ? 'UTC' : '') + unit](value);
}
// MOMENTS
function getSet (units, value) {
var unit;
if (typeof units === 'object') {
for (unit in units) {
this.set(unit, units[unit]);
}
} else {
units = normalizeUnits(units);
if (typeof this[units] === 'function') {
return this[units](value);
}
}
return this;
}
function zeroFill(number, targetLength, forceSign) {
var output = '' + Math.abs(number),
sign = number >= 0;
while (output.length < targetLength) {
output = '0' + output;
}
return (sign ? (forceSign ? '+' : '') : '-') + output;
}
var formattingTokens = /(\[[^\[]*\])|(\\)?(Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|Q|YYYYYY|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|S{1,4}|x|X|zz?|ZZ?|.)/g;
var localFormattingTokens = /(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g;
var formatFunctions = {};
var formatTokenFunctions = {};
// token: 'M'
// padded: ['MM', 2]
// ordinal: 'Mo'
// callback: function () { this.month() + 1 }
function addFormatToken (token, padded, ordinal, callback) {
var func = callback;
if (typeof callback === 'string') {
func = function () {
return this[callback]();
};
}
if (token) {
formatTokenFunctions[token] = func;
}
if (padded) {
formatTokenFunctions[padded[0]] = function () {
return zeroFill(func.apply(this, arguments), padded[1], padded[2]);
};
}
if (ordinal) {
formatTokenFunctions[ordinal] = function () {
return this.localeData().ordinal(func.apply(this, arguments), token);
};
}
}
function removeFormattingTokens(input) {
if (input.match(/\[[\s\S]/)) {
return input.replace(/^\[|\]$/g, '');
}
return input.replace(/\\/g, '');
}
function makeFormatFunction(format) {
var array = format.match(formattingTokens), i, length;
for (i = 0, length = array.length; i < length; i++) {
if (formatTokenFunctions[array[i]]) {
array[i] = formatTokenFunctions[array[i]];
} else {
array[i] = removeFormattingTokens(array[i]);
}
}
return function (mom) {
var output = '';
for (i = 0; i < length; i++) {
output += array[i] instanceof Function ? array[i].call(mom, format) : array[i];
}
return output;
};
}
// format date using native date object
function formatMoment(m, format) {
if (!m.isValid()) {
return m.localeData().invalidDate();
}
format = expandFormat(format, m.localeData());
if (!formatFunctions[format]) {
formatFunctions[format] = makeFormatFunction(format);
}
return formatFunctions[format](m);
}
function expandFormat(format, locale) {
var i = 5;
function replaceLongDateFormatTokens(input) {
return locale.longDateFormat(input) || input;
}
localFormattingTokens.lastIndex = 0;
while (i >= 0 && localFormattingTokens.test(format)) {
format = format.replace(localFormattingTokens, replaceLongDateFormatTokens);
localFormattingTokens.lastIndex = 0;
i -= 1;
}
return format;
}
var match1 = /\d/; // 0 - 9
var match2 = /\d\d/; // 00 - 99
var match3 = /\d{3}/; // 000 - 999
var match4 = /\d{4}/; // 0000 - 9999
var match6 = /[+-]?\d{6}/; // -999999 - 999999
var match1to2 = /\d\d?/; // 0 - 99
var match1to3 = /\d{1,3}/; // 0 - 999
var match1to4 = /\d{1,4}/; // 0 - 9999
var match1to6 = /[+-]?\d{1,6}/; // -999999 - 999999
var matchUnsigned = /\d+/; // 0 - inf
var matchSigned = /[+-]?\d+/; // -inf - inf
var matchOffset = /Z|[+-]\d\d:?\d\d/gi; // +00:00 -00:00 +0000 -0000 or Z
var matchTimestamp = /[+-]?\d+(\.\d{1,3})?/; // 123456789 123456789.123
// any word (or two) characters or numbers including two/three word month in arabic.
var matchWord = /[0-9]*['a-z\u00A0-\u05FF\u0700-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+|[\u0600-\u06FF\/]+(\s*?[\u0600-\u06FF]+){1,2}/i;
var regexes = {};
function addRegexToken (token, regex, strictRegex) {
regexes[token] = typeof regex === 'function' ? regex : function (isStrict) {
return (isStrict && strictRegex) ? strictRegex : regex;
};
}
function getParseRegexForToken (token, config) {
if (!hasOwnProp(regexes, token)) {
return new RegExp(unescapeFormat(token));
}
return regexes[token](config._strict, config._locale);
}
// Code from http://stackoverflow.com/questions/3561493/is-there-a-regexp-escape-function-in-javascript
function unescapeFormat(s) {
return s.replace('\\', '').replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g, function (matched, p1, p2, p3, p4) {
return p1 || p2 || p3 || p4;
}).replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&');
}
var tokens = {};
function addParseToken (token, callback) {
var i, func = callback;
if (typeof token === 'string') {
token = [token];
}
if (typeof callback === 'number') {
func = function (input, array) {
array[callback] = toInt(input);
};
}
for (i = 0; i < token.length; i++) {
tokens[token[i]] = func;
}
}
function addWeekParseToken (token, callback) {
addParseToken(token, function (input, array, config, token) {
config._w = config._w || {};
callback(input, config._w, config, token);
});
}
function addTimeToArrayFromToken(token, input, config) {
if (input != null && hasOwnProp(tokens, token)) {
tokens[token](input, config._a, config, token);
}
}
var YEAR = 0;
var MONTH = 1;
var DATE = 2;
var HOUR = 3;
var MINUTE = 4;
var SECOND = 5;
var MILLISECOND = 6;
function daysInMonth(year, month) {
return new Date(Date.UTC(year, month + 1, 0)).getUTCDate();
}
// FORMATTING
addFormatToken('M', ['MM', 2], 'Mo', function () {
return this.month() + 1;
});
addFormatToken('MMM', 0, 0, function (format) {
return this.localeData().monthsShort(this, format);
});
addFormatToken('MMMM', 0, 0, function (format) {
return this.localeData().months(this, format);
});
// ALIASES
addUnitAlias('month', 'M');
// PARSING
addRegexToken('M', match1to2);
addRegexToken('MM', match1to2, match2);
addRegexToken('MMM', matchWord);
addRegexToken('MMMM', matchWord);
addParseToken(['M', 'MM'], function (input, array) {
array[MONTH] = toInt(input) - 1;
});
addParseToken(['MMM', 'MMMM'], function (input, array, config, token) {
var month = config._locale.monthsParse(input, token, config._strict);
// if we didn't find a month name, mark the date as invalid.
if (month != null) {
array[MONTH] = month;
} else {
getParsingFlags(config).invalidMonth = input;
}
});
// LOCALES
var defaultLocaleMonths = 'January_February_March_April_May_June_July_August_September_October_November_December'.split('_');
function localeMonths (m) {
return this._months[m.month()];
}
var defaultLocaleMonthsShort = 'Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec'.split('_');
function localeMonthsShort (m) {
return this._monthsShort[m.month()];
}
function localeMonthsParse (monthName, format, strict) {
var i, mom, regex;
if (!this._monthsParse) {
this._monthsParse = [];
this._longMonthsParse = [];
this._shortMonthsParse = [];
}
for (i = 0; i < 12; i++) {
// make the regex if we don't have it already
mom = create_utc__createUTC([2000, i]);
if (strict && !this._longMonthsParse[i]) {
this._longMonthsParse[i] = new RegExp('^' + this.months(mom, '').replace('.', '') + '$', 'i');
this._shortMonthsParse[i] = new RegExp('^' + this.monthsShort(mom, '').replace('.', '') + '$', 'i');
}
if (!strict && !this._monthsParse[i]) {
regex = '^' + this.months(mom, '') + '|^' + this.monthsShort(mom, '');
this._monthsParse[i] = new RegExp(regex.replace('.', ''), 'i');
}
// test the regex
if (strict && format === 'MMMM' && this._longMonthsParse[i].test(monthName)) {
return i;
} else if (strict && format === 'MMM' && this._shortMonthsParse[i].test(monthName)) {
return i;
} else if (!strict && this._monthsParse[i].test(monthName)) {
return i;
}
}
}
// MOMENTS
function setMonth (mom, value) {
var dayOfMonth;
// TODO: Move this out of here!
if (typeof value === 'string') {
value = mom.localeData().monthsParse(value);
// TODO: Another silent failure?
if (typeof value !== 'number') {
return mom;
}
}
dayOfMonth = Math.min(mom.date(), daysInMonth(mom.year(), value));
mom._d['set' + (mom._isUTC ? 'UTC' : '') + 'Month'](value, dayOfMonth);
return mom;
}
function getSetMonth (value) {
if (value != null) {
setMonth(this, value);
utils_hooks__hooks.updateOffset(this, true);
return this;
} else {
return get_set__get(this, 'Month');
}
}
function getDaysInMonth () {
return daysInMonth(this.year(), this.month());
}
function checkOverflow (m) {
var overflow;
var a = m._a;
if (a && getParsingFlags(m).overflow === -2) {
overflow =
a[MONTH] < 0 || a[MONTH] > 11 ? MONTH :
a[DATE] < 1 || a[DATE] > daysInMonth(a[YEAR], a[MONTH]) ? DATE :
a[HOUR] < 0 || a[HOUR] > 24 || (a[HOUR] === 24 && (a[MINUTE] !== 0 || a[SECOND] !== 0 || a[MILLISECOND] !== 0)) ? HOUR :
a[MINUTE] < 0 || a[MINUTE] > 59 ? MINUTE :
a[SECOND] < 0 || a[SECOND] > 59 ? SECOND :
a[MILLISECOND] < 0 || a[MILLISECOND] > 999 ? MILLISECOND :
-1;
if (getParsingFlags(m)._overflowDayOfYear && (overflow < YEAR || overflow > DATE)) {
overflow = DATE;
}
getParsingFlags(m).overflow = overflow;
}
return m;
}
function warn(msg) {
if (utils_hooks__hooks.suppressDeprecationWarnings === false && typeof console !== 'undefined' && console.warn) {
console.warn('Deprecation warning: ' + msg);
}
}
function deprecate(msg, fn) {
var firstTime = true,
msgWithStack = msg + '\n' + (new Error()).stack;
return extend(function () {
if (firstTime) {
warn(msgWithStack);
firstTime = false;
}
return fn.apply(this, arguments);
}, fn);
}
var deprecations = {};
function deprecateSimple(name, msg) {
if (!deprecations[name]) {
warn(msg);
deprecations[name] = true;
}
}
utils_hooks__hooks.suppressDeprecationWarnings = false;
var from_string__isoRegex = /^\s*(?:[+-]\d{6}|\d{4})-(?:(\d\d-\d\d)|(W\d\d$)|(W\d\d-\d)|(\d\d\d))((T| )(\d\d(:\d\d(:\d\d(\.\d+)?)?)?)?([\+\-]\d\d(?::?\d\d)?|\s*Z)?)?$/;
var isoDates = [
['YYYYYY-MM-DD', /[+-]\d{6}-\d{2}-\d{2}/],
['YYYY-MM-DD', /\d{4}-\d{2}-\d{2}/],
['GGGG-[W]WW-E', /\d{4}-W\d{2}-\d/],
['GGGG-[W]WW', /\d{4}-W\d{2}/],
['YYYY-DDD', /\d{4}-\d{3}/]
];
// iso time formats and regexes
var isoTimes = [
['HH:mm:ss.SSSS', /(T| )\d\d:\d\d:\d\d\.\d+/],
['HH:mm:ss', /(T| )\d\d:\d\d:\d\d/],
['HH:mm', /(T| )\d\d:\d\d/],
['HH', /(T| )\d\d/]
];
var aspNetJsonRegex = /^\/?Date\((\-?\d+)/i;
// date from iso format
function configFromISO(config) {
var i, l,
string = config._i,
match = from_string__isoRegex.exec(string);
if (match) {
getParsingFlags(config).iso = true;
for (i = 0, l = isoDates.length; i < l; i++) {
if (isoDates[i][1].exec(string)) {
// match[5] should be 'T' or undefined
config._f = isoDates[i][0] + (match[6] || ' ');
break;
}
}
for (i = 0, l = isoTimes.length; i < l; i++) {
if (isoTimes[i][1].exec(string)) {
config._f += isoTimes[i][0];
break;
}
}
if (string.match(matchOffset)) {
config._f += 'Z';
}
configFromStringAndFormat(config);
} else {
config._isValid = false;
}
}
// date from iso format or fallback
function configFromString(config) {
var matched = aspNetJsonRegex.exec(config._i);
if (matched !== null) {
config._d = new Date(+matched[1]);
return;
}
configFromISO(config);
if (config._isValid === false) {
delete config._isValid;
utils_hooks__hooks.createFromInputFallback(config);
}
}
utils_hooks__hooks.createFromInputFallback = deprecate(
'moment construction falls back to js Date. This is ' +
'discouraged and will be removed in upcoming major ' +
'release. Please refer to ' +
'https://github.com/moment/moment/issues/1407 for more info.',
function (config) {
config._d = new Date(config._i + (config._useUTC ? ' UTC' : ''));
}
);
function createDate (y, m, d, h, M, s, ms) {
//can't just apply() to create a date:
//http://stackoverflow.com/questions/181348/instantiating-a-javascript-object-by-calling-prototype-constructor-apply
var date = new Date(y, m, d, h, M, s, ms);
//the date constructor doesn't accept years < 1970
if (y < 1970) {
date.setFullYear(y);
}
return date;
}
function createUTCDate (y) {
var date = new Date(Date.UTC.apply(null, arguments));
if (y < 1970) {
date.setUTCFullYear(y);
}
return date;
}
addFormatToken(0, ['YY', 2], 0, function () {
return this.year() % 100;
});
addFormatToken(0, ['YYYY', 4], 0, 'year');
addFormatToken(0, ['YYYYY', 5], 0, 'year');
addFormatToken(0, ['YYYYYY', 6, true], 0, 'year');
// ALIASES
addUnitAlias('year', 'y');
// PARSING
addRegexToken('Y', matchSigned);
addRegexToken('YY', match1to2, match2);
addRegexToken('YYYY', match1to4, match4);
addRegexToken('YYYYY', match1to6, match6);
addRegexToken('YYYYYY', match1to6, match6);
addParseToken(['YYYY', 'YYYYY', 'YYYYYY'], YEAR);
addParseToken('YY', function (input, array) {
array[YEAR] = utils_hooks__hooks.parseTwoDigitYear(input);
});
// HELPERS
function daysInYear(year) {
return isLeapYear(year) ? 366 : 365;
}
function isLeapYear(year) {
return (year % 4 === 0 && year % 100 !== 0) || year % 400 === 0;
}
// HOOKS
utils_hooks__hooks.parseTwoDigitYear = function (input) {
return toInt(input) + (toInt(input) > 68 ? 1900 : 2000);
};
// MOMENTS
var getSetYear = makeGetSet('FullYear', false);
function getIsLeapYear () {
return isLeapYear(this.year());
}
addFormatToken('w', ['ww', 2], 'wo', 'week');
addFormatToken('W', ['WW', 2], 'Wo', 'isoWeek');
// ALIASES
addUnitAlias('week', 'w');
addUnitAlias('isoWeek', 'W');
// PARSING
addRegexToken('w', match1to2);
addRegexToken('ww', match1to2, match2);
addRegexToken('W', match1to2);
addRegexToken('WW', match1to2, match2);
addWeekParseToken(['w', 'ww', 'W', 'WW'], function (input, week, config, token) {
week[token.substr(0, 1)] = toInt(input);
});
// HELPERS
// firstDayOfWeek 0 = sun, 6 = sat
// the day of the week that starts the week
// (usually sunday or monday)
// firstDayOfWeekOfYear 0 = sun, 6 = sat
// the first week is the week that contains the first
// of this day of the week
// (eg. ISO weeks use thursday (4))
function weekOfYear(mom, firstDayOfWeek, firstDayOfWeekOfYear) {
var end = firstDayOfWeekOfYear - firstDayOfWeek,
daysToDayOfWeek = firstDayOfWeekOfYear - mom.day(),
adjustedMoment;
if (daysToDayOfWeek > end) {
daysToDayOfWeek -= 7;
}
if (daysToDayOfWeek < end - 7) {
daysToDayOfWeek += 7;
}
adjustedMoment = local__createLocal(mom).add(daysToDayOfWeek, 'd');
return {
week: Math.ceil(adjustedMoment.dayOfYear() / 7),
year: adjustedMoment.year()
};
}
// LOCALES
function localeWeek (mom) {
return weekOfYear(mom, this._week.dow, this._week.doy).week;
}
var defaultLocaleWeek = {
dow : 0, // Sunday is the first day of the week.
doy : 6 // The week that contains Jan 1st is the first week of the year.
};
function localeFirstDayOfWeek () {
return this._week.dow;
}
function localeFirstDayOfYear () {
return this._week.doy;
}
// MOMENTS
function getSetWeek (input) {
var week = this.localeData().week(this);
return input == null ? week : this.add((input - week) * 7, 'd');
}
function getSetISOWeek (input) {
var week = weekOfYear(this, 1, 4).week;
return input == null ? week : this.add((input - week) * 7, 'd');
}
addFormatToken('DDD', ['DDDD', 3], 'DDDo', 'dayOfYear');
// ALIASES
addUnitAlias('dayOfYear', 'DDD');
// PARSING
addRegexToken('DDD', match1to3);
addRegexToken('DDDD', match3);
addParseToken(['DDD', 'DDDD'], function (input, array, config) {
config._dayOfYear = toInt(input);
});
// HELPERS
//http://en.wikipedia.org/wiki/ISO_week_date#Calculating_a_date_given_the_year.2C_week_number_and_weekday
function dayOfYearFromWeeks(year, week, weekday, firstDayOfWeekOfYear, firstDayOfWeek) {
var d = createUTCDate(year, 0, 1).getUTCDay();
var daysToAdd;
var dayOfYear;
d = d === 0 ? 7 : d;
weekday = weekday != null ? weekday : firstDayOfWeek;
daysToAdd = firstDayOfWeek - d + (d > firstDayOfWeekOfYear ? 7 : 0) - (d < firstDayOfWeek ? 7 : 0);
dayOfYear = 7 * (week - 1) + (weekday - firstDayOfWeek) + daysToAdd + 1;
return {
year : dayOfYear > 0 ? year : year - 1,
dayOfYear : dayOfYear > 0 ? dayOfYear : daysInYear(year - 1) + dayOfYear
};
}
// MOMENTS
function getSetDayOfYear (input) {
var dayOfYear = Math.round((this.clone().startOf('day') - this.clone().startOf('year')) / 864e5) + 1;
return input == null ? dayOfYear : this.add((input - dayOfYear), 'd');
}
// Pick the first defined of two or three arguments.
function defaults(a, b, c) {
if (a != null) {
return a;
}
if (b != null) {
return b;
}
return c;
}
function currentDateArray(config) {
var now = new Date();
if (config._useUTC) {
return [now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate()];
}
return [now.getFullYear(), now.getMonth(), now.getDate()];
}
// convert an array to a date.
// the array should mirror the parameters below
// note: all values past the year are optional and will default to the lowest possible value.
// [year, month, day , hour, minute, second, millisecond]
function configFromArray (config) {
var i, date, input = [], currentDate, yearToUse;
if (config._d) {
return;
}
currentDate = currentDateArray(config);
//compute day of the year from weeks and weekdays
if (config._w && config._a[DATE] == null && config._a[MONTH] == null) {
dayOfYearFromWeekInfo(config);
}
//if the day of the year is set, figure out what it is
if (config._dayOfYear) {
yearToUse = defaults(config._a[YEAR], currentDate[YEAR]);
if (config._dayOfYear > daysInYear(yearToUse)) {
getParsingFlags(config)._overflowDayOfYear = true;
}
date = createUTCDate(yearToUse, 0, config._dayOfYear);
config._a[MONTH] = date.getUTCMonth();
config._a[DATE] = date.getUTCDate();
}
// Default to current date.
// * if no year, month, day of month are given, default to today
// * if day of month is given, default month and year
// * if month is given, default only year
// * if year is given, don't default anything
for (i = 0; i < 3 && config._a[i] == null; ++i) {
config._a[i] = input[i] = currentDate[i];
}
// Zero out whatever was not defaulted, including time
for (; i < 7; i++) {
config._a[i] = input[i] = (config._a[i] == null) ? (i === 2 ? 1 : 0) : config._a[i];
}
// Check for 24:00:00.000
if (config._a[HOUR] === 24 &&
config._a[MINUTE] === 0 &&
config._a[SECOND] === 0 &&
config._a[MILLISECOND] === 0) {
config._nextDay = true;
config._a[HOUR] = 0;
}
config._d = (config._useUTC ? createUTCDate : createDate).apply(null, input);
// Apply timezone offset from input. The actual utcOffset can be changed
// with parseZone.
if (config._tzm != null) {
config._d.setUTCMinutes(config._d.getUTCMinutes() - config._tzm);
}
if (config._nextDay) {
config._a[HOUR] = 24;
}
}
function dayOfYearFromWeekInfo(config) {
var w, weekYear, week, weekday, dow, doy, temp;
w = config._w;
if (w.GG != null || w.W != null || w.E != null) {
dow = 1;
doy = 4;
// TODO: We need to take the current isoWeekYear, but that depends on
// how we interpret now (local, utc, fixed offset). So create
// a now version of current config (take local/utc/offset flags, and
// create now).
weekYear = defaults(w.GG, config._a[YEAR], weekOfYear(local__createLocal(), 1, 4).year);
week = defaults(w.W, 1);
weekday = defaults(w.E, 1);
} else {
dow = config._locale._week.dow;
doy = config._locale._week.doy;
weekYear = defaults(w.gg, config._a[YEAR], weekOfYear(local__createLocal(), dow, doy).year);
week = defaults(w.w, 1);
if (w.d != null) {
// weekday -- low day numbers are considered next week
weekday = w.d;
if (weekday < dow) {
++week;
}
} else if (w.e != null) {
// local weekday -- counting starts from begining of week
weekday = w.e + dow;
} else {
// default to begining of week
weekday = dow;
}
}
temp = dayOfYearFromWeeks(weekYear, week, weekday, doy, dow);
config._a[YEAR] = temp.year;
config._dayOfYear = temp.dayOfYear;
}
utils_hooks__hooks.ISO_8601 = function () {};
// date from string and format string
function configFromStringAndFormat(config) {
// TODO: Move this to another part of the creation flow to prevent circular deps
if (config._f === utils_hooks__hooks.ISO_8601) {
configFromISO(config);
return;
}
config._a = [];
getParsingFlags(config).empty = true;
// This array is used to make a Date, either with `new Date` or `Date.UTC`
var string = '' + config._i,
i, parsedInput, tokens, token, skipped,
stringLength = string.length,
totalParsedInputLength = 0;
tokens = expandFormat(config._f, config._locale).match(formattingTokens) || [];
for (i = 0; i < tokens.length; i++) {
token = tokens[i];
parsedInput = (string.match(getParseRegexForToken(token, config)) || [])[0];
if (parsedInput) {
skipped = string.substr(0, string.indexOf(parsedInput));
if (skipped.length > 0) {
getParsingFlags(config).unusedInput.push(skipped);
}
string = string.slice(string.indexOf(parsedInput) + parsedInput.length);
totalParsedInputLength += parsedInput.length;
}
// don't parse if it's not a known token
if (formatTokenFunctions[token]) {
if (parsedInput) {
getParsingFlags(config).empty = false;
}
else {
getParsingFlags(config).unusedTokens.push(token);
}
addTimeToArrayFromToken(token, parsedInput, config);
}
else if (config._strict && !parsedInput) {
getParsingFlags(config).unusedTokens.push(token);
}
}
// add remaining unparsed input length to the string
getParsingFlags(config).charsLeftOver = stringLength - totalParsedInputLength;
if (string.length > 0) {
getParsingFlags(config).unusedInput.push(string);
}
// clear _12h flag if hour is <= 12
if (getParsingFlags(config).bigHour === true &&
config._a[HOUR] <= 12 &&
config._a[HOUR] > 0) {
getParsingFlags(config).bigHour = undefined;
}
// handle meridiem
config._a[HOUR] = meridiemFixWrap(config._locale, config._a[HOUR], config._meridiem);
configFromArray(config);
checkOverflow(config);
}
function meridiemFixWrap (locale, hour, meridiem) {
var isPm;
if (meridiem == null) {
// nothing to do
return hour;
}
if (locale.meridiemHour != null) {
return locale.meridiemHour(hour, meridiem);
} else if (locale.isPM != null) {
// Fallback
isPm = locale.isPM(meridiem);
if (isPm && hour < 12) {
hour += 12;
}
if (!isPm && hour === 12) {
hour = 0;
}
return hour;
} else {
// this is not supposed to happen
return hour;
}
}
function configFromStringAndArray(config) {
var tempConfig,
bestMoment,
scoreToBeat,
i,
currentScore;
if (config._f.length === 0) {
getParsingFlags(config).invalidFormat = true;
config._d = new Date(NaN);
return;
}
for (i = 0; i < config._f.length; i++) {
currentScore = 0;
tempConfig = copyConfig({}, config);
if (config._useUTC != null) {
tempConfig._useUTC = config._useUTC;
}
tempConfig._f = config._f[i];
configFromStringAndFormat(tempConfig);
if (!valid__isValid(tempConfig)) {
continue;
}
// if there is any input that was not parsed add a penalty for that format
currentScore += getParsingFlags(tempConfig).charsLeftOver;
//or tokens
currentScore += getParsingFlags(tempConfig).unusedTokens.length * 10;
getParsingFlags(tempConfig).score = currentScore;
if (scoreToBeat == null || currentScore < scoreToBeat) {
scoreToBeat = currentScore;
bestMoment = tempConfig;
}
}
extend(config, bestMoment || tempConfig);
}
function configFromObject(config) {
if (config._d) {
return;
}
var i = normalizeObjectUnits(config._i);
config._a = [i.year, i.month, i.day || i.date, i.hour, i.minute, i.second, i.millisecond];
configFromArray(config);
}
function createFromConfig (config) {
var input = config._i,
format = config._f,
res;
config._locale = config._locale || locale_locales__getLocale(config._l);
if (input === null || (format === undefined && input === '')) {
return valid__createInvalid({nullInput: true});
}
if (typeof input === 'string') {
config._i = input = config._locale.preparse(input);
}
if (isMoment(input)) {
return new Moment(checkOverflow(input));
} else if (isArray(format)) {
configFromStringAndArray(config);
} else if (format) {
configFromStringAndFormat(config);
} else if (isDate(input)) {
config._d = input;
} else {
configFromInput(config);
}
res = new Moment(checkOverflow(config));
if (res._nextDay) {
// Adding is smart enough around DST
res.add(1, 'd');
res._nextDay = undefined;
}
return res;
}
function configFromInput(config) {
var input = config._i;
if (input === undefined) {
config._d = new Date();
} else if (isDate(input)) {
config._d = new Date(+input);
} else if (typeof input === 'string') {
configFromString(config);
} else if (isArray(input)) {
config._a = map(input.slice(0), function (obj) {
return parseInt(obj, 10);
});
configFromArray(config);
} else if (typeof(input) === 'object') {
configFromObject(config);
} else if (typeof(input) === 'number') {
// from milliseconds
config._d = new Date(input);
} else {
utils_hooks__hooks.createFromInputFallback(config);
}
}
function createLocalOrUTC (input, format, locale, strict, isUTC) {
var c = {};
if (typeof(locale) === 'boolean') {
strict = locale;
locale = undefined;
}
// object construction must be done this way.
// https://github.com/moment/moment/issues/1423
c._isAMomentObject = true;
c._useUTC = c._isUTC = isUTC;
c._l = locale;
c._i = input;
c._f = format;
c._strict = strict;
return createFromConfig(c);
}
function local__createLocal (input, format, locale, strict) {
return createLocalOrUTC(input, format, locale, strict, false);
}
var prototypeMin = deprecate(
'moment().min is deprecated, use moment.min instead. https://github.com/moment/moment/issues/1548',
function () {
var other = local__createLocal.apply(null, arguments);
return other < this ? this : other;
}
);
var prototypeMax = deprecate(
'moment().max is deprecated, use moment.max instead. https://github.com/moment/moment/issues/1548',
function () {
var other = local__createLocal.apply(null, arguments);
return other > this ? this : other;
}
);
// Pick a moment m from moments so that m[fn](other) is true for all
// other. This relies on the function fn to be transitive.
//
// moments should either be an array of moment objects or an array, whose
// first element is an array of moment objects.
function pickBy(fn, moments) {
var res, i;
if (moments.length === 1 && isArray(moments[0])) {
moments = moments[0];
}
if (!moments.length) {
return local__createLocal();
}
res = moments[0];
for (i = 1; i < moments.length; ++i) {
if (moments[i][fn](res)) {
res = moments[i];
}
}
return res;
}
// TODO: Use [].sort instead?
function min () {
var args = [].slice.call(arguments, 0);
return pickBy('isBefore', args);
}
function max () {
var args = [].slice.call(arguments, 0);
return pickBy('isAfter', args);
}
function Duration (duration) {
var normalizedInput = normalizeObjectUnits(duration),
years = normalizedInput.year || 0,
quarters = normalizedInput.quarter || 0,
months = normalizedInput.month || 0,
weeks = normalizedInput.week || 0,
days = normalizedInput.day || 0,
hours = normalizedInput.hour || 0,
minutes = normalizedInput.minute || 0,
seconds = normalizedInput.second || 0,
milliseconds = normalizedInput.millisecond || 0;
// representation for dateAddRemove
this._milliseconds = +milliseconds +
seconds * 1e3 + // 1000
minutes * 6e4 + // 1000 * 60
hours * 36e5; // 1000 * 60 * 60
// Because of dateAddRemove treats 24 hours as different from a
// day when working around DST, we need to store them separately
this._days = +days +
weeks * 7;
// It is impossible translate months into days without knowing
// which months you are are talking about, so we have to store
// it separately.
this._months = +months +
quarters * 3 +
years * 12;
this._data = {};
this._locale = locale_locales__getLocale();
this._bubble();
}
function isDuration (obj) {
return obj instanceof Duration;
}
function offset (token, separator) {
addFormatToken(token, 0, 0, function () {
var offset = this.utcOffset();
var sign = '+';
if (offset < 0) {
offset = -offset;
sign = '-';
}
return sign + zeroFill(~~(offset / 60), 2) + separator + zeroFill(~~(offset) % 60, 2);
});
}
offset('Z', ':');
offset('ZZ', '');
// PARSING
addRegexToken('Z', matchOffset);
addRegexToken('ZZ', matchOffset);
addParseToken(['Z', 'ZZ'], function (input, array, config) {
config._useUTC = true;
config._tzm = offsetFromString(input);
});
// HELPERS
// timezone chunker
// '+10:00' > ['10', '00']
// '-1530' > ['-15', '30']
var chunkOffset = /([\+\-]|\d\d)/gi;
function offsetFromString(string) {
var matches = ((string || '').match(matchOffset) || []);
var chunk = matches[matches.length - 1] || [];
var parts = (chunk + '').match(chunkOffset) || ['-', 0, 0];
var minutes = +(parts[1] * 60) + toInt(parts[2]);
return parts[0] === '+' ? minutes : -minutes;
}
// Return a moment from input, that is local/utc/zone equivalent to model.
function cloneWithOffset(input, model) {
var res, diff;
if (model._isUTC) {
res = model.clone();
diff = (isMoment(input) || isDate(input) ? +input : +local__createLocal(input)) - (+res);
// Use low-level api, because this fn is low-level api.
res._d.setTime(+res._d + diff);
utils_hooks__hooks.updateOffset(res, false);
return res;
} else {
return local__createLocal(input).local();
}
return model._isUTC ? local__createLocal(input).zone(model._offset || 0) : local__createLocal(input).local();
}
function getDateOffset (m) {
// On Firefox.24 Date#getTimezoneOffset returns a floating point.
// https://github.com/moment/moment/pull/1871
return -Math.round(m._d.getTimezoneOffset() / 15) * 15;
}
// HOOKS
// This function will be called whenever a moment is mutated.
// It is intended to keep the offset in sync with the timezone.
utils_hooks__hooks.updateOffset = function () {};
// MOMENTS
// keepLocalTime = true means only change the timezone, without
// affecting the local hour. So 5:31:26 +0300 --[utcOffset(2, true)]-->
// 5:31:26 +0200 It is possible that 5:31:26 doesn't exist with offset
// +0200, so we adjust the time as needed, to be valid.
//
// Keeping the time actually adds/subtracts (one hour)
// from the actual represented time. That is why we call updateOffset
// a second time. In case it wants us to change the offset again
// _changeInProgress == true case, then we have to adjust, because
// there is no such time in the given timezone.
function getSetOffset (input, keepLocalTime) {
var offset = this._offset || 0,
localAdjust;
if (input != null) {
if (typeof input === 'string') {
input = offsetFromString(input);
}
if (Math.abs(input) < 16) {
input = input * 60;
}
if (!this._isUTC && keepLocalTime) {
localAdjust = getDateOffset(this);
}
this._offset = input;
this._isUTC = true;
if (localAdjust != null) {
this.add(localAdjust, 'm');
}
if (offset !== input) {
if (!keepLocalTime || this._changeInProgress) {
add_subtract__addSubtract(this, create__createDuration(input - offset, 'm'), 1, false);
} else if (!this._changeInProgress) {
this._changeInProgress = true;
utils_hooks__hooks.updateOffset(this, true);
this._changeInProgress = null;
}
}
return this;
} else {
return this._isUTC ? offset : getDateOffset(this);
}
}
function getSetZone (input, keepLocalTime) {
if (input != null) {
if (typeof input !== 'string') {
input = -input;
}
this.utcOffset(input, keepLocalTime);
return this;
} else {
return -this.utcOffset();
}
}
function setOffsetToUTC (keepLocalTime) {
return this.utcOffset(0, keepLocalTime);
}
function setOffsetToLocal (keepLocalTime) {
if (this._isUTC) {
this.utcOffset(0, keepLocalTime);
this._isUTC = false;
if (keepLocalTime) {
this.subtract(getDateOffset(this), 'm');
}
}
return this;
}
function setOffsetToParsedOffset () {
if (this._tzm) {
this.utcOffset(this._tzm);
} else if (typeof this._i === 'string') {
this.utcOffset(offsetFromString(this._i));
}
return this;
}
function hasAlignedHourOffset (input) {
if (!input) {
input = 0;
}
else {
input = local__createLocal(input).utcOffset();
}
return (this.utcOffset() - input) % 60 === 0;
}
function isDaylightSavingTime () {
return (
this.utcOffset() > this.clone().month(0).utcOffset() ||
this.utcOffset() > this.clone().month(5).utcOffset()
);
}
function isDaylightSavingTimeShifted () {
if (this._a) {
var other = this._isUTC ? create_utc__createUTC(this._a) : local__createLocal(this._a);
return this.isValid() && compareArrays(this._a, other.toArray()) > 0;
}
return false;
}
function isLocal () {
return !this._isUTC;
}
function isUtcOffset () {
return this._isUTC;
}
function isUtc () {
return this._isUTC && this._offset === 0;
}
var aspNetRegex = /(\-)?(?:(\d*)\.)?(\d+)\:(\d+)(?:\:(\d+)\.?(\d{3})?)?/;
// from http://docs.closure-library.googlecode.com/git/closure_goog_date_date.js.source.html
// somewhat more in line with 4.4.3.2 2004 spec, but allows decimal anywhere
var create__isoRegex = /^(-)?P(?:(?:([0-9,.]*)Y)?(?:([0-9,.]*)M)?(?:([0-9,.]*)D)?(?:T(?:([0-9,.]*)H)?(?:([0-9,.]*)M)?(?:([0-9,.]*)S)?)?|([0-9,.]*)W)$/;
function create__createDuration (input, key) {
var duration = input,
// matching against regexp is expensive, do it on demand
match = null,
sign,
ret,
diffRes;
if (isDuration(input)) {
duration = {
ms : input._milliseconds,
d : input._days,
M : input._months
};
} else if (typeof input === 'number') {
duration = {};
if (key) {
duration[key] = input;
} else {
duration.milliseconds = input;
}
} else if (!!(match = aspNetRegex.exec(input))) {
sign = (match[1] === '-') ? -1 : 1;
duration = {
y : 0,
d : toInt(match[DATE]) * sign,
h : toInt(match[HOUR]) * sign,
m : toInt(match[MINUTE]) * sign,
s : toInt(match[SECOND]) * sign,
ms : toInt(match[MILLISECOND]) * sign
};
} else if (!!(match = create__isoRegex.exec(input))) {
sign = (match[1] === '-') ? -1 : 1;
duration = {
y : parseIso(match[2], sign),
M : parseIso(match[3], sign),
d : parseIso(match[4], sign),
h : parseIso(match[5], sign),
m : parseIso(match[6], sign),
s : parseIso(match[7], sign),
w : parseIso(match[8], sign)
};
} else if (duration == null) {// checks for null or undefined
duration = {};
} else if (typeof duration === 'object' && ('from' in duration || 'to' in duration)) {
diffRes = momentsDifference(local__createLocal(duration.from), local__createLocal(duration.to));
duration = {};
duration.ms = diffRes.milliseconds;
duration.M = diffRes.months;
}
ret = new Duration(duration);
if (isDuration(input) && hasOwnProp(input, '_locale')) {
ret._locale = input._locale;
}
return ret;
}
create__createDuration.fn = Duration.prototype;
function parseIso (inp, sign) {
// We'd normally use ~~inp for this, but unfortunately it also
// converts floats to ints.
// inp may be undefined, so careful calling replace on it.
var res = inp && parseFloat(inp.replace(',', '.'));
// apply sign while we're at it
return (isNaN(res) ? 0 : res) * sign;
}
function positiveMomentsDifference(base, other) {
var res = {milliseconds: 0, months: 0};
res.months = other.month() - base.month() +
(other.year() - base.year()) * 12;
if (base.clone().add(res.months, 'M').isAfter(other)) {
--res.months;
}
res.milliseconds = +other - +(base.clone().add(res.months, 'M'));
return res;
}
function momentsDifference(base, other) {
var res;
other = cloneWithOffset(other, base);
if (base.isBefore(other)) {
res = positiveMomentsDifference(base, other);
} else {
res = positiveMomentsDifference(other, base);
res.milliseconds = -res.milliseconds;
res.months = -res.months;
}
return res;
}
function createAdder(direction, name) {
return function (val, period) {
var dur, tmp;
//invert the arguments, but complain about it
if (period !== null && !isNaN(+period)) {
deprecateSimple(name, 'moment().' + name + '(period, number) is deprecated. Please use moment().' + name + '(number, period).');
tmp = val; val = period; period = tmp;
}
val = typeof val === 'string' ? +val : val;
dur = create__createDuration(val, period);
add_subtract__addSubtract(this, dur, direction);
return this;
};
}
function add_subtract__addSubtract (mom, duration, isAdding, updateOffset) {
var milliseconds = duration._milliseconds,
days = duration._days,
months = duration._months;
updateOffset = updateOffset == null ? true : updateOffset;
if (milliseconds) {
mom._d.setTime(+mom._d + milliseconds * isAdding);
}
if (days) {
get_set__set(mom, 'Date', get_set__get(mom, 'Date') + days * isAdding);
}
if (months) {
setMonth(mom, get_set__get(mom, 'Month') + months * isAdding);
}
if (updateOffset) {
utils_hooks__hooks.updateOffset(mom, days || months);
}
}
var add_subtract__add = createAdder(1, 'add');
var add_subtract__subtract = createAdder(-1, 'subtract');
function moment_calendar__calendar (time) {
// We want to compare the start of today, vs this.
// Getting start-of-today depends on whether we're local/utc/offset or not.
var now = time || local__createLocal(),
sod = cloneWithOffset(now, this).startOf('day'),
diff = this.diff(sod, 'days', true),
format = diff < -6 ? 'sameElse' :
diff < -1 ? 'lastWeek' :
diff < 0 ? 'lastDay' :
diff < 1 ? 'sameDay' :
diff < 2 ? 'nextDay' :
diff < 7 ? 'nextWeek' : 'sameElse';
return this.format(this.localeData().calendar(format, this, local__createLocal(now)));
}
function clone () {
return new Moment(this);
}
function isAfter (input, units) {
var inputMs;
units = normalizeUnits(typeof units !== 'undefined' ? units : 'millisecond');
if (units === 'millisecond') {
input = isMoment(input) ? input : local__createLocal(input);
return +this > +input;
} else {
inputMs = isMoment(input) ? +input : +local__createLocal(input);
return inputMs < +this.clone().startOf(units);
}
}
function isBefore (input, units) {
var inputMs;
units = normalizeUnits(typeof units !== 'undefined' ? units : 'millisecond');
if (units === 'millisecond') {
input = isMoment(input) ? input : local__createLocal(input);
return +this < +input;
} else {
inputMs = isMoment(input) ? +input : +local__createLocal(input);
return +this.clone().endOf(units) < inputMs;
}
}
function isBetween (from, to, units) {
return this.isAfter(from, units) && this.isBefore(to, units);
}
function isSame (input, units) {
var inputMs;
units = normalizeUnits(units || 'millisecond');
if (units === 'millisecond') {
input = isMoment(input) ? input : local__createLocal(input);
return +this === +input;
} else {
inputMs = +local__createLocal(input);
return +(this.clone().startOf(units)) <= inputMs && inputMs <= +(this.clone().endOf(units));
}
}
function absFloor (number) {
if (number < 0) {
return Math.ceil(number);
} else {
return Math.floor(number);
}
}
function diff (input, units, asFloat) {
var that = cloneWithOffset(input, this),
zoneDelta = (that.utcOffset() - this.utcOffset()) * 6e4,
delta, output;
units = normalizeUnits(units);
if (units === 'year' || units === 'month' || units === 'quarter') {
output = monthDiff(this, that);
if (units === 'quarter') {
output = output / 3;
} else if (units === 'year') {
output = output / 12;
}
} else {
delta = this - that;
output = units === 'second' ? delta / 1e3 : // 1000
units === 'minute' ? delta / 6e4 : // 1000 * 60
units === 'hour' ? delta / 36e5 : // 1000 * 60 * 60
units === 'day' ? (delta - zoneDelta) / 864e5 : // 1000 * 60 * 60 * 24, negate dst
units === 'week' ? (delta - zoneDelta) / 6048e5 : // 1000 * 60 * 60 * 24 * 7, negate dst
delta;
}
return asFloat ? output : absFloor(output);
}
function monthDiff (a, b) {
// difference in months
var wholeMonthDiff = ((b.year() - a.year()) * 12) + (b.month() - a.month()),
// b is in (anchor - 1 month, anchor + 1 month)
anchor = a.clone().add(wholeMonthDiff, 'months'),
anchor2, adjust;
if (b - anchor < 0) {
anchor2 = a.clone().add(wholeMonthDiff - 1, 'months');
// linear across the month
adjust = (b - anchor) / (anchor - anchor2);
} else {
anchor2 = a.clone().add(wholeMonthDiff + 1, 'months');
// linear across the month
adjust = (b - anchor) / (anchor2 - anchor);
}
return -(wholeMonthDiff + adjust);
}
utils_hooks__hooks.defaultFormat = 'YYYY-MM-DDTHH:mm:ssZ';
function toString () {
return this.clone().locale('en').format('ddd MMM DD YYYY HH:mm:ss [GMT]ZZ');
}
function moment_format__toISOString () {
var m = this.clone().utc();
if (0 < m.year() && m.year() <= 9999) {
if ('function' === typeof Date.prototype.toISOString) {
// native implementation is ~50x faster, use it when we can
return this.toDate().toISOString();
} else {
return formatMoment(m, 'YYYY-MM-DD[T]HH:mm:ss.SSS[Z]');
}
} else {
return formatMoment(m, 'YYYYYY-MM-DD[T]HH:mm:ss.SSS[Z]');
}
}
function format (inputString) {
var output = formatMoment(this, inputString || utils_hooks__hooks.defaultFormat);
return this.localeData().postformat(output);
}
function from (time, withoutSuffix) {
if (!this.isValid()) {
return this.localeData().invalidDate();
}
return create__createDuration({to: this, from: time}).locale(this.locale()).humanize(!withoutSuffix);
}
function fromNow (withoutSuffix) {
return this.from(local__createLocal(), withoutSuffix);
}
function to (time, withoutSuffix) {
if (!this.isValid()) {
return this.localeData().invalidDate();
}
return create__createDuration({from: this, to: time}).locale(this.locale()).humanize(!withoutSuffix);
}
function toNow (withoutSuffix) {
return this.to(local__createLocal(), withoutSuffix);
}
function locale (key) {
var newLocaleData;
if (key === undefined) {
return this._locale._abbr;
} else {
newLocaleData = locale_locales__getLocale(key);
if (newLocaleData != null) {
this._locale = newLocaleData;
}
return this;
}
}
var lang = deprecate(
'moment().lang() is deprecated. Instead, use moment().localeData() to get the language configuration. Use moment().locale() to change languages.',
function (key) {
if (key === undefined) {
return this.localeData();
} else {
return this.locale(key);
}
}
);
function localeData () {
return this._locale;
}
function startOf (units) {
units = normalizeUnits(units);
// the following switch intentionally omits break keywords
// to utilize falling through the cases.
switch (units) {
case 'year':
this.month(0);
/* falls through */
case 'quarter':
case 'month':
this.date(1);
/* falls through */
case 'week':
case 'isoWeek':
case 'day':
this.hours(0);
/* falls through */
case 'hour':
this.minutes(0);
/* falls through */
case 'minute':
this.seconds(0);
/* falls through */
case 'second':
this.milliseconds(0);
}
// weeks are a special case
if (units === 'week') {
this.weekday(0);
}
if (units === 'isoWeek') {
this.isoWeekday(1);
}
// quarters are also special
if (units === 'quarter') {
this.month(Math.floor(this.month() / 3) * 3);
}
return this;
}
function endOf (units) {
units = normalizeUnits(units);
if (units === undefined || units === 'millisecond') {
return this;
}
return this.startOf(units).add(1, (units === 'isoWeek' ? 'week' : units)).subtract(1, 'ms');
}
function to_type__valueOf () {
return +this._d - ((this._offset || 0) * 60000);
}
function unix () {
return Math.floor(+this / 1000);
}
function toDate () {
return this._offset ? new Date(+this) : this._d;
}
function toArray () {
var m = this;
return [m.year(), m.month(), m.date(), m.hour(), m.minute(), m.second(), m.millisecond()];
}
function moment_valid__isValid () {
return valid__isValid(this);
}
function parsingFlags () {
return extend({}, getParsingFlags(this));
}
function invalidAt () {
return getParsingFlags(this).overflow;
}
addFormatToken(0, ['gg', 2], 0, function () {
return this.weekYear() % 100;
});
addFormatToken(0, ['GG', 2], 0, function () {
return this.isoWeekYear() % 100;
});
function addWeekYearFormatToken (token, getter) {
addFormatToken(0, [token, token.length], 0, getter);
}
addWeekYearFormatToken('gggg', 'weekYear');
addWeekYearFormatToken('ggggg', 'weekYear');
addWeekYearFormatToken('GGGG', 'isoWeekYear');
addWeekYearFormatToken('GGGGG', 'isoWeekYear');
// ALIASES
addUnitAlias('weekYear', 'gg');
addUnitAlias('isoWeekYear', 'GG');
// PARSING
addRegexToken('G', matchSigned);
addRegexToken('g', matchSigned);
addRegexToken('GG', match1to2, match2);
addRegexToken('gg', match1to2, match2);
addRegexToken('GGGG', match1to4, match4);
addRegexToken('gggg', match1to4, match4);
addRegexToken('GGGGG', match1to6, match6);
addRegexToken('ggggg', match1to6, match6);
addWeekParseToken(['gggg', 'ggggg', 'GGGG', 'GGGGG'], function (input, week, config, token) {
week[token.substr(0, 2)] = toInt(input);
});
addWeekParseToken(['gg', 'GG'], function (input, week, config, token) {
week[token] = utils_hooks__hooks.parseTwoDigitYear(input);
});
// HELPERS
function weeksInYear(year, dow, doy) {
return weekOfYear(local__createLocal([year, 11, 31 + dow - doy]), dow, doy).week;
}
// MOMENTS
function getSetWeekYear (input) {
var year = weekOfYear(this, this.localeData()._week.dow, this.localeData()._week.doy).year;
return input == null ? year : this.add((input - year), 'y');
}
function getSetISOWeekYear (input) {
var year = weekOfYear(this, 1, 4).year;
return input == null ? year : this.add((input - year), 'y');
}
function getISOWeeksInYear () {
return weeksInYear(this.year(), 1, 4);
}
function getWeeksInYear () {
var weekInfo = this.localeData()._week;
return weeksInYear(this.year(), weekInfo.dow, weekInfo.doy);
}
addFormatToken('Q', 0, 0, 'quarter');
// ALIASES
addUnitAlias('quarter', 'Q');
// PARSING
addRegexToken('Q', match1);
addParseToken('Q', function (input, array) {
array[MONTH] = (toInt(input) - 1) * 3;
});
// MOMENTS
function getSetQuarter (input) {
return input == null ? Math.ceil((this.month() + 1) / 3) : this.month((input - 1) * 3 + this.month() % 3);
}
addFormatToken('D', ['DD', 2], 'Do', 'date');
// ALIASES
addUnitAlias('date', 'D');
// PARSING
addRegexToken('D', match1to2);
addRegexToken('DD', match1to2, match2);
addRegexToken('Do', function (isStrict, locale) {
return isStrict ? locale._ordinalParse : locale._ordinalParseLenient;
});
addParseToken(['D', 'DD'], DATE);
addParseToken('Do', function (input, array) {
array[DATE] = toInt(input.match(match1to2)[0], 10);
});
// MOMENTS
var getSetDayOfMonth = makeGetSet('Date', true);
addFormatToken('d', 0, 'do', 'day');
addFormatToken('dd', 0, 0, function (format) {
return this.localeData().weekdaysMin(this, format);
});
addFormatToken('ddd', 0, 0, function (format) {
return this.localeData().weekdaysShort(this, format);
});
addFormatToken('dddd', 0, 0, function (format) {
return this.localeData().weekdays(this, format);
});
addFormatToken('e', 0, 0, 'weekday');
addFormatToken('E', 0, 0, 'isoWeekday');
// ALIASES
addUnitAlias('day', 'd');
addUnitAlias('weekday', 'e');
addUnitAlias('isoWeekday', 'E');
// PARSING
addRegexToken('d', match1to2);
addRegexToken('e', match1to2);
addRegexToken('E', match1to2);
addRegexToken('dd', matchWord);
addRegexToken('ddd', matchWord);
addRegexToken('dddd', matchWord);
addWeekParseToken(['dd', 'ddd', 'dddd'], function (input, week, config) {
var weekday = config._locale.weekdaysParse(input);
// if we didn't get a weekday name, mark the date as invalid
if (weekday != null) {
week.d = weekday;
} else {
getParsingFlags(config).invalidWeekday = input;
}
});
addWeekParseToken(['d', 'e', 'E'], function (input, week, config, token) {
week[token] = toInt(input);
});
// HELPERS
function parseWeekday(input, locale) {
if (typeof input === 'string') {
if (!isNaN(input)) {
input = parseInt(input, 10);
}
else {
input = locale.weekdaysParse(input);
if (typeof input !== 'number') {
return null;
}
}
}
return input;
}
// LOCALES
var defaultLocaleWeekdays = 'Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday'.split('_');
function localeWeekdays (m) {
return this._weekdays[m.day()];
}
var defaultLocaleWeekdaysShort = 'Sun_Mon_Tue_Wed_Thu_Fri_Sat'.split('_');
function localeWeekdaysShort (m) {
return this._weekdaysShort[m.day()];
}
var defaultLocaleWeekdaysMin = 'Su_Mo_Tu_We_Th_Fr_Sa'.split('_');
function localeWeekdaysMin (m) {
return this._weekdaysMin[m.day()];
}
function localeWeekdaysParse (weekdayName) {
var i, mom, regex;
if (!this._weekdaysParse) {
this._weekdaysParse = [];
}
for (i = 0; i < 7; i++) {
// make the regex if we don't have it already
if (!this._weekdaysParse[i]) {
mom = local__createLocal([2000, 1]).day(i);
regex = '^' + this.weekdays(mom, '') + '|^' + this.weekdaysShort(mom, '') + '|^' + this.weekdaysMin(mom, '');
this._weekdaysParse[i] = new RegExp(regex.replace('.', ''), 'i');
}
// test the regex
if (this._weekdaysParse[i].test(weekdayName)) {
return i;
}
}
}
// MOMENTS
function getSetDayOfWeek (input) {
var day = this._isUTC ? this._d.getUTCDay() : this._d.getDay();
if (input != null) {
input = parseWeekday(input, this.localeData());
return this.add(input - day, 'd');
} else {
return day;
}
}
function getSetLocaleDayOfWeek (input) {
var weekday = (this.day() + 7 - this.localeData()._week.dow) % 7;
return input == null ? weekday : this.add(input - weekday, 'd');
}
function getSetISODayOfWeek (input) {
// behaves the same as moment#day except
// as a getter, returns 7 instead of 0 (1-7 range instead of 0-6)
// as a setter, sunday should belong to the previous week.
return input == null ? this.day() || 7 : this.day(this.day() % 7 ? input : input - 7);
}
addFormatToken('H', ['HH', 2], 0, 'hour');
addFormatToken('h', ['hh', 2], 0, function () {
return this.hours() % 12 || 12;
});
function meridiem (token, lowercase) {
addFormatToken(token, 0, 0, function () {
return this.localeData().meridiem(this.hours(), this.minutes(), lowercase);
});
}
meridiem('a', true);
meridiem('A', false);
// ALIASES
addUnitAlias('hour', 'h');
// PARSING
function matchMeridiem (isStrict, locale) {
return locale._meridiemParse;
}
addRegexToken('a', matchMeridiem);
addRegexToken('A', matchMeridiem);
addRegexToken('H', match1to2);
addRegexToken('h', match1to2);
addRegexToken('HH', match1to2, match2);
addRegexToken('hh', match1to2, match2);
addParseToken(['H', 'HH'], HOUR);
addParseToken(['a', 'A'], function (input, array, config) {
config._isPm = config._locale.isPM(input);
config._meridiem = input;
});
addParseToken(['h', 'hh'], function (input, array, config) {
array[HOUR] = toInt(input);
getParsingFlags(config).bigHour = true;
});
// LOCALES
function localeIsPM (input) {
// IE8 Quirks Mode & IE7 Standards Mode do not allow accessing strings like arrays
// Using charAt should be more compatible.
return ((input + '').toLowerCase().charAt(0) === 'p');
}
var defaultLocaleMeridiemParse = /[ap]\.?m?\.?/i;
function localeMeridiem (hours, minutes, isLower) {
if (hours > 11) {
return isLower ? 'pm' : 'PM';
} else {
return isLower ? 'am' : 'AM';
}
}
// MOMENTS
// Setting the hour should keep the time, because the user explicitly
// specified which hour he wants. So trying to maintain the same hour (in
// a new timezone) makes sense. Adding/subtracting hours does not follow
// this rule.
var getSetHour = makeGetSet('Hours', true);
addFormatToken('m', ['mm', 2], 0, 'minute');
// ALIASES
addUnitAlias('minute', 'm');
// PARSING
addRegexToken('m', match1to2);
addRegexToken('mm', match1to2, match2);
addParseToken(['m', 'mm'], MINUTE);
// MOMENTS
var getSetMinute = makeGetSet('Minutes', false);
addFormatToken('s', ['ss', 2], 0, 'second');
// ALIASES
addUnitAlias('second', 's');
// PARSING
addRegexToken('s', match1to2);
addRegexToken('ss', match1to2, match2);
addParseToken(['s', 'ss'], SECOND);
// MOMENTS
var getSetSecond = makeGetSet('Seconds', false);
addFormatToken('S', 0, 0, function () {
return ~~(this.millisecond() / 100);
});
addFormatToken(0, ['SS', 2], 0, function () {
return ~~(this.millisecond() / 10);
});
function millisecond__milliseconds (token) {
addFormatToken(0, [token, 3], 0, 'millisecond');
}
millisecond__milliseconds('SSS');
millisecond__milliseconds('SSSS');
// ALIASES
addUnitAlias('millisecond', 'ms');
// PARSING
addRegexToken('S', match1to3, match1);
addRegexToken('SS', match1to3, match2);
addRegexToken('SSS', match1to3, match3);
addRegexToken('SSSS', matchUnsigned);
addParseToken(['S', 'SS', 'SSS', 'SSSS'], function (input, array) {
array[MILLISECOND] = toInt(('0.' + input) * 1000);
});
// MOMENTS
var getSetMillisecond = makeGetSet('Milliseconds', false);
addFormatToken('z', 0, 0, 'zoneAbbr');
addFormatToken('zz', 0, 0, 'zoneName');
// MOMENTS
function getZoneAbbr () {
return this._isUTC ? 'UTC' : '';
}
function getZoneName () {
return this._isUTC ? 'Coordinated Universal Time' : '';
}
var momentPrototype__proto = Moment.prototype;
momentPrototype__proto.add = add_subtract__add;
momentPrototype__proto.calendar = moment_calendar__calendar;
momentPrototype__proto.clone = clone;
momentPrototype__proto.diff = diff;
momentPrototype__proto.endOf = endOf;
momentPrototype__proto.format = format;
momentPrototype__proto.from = from;
momentPrototype__proto.fromNow = fromNow;
momentPrototype__proto.to = to;
momentPrototype__proto.toNow = toNow;
momentPrototype__proto.get = getSet;
momentPrototype__proto.invalidAt = invalidAt;
momentPrototype__proto.isAfter = isAfter;
momentPrototype__proto.isBefore = isBefore;
momentPrototype__proto.isBetween = isBetween;
momentPrototype__proto.isSame = isSame;
momentPrototype__proto.isValid = moment_valid__isValid;
momentPrototype__proto.lang = lang;
momentPrototype__proto.locale = locale;
momentPrototype__proto.localeData = localeData;
momentPrototype__proto.max = prototypeMax;
momentPrototype__proto.min = prototypeMin;
momentPrototype__proto.parsingFlags = parsingFlags;
momentPrototype__proto.set = getSet;
momentPrototype__proto.startOf = startOf;
momentPrototype__proto.subtract = add_subtract__subtract;
momentPrototype__proto.toArray = toArray;
momentPrototype__proto.toDate = toDate;
momentPrototype__proto.toISOString = moment_format__toISOString;
momentPrototype__proto.toJSON = moment_format__toISOString;
momentPrototype__proto.toString = toString;
momentPrototype__proto.unix = unix;
momentPrototype__proto.valueOf = to_type__valueOf;
// Year
momentPrototype__proto.year = getSetYear;
momentPrototype__proto.isLeapYear = getIsLeapYear;
// Week Year
momentPrototype__proto.weekYear = getSetWeekYear;
momentPrototype__proto.isoWeekYear = getSetISOWeekYear;
// Quarter
momentPrototype__proto.quarter = momentPrototype__proto.quarters = getSetQuarter;
// Month
momentPrototype__proto.month = getSetMonth;
momentPrototype__proto.daysInMonth = getDaysInMonth;
// Week
momentPrototype__proto.week = momentPrototype__proto.weeks = getSetWeek;
momentPrototype__proto.isoWeek = momentPrototype__proto.isoWeeks = getSetISOWeek;
momentPrototype__proto.weeksInYear = getWeeksInYear;
momentPrototype__proto.isoWeeksInYear = getISOWeeksInYear;
// Day
momentPrototype__proto.date = getSetDayOfMonth;
momentPrototype__proto.day = momentPrototype__proto.days = getSetDayOfWeek;
momentPrototype__proto.weekday = getSetLocaleDayOfWeek;
momentPrototype__proto.isoWeekday = getSetISODayOfWeek;
momentPrototype__proto.dayOfYear = getSetDayOfYear;
// Hour
momentPrototype__proto.hour = momentPrototype__proto.hours = getSetHour;
// Minute
momentPrototype__proto.minute = momentPrototype__proto.minutes = getSetMinute;
// Second
momentPrototype__proto.second = momentPrototype__proto.seconds = getSetSecond;
// Millisecond
momentPrototype__proto.millisecond = momentPrototype__proto.milliseconds = getSetMillisecond;
// Offset
momentPrototype__proto.utcOffset = getSetOffset;
momentPrototype__proto.utc = setOffsetToUTC;
momentPrototype__proto.local = setOffsetToLocal;
momentPrototype__proto.parseZone = setOffsetToParsedOffset;
momentPrototype__proto.hasAlignedHourOffset = hasAlignedHourOffset;
momentPrototype__proto.isDST = isDaylightSavingTime;
momentPrototype__proto.isDSTShifted = isDaylightSavingTimeShifted;
momentPrototype__proto.isLocal = isLocal;
momentPrototype__proto.isUtcOffset = isUtcOffset;
momentPrototype__proto.isUtc = isUtc;
momentPrototype__proto.isUTC = isUtc;
// Timezone
momentPrototype__proto.zoneAbbr = getZoneAbbr;
momentPrototype__proto.zoneName = getZoneName;
// Deprecations
momentPrototype__proto.dates = deprecate('dates accessor is deprecated. Use date instead.', getSetDayOfMonth);
momentPrototype__proto.months = deprecate('months accessor is deprecated. Use month instead', getSetMonth);
momentPrototype__proto.years = deprecate('years accessor is deprecated. Use year instead', getSetYear);
momentPrototype__proto.zone = deprecate('moment().zone is deprecated, use moment().utcOffset instead. https://github.com/moment/moment/issues/1779', getSetZone);
var momentPrototype = momentPrototype__proto;
function moment__createUnix (input) {
return local__createLocal(input * 1000);
}
function moment__createInZone () {
return local__createLocal.apply(null, arguments).parseZone();
}
var defaultCalendar = {
sameDay : '[Today at] LT',
nextDay : '[Tomorrow at] LT',
nextWeek : 'dddd [at] LT',
lastDay : '[Yesterday at] LT',
lastWeek : '[Last] dddd [at] LT',
sameElse : 'L'
};
function locale_calendar__calendar (key, mom, now) {
var output = this._calendar[key];
return typeof output === 'function' ? output.call(mom, now) : output;
}
var defaultLongDateFormat = {
LTS : 'h:mm:ss A',
LT : 'h:mm A',
L : 'MM/DD/YYYY',
LL : 'MMMM D, YYYY',
LLL : 'MMMM D, YYYY LT',
LLLL : 'dddd, MMMM D, YYYY LT'
};
function longDateFormat (key) {
var output = this._longDateFormat[key];
if (!output && this._longDateFormat[key.toUpperCase()]) {
output = this._longDateFormat[key.toUpperCase()].replace(/MMMM|MM|DD|dddd/g, function (val) {
return val.slice(1);
});
this._longDateFormat[key] = output;
}
return output;
}
var defaultInvalidDate = 'Invalid date';
function invalidDate () {
return this._invalidDate;
}
var defaultOrdinal = '%d';
var defaultOrdinalParse = /\d{1,2}/;
function ordinal (number) {
return this._ordinal.replace('%d', number);
}
function preParsePostFormat (string) {
return string;
}
var defaultRelativeTime = {
future : 'in %s',
past : '%s ago',
s : 'a few seconds',
m : 'a minute',
mm : '%d minutes',
h : 'an hour',
hh : '%d hours',
d : 'a day',
dd : '%d days',
M : 'a month',
MM : '%d months',
y : 'a year',
yy : '%d years'
};
function relative__relativeTime (number, withoutSuffix, string, isFuture) {
var output = this._relativeTime[string];
return (typeof output === 'function') ?
output(number, withoutSuffix, string, isFuture) :
output.replace(/%d/i, number);
}
function pastFuture (diff, output) {
var format = this._relativeTime[diff > 0 ? 'future' : 'past'];
return typeof format === 'function' ? format(output) : format.replace(/%s/i, output);
}
function locale_set__set (config) {
var prop, i;
for (i in config) {
prop = config[i];
if (typeof prop === 'function') {
this[i] = prop;
} else {
this['_' + i] = prop;
}
}
// Lenient ordinal parsing accepts just a number in addition to
// number + (possibly) stuff coming from _ordinalParseLenient.
this._ordinalParseLenient = new RegExp(this._ordinalParse.source + '|' + (/\d{1,2}/).source);
}
var prototype__proto = Locale.prototype;
prototype__proto._calendar = defaultCalendar;
prototype__proto.calendar = locale_calendar__calendar;
prototype__proto._longDateFormat = defaultLongDateFormat;
prototype__proto.longDateFormat = longDateFormat;
prototype__proto._invalidDate = defaultInvalidDate;
prototype__proto.invalidDate = invalidDate;
prototype__proto._ordinal = defaultOrdinal;
prototype__proto.ordinal = ordinal;
prototype__proto._ordinalParse = defaultOrdinalParse;
prototype__proto.preparse = preParsePostFormat;
prototype__proto.postformat = preParsePostFormat;
prototype__proto._relativeTime = defaultRelativeTime;
prototype__proto.relativeTime = relative__relativeTime;
prototype__proto.pastFuture = pastFuture;
prototype__proto.set = locale_set__set;
// Month
prototype__proto.months = localeMonths;
prototype__proto._months = defaultLocaleMonths;
prototype__proto.monthsShort = localeMonthsShort;
prototype__proto._monthsShort = defaultLocaleMonthsShort;
prototype__proto.monthsParse = localeMonthsParse;
// Week
prototype__proto.week = localeWeek;
prototype__proto._week = defaultLocaleWeek;
prototype__proto.firstDayOfYear = localeFirstDayOfYear;
prototype__proto.firstDayOfWeek = localeFirstDayOfWeek;
// Day of Week
prototype__proto.weekdays = localeWeekdays;
prototype__proto._weekdays = defaultLocaleWeekdays;
prototype__proto.weekdaysMin = localeWeekdaysMin;
prototype__proto._weekdaysMin = defaultLocaleWeekdaysMin;
prototype__proto.weekdaysShort = localeWeekdaysShort;
prototype__proto._weekdaysShort = defaultLocaleWeekdaysShort;
prototype__proto.weekdaysParse = localeWeekdaysParse;
// Hours
prototype__proto.isPM = localeIsPM;
prototype__proto._meridiemParse = defaultLocaleMeridiemParse;
prototype__proto.meridiem = localeMeridiem;
function lists__get (format, index, field, setter) {
var locale = locale_locales__getLocale();
var utc = create_utc__createUTC().set(setter, index);
return locale[field](utc, format);
}
function list (format, index, field, count, setter) {
if (typeof format === 'number') {
index = format;
format = undefined;
}
format = format || '';
if (index != null) {
return lists__get(format, index, field, setter);
}
var i;
var out = [];
for (i = 0; i < count; i++) {
out[i] = lists__get(format, i, field, setter);
}
return out;
}
function lists__listMonths (format, index) {
return list(format, index, 'months', 12, 'month');
}
function lists__listMonthsShort (format, index) {
return list(format, index, 'monthsShort', 12, 'month');
}
function lists__listWeekdays (format, index) {
return list(format, index, 'weekdays', 7, 'day');
}
function lists__listWeekdaysShort (format, index) {
return list(format, index, 'weekdaysShort', 7, 'day');
}
function lists__listWeekdaysMin (format, index) {
return list(format, index, 'weekdaysMin', 7, 'day');
}
locale_locales__getSetGlobalLocale('en', {
ordinalParse: /\d{1,2}(th|st|nd|rd)/,
ordinal : function (number) {
var b = number % 10,
output = (toInt(number % 100 / 10) === 1) ? 'th' :
(b === 1) ? 'st' :
(b === 2) ? 'nd' :
(b === 3) ? 'rd' : 'th';
return number + output;
}
});
// Side effect imports
utils_hooks__hooks.lang = deprecate('moment.lang is deprecated. Use moment.locale instead.', locale_locales__getSetGlobalLocale);
utils_hooks__hooks.langData = deprecate('moment.langData is deprecated. Use moment.localeData instead.', locale_locales__getLocale);
var mathAbs = Math.abs;
function duration_abs__abs () {
var data = this._data;
this._milliseconds = mathAbs(this._milliseconds);
this._days = mathAbs(this._days);
this._months = mathAbs(this._months);
data.milliseconds = mathAbs(data.milliseconds);
data.seconds = mathAbs(data.seconds);
data.minutes = mathAbs(data.minutes);
data.hours = mathAbs(data.hours);
data.months = mathAbs(data.months);
data.years = mathAbs(data.years);
return this;
}
function duration_add_subtract__addSubtract (duration, input, value, direction) {
var other = create__createDuration(input, value);
duration._milliseconds += direction * other._milliseconds;
duration._days += direction * other._days;
duration._months += direction * other._months;
return duration._bubble();
}
// supports only 2.0-style add(1, 's') or add(duration)
function duration_add_subtract__add (input, value) {
return duration_add_subtract__addSubtract(this, input, value, 1);
}
// supports only 2.0-style subtract(1, 's') or subtract(duration)
function duration_add_subtract__subtract (input, value) {
return duration_add_subtract__addSubtract(this, input, value, -1);
}
function bubble () {
var milliseconds = this._milliseconds;
var days = this._days;
var months = this._months;
var data = this._data;
var seconds, minutes, hours, years = 0;
// The following code bubbles up values, see the tests for
// examples of what that means.
data.milliseconds = milliseconds % 1000;
seconds = absFloor(milliseconds / 1000);
data.seconds = seconds % 60;
minutes = absFloor(seconds / 60);
data.minutes = minutes % 60;
hours = absFloor(minutes / 60);
data.hours = hours % 24;
days += absFloor(hours / 24);
// Accurately convert days to years, assume start from year 0.
years = absFloor(daysToYears(days));
days -= absFloor(yearsToDays(years));
// 30 days to a month
// TODO (iskren): Use anchor date (like 1st Jan) to compute this.
months += absFloor(days / 30);
days %= 30;
// 12 months -> 1 year
years += absFloor(months / 12);
months %= 12;
data.days = days;
data.months = months;
data.years = years;
return this;
}
function daysToYears (days) {
// 400 years have 146097 days (taking into account leap year rules)
return days * 400 / 146097;
}
function yearsToDays (years) {
// years * 365 + absFloor(years / 4) -
// absFloor(years / 100) + absFloor(years / 400);
return years * 146097 / 400;
}
function as (units) {
var days;
var months;
var milliseconds = this._milliseconds;
units = normalizeUnits(units);
if (units === 'month' || units === 'year') {
days = this._days + milliseconds / 864e5;
months = this._months + daysToYears(days) * 12;
return units === 'month' ? months : months / 12;
} else {
// handle milliseconds separately because of floating point math errors (issue #1867)
days = this._days + Math.round(yearsToDays(this._months / 12));
switch (units) {
case 'week' : return days / 7 + milliseconds / 6048e5;
case 'day' : return days + milliseconds / 864e5;
case 'hour' : return days * 24 + milliseconds / 36e5;
case 'minute' : return days * 1440 + milliseconds / 6e4;
case 'second' : return days * 86400 + milliseconds / 1000;
// Math.floor prevents floating point math errors here
case 'millisecond': return Math.floor(days * 864e5) + milliseconds;
default: throw new Error('Unknown unit ' + units);
}
}
}
// TODO: Use this.as('ms')?
function duration_as__valueOf () {
return (
this._milliseconds +
this._days * 864e5 +
(this._months % 12) * 2592e6 +
toInt(this._months / 12) * 31536e6
);
}
function
|
(alias) {
return function () {
return this.as(alias);
};
}
var asMilliseconds = makeAs('ms');
var asSeconds = makeAs('s');
var asMinutes = makeAs('m');
var asHours = makeAs('h');
var asDays = makeAs('d');
var asWeeks = makeAs('w');
var asMonths = makeAs('M');
var asYears = makeAs('y');
function duration_get__get (units) {
units = normalizeUnits(units);
return this[units + 's']();
}
function makeGetter(name) {
return function () {
return this._data[name];
};
}
var duration_get__milliseconds = makeGetter('milliseconds');
var seconds = makeGetter('seconds');
var minutes = makeGetter('minutes');
var hours = makeGetter('hours');
var days = makeGetter('days');
var months = makeGetter('months');
var years = makeGetter('years');
function weeks () {
return absFloor(this.days() / 7);
}
var round = Math.round;
var thresholds = {
s: 45, // seconds to minute
m: 45, // minutes to hour
h: 22, // hours to day
d: 26, // days to month
M: 11 // months to year
};
// helper function for moment.fn.from, moment.fn.fromNow, and moment.duration.fn.humanize
function substituteTimeAgo(string, number, withoutSuffix, isFuture, locale) {
return locale.relativeTime(number || 1, !!withoutSuffix, string, isFuture);
}
function duration_humanize__relativeTime (posNegDuration, withoutSuffix, locale) {
var duration = create__createDuration(posNegDuration).abs();
var seconds = round(duration.as('s'));
var minutes = round(duration.as('m'));
var hours = round(duration.as('h'));
var days = round(duration.as('d'));
var months = round(duration.as('M'));
var years = round(duration.as('y'));
var a = seconds < thresholds.s && ['s', seconds] ||
minutes === 1 && ['m'] ||
minutes < thresholds.m && ['mm', minutes] ||
hours === 1 && ['h'] ||
hours < thresholds.h && ['hh', hours] ||
days === 1 && ['d'] ||
days < thresholds.d && ['dd', days] ||
months === 1 && ['M'] ||
months < thresholds.M && ['MM', months] ||
years === 1 && ['y'] || ['yy', years];
a[2] = withoutSuffix;
a[3] = +posNegDuration > 0;
a[4] = locale;
return substituteTimeAgo.apply(null, a);
}
// This function allows you to set a threshold for relative time strings
function duration_humanize__getSetRelativeTimeThreshold (threshold, limit) {
if (thresholds[threshold] === undefined) {
return false;
}
if (limit === undefined) {
return thresholds[threshold];
}
thresholds[threshold] = limit;
return true;
}
function humanize (withSuffix) {
var locale = this.localeData();
var output = duration_humanize__relativeTime(this, !withSuffix, locale);
if (withSuffix) {
output = locale.pastFuture(+this, output);
}
return locale.postformat(output);
}
var iso_string__abs = Math.abs;
function iso_string__toISOString() {
// inspired by https://github.com/dordille/moment-isoduration/blob/master/moment.isoduration.js
var Y = iso_string__abs(this.years());
var M = iso_string__abs(this.months());
var D = iso_string__abs(this.days());
var h = iso_string__abs(this.hours());
var m = iso_string__abs(this.minutes());
var s = iso_string__abs(this.seconds() + this.milliseconds() / 1000);
var total = this.asSeconds();
if (!total) {
// this is the same as C#'s (Noda) and python (isodate)...
// but not other JS (goog.date)
return 'P0D';
}
return (total < 0 ? '-' : '') +
'P' +
(Y ? Y + 'Y' : '') +
(M ? M + 'M' : '') +
(D ? D + 'D' : '') +
((h || m || s) ? 'T' : '') +
(h ? h + 'H' : '') +
(m ? m + 'M' : '') +
(s ? s + 'S' : '');
}
var duration_prototype__proto = Duration.prototype;
duration_prototype__proto.abs = duration_abs__abs;
duration_prototype__proto.add = duration_add_subtract__add;
duration_prototype__proto.subtract = duration_add_subtract__subtract;
duration_prototype__proto.as = as;
duration_prototype__proto.asMilliseconds = asMilliseconds;
duration_prototype__proto.asSeconds = asSeconds;
duration_prototype__proto.asMinutes = asMinutes;
duration_prototype__proto.asHours = asHours;
duration_prototype__proto.asDays = asDays;
duration_prototype__proto.asWeeks = asWeeks;
duration_prototype__proto.asMonths = asMonths;
duration_prototype__proto.asYears = asYears;
duration_prototype__proto.valueOf = duration_as__valueOf;
duration_prototype__proto._bubble = bubble;
duration_prototype__proto.get = duration_get__get;
duration_prototype__proto.milliseconds = duration_get__milliseconds;
duration_prototype__proto.seconds = seconds;
duration_prototype__proto.minutes = minutes;
duration_prototype__proto.hours = hours;
duration_prototype__proto.days = days;
duration_prototype__proto.weeks = weeks;
duration_prototype__proto.months = months;
duration_prototype__proto.years = years;
duration_prototype__proto.humanize = humanize;
duration_prototype__proto.toISOString = iso_string__toISOString;
duration_prototype__proto.toString = iso_string__toISOString;
duration_prototype__proto.toJSON = iso_string__toISOString;
duration_prototype__proto.locale = locale;
duration_prototype__proto.localeData = localeData;
// Deprecations
duration_prototype__proto.toIsoString = deprecate('toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)', iso_string__toISOString);
duration_prototype__proto.lang = lang;
// Side effect imports
addFormatToken('X', 0, 0, 'unix');
addFormatToken('x', 0, 0, 'valueOf');
// PARSING
addRegexToken('x', matchSigned);
addRegexToken('X', matchTimestamp);
addParseToken('X', function (input, array, config) {
config._d = new Date(parseFloat(input, 10) * 1000);
});
addParseToken('x', function (input, array, config) {
config._d = new Date(toInt(input));
});
// Side effect imports
utils_hooks__hooks.version = '2.10.3';
setHookCallback(local__createLocal);
utils_hooks__hooks.fn = momentPrototype;
utils_hooks__hooks.min = min;
utils_hooks__hooks.max = max;
utils_hooks__hooks.utc = create_utc__createUTC;
utils_hooks__hooks.unix = moment__createUnix;
utils_hooks__hooks.months = lists__listMonths;
utils_hooks__hooks.isDate = isDate;
utils_hooks__hooks.locale = locale_locales__getSetGlobalLocale;
utils_hooks__hooks.invalid = valid__createInvalid;
utils_hooks__hooks.duration = create__createDuration;
utils_hooks__hooks.isMoment = isMoment;
utils_hooks__hooks.weekdays = lists__listWeekdays;
utils_hooks__hooks.parseZone = moment__createInZone;
utils_hooks__hooks.localeData = locale_locales__getLocale;
utils_hooks__hooks.isDuration = isDuration;
utils_hooks__hooks.monthsShort = lists__listMonthsShort;
utils_hooks__hooks.weekdaysMin = lists__listWeekdaysMin;
utils_hooks__hooks.defineLocale = defineLocale;
utils_hooks__hooks.weekdaysShort = lists__listWeekdaysShort;
utils_hooks__hooks.normalizeUnits = normalizeUnits;
utils_hooks__hooks.relativeTimeThreshold = duration_humanize__getSetRelativeTimeThreshold;
var _moment = utils_hooks__hooks;
return _moment;
}));
|
makeAs
|
tests.rs
|
use super::*;
use crate::p2p::control::{Control, Shutdown, Swarmer};
use futures::channel::oneshot;
use libp2p::identity::Keypair;
use serial_test::serial;
use slimchain_common::basic::ShardId;
use slimchain_utils::init_tracing_for_test;
use std::ops::{Deref, DerefMut};
#[derive(NetworkBehaviour)]
struct DiscoveryTest {
discv: Discovery,
}
impl DiscoveryTest {
async fn new(pk: PublicKey, role: Role, enable_mdns: bool) -> Result<Self> {
let discv = Discovery::new(pk, role, enable_mdns).await?;
Ok(Self { discv })
}
fn try_find_peer(
&mut self,
role: Role,
timeout: Duration,
ret: oneshot::Sender<Result<PeerId>>,
) {
self.discv.find_random_peer_with_ret(role, timeout, ret);
}
}
impl NetworkBehaviourEventProcess<DiscoveryEvent> for DiscoveryTest {
fn inject_event(&mut self, _: DiscoveryEvent) {}
}
#[async_trait::async_trait]
impl Shutdown for DiscoveryTest {
async fn shutdown(&mut self) -> Result<()> {
Ok(())
}
}
impl Deref for DiscoveryTest {
type Target = Discovery;
fn deref(&self) -> &Self::Target {
&self.discv
}
}
impl DerefMut for DiscoveryTest {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.discv
}
}
async fn create_node(mdns: bool, role: Role) -> (PeerId, Multiaddr, Control<DiscoveryTest>) {
let keypair = Keypair::generate_ed25519();
let mut swarmer = Swarmer::new(
keypair.clone(),
DiscoveryTest::new(keypair.public(), role, mdns)
.await
.unwrap(),
)
.await
.unwrap();
let address = swarmer.listen_on_str("/ip4/127.0.0.1/tcp/0").await.unwrap();
let ctrl = swarmer.spawn();
(keypair.public().into_peer_id(), address, ctrl)
}
#[tokio::test]
#[serial]
async fn test_with_mdns() {
let _guard = init_tracing_for_test();
let (_peer1, _addr1, mut ctrl1) = create_node(true, Role::Client).await;
let (peer2, _addr2, ctrl2) = create_node(true, Role::Miner).await;
let (peer3, _addr3, ctrl3) = create_node(true, Role::Storage(ShardId::new(0, 1))).await;
let (peer4, _addr4, ctrl4) = create_node(true, Role::Storage(ShardId::new(0, 1))).await;
let res = ctrl1
.call_with_sender(|swarm, ret| {
swarm
.behaviour_mut()
.try_find_peer(Role::Client, Duration::from_millis(100), ret)
})
.await
.unwrap();
assert!(res.is_err());
let res = ctrl1
.call_with_sender(|swarm, ret| {
swarm
.behaviour_mut()
.try_find_peer(Role::Miner, Duration::from_secs(5), ret)
})
.await
.unwrap()
.unwrap();
assert_eq!(peer2, res);
assert_eq!(
Some(peer2),
ctrl1
.call(|swarm| swarm.behaviour().random_known_peer(&Role::Miner))
.await
.unwrap()
);
let res = ctrl1
.call_with_sender(|swarm, ret| {
swarm.behaviour_mut().try_find_peer(
Role::Storage(ShardId::new(0, 1)),
Duration::from_secs(5),
ret,
)
})
.await
|
assert!(res == peer3 || res == peer4);
let res = ctrl1
.call(|swarm| {
swarm
.behaviour()
.random_known_peers(&Role::Storage(ShardId::new(0, 1)), 1)
})
.await
.unwrap();
assert!(res[0] == peer3 || res[0] == peer4);
ctrl1.shutdown().await.unwrap();
ctrl2.shutdown().await.unwrap();
ctrl3.shutdown().await.unwrap();
ctrl4.shutdown().await.unwrap();
}
#[tokio::test]
#[serial]
async fn test_without_mdns() {
let _guard = init_tracing_for_test();
let (peer0, addr0, ctrl0) = create_node(false, Role::Client).await;
let (_peer1, _addr1, mut ctrl1) = create_node(false, Role::Client).await;
let (peer2, _addr2, mut ctrl2) = create_node(false, Role::Miner).await;
let (peer3, _addr3, mut ctrl3) = create_node(false, Role::Storage(ShardId::new(0, 1))).await;
let (peer4, _addr4, mut ctrl4) = create_node(false, Role::Storage(ShardId::new(0, 1))).await;
let addr = addr0.clone();
ctrl1
.call(move |swarm| swarm.behaviour_mut().add_address(peer0, addr))
.await
.unwrap();
let addr = addr0.clone();
ctrl2
.call(move |swarm| swarm.behaviour_mut().add_address(peer0, addr))
.await
.unwrap();
let addr = addr0.clone();
ctrl3
.call(move |swarm| swarm.behaviour_mut().add_address(peer0, addr))
.await
.unwrap();
let addr = addr0.clone();
ctrl4
.call(move |swarm| swarm.behaviour_mut().add_address(peer0, addr))
.await
.unwrap();
let res = ctrl1
.call_with_sender(|swarm, ret| {
swarm
.behaviour_mut()
.try_find_peer(Role::Miner, Duration::from_secs(5), ret)
})
.await
.unwrap()
.unwrap();
assert_eq!(peer2, res);
assert_eq!(
Some(peer2),
ctrl1
.call(|swarm| swarm.behaviour().random_known_peer(&Role::Miner))
.await
.unwrap()
);
let res = ctrl1
.call_with_sender(|swarm, ret| {
swarm.behaviour_mut().try_find_peer(
Role::Storage(ShardId::new(0, 1)),
Duration::from_secs(5),
ret,
)
})
.await
.unwrap()
.unwrap();
assert!(res == peer3 || res == peer4);
let res = ctrl1
.call_with_sender(|swarm, ret| {
swarm.behaviour_mut().try_find_peer(
Role::Storage(ShardId::new(0, 1)),
Duration::from_secs(5),
ret,
)
})
.await
.unwrap()
.unwrap();
assert!(res == peer3 || res == peer4);
let res = ctrl1
.call(|swarm| {
swarm
.behaviour()
.random_known_peers(&Role::Storage(ShardId::new(0, 1)), 1)
})
.await
.unwrap();
assert!(res[0] == peer3 || res[0] == peer4);
ctrl0.shutdown().await.unwrap();
ctrl1.shutdown().await.unwrap();
ctrl2.shutdown().await.unwrap();
ctrl3.shutdown().await.unwrap();
ctrl4.shutdown().await.unwrap();
}
|
.unwrap()
.unwrap();
|
docids.go
|
package main
import (
"flag"
"fmt"
"io"
"math"
"os"
"github.com/Debian/dcs/internal/index"
)
const docidsHelp = `docids - list the documents covered by this index
Example:
% dcs docids -idx=/srv/dcs/shard0/full
ruby-jquery-turbolinks_2.1.0~dfsg-1/.travis.yml
ruby-jquery-turbolinks_2.1.0~dfsg-1/CONTRIBUTING.md
ruby-jquery-turbolinks_2.1.0~dfsg-1/Gemfile
[…]
% dcs docids -idx=/srv/dcs/shard0/full -doc=388
mopidy-somafm_1.1.0-1/tox.ini
`
func docids(args []string) error {
|
fset := flag.NewFlagSet("docids", flag.ExitOnError)
fset.Usage = usage(fset, docidsHelp)
var idx string
fset.StringVar(&idx, "idx", "", "path to the index file to work with")
var doc int
fset.IntVar(&doc, "doc", -1, "docid of the document to display. All docids are displayed if set to -1 (the default)")
if err := fset.Parse(args); err != nil {
return err
}
if idx == "" {
fset.Usage()
os.Exit(1)
}
i, err := index.Open(idx)
if err != nil {
return fmt.Errorf("Could not open index: %v", err)
}
defer i.Close()
if doc > -1 {
// Display a specific docid entry
if doc > math.MaxUint32 {
return fmt.Errorf("-doc=%d exceeds the uint32 docid space", doc)
}
fn, err := i.DocidMap.Lookup(uint32(doc))
if err != nil {
return err
}
fmt.Println(fn)
return nil
}
// Display all docid entries
if _, err := io.Copy(os.Stdout, i.DocidMap.All()); err != nil {
return err
}
return nil
}
|
|
post-process.js
|
//\Fable>babel out\bundle.js --plugins=transform-es3-property-literals,ransform-es3-member-expression-literals -o bundle-2.js
var fs=require("fs");
var filenameIn=process.argv[2];
var filenameOut=process.argv[3];
function patch(data)
{
fs.writeFile(filenameOut,"this.Map=polyfill.Map;\n"+
"this.Symbol=polyfill.Symbol;\n"+
"this.ArrayBuffer=polyfill.ArrayBuffer;\n"+
data);
console.log("Patching done.");
console.log("Copy/past content of bundle-patch.js to Google Script");
}
//Apply some bBabel Trasform
//------------------------------
function
|
(data, nextFunc)
{
var babel=require("babel-core");
var babelOptions={
//"presets": ["es2015"],
"plugins": ["transform-es3-property-literals","transform-es3-member-expression-literals"]
}
var res=babel.transform(data,babelOptions);
console.log("Babel processing done. Patching...");
return nextFunc(res.code);
}
//Read file
//---------------
fs.readFile(filenameIn,function(err,data)
{
if(err)
console.log("Error: "+err);
else
{
console.log("File read. Babel processing....");
babelPostProc(data,patch);
}
})
|
babelPostProc
|
404.js
|
import React from "react"
|
<h1>ayy... nothing here!</h1>
<p>You just hit a route that doesn't exist... the sadness.</p>
</div>
)
export default NotFoundPage
|
const NotFoundPage = () => (
<div>
|
main.go
|
package main
import (
"context"
"fmt"
"io"
"log"
"net"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/credentials"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
"github.com/dragonator/grpc-golang/internal/pb"
)
const port = ":9000"
func main() {
lis, err := net.Listen("tcp", port)
if err != nil {
log.Fatal(err)
}
creds, err := credentials.NewServerTLSFromFile("certs/localhost.pem", "certs/localhost-key.pem")
if err != nil {
log.Fatal(err)
}
opts := []grpc.ServerOption{grpc.Creds(creds)}
s := grpc.NewServer(opts...)
pb.RegisterEmployeeServiceServer(s, new(employeeService))
log.Println("Starting server on port " + port)
s.Serve(lis)
}
type employeeService struct{}
func (*employeeService) GetByBadgeNumber(ctx context.Context, req *pb.GetByBadgeNumberRequest) (*pb.EmployeeResponse, error) {
if md, ok := metadata.FromIncomingContext(ctx); ok {
fmt.Printf("Metadata received: %v\n", md)
}
for _, e := range employees {
if e.BadgeNumber == req.BadgeNumber {
return &pb.EmployeeResponse{Employee: e}, nil
}
}
return nil, status.Error(codes.NotFound, "Employee not found")
}
func (*employeeService) GetAll(req *pb.GetAllRequest, stream pb.EmployeeService_GetAllServer) error {
for _, e := range employees {
stream.Send(&pb.EmployeeResponse{Employee: e})
}
return nil
}
func (*employeeService) Save(ctx context.Context, req *pb.EmployeeRequest) (*pb.EmployeeResponse, error) {
employees = append(employees, req.Employee)
for _, e := range employees {
fmt.Println(e)
}
return &pb.EmployeeResponse{Employee: req.Employee}, nil
}
func (*employeeService) SaveAll(stream pb.EmployeeService_SaveAllServer) error {
for {
emp, err := stream.Recv()
if err == io.EOF {
|
break
}
if err != nil {
return err
}
employees = append(employees, emp.Employee)
stream.Send(&pb.EmployeeResponse{Employee: emp.Employee})
}
for _, e := range employees {
fmt.Println(e)
}
return nil
}
func (*employeeService) AddPhoto(stream pb.EmployeeService_AddPhotoServer) error {
md, ok := metadata.FromIncomingContext(stream.Context())
if ok {
fmt.Printf("Receiving photo for badge number %v\n", md["badgenumber"][0])
}
imgData := []byte{}
for {
data, err := stream.Recv()
if err == io.EOF {
fmt.Printf("File received with length: %v\n", len(imgData))
return stream.SendAndClose(&pb.AddPhotoResponse{IsOk: true})
}
if err != nil {
return err
}
fmt.Printf("Received %v bytes\n", len(data.Data))
imgData = append(imgData, data.Data...)
}
}
| |
Previous.tsx
|
/* eslint-disable max-len */
import React from 'react';
const Previous: React.FC<React.SVGAttributes<SVGElement>> = (props) => (
<svg {...props} viewBox="0 0 24 24">
<path d="M0.807857 6.77032C0.605985 6.77032 0.403489 6.68595 0.235361 6.53469C-0.0846387 6.21469 -0.0846387 5.7097 0.235361 5.40656L5.40592 0.235361C5.72592 -0.0846387 6.23091 -0.0846387 6.53405 0.235361C6.85405 0.555361 6.85405 1.06035 6.53405 1.36349L1.36413 6.53469C1.21225 6.68594 1.01039 6.77032 0.807873 6.77032H0.807857Z" />
<path d="M5.97875 11.9413C5.77688 11.9413 5.57439 11.8569 5.40626 11.7057L0.235697 6.53512C-0.084303 6.21512 -0.084303 5.71013 0.235697 5.40699C0.555697 5.08699 1.06069 5.08699 1.36382 5.40699L6.53439 10.5776C6.85439 10.8976 6.85439 11.4025 6.53439 11.7057C6.38314 11.8569 6.18127 11.9413 5.97875 11.9413Z" />
<path d="M15.1918 16.0001C14.7537 16.0001 14.3837 15.6464 14.3837 15.192C14.3837 9.7352 11.3856 6.73696 5.96242 6.73696H1.17874C0.740615 6.73696 0.386871 6.38322 0.386871 5.92883C0.386871 5.47383 0.740615 5.13696 1.17874 5.13696H5.97874C9.19554 5.13696 11.705 6.06322 13.4568 7.88192C15.1243 9.63376 16 12.16 16 15.192C16 15.6464 15.6463 16.0001 15.1919 16.0001H15.1918Z" />
</svg>
);
|
export default Previous;
| |
RenderMapBlips.tsx
|
import * as React from "react";
import * as L from "leaflet";
import { Marker, Popup, useMap } from "react-leaflet";
import { convertToMap } from "lib/map/utils";
import { blipTypes } from "lib/map/blips";
import { BLIP_SIZES, Blip, BlipsData, MarkerType } from "types/Map";
import { useDispatchMapState } from "state/mapState";
export function RenderMapBlips() {
const map = useMap();
const [blips, setBlips] = React.useState<Blip[]>([]);
const { blipsHidden } = useDispatchMapState();
const doBlips = React.useCallback(async () => {
setBlips(await generateBlips(map));
}, [map]);
React.useEffect(() => {
doBlips();
}, [doBlips]);
if (blipsHidden) {
return null;
}
return (
<>
{blips.map((blip, idx) => {
return (
<Marker
icon={blip.icon}
draggable={false}
key={`${blip.name}-${idx}`}
position={blip.pos}
>
<Popup>
<p className="text-base !m-0">
<strong>Name: </strong> {blip.type}- {blip.name}
</p>
</Popup>
</Marker>
);
})}
</>
);
}
async function
|
(map: L.Map) {
const blipsData: BlipsData = await fetch("/blips.json")
.then((v) => v.json())
.catch(() => ({}));
const markerTypes = generateMarkerTypes();
const createdBlips: Blip[] = [];
for (const id in blipsData) {
if (blipsData[id]) {
const blipArray = blipsData[id];
for (const i in blipArray) {
const blipData = blipArray[+i];
const markerData = markerTypes[+id];
if (!blipData) continue;
const pos =
"pos" in blipData ? blipData.pos : { x: blipData.x, y: blipData.y, z: blipData.z };
const converted = convertToMap(pos.x, pos.y, map);
const blip: Blip = {
name: markerData?.name ?? id,
description: null,
pos: converted,
rawPos: pos,
type: Number(id),
icon: markerData ? L.icon(markerData) : undefined,
};
createdBlips.push(blip);
}
}
}
return createdBlips;
}
function generateMarkerTypes() {
const markerTypes: Record<number, MarkerType> = {};
let blipCss = `.blip {
background: url("/map/blips_texturesheet.png");
background-size: ${1024 / 2}px ${1024 / 2}px;
display: inline-block;
width: ${BLIP_SIZES.width}px;
height: ${BLIP_SIZES.height}px;
}`;
const current = {
x: 0,
y: 0,
id: 0,
};
for (const blipName in blipTypes) {
const blip = blipTypes[blipName];
if (!blip.id) {
current.id = current.id + 1;
} else {
current.id = blip.id;
}
if (!blip.x) {
current.x += 1;
} else {
current.x = blip.x;
}
if (blip.y) {
current.y = blip.y;
}
markerTypes[current.id] = {
name: blipName.replace(/([A-Z0-9])/g, " $1").trim(),
className: `blip blip-${blipName}`,
iconUrl:
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAAFElEQVR4XgXAAQ0AAABAMP1L30IDCPwC/o5WcS4AAAAASUVORK5CYII=",
iconSize: [BLIP_SIZES.width, BLIP_SIZES.height],
iconAnchor: [BLIP_SIZES.width / 2, 0],
popupAnchor: [0, 0],
};
const left = current.x * BLIP_SIZES.width + 0;
const top = current.y * BLIP_SIZES.height + 0;
blipCss += `.blip-${blipName} { background-position: -${left}px -${top}px }`;
}
const style = document.createElement("style");
style.innerHTML = blipCss;
document.head.appendChild(style);
return markerTypes;
}
|
generateBlips
|
random_delay.go
|
package main
import (
"fmt"
"time"
"github.com/sam-ulrich1/colly/v2"
"github.com/sam-ulrich1/colly/v2/debug"
)
func
|
() {
url := "https://httpbin.org/delay/2"
// Instantiate default collector
c := colly.NewCollector(
// Attach a debugger to the collector
colly.Debugger(&debug.LogDebugger{}),
colly.Async(),
)
// Limit the number of threads started by colly to two
// when visiting links which domains' matches "*httpbin.*" glob
c.Limit(&colly.LimitRule{
DomainGlob: "*httpbin.*",
Parallelism: 2,
RandomDelay: 5 * time.Second,
})
// Start scraping in four threads on https://httpbin.org/delay/2
for i := 0; i < 4; i++ {
c.Visit(fmt.Sprintf("%s?n=%d", url, i))
}
// Start scraping on https://httpbin.org/delay/2
c.Visit(url)
// Wait until threads are finished
c.Wait()
}
|
main
|
aiohttp.py
|
# -*- coding: utf-8 -*-
"""aiohttp requester module."""
import asyncio
import logging
import socket
from asyncio.events import AbstractEventLoop, AbstractServer
from typing import Any, Mapping, Optional, Tuple, Union
import aiohttp
import aiohttp.web
import async_timeout
from async_upnp_client import UpnpEventHandler, UpnpRequester
_LOGGER = logging.getLogger(__name__)
EXTERNAL_IP = "1.1.1.1"
def get_local_ip(target_host: Optional[str] = None) -> str:
"""Try to get the local IP of this machine, used to talk to target_url."""
target_host = target_host or EXTERNAL_IP
target_port = 80
try:
temp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
temp_sock.connect((target_host, target_port))
local_ip: str = temp_sock.getsockname()[0]
return local_ip
finally:
temp_sock.close()
class AiohttpRequester(UpnpRequester):
"""Standard AioHttpUpnpRequester, to be used with UpnpFactory."""
def __init__(
self, timeout: int = 5, http_headers: Optional[Mapping[str, str]] = None
) -> None:
"""Initialize."""
self._timeout = timeout
self._http_headers = http_headers or {}
async def async_do_http_request(
self,
method: str,
url: str,
headers: Optional[Mapping[str, str]] = None,
body: Optional[str] = None,
body_type: str = "text",
) -> Tuple[int, Mapping, Union[str, bytes, None]]:
"""Do a HTTP request."""
# pylint: disable=too-many-arguments
req_headers = {**self._http_headers, **(headers or {})}
async with async_timeout.timeout(self._timeout):
async with aiohttp.ClientSession() as session:
async with session.request(
method, url, headers=req_headers, data=body
) as response:
status = response.status
resp_headers: Mapping = response.headers or {}
resp_body: Union[str, bytes, None] = None
if body_type == "text":
try:
resp_body = await response.text()
except UnicodeDecodeError as exception:
resp_body_bytes = await response.read()
resp_body = resp_body_bytes.decode(
exception.encoding, errors="replace"
)
elif body_type == "raw":
resp_body = await response.read()
elif body_type == "ignore":
resp_body = None
return status, resp_headers, resp_body
class AiohttpSessionRequester(UpnpRequester):
"""
Standard AiohttpSessionRequester, to be used with UpnpFactory.
With pluggable session.
"""
def __init__(
self,
session: aiohttp.ClientSession,
with_sleep: bool = False,
timeout: int = 5,
http_headers: Optional[Mapping[str, str]] = None,
) -> None:
"""Initialize."""
self._session = session
self._with_sleep = with_sleep
self._timeout = timeout
self._http_headers = http_headers or {}
async def async_do_http_request(
self,
method: str,
url: str,
headers: Optional[Mapping[str, str]] = None,
body: Optional[str] = None,
body_type: str = "text",
) -> Tuple[int, Mapping, Union[str, bytes, None]]:
"""Do a HTTP request."""
# pylint: disable=too-many-arguments
req_headers = {**self._http_headers, **(headers or {})}
if self._with_sleep:
await asyncio.sleep(0.01)
async with async_timeout.timeout(self._timeout):
async with self._session.request(
method, url, headers=req_headers, data=body
) as response:
status = response.status
resp_headers: Mapping = response.headers or {}
resp_body: Union[str, bytes, None] = None
if body_type == "text":
resp_body = await response.text()
elif body_type == "raw":
resp_body = await response.read()
elif body_type == "ignore":
resp_body = None
return status, resp_headers, resp_body
class AiohttpNotifyServer:
"""AIO HTTP Server to handle incoming events."""
def
|
(
self,
requester: UpnpRequester,
listen_port: int,
listen_host: Optional[str] = None,
callback_url: Optional[str] = None,
loop: Optional[AbstractEventLoop] = None,
) -> None:
"""Initialize."""
# pylint: disable=too-many-arguments
self._listen_port = listen_port
self._listen_host = listen_host or get_local_ip()
self._callback_url = callback_url or "http://{}:{}/notify".format(
self._listen_host, self._listen_port
)
self._loop = loop or asyncio.get_event_loop()
self._aiohttp_server: Optional[aiohttp.web.Server] = None
self._server: Optional[AbstractServer] = None
self.event_handler = UpnpEventHandler(self._callback_url, requester)
async def start_server(self) -> None:
"""Start the HTTP server."""
self._aiohttp_server = aiohttp.web.Server(self._handle_request)
try:
self._server = await self._loop.create_server(
self._aiohttp_server, self._listen_host, self._listen_port
)
except OSError as error:
_LOGGER.error(
"Failed to create HTTP server at %s:%d: %s",
self._listen_host,
self._listen_port,
error,
)
async def stop_server(self) -> None:
"""Stop the HTTP server."""
if self._aiohttp_server:
await self._aiohttp_server.shutdown(10)
if self._server:
self._server.close()
async def _handle_request(self, request: Any) -> aiohttp.web.Response:
"""Handle incoming requests."""
_LOGGER.debug("Received request: %s", request)
if request.method != "NOTIFY":
_LOGGER.debug("Not notify")
return aiohttp.web.Response(status=405)
headers = request.headers
body = await request.text()
status = await self.event_handler.handle_notify(headers, body)
_LOGGER.debug("NOTIFY response status: %s", status)
return aiohttp.web.Response(status=status)
@property
def callback_url(self) -> str:
"""Return callback URL on which we are callable."""
return self.event_handler.callback_url
|
__init__
|
routes.py
|
"""
Routes module.
Responsible for providing the means to register the application routes.
"""
from example_web_app.controllers.health_api import HealthApiController
from example_web_app.controllers.example_api import ExampleApiController
def setup_routes(app):
###
# Register the HelloWorld API handlers
#
|
health_api = HealthApiController()
example_api = ExampleApiController()
###
# API v1.0 ROUTES
#
# Add your public v1.0 API routes here
#
app.router.add_get('/api/v1.0/examples', example_api.get)
app.router.add_get('/api/v1.0/examples/{id}', example_api.get_by_id)
###
# INTERNAL API ROUTES
#
# Add your internal/administrative API routes here
#
app.router.add_get('/api/-/health', health_api.get)
|
|
traits.rs
|
use crate::coord::TileCoord;
use bevy::math::IVec2;
use bevy::prelude::{Entity, Query, With};
use bevy_ecs_tilemap::{MapQuery, Tile, TileParent, TilePos};
use bevy_tileset::auto::{AutoTile, AutoTileId};
use std::cell::RefCell;
/// A trait over `Query<'w, 's, (Entity, &TilePos, &TileParent, &AutoTile), With<Tile>>` to prevent errors with
/// "explicit lifetime required in the type of `query`"
pub(super) trait TileQuery {
fn find_tile(&self, entity: Entity) -> Option<TileInfo>;
fn count(&self) -> usize;
}
impl<'w, 's> TileQuery for Query<'w, 's, (Entity, &TilePos, &TileParent, &AutoTileId), With<Tile>> {
fn find_tile(&self, entity: Entity) -> Option<TileInfo> {
if let Ok((entity, pos, parent, auto_tile)) = self.get(entity) {
Some(TileInfo::new(entity, pos, parent, auto_tile))
} else {
None
}
}
fn count(&self) -> usize {
self.iter().count()
}
}
/// Defines a tile
#[derive(Copy, Clone, Debug)]
pub(super) struct TileInfo {
pub coord: TileCoord,
pub entity: Entity,
pub auto_tile: bevy_tileset::auto::AutoTileId,
}
pub(super) struct TilemapCache<'a, 'w, 's> {
pub tiles_query: &'a dyn TileQuery,
pub map_query: &'a RefCell<MapQuery<'w, 's>>,
}
impl TileInfo {
pub fn new(entity: Entity, pos: &TilePos, parent: &TileParent, auto_tile: &AutoTileId) -> Self {
Self {
entity,
auto_tile: *auto_tile,
coord: TileCoord {
pos: *pos,
map_id: parent.map_id,
layer_id: parent.layer_id,
},
}
}
}
impl bevy_tileset::auto::AutoTile for TileInfo {
type Coords = TileCoord;
fn coords(&self) -> Self::Coords {
self.coord
}
fn auto_id(&self) -> bevy_tileset::auto::AutoTileId {
self.auto_tile
}
fn can_match(&self, other: &Self) -> bool {
self.coord.map_id == other.coord.map_id
&& self.coord.layer_id == other.coord.layer_id
&& self.auto_tile == other.auto_tile
}
}
impl<'a, 'w, 's> bevy_tileset::auto::AutoTilemap for TilemapCache<'a, 'w, 's> {
type Tile = TileInfo;
fn make_coords(
&self,
pos: IVec2,
template: &<Self::Tile as AutoTile>::Coords,
) -> <Self::Tile as AutoTile>::Coords {
TileCoord {
pos: pos.as_uvec2().into(),
map_id: template.map_id,
layer_id: template.layer_id,
}
}
fn get_tile_at(&self, coords: &<Self::Tile as AutoTile>::Coords) -> Option<Self::Tile> {
let entity =
self.map_query
.borrow_mut()
.get_tile_entity(coords.pos, coords.map_id, coords.layer_id);
if let Ok(entity) = entity
|
else {
None
}
}
fn len(&self) -> usize {
self.tiles_query.count()
}
}
|
{
self.tiles_query.find_tile(entity)
}
|
problem.py
|
from typing import TYPE_CHECKING, List, Optional, Type
from uuid import UUID
from sqlalchemy import event
from sqlalchemy.schema import Column, ForeignKey, UniqueConstraint
from sqlmodel import Field, Relationship
from sqlmodel.sql.sqltypes import GUID
from joj.horse.models.base import DomainURLORMModel, url_pre_save
from joj.horse.models.link_tables import ProblemProblemSetLink
from joj.horse.schemas.problem import ProblemDetail, WithLatestRecordType
from joj.horse.services.db import db_session
if TYPE_CHECKING:
from joj.horse.models import (
Domain,
ProblemConfig,
ProblemGroup,
ProblemSet,
Record,
User,
)
class Problem(DomainURLORMModel, ProblemDetail, table=True): # type: ignore[call-arg]
__tablename__ = "problems"
__table_args__ = (UniqueConstraint("domain_id", "url"),)
domain_id: UUID = Field(
sa_column=Column(
GUID, ForeignKey("domains.id", ondelete="CASCADE"), nullable=False
)
)
domain: "Domain" = Relationship(back_populates="problems")
owner_id: Optional[UUID] = Field(
sa_column=Column(
GUID, ForeignKey("users.id", ondelete="SET NULL"), nullable=True
)
)
owner: Optional["User"] = Relationship(back_populates="owned_problems")
problem_group_id: Optional[UUID] = Field(
sa_column=Column(
GUID, ForeignKey("problem_groups.id", ondelete="SET NULL"), nullable=True
)
)
problem_group: Optional["ProblemGroup"] = Relationship(back_populates="problems")
problem_sets: List["ProblemSet"] = Relationship(
back_populates="problems",
link_model=ProblemProblemSetLink,
)
problem_problem_set_links: List[ProblemProblemSetLink] = Relationship(
back_populates="problem",
)
records: List["Record"] = Relationship(back_populates="problem")
problem_configs: List["ProblemConfig"] = Relationship(back_populates="problem")
@classmethod
async def
|
(
cls,
result_cls: Type[WithLatestRecordType],
problem_set_id: Optional[UUID],
problems: List["Problem"],
user_id: UUID,
) -> List[WithLatestRecordType]:
from joj.horse import models
problem_ids = [problem.id for problem in problems]
records = await models.Record.get_user_latest_records(
problem_set_id=problem_set_id, problem_ids=problem_ids, user_id=user_id
)
problems = [
result_cls(**problems[i].dict(), latest_record=records[i])
for i, record in enumerate(records)
]
return problems
async def get_latest_problem_config(self) -> Optional["ProblemConfig"]:
from joj.horse import models
statement = (
models.ProblemConfig.sql_select()
.where(models.ProblemConfig.problem_id == self.id)
.order_by(models.ProblemConfig.created_at.desc()) # type: ignore
.limit(1)
)
async with db_session() as session:
results = await session.exec(statement)
return results.one_or_none()
event.listen(Problem, "before_insert", url_pre_save)
event.listen(Problem, "before_update", url_pre_save)
|
get_problems_with_record_states
|
student.page_20190529171200.ts
|
import { Component, NgZone } from '@angular/core';
import { FormBuilder, FormGroup, Validators } from '@angular/forms';
import {
NavController,
AlertController,
MenuController,
ToastController,
PopoverController,
LoadingController,
ModalController } from '@ionic/angular';
// Modals
import { SearchFilterPage } from '../../pages/modal/search-filter/search-filter.page';
import { ImagePage } from './../modal/image/image.page';
// Call notifications test by Popover and Custom Component.
import { NotificationsComponent } from './../../components/notifications/notifications.component';
import { RestApiService } from './../../rest-api.service';
// Camera
import { Camera, CameraOptions } from '@ionic-native/camera/ngx';
// Geolocation
import { Geolocation } from '@ionic-native/geolocation/ngx';
@Component({
selector: 'app-student',
templateUrl: './student.page.html',
styleUrls: ['./student.page.scss']
})
export class StudentPage {
public onRegisterForm: FormGroup;
class_list: any = [];
student_name: string = '';
program: string = '';
class: string = '';
phone: number = 0;
gender: string = '';
dob: string = '';
father: string = '';
_userid: string;
_username: string;
_centerid: string;
_centername: string;
constructor(
private formBuilder: FormBuilder,
public navController: NavController,
public menuCtrl: MenuController,
public popoverCtrl: PopoverController,
public alertController: AlertController,
public modalCtrl: ModalController,
public toastCtrl: ToastController,
public api: RestApiService,
private zone: NgZone,
//private sanitizer: DomSanitizer,
private loadingController: LoadingController,
private camera: Camera,
private geolocation: Geolocation
) {
this.onRegisterForm = this.formBuilder.group({
fullName: ['', [Validators.required]],
phone: ['', [Validators.required]],
father: ['', [Validators.required]]
});
this._userid = localStorage.getItem("_userid");
this._username = localStorage.getItem("_username");
this._centerid = '';
this._centername = '';
console.log('###localStorage: '+JSON.stringify(localStorage));
}
select_program_onchange(value){
console.log('@@@Selected program: ', value);
this.program = value;
if(value == 'ece'){
this.class_list = ['Anganwadi'];
} else if(value == 'pge'){
this.class_list = ['1', '2', '3', '4', '5', '6', '7'];
} else {
this.class_list = [];
}
}
select_class_onchange(value){
console.log('@@@Selected class: ', value);
this.class = value;
|
this.gender = value;
}
dob_onhange(value){
console.log('@@@Selected dob: ', value);
this.dob = value;
}
async explor(){
this.navController.navigateForward('/studentexplor');
}
reset(){
this.student_name = '';
this.phone = 0;
this.father = '';
}
async signUp(){
this.student_name = this.onRegisterForm.value.fullName;
this.phone = this.onRegisterForm.value.phone;
this.father = this.onRegisterForm.value.father;
console.log('@@@Student full name: '+this.student_name+' phone: '+this.phone+' father: '+this.father);
if(this.student_name == undefined || this.student_name == null || this.student_name == ''){
this.showAlert('Verify', '', 'Please check Student full name !!!');
} else if(this.program == undefined || this.program == null || this.program == ''){
this.showAlert('Verify', '', 'Please select Program !!!');
} else if(this.class == undefined || this.class == null || this.class == ''){
this.showAlert('Verify', '', 'Please select Class !!!');
//} else if(this.[this.phone == undefined || this.phone == null || this.phone == ''){
// this.showAlert('Verify', '', 'Please check Phone !!!');
} else if(this.gender == undefined || this.gender == null || this.gender == ''){
this.showAlert('Verify', '', 'Please select Gender !!!');
} else if(this.dob == undefined || this.dob == null || this.dob == ''){
this.showAlert('Verify', '', 'Please check DOB !!!');
} else if(this.father == undefined || this.father == null || this.father == ''){
this.showAlert('Verify', '', 'Please check Father name !!!');
} else{
//this.showAlert('Verify', '', 'OK !!!');
// proceed to save
const details = {
userid : this._userid,
username : this._username,
centerid : this._centerid,
centername : this._centername,
studentid : (new Date).getTime(),
studentname : this.student_name,
program : this.program,
class : this.class,
phone : this.phone,
gender : this.gender,
dob : this.dob,
parentsname : this.father
}
let loading = await this.loadingController.create({});
await loading.present();
await this.api.registernewstudent(details)
.subscribe(res => {
console.log(res);
loading.dismiss();
this.reset();
this.showAlert('Student Registration', '', 'Student registration '+res['status']+' !!!');
}, err => {
console.log(err);
loading.dismiss();
this.showAlert('Student Registration', '', 'Student registration failed !!!');
});
}
}
// alert box
async showAlert(header: string, subHeader: string, message: string) {
const alert = await this.alertController.create({
header: header,
subHeader: subHeader,
message: message,
buttons: ['OK']
});
await alert.present();
}
// confirm box
async showConfirm(header: string, subHeader: string, message: string, body: any) {
const alert = await this.alertController.create({
header: header,
subHeader: subHeader,
message: message,
buttons: [
{
text: 'Cancel',
role: 'cancel',
cssClass: 'secondary',
handler: (blah) => {
console.log('Confirm Cancel: blah');
}
}, {
text: 'Ok',
handler: () => {}
}
]
});
await alert.present();
}
}
|
}
gender_onchange(value){
console.log('@@@Selected gender: ', value);
|
zz_generated_time_rfc3339.go
|
//go:build go1.16
// +build go1.16
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
package armcognitiveservices
import (
"encoding/json"
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"reflect"
"regexp"
"strings"
"time"
)
const (
utcLayoutJSON = `"2006-01-02T15:04:05.999999999"`
utcLayout = "2006-01-02T15:04:05.999999999"
rfc3339JSON = `"` + time.RFC3339Nano + `"`
)
// Azure reports time in UTC but it doesn't include the 'Z' time zone suffix in some cases.
var tzOffsetRegex = regexp.MustCompile(`(Z|z|\+|-)(\d+:\d+)*"*$`)
type timeRFC3339 time.Time
func (t timeRFC3339) MarshalJSON() (json []byte, err error) {
tt := time.Time(t)
return tt.MarshalJSON()
|
func (t timeRFC3339) MarshalText() (text []byte, err error) {
tt := time.Time(t)
return tt.MarshalText()
}
func (t *timeRFC3339) UnmarshalJSON(data []byte) error {
layout := utcLayoutJSON
if tzOffsetRegex.Match(data) {
layout = rfc3339JSON
}
return t.Parse(layout, string(data))
}
func (t *timeRFC3339) UnmarshalText(data []byte) (err error) {
layout := utcLayout
if tzOffsetRegex.Match(data) {
layout = time.RFC3339Nano
}
return t.Parse(layout, string(data))
}
func (t *timeRFC3339) Parse(layout, value string) error {
p, err := time.Parse(layout, strings.ToUpper(value))
*t = timeRFC3339(p)
return err
}
func populateTimeRFC3339(m map[string]interface{}, k string, t *time.Time) {
if t == nil {
return
} else if azcore.IsNullValue(t) {
m[k] = nil
return
} else if reflect.ValueOf(t).IsNil() {
return
}
m[k] = (*timeRFC3339)(t)
}
func unpopulateTimeRFC3339(data json.RawMessage, t **time.Time) error {
if data == nil || strings.EqualFold(string(data), "null") {
return nil
}
var aux timeRFC3339
if err := json.Unmarshal(data, &aux); err != nil {
return err
}
*t = (*time.Time)(&aux)
return nil
}
|
}
|
widget.rs
|
//! Use the built-in widgets in your user interface.
|
//!
//! # Re-exports
//! The contents of this module are re-exported in the [`ui` module]. Therefore,
//! you can directly type:
//!
//! ```
//! use coffee::ui::{button, Button};
//! ```
//!
//! However, if you want to use a custom renderer, you will need to work with
//! the definitions of [`Row`] and [`Column`] found in this module.
//!
//! # Customization
//! Every drawable widget has its own module with a `Renderer` trait that must
//! be implemented by a custom renderer before being able to use the
//! widget.
//!
//! The built-in [`Renderer`] supports all the widgets in this module!
//!
//! [`ui` module]: ../index.html
//! [`Row`]: struct.Row.html
//! [`Column`]: struct.Column.html
//! [`Renderer`]: ../struct.Renderer.html
mod column;
mod row;
pub mod button;
pub mod checkbox;
pub mod radio;
pub mod slider;
pub mod text;
pub use button::Button;
pub use checkbox::Checkbox;
pub use column::Column;
pub use radio::Radio;
pub use row::Row;
pub use slider::Slider;
pub use text::Text;
| |
lang.py
|
from kivy.lang import Observable
import gettext
from constants import LOCALE_DIR
class Lang(Observable):
observers = []
lang = None
def
|
(self, defaultlang, transalte=None):
super(Lang, self).__init__()
self.ugettext = None
self.lang = defaultlang
self._translate = transalte if transalte is not None else gettext.gettext
self.switch_lang(self.lang)
def __call__(self, text):
return self._translate(text)
def fbind(self, name, func, *largs, **kwargs):
if name == "_":
self.observers.append((func, largs, kwargs))
else:
return super(Lang, self).fbind(name, func, *largs, **kwargs)
def funbind(self, name, func, *largs, **kwargs):
if name == "_":
key = (func, largs, kwargs)
if key in self.observers:
self.observers.remove(key)
else:
return super(Lang, self).funbind(name, func, *largs, **kwargs)
def switch_lang(self, lang):
# get the right locales directory, and instanciate a gettext
locales = gettext.translation('Deep3DPhoto', LOCALE_DIR, languages=[lang])
self.ugettext = locales.gettext
# update all the kv rules attached to this text
for func, largs, kwargs in self.observers:
func(largs, None, None)
|
__init__
|
issue-3563.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait A {
fn
|
(&self) {
|| self.b()
//~^ ERROR no method named `b` found for type `&Self` in the current scope
//~| ERROR mismatched types
}
}
fn main() {}
|
a
|
views.py
|
from django.http import HttpResponse
from django.views.decorators.http import require_http_methods
from django.shortcuts import render
import re
# Create your views here.
@require_http_methods(['GET', 'POST'])
def echo_0(request):
if request.method == 'GET' and something == None:
return render(request,'templates/echo.html',context)
elif request.method in ['POST', 'PUT']:
return HtppBadResponse(status=405)
def parser(string):
result = re.match(r'[aA-zZ]+',string)
return result.group(0)
# def echo(request):
# try:
# if (request.method == 'GET'):
# meta = parser(request.META['QUERY_STRING'])
# return render(request, 'echo.html', context={
# 'get_letters': meta,
# 'get_value': request.GET.get(meta),
# 'get_tag': request.META.get('HTTP_X_PRINT_STATEMENT'),
# 'request_method': request.META['REQUEST_METHOD'].lower()
# })
# elif request.method == 'POST':
# meta = parser(request.META['QUERY_STRING'])
|
# 'request_method': request.META['REQUEST_METHOD'].lower()
# })
# except:
# return HttpResponse(status=404)
# def echo(request):
# if (request.method == 'GET'):
# meta = parser(request.META['QUERY_STRING'])
# return render(request, 'echo.html', context={
# 'get_letters': meta,
# 'get_value': request.GET.get(meta),
# 'get_tag': request.META.get('HTTP_X_PRINT_STATEMENT'),
# 'request_method': request.META['REQUEST_METHOD'].lower()
# })
# elif request.method == 'POST':
# #print(request.META['QUERY_STRING'])
# print(request.POST)
# return render(request, 'echo.html', context={
# 'get_letters':'a',
# 'get_value': 1,
# 'get_tag': request.META.get('HTTP_X_PRINT_STATEMENT'),
# 'request_method': request.META['REQUEST_METHOD'].lower()
# })
def echo(request):
context = {
'get' : request.GET,
'post' : request.POST,
'meta' : request.META
}
return render(request,"echo.html",context = context)
def filters(request):
return render(request, 'filters.html', context={
'a': request.GET.get('a', 1),
'b': request.GET.get('b', 1)
})
# <!-- {% extends base.html%} -->
#
def extend(request):
return render(request, 'extend.html', context={
'a': request.GET.get('a'),
'b': request.GET.get('b')
})
#
# <!--DOCTYPE html -->
# <html>
# <body>
# {% if 'QUERY_STRING' in request.META %}
# <h1> {{ request_method }} {{ get_letter }}: {{ get_value }} statement is empty </h1>
# {% elif 'HTTP_X_PRINT_STATEMENT' in request.META %}
# <h2> statement is {{get_tag}} </h2>
# {% endif %}
# </body>
# </html>
|
# return render(request, 'echo.html', context={
# 'get_letters': meta,
# 'get_value': request.POST.get(meta),
# 'get_tag': request.META.get('HTTP_X_PRINT_STATEMENT'),
|
axisHelper.js
|
import {__DEV__} from '../config';
import * as zrUtil from 'zrender/src/core/util';
import * as textContain from 'zrender/src/contain/text';
import OrdinalScale from '../scale/Ordinal';
import IntervalScale from '../scale/Interval';
import Scale from '../scale/Scale';
import * as numberUtil from '../util/number';
import {calBarWidthAndOffset} from '../layout/barGrid';
import '../scale/Time';
import '../scale/Log';
/**
* Get axis scale extent before niced.
* Item of returned array can only be number (including Infinity and NaN).
*/
export function getScaleExtent(scale, model) {
var scaleType = scale.type;
var min = model.getMin();
var max = model.getMax();
var fixMin = min != null;
var fixMax = max != null;
var originalExtent = scale.getExtent();
var axisDataLen;
var boundaryGap;
var span;
if (scaleType === 'ordinal') {
axisDataLen = (model.get('data') || []).length;
}
else {
boundaryGap = model.get('boundaryGap');
if (!zrUtil.isArray(boundaryGap)) {
boundaryGap = [boundaryGap || 0, boundaryGap || 0];
}
if (typeof boundaryGap[0] === 'boolean') {
if (__DEV__) {
console.warn('Boolean type for boundaryGap is only '
+ 'allowed for ordinal axis. Please use string in '
+ 'percentage instead, e.g., "20%". Currently, '
+ 'boundaryGap is set to be 0.');
}
boundaryGap = [0, 0];
}
boundaryGap[0] = numberUtil.parsePercent(boundaryGap[0], 1);
boundaryGap[1] = numberUtil.parsePercent(boundaryGap[1], 1);
span = (originalExtent[1] - originalExtent[0])
|| Math.abs(originalExtent[0]);
}
// Notice: When min/max is not set (that is, when there are null/undefined,
// which is the most common case), these cases should be ensured:
// (1) For 'ordinal', show all axis.data.
// (2) For others:
// + `boundaryGap` is applied (if min/max set, boundaryGap is
// disabled).
// + If `needCrossZero`, min/max should be zero, otherwise, min/max should
// be the result that originalExtent enlarged by boundaryGap.
// (3) If no data, it should be ensured that `scale.setBlank` is set.
// FIXME
// (1) When min/max is 'dataMin' or 'dataMax', should boundaryGap be able to used?
// (2) When `needCrossZero` and all data is positive/negative, should it be ensured
// that the results processed by boundaryGap are positive/negative?
if (min == null) {
min = scaleType === 'ordinal'
? (axisDataLen ? 0 : NaN)
: originalExtent[0] - boundaryGap[0] * span;
}
if (max == null) {
max = scaleType === 'ordinal'
? (axisDataLen ? axisDataLen - 1 : NaN)
: originalExtent[1] + boundaryGap[1] * span;
}
if (min === 'dataMin') {
min = originalExtent[0];
}
else if (typeof min === 'function') {
min = min({
min: originalExtent[0],
max: originalExtent[1]
});
}
if (max === 'dataMax') {
max = originalExtent[1];
}
else if (typeof max === 'function') {
max = max({
min: originalExtent[0],
max: originalExtent[1]
});
}
(min == null || !isFinite(min)) && (min = NaN);
(max == null || !isFinite(max)) && (max = NaN);
scale.setBlank(zrUtil.eqNaN(min) || zrUtil.eqNaN(max));
// Evaluate if axis needs cross zero
if (model.getNeedCrossZero()) {
// Axis is over zero and min is not set
if (min > 0 && max > 0 && !fixMin) {
min = 0;
}
// Axis is under zero and max is not set
if (min < 0 && max < 0 && !fixMax) {
max = 0;
}
}
// If bars are placed on a base axis of type time or interval account for axis boundary overflow and current axis is base axis
var ecModel = model.getModel().ecModel;
var numberBarPlots = ecModel.getSeriesByType("bar").length;
var isBaseAxis = model.ecModel.getSeries().map(function(x){ return x.getBaseAxis() === model.axis}).indexOf(true) !== -1;
if ((scaleType === 'time' || scaleType === 'interval') && numberBarPlots > 0 && isBaseAxis){
// Adjust axis min and max to account for overflow
var adjustedScale = adjustScaleForOverflow(min, max, model);
min = adjustedScale.min;
max = adjustedScale.max;
}
return [min, max];
}
export function adjustScaleForOverflow(min, max, model) {
var ecModel = model.getModel().ecModel;
// Get Axis Length
var axisExtent = model.axis.getExtent();
var axisLength = axisExtent[1] - axisExtent[0]
// Calculate placement of bars on axis
var barWidthAndOffset = calBarWidthAndOffset(zrUtil.filter(
ecModel.getSeriesByType('bar'),
function (seriesModel) {
return !ecModel.isSeriesFiltered(seriesModel)
&& seriesModel.coordinateSystem
&& seriesModel.coordinateSystem.type === 'cartesian2d';
}
));
// Get bars on current base axis and calculate min and max overflow
var baseAxisKey = model.axis.dim + model.axis.index;
var barsOnCurrentAxis = barWidthAndOffset[baseAxisKey];
if (barsOnCurrentAxis === undefined) {
return { "min": min, "max": max };
}
var minOverflow = Math.abs(Math.min.apply(null, Object.values(barsOnCurrentAxis).map(function (x) { return x.offset })));
var maxOverflow = Math.max.apply(null, Object.values(barsOnCurrentAxis).map(function (x) { return x.offset + x.width }));
var totalOverFlow = minOverflow + maxOverflow;
// Calulate required buffer based on old range and overflow
var oldRange = max - min;
var oldRangePercentOfNew = (1 - (minOverflow + maxOverflow) / axisLength);
var overflowBuffer = ((oldRange / oldRangePercentOfNew) - oldRange);
max += overflowBuffer * (maxOverflow / totalOverFlow);
min -= overflowBuffer * (minOverflow / totalOverFlow);
return { "min": min, "max": max };
}
export function niceScaleExtent(scale, model) {
var extent = getScaleExtent(scale, model);
var fixMin = model.getMin() != null;
var fixMax = model.getMax() != null;
var splitNumber = model.get('splitNumber');
if (scale.type === 'log') {
scale.base = model.get('logBase');
}
var scaleType = scale.type;
scale.setExtent(extent[0], extent[1]);
scale.niceExtent({
splitNumber: splitNumber,
fixMin: fixMin,
fixMax: fixMax,
minInterval: (scaleType === 'interval' || scaleType === 'time')
? model.get('minInterval') : null,
maxInterval: (scaleType === 'interval' || scaleType === 'time')
? model.get('maxInterval') : null
});
// If some one specified the min, max. And the default calculated interval
// is not good enough. He can specify the interval. It is often appeared
// in angle axis with angle 0 - 360. Interval calculated in interval scale is hard
// to be 60.
// FIXME
var interval = model.get('interval');
if (interval != null) {
scale.setInterval && scale.setInterval(interval);
}
}
/**
* @param {module:echarts/model/Model} model
* @param {string} [axisType] Default retrieve from model.type
* @return {module:echarts/scale/*}
*/
export function createScaleByModel(model, axisType) {
axisType = axisType || model.get('type');
if (axisType) {
switch (axisType) {
// Buildin scale
case 'category':
return new OrdinalScale(
model.getCategories(), [Infinity, -Infinity]
);
case 'value':
return new IntervalScale();
// Extended scale, like time and log
default:
return (Scale.getClass(axisType) || IntervalScale).create(model);
}
}
}
/**
* Check if the axis corss 0
*/
export function ifAxisCrossZero(axis) {
var dataExtent = axis.scale.getExtent();
var min = dataExtent[0];
var max = dataExtent[1];
return !((min > 0 && max > 0) || (min < 0 && max < 0));
}
/**
* @param {Array.<number>} tickCoords In axis self coordinate.
* @param {Array.<string>} labels
* @param {string} font
* @param {number} axisRotate 0: towards right horizontally, clock-wise is negative.
* @param {number} [labelRotate=0] 0: towards right horizontally, clock-wise is negative.
* @return {number}
*/
export function getAxisLabelInterval(tickCoords, labels, font, axisRotate, labelRotate) {
var textSpaceTakenRect;
var autoLabelInterval = 0;
var accumulatedLabelInterval = 0;
var rotation = (axisRotate - labelRotate) / 180 * Math.PI;
var step = 1;
if (labels.length > 40) {
// Simple optimization for large amount of labels
step = Math.floor(labels.length / 40);
}
for (var i = 0; i < tickCoords.length; i += step) {
var tickCoord = tickCoords[i];
// Not precise, do not consider align and vertical align
// and each distance from axis line yet.
var rect = textContain.getBoundingRect(
labels[i], font, 'center', 'top'
);
rect.x += tickCoord * Math.cos(rotation);
rect.y += tickCoord * Math.sin(rotation);
// Magic number
rect.width *= 1.3;
rect.height *= 1.3;
if (!textSpaceTakenRect) {
textSpaceTakenRect = rect.clone();
}
// There is no space for current label;
else if (textSpaceTakenRect.intersect(rect)) {
accumulatedLabelInterval++;
autoLabelInterval = Math.max(autoLabelInterval, accumulatedLabelInterval);
}
else {
textSpaceTakenRect.union(rect);
// Reset
accumulatedLabelInterval = 0;
}
}
if (autoLabelInterval === 0 && step > 1) {
return step;
}
return (autoLabelInterval + 1) * step - 1;
}
/**
* @param {Object} axis
* @param {Function} labelFormatter
* @return {Array.<string>}
*/
export function
|
(axis, labelFormatter) {
var scale = axis.scale;
var labels = scale.getTicksLabels();
var ticks = scale.getTicks();
if (typeof labelFormatter === 'string') {
labelFormatter = (function (tpl) {
return function (val) {
return tpl.replace('{value}', val != null ? val : '');
};
})(labelFormatter);
// Consider empty array
return zrUtil.map(labels, labelFormatter);
}
else if (typeof labelFormatter === 'function') {
return zrUtil.map(ticks, function (tick, idx) {
return labelFormatter(
getAxisRawValue(axis, tick),
idx
);
}, this);
}
else {
return labels;
}
}
export function getAxisRawValue(axis, value) {
// In category axis with data zoom, tick is not the original
// index of axis.data. So tick should not be exposed to user
// in category axis.
return axis.type === 'category' ? axis.scale.getLabel(value) : value;
}
|
getFormattedLabels
|
test_version.py
|
import gron
def
|
():
assert hasattr(gron, '__VERSION__')
|
test_version
|
parser.rs
|
use ast;
use keyword::Keyword;
use scanner::Token;
use stream::Stream;
#[derive(PartialEq, Eq, Debug)]
pub struct ParseError {
reason: String,
}
fn expect_next(token_stream: &mut Stream<Token>) -> Result<Token, ParseError> {
token_stream.next().ok_or(ParseError {
reason: "premature end".to_string(),
})
}
fn expect_next_eql(token_stream: &mut Stream<Token>, exp: Token) -> Result<(), ParseError> {
let tok = expect_next(token_stream)?;
if tok != exp {
Err(ParseError {
reason: format!("expected token: {:?}. actual: {:?}", exp, tok),
})
} else {
Ok(())
}
}
pub fn parse_expr(token_stream: &mut Stream<Token>) -> Result<ast::Expr, ParseError> {
let tok = expect_next(token_stream)?;
match tok {
Token::NumLiteral(s) => Ok(ast::Expr::NumLiteral(s)),
_ => Err(ParseError {
reason: "parse_expr: Expect NumLiteral".to_string(),
}),
}
}
pub fn parse_statement(token_stream: &mut Stream<Token>) -> Result<ast::Statement, ParseError> {
let _ = expect_next_eql(token_stream, Token::Symbol("return".to_string()))?;
let expr = parse_expr(token_stream)?;
let _ = expect_next_eql(token_stream, Token::Semi)?;
return Ok(ast::Statement::Return(Box::new(expr)));
}
pub fn parse_block(token_stream: &mut Stream<Token>) -> Result<ast::Block, ParseError> {
let _ = expect_next_eql(token_stream, Token::OpenCur)?;
let mut statements = Vec::new();
loop {
match token_stream.peek().ok_or(ParseError {
reason: "Premature end".to_string(),
})? {
Token::CloseCur => {
break;
}
_ => {
let stmt = parse_statement(token_stream)?;
statements.push(stmt);
}
}
}
let _ = expect_next_eql(token_stream, Token::CloseCur)?;
Ok(ast::Block { statements })
}
pub fn parse_function(token_stream: &mut Stream<Token>) -> Result<ast::Function, ParseError> {
let return_typename = expect_next(token_stream)?
.get_symbol_string()
.and_then(|name| {
match Keyword::from_str(&name) {
Some(_) => None, // reject keywords
None => Some(name),
}
})
.ok_or(ParseError {
reason: "invalid return typename type".to_string(),
})?;
let function_name = expect_next(token_stream)?
.get_symbol_string()
.and_then(|name| {
match Keyword::from_str(&name) {
Some(_) => None, // reject keywords
None => Some(name),
}
})
.ok_or(ParseError {
reason: "invalid function name type".to_string(),
})?;
let _ = expect_next_eql(token_stream, Token::OpenPar)?;
let _ = expect_next_eql(token_stream, Token::ClosePar)?;
let block = parse_block(token_stream)?;
Ok(ast::Function {
return_type: ast::Type {
name: return_typename,
},
name: function_name,
parameters: Vec::new(),
block,
})
}
pub fn parse_program(token_stream: &mut Stream<Token>) -> Result<ast::Program, ParseError> {
let mut functions = vec![];
while !token_stream.is_exhausted() {
functions.push(parse_function(token_stream)?);
}
return Ok(ast::Program { functions });
}
#[test]
fn test_parser() {
let tokens = vec![
Token::Symbol("int".to_string()),
Token::Symbol("main".to_string()),
Token::OpenPar,
Token::ClosePar,
Token::OpenCur,
Token::Symbol("return".to_string()),
Token::NumLiteral("0".to_string()),
Token::Semi,
Token::CloseCur,
];
let exp_ast = ast::Program {
functions: vec![
ast::Function {
return_type: ast::Type {
name: "int".to_string(),
},
name: "main".to_string(),
parameters: vec![],
block: ast::Block {
|
},
},
],
};
let mut token_stream = Stream::new(tokens);
let ast = parse_program(&mut token_stream).unwrap();
assert_eq!(true, token_stream.is_exhausted());
assert_eq!(exp_ast, ast);
}
|
statements: vec![
ast::Statement::Return(Box::new(ast::Expr::NumLiteral("0".to_string()))),
],
|
nested_tenant_group.go
|
// Code generated by go-swagger; DO NOT EDIT.
// Copyright 2020 The go-netbox Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
strfmt "github.com/go-openapi/strfmt"
"github.com/go-openapi/errors"
"github.com/go-openapi/swag"
"github.com/go-openapi/validate"
)
// NestedTenantGroup nested tenant group
// swagger:model NestedTenantGroup
type NestedTenantGroup struct {
// ID
// Read Only: true
ID int64 `json:"id,omitempty"`
|
// Max Length: 50
// Min Length: 1
Name *string `json:"name"`
// Slug
// Required: true
// Max Length: 50
// Min Length: 1
// Pattern: ^[-a-zA-Z0-9_]+$
Slug *string `json:"slug"`
// Tenant count
// Read Only: true
TenantCount int64 `json:"tenant_count,omitempty"`
// Url
// Read Only: true
// Format: uri
URL strfmt.URI `json:"url,omitempty"`
}
// Validate validates this nested tenant group
func (m *NestedTenantGroup) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateName(formats); err != nil {
res = append(res, err)
}
if err := m.validateSlug(formats); err != nil {
res = append(res, err)
}
if err := m.validateURL(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *NestedTenantGroup) validateName(formats strfmt.Registry) error {
if err := validate.Required("name", "body", m.Name); err != nil {
return err
}
if err := validate.MinLength("name", "body", string(*m.Name), 1); err != nil {
return err
}
if err := validate.MaxLength("name", "body", string(*m.Name), 50); err != nil {
return err
}
return nil
}
func (m *NestedTenantGroup) validateSlug(formats strfmt.Registry) error {
if err := validate.Required("slug", "body", m.Slug); err != nil {
return err
}
if err := validate.MinLength("slug", "body", string(*m.Slug), 1); err != nil {
return err
}
if err := validate.MaxLength("slug", "body", string(*m.Slug), 50); err != nil {
return err
}
if err := validate.Pattern("slug", "body", string(*m.Slug), `^[-a-zA-Z0-9_]+$`); err != nil {
return err
}
return nil
}
func (m *NestedTenantGroup) validateURL(formats strfmt.Registry) error {
if swag.IsZero(m.URL) { // not required
return nil
}
if err := validate.FormatOf("url", "body", "uri", m.URL.String(), formats); err != nil {
return err
}
return nil
}
// MarshalBinary interface implementation
func (m *NestedTenantGroup) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *NestedTenantGroup) UnmarshalBinary(b []byte) error {
var res NestedTenantGroup
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}
|
// Name
// Required: true
|
SimpleMatrixClientState.ts
|
// Copyright (c) 2021. Sendanor <[email protected]>. All rights reserved.
export enum SimpleMatrixClientState {
/**
* Client has been initialized but does not have authenticated session
*/
UNAUTHENTICATED,
/**
* Client is in the middle of authenticating and has not started long polling
*/
AUTHENTICATING,
/**
* Client has authenticated session but has not started sync (eg. long polling events)
*/
AUTHENTICATED,
/**
* Client has authenticated session and is in the middle of initial sync request from the
* backend
*/
AUTHENTICATED_AND_STARTING,
/**
* Client has authenticated session but the initial sync resulted in an error and client is
* in the middle of timeout until next try is done
*/
AUTHENTICATED_AND_RESTARTING,
/**
* Client has authenticated session and has finished initial sync and is in the middle of
* the sync retry timeout (eg. client side sync delay timer is active only)
*/
AUTHENTICATED_AND_STARTED,
/**
* Client has authenticated session and is in the middle of a long polling sync request from
* the backend
*/
AUTHENTICATED_AND_SYNCING
}
export function isSimpleMatrixClientState (value: any): value is SimpleMatrixClientState {
switch (value) {
case SimpleMatrixClientState.UNAUTHENTICATED:
case SimpleMatrixClientState.AUTHENTICATING:
case SimpleMatrixClientState.AUTHENTICATED:
case SimpleMatrixClientState.AUTHENTICATED_AND_STARTING:
case SimpleMatrixClientState.AUTHENTICATED_AND_RESTARTING:
case SimpleMatrixClientState.AUTHENTICATED_AND_STARTED:
case SimpleMatrixClientState.AUTHENTICATED_AND_SYNCING:
return true;
default:
return false;
}
}
export function stringifySimpleMatrixClientState (value: SimpleMatrixClientState): string {
switch (value) {
case SimpleMatrixClientState.UNAUTHENTICATED : return 'UNAUTHENTICATED';
case SimpleMatrixClientState.AUTHENTICATING : return 'AUTHENTICATING';
case SimpleMatrixClientState.AUTHENTICATED : return 'AUTHENTICATED';
case SimpleMatrixClientState.AUTHENTICATED_AND_STARTING : return 'AUTHENTICATED_AND_STARTING';
case SimpleMatrixClientState.AUTHENTICATED_AND_RESTARTING : return 'AUTHENTICATED_AND_RESTARTING';
case SimpleMatrixClientState.AUTHENTICATED_AND_STARTED : return 'AUTHENTICATED_AND_STARTED';
case SimpleMatrixClientState.AUTHENTICATED_AND_SYNCING : return 'AUTHENTICATED_AND_SYNCING';
}
throw new TypeError(`Unsupported SimpleMatrixClientState value: ${value}`);
}
export function
|
(value: any): SimpleMatrixClientState | undefined {
switch ( `${value}`.toUpperCase() ) {
case 'UNAUTHENTICATED' : return SimpleMatrixClientState.UNAUTHENTICATED;
case 'AUTHENTICATING' : return SimpleMatrixClientState.AUTHENTICATING;
case 'AUTHENTICATED' : return SimpleMatrixClientState.AUTHENTICATED;
case 'AUTHENTICATED_AND_STARTING' : return SimpleMatrixClientState.AUTHENTICATED_AND_STARTING;
case 'AUTHENTICATED_AND_RESTARTING' : return SimpleMatrixClientState.AUTHENTICATED_AND_RESTARTING;
case 'AUTHENTICATED_AND_STARTED' : return SimpleMatrixClientState.AUTHENTICATED_AND_STARTED;
case 'AUTHENTICATED_AND_SYNCING' : return SimpleMatrixClientState.AUTHENTICATED_AND_SYNCING;
default : return undefined;
}
}
|
parseSimpleMatrixClientState
|
cache.py
|
from memcached_clients import RestclientPymemcacheClient
import re
ONE_MINUTE = 60
ONE_HOUR = 60 * 60
class
|
(RestclientPymemcacheClient):
""" A custom cache implementation for Course Dashboards """
def get_cache_expiration_time(self, service, url, status=200):
if "sws" == service:
if re.match(r"^/student/v\d/term/\d{4}", url):
return ONE_HOUR * 10
if re.match(r"^/student/v\d/(?:enrollment|registration)", url):
return ONE_HOUR * 2
return ONE_HOUR
if "pws" == service:
return ONE_HOUR * 10
if "gws" == service:
return ONE_MINUTE * 2
if "canvas" == service:
if status == 200:
return ONE_HOUR * 10
return ONE_MINUTE * 5
|
RestClientsCache
|
signature_test.go
|
// These tests check that the signature validation in venus/types
// works as expected. They are kept in the wallet package because
// these tests need to generate signatures and the wallet package owns this
// function. They cannot be kept in types because wallet imports "types"
// for the Signature and KeyInfo types. TODO: organize packages in a way
// that makes more sense, e.g. so that signature tests can be in same package
// as signature code.
package wallet
import (
"context"
"testing"
"github.com/filecoin-project/go-address"
"github.com/ipfs/go-datastore"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/filecoin-project/venus/pkg/config"
"github.com/filecoin-project/venus/pkg/crypto"
tf "github.com/filecoin-project/venus/pkg/testhelpers/testflags"
)
/* Test types.ValidateSignature */
func requireSignerAddr(t *testing.T) (*DSBackend, address.Address) {
ds := datastore.NewMapDatastore()
fs, err := NewDSBackend(context.Background(), ds, config.TestPassphraseConfig(), TestPassword)
require.NoError(t, err)
addr, err := fs.NewAddress(context.Background(), address.SECP256K1)
require.NoError(t, err)
return fs, addr
|
func TestSignatureOk(t *testing.T) {
tf.UnitTest(t)
fs, addr := requireSignerAddr(t)
data := []byte("THESE BYTES WILL BE SIGNED")
sig, err := fs.SignBytes(context.Background(), data, addr)
require.NoError(t, err)
assert.NoError(t, crypto.Verify(sig, addr, data))
}
// Signature is nil.
func TestNilSignature(t *testing.T) {
tf.UnitTest(t)
_, addr := requireSignerAddr(t)
data := []byte("THESE BYTES NEED A SIGNATURE")
assert.Error(t, crypto.Verify(&crypto.Signature{}, addr, data))
}
// Signature is over different data.
func TestDataCorrupted(t *testing.T) {
tf.UnitTest(t)
fs, addr := requireSignerAddr(t)
data := []byte("THESE BYTES ARE SIGNED")
sig, err := fs.SignBytes(context.Background(), data, addr)
require.NoError(t, err)
corruptData := []byte("THESE BYTEZ ARE SIGNED")
assert.Error(t, crypto.Verify(sig, addr, corruptData))
}
// Signature is valid for data but was signed by a different address.
func TestInvalidAddress(t *testing.T) {
tf.UnitTest(t)
fs, addr := requireSignerAddr(t)
data := []byte("THESE BYTES ARE SIGNED")
sig, err := fs.SignBytes(context.Background(), data, addr)
require.NoError(t, err)
badAddr, err := fs.NewAddress(context.Background(), address.SECP256K1)
require.NoError(t, err)
assert.Error(t, crypto.Verify(sig, badAddr, data))
}
// Signature is corrupted.
func TestSignatureCorrupted(t *testing.T) {
tf.UnitTest(t)
fs, addr := requireSignerAddr(t)
data := []byte("THESE BYTES ARE SIGNED")
sig, err := fs.SignBytes(context.Background(), data, addr)
require.NoError(t, err)
sig.Data[0] = sig.Data[0] ^ 0xFF // This operation ensures sig is modified
assert.Error(t, crypto.Verify(sig, addr, data))
}
|
}
// Signature is over the data being verified and was signed by the verifying
// address. Everything should work out ok.
|
provider.object.ts
|
import {UserProviderId} from '@sheetbase/models';
export class ProviderObject {
providerId: UserProviderId;
endpoint: string;
scopes: string;
customParameters = {} as Record<string, string>;
constructor(providerId: UserProviderId, endpoint: string, scopes: string) {
this.providerId = providerId;
this.endpoint = endpoint;
this.scopes = scopes;
}
addScope(scope: string) {
this.scopes = this.scopes + ' ' + scope;
}
setCustomParameters(customOAuthParameters: {}) {
this.customParameters = customOAuthParameters;
}
url(clientId: string, redirectUri: string) {
let params = '';
for (const key of Object.keys(this.customParameters)) {
params += '&' + key + '=' + this.customParameters[key];
}
return (
this.endpoint +
'?' +
'response_type=token&' +
`client_id=${clientId}&` +
`redirect_uri=${redirectUri}&` +
`scope=${this.scopes}` +
params
);
|
}
}
|
|
sync_tile_picking.rs
|
use crate::packets::PacketBody;
use crate::SliceCursor;
/// Sync tile picking.
///
/// Direction: Client <-> Server.
#[derive(Debug)]
pub struct SyncTilePicking {
pub player_id: u8,
pub x: i16,
pub y: i16,
pub pick_damage: u8,
}
impl PacketBody for SyncTilePicking {
const TAG: u8 = 125;
fn write_body(&self, cursor: &mut SliceCursor)
|
fn from_body(cursor: &mut SliceCursor) -> Self {
Self {
player_id: cursor.read(),
x: cursor.read(),
y: cursor.read(),
pick_damage: cursor.read(),
}
}
}
|
{
cursor.write(&self.player_id);
cursor.write(&self.x);
cursor.write(&self.y);
cursor.write(&self.pick_damage);
}
|
main.go
|
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by cloud.google.com/go/internal/gapicgen/gensnippets. DO NOT EDIT.
// [START cloudchannel_v1_generated_CloudChannelService_CheckCloudIdentityAccountsExist_sync]
package main
import (
"context"
channel "cloud.google.com/go/channel/apiv1"
channelpb "google.golang.org/genproto/googleapis/cloud/channel/v1"
)
func main() {
ctx := context.Background()
c, err := channel.NewCloudChannelClient(ctx)
if err != nil {
|
req := &channelpb.CheckCloudIdentityAccountsExistRequest{
// TODO: Fill request struct fields.
// See https://pkg.go.dev/google.golang.org/genproto/googleapis/cloud/channel/v1#CheckCloudIdentityAccountsExistRequest.
}
resp, err := c.CheckCloudIdentityAccountsExist(ctx, req)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
// [END cloudchannel_v1_generated_CloudChannelService_CheckCloudIdentityAccountsExist_sync]
|
// TODO: Handle error.
}
defer c.Close()
|
aptget.py
|
import os
from i3pystatus.core.command import run_through_shell
from i3pystatus.updates import Backend
class
|
(Backend):
"""
Gets update count for Debian based distributions.
This mimics the Arch Linux `checkupdates` script
but with apt-get and written in python.
"""
@property
def updates(self):
cache_dir = "/tmp/update-cache-" + os.getenv("USER")
if not os.path.exists(cache_dir):
os.mkdir(cache_dir)
command = "apt-get update -o Dir::State::Lists=" + cache_dir
run_through_shell(command.split())
command = "apt-get upgrade -s -o Dir::State::Lists=" + cache_dir
apt = run_through_shell(command.split())
update_count = 0
for line in apt.out.split("\n"):
if line.startswith("Inst"):
update_count += 1
return update_count
Backend = AptGet
|
AptGet
|
hic.py
|
from numbers import Integral
from numpy import ma
import numpy as np
from scipy.sparse import coo_matrix
from scipy.stats import mode
from prody.chromatin.norm import VCnorm, SQRTVCnorm, Filenorm
from prody.chromatin.functions import div0, showDomains, _getEigvecs
from prody import PY2K
from prody.dynamics import GNM, MaskedGNM
from prody.dynamics.functions import writeArray
from prody.dynamics.mode import Mode
from prody.dynamics.modeset import ModeSet
from prody.utilities import openFile, importLA, showMatrix, isURL, fixArraySize, makeSymmetric
__all__ = ['HiC', 'parseHiC', 'parseHiCStream', 'parseHiCBinary', 'saveHiC', 'loadHiC', 'writeMap']
class HiC(object):
"""This class is used to store and preprocess Hi-C contact map. A :class:`.GNM`
instance for analyzing the contact map can be also created by using this class.
"""
def __init__(self, title='Unknown', map=None, bin=None):
self._title = title
self._map = None
self.mask = False
self._labels = 0
self.masked = True
self.bin = bin
self.map = map
@property
def map(self):
if self.masked:
return self.getTrimedMap()
else:
return self._map
@map.setter
def map(self, value):
if value is None:
self._map = None
else:
self._map = np.asarray(value)
self._map = makeSymmetric(self._map)
self._maskUnmappedRegions()
self._labels = np.zeros(len(self._map), dtype=int)
def __repr__(self):
mask = self.mask
if np.isscalar(mask):
return '<HiC: {0} ({1} loci)>'.format(self._title, len(self._map))
else:
return '<HiC: {0} ({1} mapped loci; {2} in total)>'.format(self._title, np.count_nonzero(mask), len(self._map))
def __str__(self):
return 'HiC ' + self._title
def __getitem__(self, index):
if isinstance(index, Integral):
return self.map.flatten()[index]
else:
i, j = index
return self.map[i,j]
def __len__(self):
mask = self.mask
if np.isscalar(mask):
return len(self._map)
else:
return np.count_nonzero(mask)
def numAtoms(self):
return len(self.map)
def getTitle(self):
"""Returns title of the instance."""
return self._title
def setTitle(self, title):
"""Sets title of the instance."""
self._title = str(title)
def getCompleteMap(self):
"""Obtains the complete contact map with unmapped regions."""
return self._map
def getTrimedMap(self):
"""Obtains the contact map without unmapped regions."""
if self._map is None:
return None
if np.isscalar(self.mask):
return self._map
M = ma.array(self._map)
M.mask = np.diag(~self.mask)
return ma.compress_rowcols(M)
def align(self, array, axis=None):
if not isinstance(array, np.ndarray):
array = np.array(array)
ret = array = array.copy()
if np.isscalar(self.mask):
return ret
mask = self.mask.copy()
l_full = self.getCompleteMap().shape[0]
l_trim = self.getTrimedMap().shape[0]
if len(array.shape) == 0:
raise ValueError('array cannot be empty')
elif len(array.shape) == 1:
l = array.shape[0]
if l == l_trim:
N = len(mask)
ret = np.zeros(N, dtype=array.dtype)
ret[mask] = array
elif l == l_full:
ret = array[mask]
else:
raise ValueError('The length of array (%d) does not '
'match that of either the full (%d) '
'or trimed (%d).'
%(l, l_full, l_trim))
elif len(array.shape) == 2:
s = array.shape
if axis is None:
if s[0] != s[1]:
raise ValueError('The array must be a square matrix '
'if axis is set to None.')
if s[0] == l_trim:
N = len(mask)
whole_mat = np.zeros((N,N), dtype=array.dtype)
mask = np.outer(mask, mask)
whole_mat[mask] = array.flatten()
ret = whole_mat
elif s[0] == l_full:
M = ma.array(array)
M.mask = np.diag(mask)
ret = ma.compress_rowcols(M)
else:
raise ValueError('The size of array (%d) does not '
'match that of either the full (%d) '
'or trimed (%d).'
%(s[0], l_full, l_trim))
else:
new_shape = list(s)
otheraxis = 0 if axis!=0 else 1
if s[axis] == l_trim:
N = len(mask)
new_shape[axis] = N
whole_mat = np.zeros(new_shape)
mask = np.expand_dims(mask, axis=otheraxis)
mask = mask.repeat(s[otheraxis], axis=otheraxis)
whole_mat[mask] = array.flatten()
ret = whole_mat
elif s[axis] == l_full:
mask = np.expand_dims(mask, axis=otheraxis)
mask = mask.repeat(s[otheraxis])
ret = self._map[mask]
else:
raise ValueError('The size of array (%d) does not '
'match that of either the full (%d) '
'or trimed (%d).'
%(s[0], l_full, l_trim))
return ret
def getKirchhoff(self):
"""Builds a Kirchhoff matrix based on the contact map."""
if self._map is None:
return None
else:
M = self.map
I = np.eye(M.shape[0], dtype=bool)
A = M.copy()
A[I] = 0.
D = np.diag(np.sum(A, axis=0))
K = D - A
return K
def _maskUnmappedRegions(self, diag=False):
"""Finds and masks unmapped regions in the contact map."""
M = self._map
if M is None: return
if diag:
# Obtain the diagonal values, need to make sure d is an array
# instead of a matrix, otherwise diag() later will not work as
# intended.
d = np.array(np.diag(M))
else:
d = np.array(M.sum(0))
# mask if a diagonal value is zero
mask_zero = np.array(d==0)
# mask if a diagonal value is NAN
mask_nan = np.isnan(d)
# combine two masks
mask = np.logical_or(mask_nan, mask_zero)
self.mask = ~mask
return self.mask
def calcGNM(self, n_modes=None, **kwargs):
"""Calculates GNM on the current Hi-C map. By default, ``n_modes`` is
set to **None** and ``zeros`` to **True**."""
if 'zeros' not in kwargs:
kwargs['zeros'] = True
if self.masked:
gnm = MaskedGNM(self._title, self.mask)
else:
gnm = GNM(self._title)
gnm.setKirchhoff(self.getKirchhoff())
gnm.calcModes(n_modes=n_modes, **kwargs)
return gnm
def normalize(self, method=VCnorm, **kwargs):
"""Applies chosen normalization on the current Hi-C map."""
M = self._map
N = method(M, **kwargs)
self.map = N
return N
def setDomains(self, labels, **kwargs):
"""Uses spectral clustering to identify structural domains on the chromosome.
:arg labels: domain labels
:type labels: :class:`~numpy.ndarray`, list
:arg method: Label assignment algorithm used after Laplacian embedding.
:type method: func
"""
wastrimmed = self.masked
self.masked = True
if len(labels) == self.numAtoms():
full_length = self.numAtoms()
if full_length != len(labels):
_labels = np.empty(full_length)
_labels.fill(np.nan)
_labels[self.mask] = labels
currlbl = labels[0]
for i in range(len(_labels)):
l = _labels[i]
if np.isnan(l):
_labels[i] = currlbl
elif currlbl != l:
currlbl = l
labels = _labels
else:
self.masked = False
if len(labels) != self.numAtoms():
raise ValueError('The length of the labels should match either the length '
'of masked or complete Hi-C map. Turn off "masked" if '
'you intended to set the labels to the full map.')
self.masked = wastrimmed
self._labels = labels
return self.getDomains()
def getDomains(self):
"""Returns an 1D :class:`numpy.ndarray` whose length is the number of loci. Each
element is an index denotes to which domain the locus belongs."""
lbl = self._labels
mask = self.mask
if self.masked:
lbl = lbl[mask]
return lbl
def getDomainList(self):
"""Returns a list of domain separations. The list has two columns: the first is for
the domain starts and the second is for the domain ends."""
indicators = np.diff(self.getDomains())
indicators = np.append(1., indicators)
indicators[-1] = 1
sites = np.where(indicators != 0)[0]
starts = sites[:-1]
ends = sites[1:]
domains = np.array([starts, ends]).T
return domains
def view(self, spec='p', **kwargs):
"""Visualization of the Hi-C map and domains (if present). The function makes use
of :func:`.showMatrix`.
:arg spec: a string specifies how to preprocess the matrix. Blank for no preprocessing,
'p' for showing only data from *p*-th to *100-p*-th percentile. '_' is to suppress
creating a new figure and paint to the current one instead. The letter specifications
can be applied sequentially, e.g. 'p_'.
:type spec: str
:arg p: specifies the percentile threshold.
:type p: double
"""
dm_kwargs = {}
keys = list(kwargs.keys())
for k in keys:
if k.startswith('dm_'):
dm_kwargs[k[3:]] = kwargs.pop(k)
elif k.startswith('domain_'):
dm_kwargs[k[7:]] = kwargs.pop(k)
M = self.map
if 'p' in spec:
p = kwargs.pop('p', 5)
lp = kwargs.pop('lp', p)
hp = kwargs.pop('hp', 100-p)
vmin = np.percentile(M, lp)
vmax = np.percentile(M, hp)
else:
vmin = vmax = None
if not 'vmin' in kwargs:
kwargs['vmin'] = vmin
if not 'vmax' in kwargs:
kwargs['vmax'] = vmax
im = showMatrix(M, **kwargs)
domains = self.getDomainList()
if len(domains) > 1:
showDomains(domains, **dm_kwargs)
return im
def copy(self):
new = type(self)()
new.__dict__.update(self.__dict__)
return new
__copy__ = copy
def parseHiC(filename, **kwargs):
"""Returns an :class:`.HiC` from a Hi-C data file.
This function extends :func:`.parseHiCStream`.
:arg filename: the filename to the Hi-C data file.
:type filename: str
"""
import os, struct
title = kwargs.get('title')
if title is None:
title = os.path.basename(filename)
else:
title = kwargs.pop('title')
if isURL(filename):
M, res = parseHiCBinary(filename, title=title, **kwargs)
else:
with open(filename,'rb') as req:
magic_number = struct.unpack('<3s',req.read(3))[0]
if magic_number == b"HIC":
M, res = parseHiCBinary(filename, title=title, **kwargs)
else:
with open(filename, 'r') as filestream:
M, res = parseHiCStream(filestream, title=title, **kwargs)
hic = HiC(title=title, map=M, bin=res)
return hic
def _sparse2dense(I, J, values, bin=None):
I = np.asarray(I, dtype=int)
J = np.asarray(J, dtype=int)
values = np.asarray(values, dtype=float)
# determine the bin size by the most frequent interval
if bin is None:
loci = np.unique(np.sort(I))
bins = np.diff(loci)
bin = mode(bins)[0][0]
# convert coordinate from basepair to locus index
bin = int(bin)
I = I // bin
J = J // bin
# make sure that the matrix is square
# if np.max(I) != np.max(J):
# b = np.max(np.append(I, J))
# I = np.append(I, b)
# J = np.append(J, b)
# values = np.append(values, 0.)
# Convert to sparse matrix format, then full matrix format
# and finally array type. Matrix format is avoided because
# diag() won't work as intended for Matrix instances.
M = np.array(coo_matrix((values, (I, J))).todense())
return M, bin
def parseHiCStream(stream, **kwargs):
|
def parseHiCBinary(filename, **kwargs):
chrloc = kwargs.get('chrom', None)
if chrloc is None:
raise ValueError('chrom needs to be specified when parsing .hic format')
chrloc1 = kwargs.get('chrom1', chrloc)
chrloc2 = kwargs.get('chrom2', chrloc)
norm = kwargs.get('norm', 'NONE')
unit = kwargs.get('unit', 'BP')
res = kwargs.get('binsize', None)
res = kwargs.get('bin', res)
if res is None:
raise ValueError('bin needs to be specified when parsing .hic format')
res = int(res)
from .straw import straw
result = straw(norm, filename, chrloc1, chrloc2, unit, res)
M, res = _sparse2dense(*result, bin=res)
return M, res
def writeMap(filename, map, bin=None, format='%f'):
"""Writes *map* to the file designated by *filename*.
:arg filename: the file to be written.
:type filename: str
:arg map: a Hi-C contact map.
:type map: :class:`numpy.ndarray`
:arg bin: bin size of the *map*. If bin is `None`, *map* will be
written in full matrix format.
:type bin: int
:arg format: output format for map elements.
:type format: str
"""
assert isinstance(map, np.ndarray), 'map must be a numpy.ndarray.'
if bin is None:
return writeArray(filename, map, format=format)
else:
L = int(map.size - np.diag(map).size)//2 + np.diag(map).size
spmat = np.zeros((L, 3))
m,n = map.shape
l = 0
for i in range(m):
for j in range(i,n):
spmat[l, 0] = i * bin
spmat[l, 1] = j * bin
spmat[l, 2] = map[i, j]
l += 1
fmt = ['%d', '%d', format]
return writeArray(filename, spmat, format=fmt)
def saveHiC(hic, filename=None, map=True, **kwargs):
"""Saves *HiC* model data as :file:`filename.hic.npz`. If *map* is **True**,
Hi-C contact map will not be saved and it can be loaded from raw data file
later. If *filename* is **None**, name of the Hi-C instance will be used as
the filename, after ``" "`` (white spaces) in the name are replaced with
``"_"`` (underscores). Upon successful completion of saving, filename is
returned. This function makes use of :func:`numpy.savez` function."""
assert isinstance(hic, HiC), 'hic must be a HiC instance.'
if filename is None:
filename = hic.getTitle().replace(' ', '_')
if filename.endswith('.hic'):
filename += '.npz'
elif not filename.endswith('.hic.npz'):
filename += '.hic.npz'
attr_dict = hic.__dict__.copy()
if not map:
attr_dict.pop('_map')
ostream = openFile(filename, 'wb', **kwargs)
np.savez_compressed(ostream, **attr_dict)
ostream.close()
return filename
def loadHiC(filename):
"""Returns HiC instance after loading it from file (*filename*).
This function makes use of :func:`numpy.load` function. See also
:func:`saveHiC`."""
attr_dict = np.load(filename)
hic = HiC()
keys = attr_dict.keys()
for k in keys:
val = attr_dict[k]
if len(val.shape) == 0:
val = np.asscalar(val)
setattr(hic, k, val)
return hic
def saveHiC_h5(hic, filename=None, **kwargs):
"""Saves *HiC* model data as :file:`filename.hic.npz`. If *filename* is
**None**, name of the Hi-C instance will be used as
the filename, after ``" "`` (white spaces) in the name are replaced with
``"_"`` (underscores). Upon successful completion of saving, filename is
returned. This function makes use of :func:`numpy.savez` function."""
try:
import h5py
except:
raise ImportError('h5py needs to be installed for using this function')
assert isinstance(hic, HiC), 'hic must be a HiC instance.'
if filename is None:
filename = hic.getTitle().replace(' ', '_')
if filename.endswith('.hic'):
filename += '.hic'
elif not filename.endswith('.hic.h5'):
filename += '.hic.h5'
attr_dict = hic.__dict__.copy()
with h5py.File(filename, 'w') as f:
for key in attr_dict:
value = attr_dict[key]
compression = None if np.isscalar(value) else 'gzip'
f.create_dataset(key, data=value, compression=compression)
return filename
def loadHiC_h5(filename):
"""Returns HiC instance after loading it from file (*filename*).
This function makes use of :func:`numpy.load` function. See also
:func:`saveHiC`."""
try:
import h5py
except:
raise ImportError('h5py needs to be installed for using this function')
hic = HiC()
with h5py.File(filename, 'r') as f:
for key in f.keys():
try:
value = f[key][:]
except:
value = f[key][()]
setattr(hic, key, value)
return hic
|
"""Returns an :class:`.HiC` from a stream of Hi-C data lines.
:arg stream: Anything that implements the method ``read``, ``seek``
(e.g. :class:`file`, buffer, stdin)
"""
issparse = kwargs.get('sparse', None)
import csv
dialect = csv.Sniffer().sniff(stream.read(1024))
stream.seek(0)
reader = csv.reader(stream, dialect)
D = list()
for row in reader:
d = list()
for element in row:
d.append(np.double(element))
D.append(d)
D = np.array(D)
res = kwargs.get('bin', None)
if res is not None:
res = int(res)
size = D.shape
if len(D.shape) <= 1:
raise ValueError("cannot parse the file: input file only contains one column.")
if issparse is None:
issparse = size[1] == 3
if not issparse:
M = D
else:
try:
I, J, values = D.T[:3]
except ValueError:
raise ValueError('the sparse matrix format should have three columns')
M, res = _sparse2dense(I, J, values, bin=res)
return M, res
|
path.go
|
// Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package cmdutil
import "os"
// GetHomePath obtains a home path from a GOOS architecture.
//
// based on spf13/viper (userHomeDir)
func
|
(goos string) string {
if goos != "windows" {
return os.Getenv("HOME")
}
if home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH"); home != "" {
return home
}
return os.Getenv("USERPROFILE")
}
|
GetHomePath
|
InferTypesFromRule.d.ts
|
import { RuleCreateFunction, RuleModule } from "../ts-eslint";
/**
* Uses type inference to fetch the TOptions type from the given RuleModule
*/
declare type InferOptionsTypeFromRule<T> = T extends
RuleModule<infer _TMessageIds, infer TOptions> ? TOptions
: T extends RuleCreateFunction<infer _TMessageIds, infer TOptions> ? TOptions
: unknown;
|
* Uses type inference to fetch the TMessageIds type from the given RuleModule
*/
declare type InferMessageIdsTypeFromRule<T> = T extends
RuleModule<infer TMessageIds, infer _TOptions> ? TMessageIds
: T extends RuleCreateFunction<infer TMessageIds, infer _TOptions>
? TMessageIds
: unknown;
export { InferMessageIdsTypeFromRule, InferOptionsTypeFromRule };
//# sourceMappingURL=InferTypesFromRule.d.ts.map
|
/**
|
test.go
|
package test
import (
"encoding/json"
"io"
"io/ioutil"
"net/http"
"sync"
"time"
"github.com/kubeedge/kubeedge/beehive/pkg/common/log"
"github.com/kubeedge/kubeedge/beehive/pkg/core"
"github.com/kubeedge/kubeedge/beehive/pkg/core/context"
"github.com/kubeedge/kubeedge/beehive/pkg/core/model"
"github.com/kubeedge/kubeedge/pkg/common/message"
"github.com/kubeedge/kubeedge/pkg/metamanager/dao"
"k8s.io/api/core/v1"
)
const (
name = "testManager"
edgedEndPoint = "http://127.0.0.1:10255"
)
func
|
() {
core.Register(&testManager{})
}
type testManager struct {
context *context.Context
moduleWait *sync.WaitGroup
}
type meta struct {
UID string `json:"uid"`
}
func (tm *testManager) Name() string {
return name
}
func (tm *testManager) Group() string {
return core.MetaGroup
}
//Function to get the pods from Edged
func GetPodListFromEdged(w http.ResponseWriter) error{
var pods v1.PodList
var bytes io.Reader
client := &http.Client{}
t := time.Now()
req, err := http.NewRequest(http.MethodGet, edgedEndPoint+"/pods", bytes)
req.Header.Set("Content-Type", "application/json; charset=utf-8")
if err != nil {
log.LOGGER.Errorf("Frame HTTP request failed: %v", err)
return err
}
resp, err := client.Do(req)
if err != nil {
log.LOGGER.Errorf("Sending HTTP request failed: %v", err)
return err
}
log.LOGGER.Infof("%s %s %v in %v", req.Method, req.URL, resp.Status, time.Now().Sub(t))
defer resp.Body.Close()
contents, err := ioutil.ReadAll(resp.Body)
if err != nil {
log.LOGGER.Errorf("HTTP Response reading has failed: %v", err)
return err
}
err = json.Unmarshal(contents, &pods)
if err != nil {
log.LOGGER.Errorf("Json Unmarshal has failed: %v", err)
return err
}
respBody, err := json.Marshal(pods)
if err != nil {
log.LOGGER.Errorf("Json Marshal failed: %v", err)
return err
}
w.Header().Set("Content-Type", "application/json")
w.Write(respBody)
return nil
}
//Function to handle Get/Add/Delete deployment list.
func (tm *testManager) applicationHandler(w http.ResponseWriter, req *http.Request) {
var operation string
var p v1.Pod
if req.Method == http.MethodGet {
err := GetPodListFromEdged(w)
if err != nil{
log.LOGGER.Errorf("Get podlist from Edged has failed: %v", err)
}
} else if req.Body != nil {
body, err := ioutil.ReadAll(req.Body)
if err != nil {
log.LOGGER.Errorf("read body error %v", err)
w.Write([]byte("read request body error"))
}
log.LOGGER.Infof("request body is %s\n", string(body))
if err = json.Unmarshal(body, &p); err != nil {
log.LOGGER.Errorf("unmarshal request body error %v", err)
w.Write([]byte("unmarshal request body error"))
}
switch req.Method {
case "POST":
operation = model.InsertOperation
case "DELETE":
operation = model.DeleteOperation
case "PUT":
operation = model.UpdateOperation
}
ns := v1.NamespaceDefault
if p.Namespace != "" {
ns = p.Namespace
}
msgReq := message.BuildMsg("resource", string(p.UID), "controller", ns+"/pod/"+string(p.Name), operation, p)
tm.context.Send("metaManager", *msgReq)
log.LOGGER.Infof("send message to metaManager is %+v\n", msgReq)
}
}
//Function to handle device addition and removal from the edgenode
func (tm *testManager) deviceHandler(w http.ResponseWriter, req *http.Request) {
var operation string
var Content interface{}
if req.Body != nil {
body, err := ioutil.ReadAll(req.Body)
if err != nil {
log.LOGGER.Errorf("read body error %v", err)
w.Write([]byte("read request body error"))
}
log.LOGGER.Infof("request body is %s\n", string(body))
err = json.Unmarshal(body, &Content)
if err != nil {
log.LOGGER.Errorf("unmarshal request body error %v", err)
w.Write([]byte("unmarshal request body error"))
}
switch req.Method {
case "POST":
operation = model.InsertOperation
case "DELETE":
operation = model.DeleteOperation
case "PUT":
operation = model.UpdateOperation
}
msgReq := message.BuildMsg("edgehub", "", "edgemgr", "membership", operation, Content)
tm.context.Send("twin", *msgReq)
log.LOGGER.Infof("send message to twingrp is %+v\n", msgReq)
}
}
func (tm *testManager) secretHandler(w http.ResponseWriter, req *http.Request) {
var operation string
var p v1.Secret
if req.Body != nil {
body, err := ioutil.ReadAll(req.Body)
if err != nil {
log.LOGGER.Errorf("read body error %v", err)
w.Write([]byte("read request body error"))
}
log.LOGGER.Infof("request body is %s\n", string(body))
if err = json.Unmarshal(body, &p); err != nil {
log.LOGGER.Errorf("unmarshal request body error %v", err)
w.Write([]byte("unmarshal request body error"))
}
switch req.Method {
case "POST":
operation = model.InsertOperation
case "DELETE":
operation = model.DeleteOperation
case "PUT":
operation = model.UpdateOperation
}
msgReq := message.BuildMsg("edgehub", string(p.UID), "test", "fakeNamespace/secret/"+string(p.UID), operation, p)
tm.context.Send("metaManager", *msgReq)
log.LOGGER.Infof("send message to metaManager is %+v\n", msgReq)
}
}
func (tm *testManager) configmapHandler(w http.ResponseWriter, req *http.Request) {
var operation string
var p v1.ConfigMap
if req.Body != nil {
body, err := ioutil.ReadAll(req.Body)
if err != nil {
log.LOGGER.Errorf("read body error %v", err)
w.Write([]byte("read request body error"))
}
log.LOGGER.Infof("request body is %s\n", string(body))
if err = json.Unmarshal(body, &p); err != nil {
log.LOGGER.Errorf("unmarshal request body error %v", err)
w.Write([]byte("unmarshal request body error"))
}
switch req.Method {
case "POST":
operation = model.InsertOperation
case "DELETE":
operation = model.DeleteOperation
case "PUT":
operation = model.UpdateOperation
}
msgReq := message.BuildMsg("edgehub", string(p.UID), "test", "fakeNamespace/configmap/"+string(p.UID), operation, p)
tm.context.Send("metaManager", *msgReq)
log.LOGGER.Infof("send message to metaManager is %+v\n", msgReq)
}
}
func (tm *testManager) getPodsHandler(w http.ResponseWriter, r *http.Request) {
var podList v1.PodList
metas, err := dao.QueryMeta("type", "pod")
if err != nil {
log.LOGGER.Errorf("failed to query pods: %v", err)
}
for _, podContent := range *metas {
var pod v1.Pod
err := json.Unmarshal([]byte(podContent), &pod)
if err != nil {
log.LOGGER.Errorf("failed to unmarshal: %v", err)
}
podList.Items = append(podList.Items, pod)
}
respBody, err := json.Marshal(podList)
w.Header().Set("Content-Type", "application/json")
w.Write(respBody)
}
func (tm *testManager) podHandler(w http.ResponseWriter, req *http.Request) {
var operation string
var p v1.Pod
if req.Method == http.MethodGet {
tm.getPodsHandler(w, req)
} else if req.Body != nil {
body, err := ioutil.ReadAll(req.Body)
if err != nil {
log.LOGGER.Errorf("read body error %v", err)
w.Write([]byte("read request body error"))
}
log.LOGGER.Infof("request body is %s\n", string(body))
if err = json.Unmarshal(body, &p); err != nil {
log.LOGGER.Errorf("unmarshal request body error %v", err)
w.Write([]byte("unmarshal request body error"))
}
switch req.Method {
case "POST":
operation = model.InsertOperation
case "DELETE":
operation = model.DeleteOperation
case "PUT":
operation = model.UpdateOperation
}
ns := v1.NamespaceDefault
if p.Namespace != "" {
ns = p.Namespace
}
msgReq := message.BuildMsg("edgehub", string(p.UID), "test", ns+"/pod/"+string(p.UID), operation, p)
tm.context.Send("metaManager", *msgReq)
log.LOGGER.Infof("send message to metaManager is %+v\n", msgReq)
}
}
func (tm *testManager) Start(c *context.Context) {
tm.context = c
defer tm.Cleanup()
http.HandleFunc("/pod", tm.podHandler)
http.HandleFunc("/configmap", tm.configmapHandler)
http.HandleFunc("/secret", tm.secretHandler)
http.HandleFunc("/devices", tm.deviceHandler)
http.HandleFunc("/apps", tm.applicationHandler)
err := http.ListenAndServe(":12345", nil)
if err != nil {
log.LOGGER.Errorf("ListenAndServe: %v", err)
}
}
func (tm *testManager) Cleanup() {
tm.context.Cleanup(tm.Name())
}
|
init
|
ng_utils.py
|
import re
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.encoding import force_text
from ordered_set import OrderedSet
register = template.Library()
@register.filter
def required(field):
"""
Return 'required' as a string if the BoundField's underlying field is required.
"""
return "required" if field.field.required else ""
@register.filter
def
|
(value, css_classes):
"""
Add a single or multiple css classes to a form widget. To add multiple classes, pass
them as a whitespace delimited string. eg, {{ field|add_class:"foo bar" }}
"""
if not css_classes:
return value
widget = value.field.widget
orig_classes = OrderedSet(widget.attrs.get('class', '').split())
new_classes = OrderedSet(css_classes.split())
widget.attrs['class'] = " ".join(orig_classes | new_classes)
return value
@register.simple_tag(takes_context=True)
def isactive(context, url, active='active', inactive='', exact=False):
"""
A ternary tag for whether a URL is 'active'. An active URL is defined as matching
the current request URL. The default behavior is to match the beginning of the URL.
For example, if `url` is '/some/path' and the current request URL is
'/some/path/subpath', then the URL is considered active. If `exact` is set to True,
then the URL's must match exactly.
Example::
{% url 'named-url' as named_url %}
<div class="{% isactive named_url 'active' 'inactive' %}">
</div>
"""
request_url = context['request'].path_info
if (request_url == url if exact else request_url.startswith(url)):
return active
return inactive
# def ifactive
# refer to {% ifequal %} implementation because it doesn't perform {% if %} condition parsing
# Originally from: https://djangosnippets.org/snippets/1519/
CONSONANT_SOUND = re.compile(r'''one(![ir])''', re.IGNORECASE | re.VERBOSE)
VOWEL_SOUND = re.compile(r'''[aeio]|u([aeiou]|[^n][^aeiou]|ni[^dmnl]|nil[^l])|h(ier|onest|onou?r|ors\b|our(!i))|[fhlmnrsx]\b''', re.IGNORECASE | re.VERBOSE)
@register.filter
def an(text):
text = force_text(text)
match = not CONSONANT_SOUND.match(text) and VOWEL_SOUND.match(text)
return '%s %s' % ('an' if match else 'a', text)
|
add_class
|
select_character.go
|
package commands
import (
"fmt"
"strings"
log "github.com/sirupsen/logrus"
"github.com/talesmud/talesmud/pkg/entities"
"github.com/talesmud/talesmud/pkg/entities/characters"
"github.com/talesmud/talesmud/pkg/entities/rooms"
"github.com/talesmud/talesmud/pkg/mudserver/game/def"
"github.com/talesmud/talesmud/pkg/mudserver/game/messages"
m "github.com/talesmud/talesmud/pkg/mudserver/game/messages"
)
// SelectCharacterCommand ... select a character
type SelectCharacterCommand struct {
}
// Key ...
func (command *SelectCharacterCommand) Key() CommandKey { return &StartsWithCommandKey{} }
// Execute ... executes scream command
func (command *SelectCharacterCommand) Execute(game def.GameCtrl, message *messages.Message) bool {
parts := strings.Fields(message.Data)
characterName := strings.Join(parts[1:], " ")
if characters, err := game.GetFacade().CharactersService().FindByName(characterName); err == nil {
for _, character := range characters {
if character.Name == characterName && character.BelongsUserID == message.FromUser.ID {
// found character to select
handleCharacterSelected(game, message.FromUser, character)
return true
}
}
}
game.SendMessage() <- message.Reply("Could not select character: " + characterName)
return true
}
func
|
(game def.GameCtrl, user *entities.User, character *characters.Character) {
// handle Character deselection
if user.LastCharacter != "" && user.LastCharacter != character.ID {
if character, err := game.GetFacade().CharactersService().FindByID(user.LastCharacter); err == nil {
if room, err := game.GetFacade().RoomsService().FindByID(character.CurrentRoomID); err == nil {
// remove character from current room
// send all players a left room message
game.SendMessage() <- messages.CharacterLeftRoom{
MessageResponse: messages.MessageResponse{
Audience: m.MessageAudienceRoomWithoutOrigin,
AudienceID: room.ID,
OriginID: character.ID,
Message: character.Name + " left.",
},
}
room.RemoveCharacter(character.ID)
game.GetFacade().RoomsService().Update(room.ID, room)
}
}
}
// update player
user.LastCharacter = character.ID
game.GetFacade().UsersService().Update(user.RefID, user)
characterSelected := &messages.CharacterSelected{
MessageResponse: messages.MessageResponse{
Audience: messages.MessageAudienceOrigin,
AudienceID: user.ID,
Type: messages.MessageTypeCharacterSelected,
Message: fmt.Sprintf("You are now playing as [%v]", character.Name),
},
Character: character,
}
game.SendMessage() <- characterSelected
var currentRoom *rooms.Room
var err error
if character.CurrentRoomID != "" {
if currentRoom, err = game.GetFacade().RoomsService().FindByID(character.CurrentRoomID); err != nil {
log.WithField("room", character.CurrentRoomID).Warn("CurrentRoomID for player not found (room might have been deleted or temporary)")
// set to ""
character.CurrentRoomID = ""
}
}
// new character or not part of a room?
if character.CurrentRoomID == "" {
// find a random room to start in or get starting room
rooms, _ := game.GetFacade().RoomsService().FindAll()
if len(rooms) > 0 {
// TOOD make this random or select a starting room
currentRoom = rooms[0]
//TODO: send this as message
character.CurrentRoomID = currentRoom.ID
game.GetFacade().CharactersService().Update(character.ID, character)
}
}
// update room // send these state change messages via channel
currentRoom.AddCharacter(character.ID)
game.GetFacade().RoomsService().Update(currentRoom.ID, currentRoom)
enterRoom := m.NewEnterRoomMessage(currentRoom, user, game)
enterRoom.AudienceID = user.ID
game.SendMessage() <- enterRoom
game.SendMessage() <- messages.CharacterJoinedRoom{
MessageResponse: messages.MessageResponse{
Audience: m.MessageAudienceRoomWithoutOrigin,
AudienceID: currentRoom.ID,
OriginID: character.ID,
Message: character.Name + " entered.",
},
}
}
|
handleCharacterSelected
|
hairline.rs
|
use tiny_skia::*;
fn draw_line(x0: f32, y0: f32, x1: f32, y1: f32, anti_alias: bool, width: f32, line_cap: LineCap) -> Pixmap {
let mut pixmap = Pixmap::new(100, 100).unwrap();
let mut canvas = Canvas::from(pixmap.as_mut());
let mut pb = PathBuilder::new();
pb.move_to(x0, y0);
pb.line_to(x1, y1);
let path = pb.finish().unwrap();
let mut paint = Paint::default();
paint.set_color_rgba8(50, 127, 150, 200);
paint.anti_alias = anti_alias;
let mut stroke = Stroke::default();
stroke.width = width;
stroke.line_cap = line_cap;
canvas.stroke_path(&path, &paint, &stroke);
pixmap
}
#[test]
fn hline_05() {
let expected = Pixmap::load_png("tests/images/hairline/hline-05.png").unwrap();
assert_eq!(draw_line(10.0, 10.0, 90.0, 10.0, false, 0.5, LineCap::Butt), expected);
}
#[test]
fn hline_05_aa() {
let expected = Pixmap::load_png("tests/images/hairline/hline-05-aa.png").unwrap();
assert_eq!(draw_line(10.0, 10.0, 90.0, 10.0, true, 0.5, LineCap::Butt), expected);
}
#[test]
fn hline_05_aa_round() {
let expected = Pixmap::load_png("tests/images/hairline/hline-05-aa-round.png").unwrap();
assert_eq!(draw_line(10.0, 10.0, 90.0, 10.0, true, 0.5, LineCap::Round), expected);
}
#[test]
fn vline_05() {
let expected = Pixmap::load_png("tests/images/hairline/vline-05.png").unwrap();
assert_eq!(draw_line(10.0, 10.0, 10.0, 90.0, false, 0.5, LineCap::Butt), expected);
}
#[test]
fn vline_05_aa() {
let expected = Pixmap::load_png("tests/images/hairline/vline-05-aa.png").unwrap();
assert_eq!(draw_line(10.0, 10.0, 10.0, 90.0, true, 0.5, LineCap::Butt), expected);
}
#[test]
fn vline_05_aa_round() {
let expected = Pixmap::load_png("tests/images/hairline/vline-05-aa-round.png").unwrap();
assert_eq!(draw_line(10.0, 10.0, 10.0, 90.0, true, 0.5, LineCap::Round), expected);
}
#[test]
fn horish_05_aa() {
let expected = Pixmap::load_png("tests/images/hairline/horish-05-aa.png").unwrap();
assert_eq!(draw_line(10.0, 10.0, 90.0, 70.0, true, 0.5, LineCap::Butt), expected);
}
#[test]
fn vertish_05_aa() {
let expected = Pixmap::load_png("tests/images/hairline/vertish-05-aa.png").unwrap();
assert_eq!(draw_line(10.0, 10.0, 70.0, 90.0, true, 0.5, LineCap::Butt), expected);
}
#[test]
fn clip_line_05_aa() {
let expected = Pixmap::load_png("tests/images/hairline/clip-line-05-aa.png").unwrap();
assert_eq!(draw_line(-10.0, 10.0, 110.0, 70.0, true, 0.5, LineCap::Butt), expected);
}
#[test]
fn clip_line_00() {
let expected = Pixmap::load_png("tests/images/hairline/clip-line-00.png").unwrap();
assert_eq!(draw_line(-10.0, 10.0, 110.0, 70.0, false, 0.0, LineCap::Butt), expected);
}
#[test]
fn clip_hline_top_aa() {
let expected = Pixmap::load_png("tests/images/hairline/clip-hline-top-aa.png").unwrap();
assert_eq!(draw_line(-1.0, 0.0, 101.0, 0.0, true, 1.0, LineCap::Butt), expected);
}
#[test]
fn clip_hline_bottom_aa() {
let expected = Pixmap::load_png("tests/images/hairline/clip-hline-bottom-aa.png").unwrap();
assert_eq!(draw_line(-1.0, 100.0, 101.0, 100.0, true, 1.0, LineCap::Butt), expected);
}
#[test]
fn clip_vline_left_aa() {
let expected = Pixmap::load_png("tests/images/hairline/clip-vline-left-aa.png").unwrap();
assert_eq!(draw_line(0.0, -1.0, 0.0, 101.0, true, 1.0, LineCap::Butt), expected);
}
#[test]
fn clip_vline_right_aa() {
let expected = Pixmap::load_png("tests/images/hairline/clip-vline-right-aa.png").unwrap();
assert_eq!(draw_line(100.0, -1.0, 100.0, 101.0, true, 1.0, LineCap::Butt), expected);
}
fn draw_quad(anti_alias: bool, width: f32, line_cap: LineCap) -> Pixmap {
let mut pixmap = Pixmap::new(200, 100).unwrap();
let mut canvas = Canvas::from(pixmap.as_mut());
let mut pb = PathBuilder::new();
pb.move_to(25.0, 80.0);
pb.quad_to(155.0, 75.0, 175.0, 20.0);
let path = pb.finish().unwrap();
let mut paint = Paint::default();
paint.set_color_rgba8(50, 127, 150, 200);
paint.anti_alias = anti_alias;
let mut stroke = Stroke::default();
stroke.width = width;
stroke.line_cap = line_cap;
canvas.stroke_path(&path, &paint, &stroke);
pixmap
}
#[test]
fn quad_width_05_aa() {
let expected = Pixmap::load_png("tests/images/hairline/quad-width-05-aa.png").unwrap();
assert_eq!(draw_quad(true, 0.5, LineCap::Butt), expected);
}
#[test]
fn quad_width_05_aa_round() {
let expected = Pixmap::load_png("tests/images/hairline/quad-width-05-aa-round.png").unwrap();
assert_eq!(draw_quad(true, 0.5, LineCap::Round), expected);
}
#[test]
fn quad_width_00() {
let expected = Pixmap::load_png("tests/images/hairline/quad-width-00.png").unwrap();
assert_eq!(draw_quad(false, 0.0, LineCap::Butt), expected);
}
fn draw_cubic(points: &[f32; 8], anti_alias: bool, width: f32, line_cap: LineCap) -> Pixmap {
let mut pixmap = Pixmap::new(200, 100).unwrap();
let mut canvas = Canvas::from(pixmap.as_mut());
let mut pb = PathBuilder::new();
pb.move_to(points[0], points[1]);
pb.cubic_to(points[2], points[3], points[4], points[5], points[6], points[7]);
let path = pb.finish().unwrap();
let mut paint = Paint::default();
paint.set_color_rgba8(50, 127, 150, 200);
paint.anti_alias = anti_alias;
let mut stroke = Stroke::default();
stroke.width = width;
stroke.line_cap = line_cap;
canvas.stroke_path(&path, &paint, &stroke);
pixmap
}
#[test]
fn cubic_width_10_aa() {
let expected = Pixmap::load_png("tests/images/hairline/cubic-width-10-aa.png").unwrap();
assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], true, 1.0, LineCap::Butt), expected);
}
#[test]
fn cubic_width_05_aa() {
let expected = Pixmap::load_png("tests/images/hairline/cubic-width-05-aa.png").unwrap();
assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], true, 0.5, LineCap::Butt), expected);
}
#[test]
fn cubic_width_00_aa() {
let expected = Pixmap::load_png("tests/images/hairline/cubic-width-00-aa.png").unwrap();
assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], true, 0.0, LineCap::Butt), expected);
}
#[test]
fn
|
() {
let expected = Pixmap::load_png("tests/images/hairline/cubic-width-00.png").unwrap();
assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], false, 0.0, LineCap::Butt), expected);
}
#[test]
fn cubic_width_05_aa_round() {
let expected = Pixmap::load_png("tests/images/hairline/cubic-width-05-aa-round.png").unwrap();
assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], true, 0.5, LineCap::Round), expected);
}
#[test]
fn cubic_width_00_round() {
let expected = Pixmap::load_png("tests/images/hairline/cubic-width-00-round.png").unwrap();
assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], false, 0.0, LineCap::Round), expected);
}
#[test]
fn chop_cubic_01() {
let expected = Pixmap::load_png("tests/images/hairline/chop-cubic-01.png").unwrap();
// This curve will invoke `path_geometry::chop_cubic_at_max_curvature` branch of `hair_cubic`.
assert_eq!(draw_cubic(&[57.0, 13.0, 17.0, 15.0, 55.0, 97.0, 89.0, 62.0], true, 0.5, LineCap::Butt), expected);
}
#[test]
fn clip_cubic_05_aa() {
let expected = Pixmap::load_png("tests/images/hairline/clip-cubic-05-aa.png").unwrap();
assert_eq!(draw_cubic(&[-25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], true, 0.5, LineCap::Butt), expected);
}
#[test]
fn clip_cubic_00() {
let expected = Pixmap::load_png("tests/images/hairline/clip-cubic-00.png").unwrap();
assert_eq!(draw_cubic(&[-25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], false, 0.0, LineCap::Butt), expected);
}
#[test]
fn clipped_circle_aa() {
let mut pixmap = Pixmap::new(100, 100).unwrap();
let mut canvas = Canvas::from(pixmap.as_mut());
let mut paint = Paint::default();
paint.set_color_rgba8(50, 127, 150, 200);
paint.anti_alias = true;
let mut stroke = Stroke::default();
stroke.width = 0.5;
let path = PathBuilder::from_circle(50.0, 50.0, 55.0).unwrap();
canvas.stroke_path(&path, &paint, &stroke);
let expected = Pixmap::load_png("tests/images/hairline/clipped-circle-aa.png").unwrap();
assert_eq!(pixmap, expected);
}
|
cubic_width_00
|
DataUrl.ts
|
export interface DataUrlInfo {
mimeType: string
data: string
isBase64: boolean
}
const PATTERN = /^data:(?<mimeType>[^;,]*)(?<isBase64>;base64)?,(?<data>.*)$/
export function parse(dataUrl: string): DataUrlInfo | null {
const match = dataUrl.match(PATTERN)
if (!match || !match.groups) return null
return {
mimeType: match.groups.mimeType || 'text/plain',
data: match.groups.data,
isBase64: !!match.groups.isBase64,
}
|
}
|
|
index.tsx
|
import Check from '@/constants/svg/check.svg';
import { tw } from 'twind';
const FeatureSection = () => (
<section className={tw(`bg-white pb-6`)}>
<div className={tw(`max-w-7xl mx-auto p-4 sm:p-6 lg:p-8`)}>
<div className={tw(`container mx-auto px-6 p-6 bg-white`)}>
<div className={tw(`mb-16 text-center`)}>
<h4 className={tw(`text-base text-indigo-600 font-semibold tracking-wide uppercase`)}>Features</h4>
<p className={tw(`mt-2 text-5xl lg:text-7xl font-bold tracking-tight text-gray-900`)}>
How we change the game
</p>
</div>
<div className={tw(`flex flex-wrap my-12`)}>
<div className={tw(`w-full border-b md:w-1/2 md:border-r lg:w-1/3 p-8`)}>
<div className={tw(`flex items-center mb-6`)}>
<Check width={20} height={20} fill="currentColor" className={tw(`h-6 w-6 text-indigo-500`)} />
<div className={tw(`ml-4 text-xl`)}>Multichannel</div>
</div>
<p className={tw(`leading-loose text-gray-500`)}>
Amazon, eBay, Kaufland, Check24, Otto, Manomano and webshop...You will be having one stop shop like we do.
</p>
</div>
<div className={tw(`w-full border-b md:w-1/2 lg:w-1/3 lg:border-r p-8`)}>
<div className={tw(`flex items-center mb-6`)}>
<Check width={20} height={20} fill="currentColor" className={tw(`h-6 w-6 text-indigo-500`)} />
<div className={tw(`ml-4 text-xl`)}>Enterprise-ready</div>
</div>
<p className={tw(`leading-loose text-gray-500 `)}>
We have been working with some of the producer offshore and delivering with best practices.
</p>
</div>
<div className={tw(`w-full border-b md:w-1/2 md:border-r lg:w-1/3 lg:border-r-0 p-8`)}>
<div className="flex items-center mb-6">
<Check width={20} height={20} fill="currentColor" className={tw(`h-6 w-6 text-indigo-500`)} />
<div className={tw(`ml-4 text-xl`)}>Unlimited growth</div>
</div>
<p className={tw(`leading-loose text-gray-500`)}>
We are always looking for new channel and new markets to grow and open to new ideas. As long as you have a vision.
</p>
</div>
<div className={tw(`w-full border-b md:w-1/2 lg:w-1/3 lg:border-r lg:border-b-0 p-8`)}>
<div className={tw(`flex items-center mb-6`)}>
<Check width={20} height={20} fill="currentColor" className={tw(`h-6 w-6 text-indigo-500`)} />
<div className={tw(`ml-4 text-xl`)}>Automated Advertising</div>
</div>
<p className={tw(`leading-loose text-gray-500`)}>
Our algorithms access the resources of keyword reports and external data to create, monitor, and optimize optimally aligned sponsored brands.
</p>
</div>
<div className={tw(`w-full border-b md:w-1/2 md:border-r md:border-b-0 lg:w-1/3 lg:border-b-0 p-8`)}>
<div className={tw(`flex items-center mb-6`)}>
<Check width={20} height={20} fill="currentColor" className={tw(`h-6 w-6 text-indigo-500`)} />
|
<div className={tw(`ml-4 text-xl`)}>Cost at a glance</div>
</div>
<p className={tw(`leading-loose text-gray-500`)}>
Fully automated monthly export of all your costs. Clearly and transparent.
</p>
</div>
<div className={tw(`w-full md:w-1/2 lg:w-1/3 p-8`)}>
<div className={tw(`flex items-center mb-6`)}>
<Check width={20} height={20} fill="currentColor" className={tw(`h-6 w-6 text-indigo-500`)} />
<div className={tw(`ml-4 text-xl`)}>Integrations</div>
</div>
<p className={tw(`leading-loose text-gray-500`)}>
All of the information is anytime accessible on your dashboard.
</p>
</div>
</div>
</div>
</div>
</section>
);
export default FeatureSection;
| |
contact_manifolds_trimesh_shape.rs
|
use crate::bounding_volume::{BoundingVolume, AABB};
use crate::math::{Isometry, Real};
use crate::query::contact_manifolds::ContactManifoldsWorkspace;
use crate::query::query_dispatcher::PersistentQueryDispatcher;
use crate::query::ContactManifold;
use crate::shape::{Shape, TriMesh};
use crate::utils::MaybeSerializableData;
#[cfg(feature = "serde-serialize")]
use erased_serde::Serialize;
#[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))]
#[derive(Clone)]
pub struct TriMeshShapeContactManifoldsWorkspace {
interferences: Vec<u32>,
local_aabb2: AABB,
old_interferences: Vec<u32>,
}
impl TriMeshShapeContactManifoldsWorkspace {
pub fn new() -> Self {
Self {
interferences: Vec::new(),
local_aabb2: AABB::new_invalid(),
old_interferences: Vec::new(),
}
}
}
/// Computes the contact manifold between a triangle-mesh an a shape, both represented as `Shape` trait-objects.
pub fn contact_manifolds_trimesh_shape_shapes<ManifoldData, ContactData>(
dispatcher: &dyn PersistentQueryDispatcher<ManifoldData, ContactData>,
pos12: &Isometry<Real>,
shape1: &dyn Shape,
shape2: &dyn Shape,
prediction: Real,
manifolds: &mut Vec<ContactManifold<ManifoldData, ContactData>>,
workspace: &mut Option<ContactManifoldsWorkspace>,
) where
ManifoldData: Default,
ContactData: Default + Copy,
{
if let Some(trimesh1) = shape1.as_trimesh() {
contact_manifolds_trimesh_shape(
dispatcher, pos12, trimesh1, shape2, prediction, manifolds, workspace, false,
)
} else if let Some(trimesh2) = shape2.as_trimesh() {
contact_manifolds_trimesh_shape(
dispatcher,
&pos12.inverse(),
trimesh2,
shape1,
prediction,
manifolds,
workspace,
true,
)
}
}
fn ensure_workspace_exists(workspace: &mut Option<ContactManifoldsWorkspace>) {
if workspace
.as_mut()
.and_then(|w| w.0.downcast_mut::<TriMeshShapeContactManifoldsWorkspace>())
.is_some()
{
return;
}
*workspace = Some(ContactManifoldsWorkspace(Box::new(
TriMeshShapeContactManifoldsWorkspace::new(),
)));
}
/// Computes the contact manifold between a triangle-mesh and a shape.
pub fn contact_manifolds_trimesh_shape<ManifoldData, ContactData>(
dispatcher: &dyn PersistentQueryDispatcher<ManifoldData, ContactData>,
pos12: &Isometry<Real>,
trimesh1: &TriMesh,
shape2: &dyn Shape,
prediction: Real,
manifolds: &mut Vec<ContactManifold<ManifoldData, ContactData>>,
workspace: &mut Option<ContactManifoldsWorkspace>,
flipped: bool,
) where
ManifoldData: Default,
ContactData: Default + Copy,
|
impl MaybeSerializableData for TriMeshShapeContactManifoldsWorkspace {
#[cfg(feature = "serde-serialize")]
fn as_serialize(&self) -> Option<(u32, &dyn Serialize)> {
Some((
super::WorkspaceSerializationTag::TriMeshShapeContactManifoldsWorkspace as u32,
self,
))
}
fn clone_dyn(&self) -> Box<dyn MaybeSerializableData> {
Box::new(self.clone())
}
}
|
{
ensure_workspace_exists(workspace);
let workspace: &mut TriMeshShapeContactManifoldsWorkspace =
workspace.as_mut().unwrap().0.downcast_mut().unwrap();
/*
* Compute interferences.
*/
// TODO: somehow precompute the AABB and reuse it?
let mut new_local_aabb2 = shape2.compute_aabb(&pos12).loosened(prediction);
let same_local_aabb2 = workspace.local_aabb2.contains(&new_local_aabb2);
let mut old_manifolds = Vec::new();
if !same_local_aabb2 {
let extra_margin =
(new_local_aabb2.maxs - new_local_aabb2.mins).map(|e| (e / 10.0).min(0.1));
new_local_aabb2.mins -= extra_margin;
new_local_aabb2.maxs += extra_margin;
let local_aabb2 = new_local_aabb2; // .loosened(prediction * 2.0); // FIXME: what would be the best value?
std::mem::swap(
&mut workspace.old_interferences,
&mut workspace.interferences,
);
std::mem::swap(manifolds, &mut old_manifolds);
// This assertion may fire due to the invalid triangle_ids that the
// near-phase may return (due to SIMD sentinels).
//
// assert_eq!(
// workspace
// .old_interferences
// .len()
// .min(trimesh1.num_triangles()),
// workspace.old_manifolds.len()
// );
workspace.interferences.clear();
trimesh1
.quadtree()
.intersect_aabb(&local_aabb2, &mut workspace.interferences);
workspace.local_aabb2 = local_aabb2;
}
/*
* Dispatch to the specific solver by keeping the previous manifold if we already had one.
*/
let new_interferences = &workspace.interferences;
let mut old_inter_it = workspace.old_interferences.drain(..).peekable();
let mut old_manifolds_it = old_manifolds.drain(..);
// TODO: don't redispatch at each frame (we should probably do the same as
// the heightfield).
for (i, triangle_id) in new_interferences.iter().enumerate() {
if *triangle_id >= trimesh1.num_triangles() as u32 {
// Because of SIMD padding, the broad-phase may return triangle indices greater
// than the max.
continue;
}
if !same_local_aabb2 {
loop {
match old_inter_it.peek() {
Some(old_triangle_id) if *old_triangle_id < *triangle_id => {
let _ = old_inter_it.next();
let _ = old_manifolds_it.next();
}
_ => break,
}
}
let manifold = if old_inter_it.peek() != Some(triangle_id) {
let (id1, id2) = if flipped {
(0, *triangle_id)
} else {
(*triangle_id, 0)
};
ContactManifold::with_data(id1, id2, ManifoldData::default())
} else {
// We already have a manifold for this triangle.
let _ = old_inter_it.next();
old_manifolds_it.next().unwrap()
};
manifolds.push(manifold);
}
let manifold = &mut manifolds[i];
let triangle1 = trimesh1.triangle(*triangle_id);
if flipped {
let _ = dispatcher.contact_manifold_convex_convex(
&pos12.inverse(),
shape2,
&triangle1,
prediction,
manifold,
);
} else {
let _ = dispatcher
.contact_manifold_convex_convex(pos12, &triangle1, shape2, prediction, manifold);
}
}
}
|
thread-comments.tsx
|
import mockData from "../mock-data/mock-board-data";
function ThreadComments() {
return (
<div>
<header>
<div>
{mockData.map((item, i) => (
|
</tr>
))}
</div>
</header>
</div>
);
}
export default ThreadComments;
|
<tr key={i}>
<h1>{item.title}</h1>
<br />
<hr />
|
log_dup_windows.go
|
package log
import (
syslog "log"
"syscall"
)
// sysDup 将进程致命错误转储
func sysDup(fd int) err
|
// Duplicate the stdin/stdout/stderr handles
files := []uintptr{uintptr(syscall.Stdin), uintptr(syscall.Stdout), uintptr(syscall.Stderr)}
p, _ := syscall.GetCurrentProcess()
h := syscall.Handle(fd)
for i := range files {
err := syscall.DuplicateHandle(p, syscall.Handle(files[i]), p, &h, 0, true, syscall.DUPLICATE_SAME_ACCESS)
if err != nil {
syslog.Println(err)
return err
}
}
return nil
}
|
or {
|
project.ts
|
require('es6-promise');
import {Signature} from './common/models';
import {constructJsonPartialSignRequest, constructJsonSignRequest, constructPublicJsonRequest} from './common/util';
import Config from './config';
import {sendGetJSON, sendPostJSON} from './utils/http';
const base58 = require('bs58')
class Project {
config: Config;
constructor(config: Config) {
this.config = config;
}
listProjects(): Promise<any> {
return sendGetJSON(this.config.getBlockSyncUrl() + '/api/project/listProjects');
}
getProjectByProjectDid(projectDid: any): Promise<any> {
return sendGetJSON(this.config.getBlockSyncUrl() + '/api/project/getByProjectDid/' + projectDid);
}
getProjectByUserDid(senderDid: any): Promise<any> {
const payload = {senderDid: senderDid};
const request = constructPublicJsonRequest('listProjectBySenderDid', payload)
return sendPostJSON(this.config.getBlockSyncUrl() + '/api/project/', request);
}
createProject(data: any, signature: Signature, PDSUrl: string): Promise<any> {
const json = constructJsonSignRequest('createProject', 'create_project', signature, data);
return sendPostJSON(PDSUrl + 'api/request', json);
}
updateProjectStatus(data: any, signature: Signature, PDSUrl: string): Promise<any> {
const json = constructJsonSignRequest('updateProjectStatus', 'project_status', signature, data);
return sendPostJSON(PDSUrl + 'api/request', json);
}
fundProject(data: any, signature: Signature, PDSUrl: string): Promise<any> {
const json = constructJsonPartialSignRequest('fundProject', 'fund_project', signature, data);
return sendPostJSON(PDSUrl + 'api/request', json);
}
createPublic(source: any, PDSUrl: string) {
const srcParts = source.split(',');
const data = srcParts[1];
const contentType = srcParts[0].split(';')[0].split(':')[1];
const payload = {
data: data,
contentType: contentType
};
const json = constructPublicJsonRequest('createPublic', payload);
return sendPostJSON(PDSUrl + 'api/public', json);
}
fetchPublic(key: any, PDSUrl: string) {
const payload = {
key: key
};
return new Promise((resolve, reject) => {
const json = constructPublicJsonRequest('fetchPublic', payload);
sendPostJSON(PDSUrl + 'api/public', json)
.then((response: any) => {
if (response.result.data) {
const obj = {
data: response.result.data,
contentType: response.result.contentType
};
resolve(obj);
} else {
reject(null);
}
})
.catch((error: any) => {
reject(error);
});
});
}
generateWithdrawObjectJson = (data: any, signature: string, pubKey: string, fee: object) => {
return {
|
signature: signature,
pub_key: {
type: "tendermint/PubKeyEd25519",
value: base58.decode(pubKey).toString('base64'),
}
}]
// memo: "this is an optional memo",
};
}
payOutToEthWallet(data: any, signature: Signature, fee: object, mode?: string): Promise<any> {
const {signatureValue, publicKey} = signature;
const tx = this.generateWithdrawObjectJson(data, signatureValue, publicKey, fee);
return sendPostJSON(this.config.getBlockSyncUrl() + '/api/blockchain/txs', {
tx,
mode: mode || "block"
})
}
}
export default Project;
|
msg: [{type: "project/WithdrawFunds", value: data}],
fee,
signatures: [{
|
0090_30867afad44a_rename_concurrency_column_in_dag_table_.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Rename ``concurrency`` column in ``dag`` table to`` max_active_tasks``
Revision ID: 30867afad44a
Revises: e9304a3141f0
Create Date: 2021-06-04 22:11:19.849981
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '30867afad44a'
down_revision = 'e9304a3141f0'
branch_labels = None
depends_on = None
airflow_version = '2.2.0'
def
|
():
"""Apply Rename ``concurrency`` column in ``dag`` table to`` max_active_tasks``"""
conn = op.get_bind()
is_sqlite = bool(conn.dialect.name == "sqlite")
if is_sqlite:
op.execute("PRAGMA foreign_keys=off")
with op.batch_alter_table('dag') as batch_op:
batch_op.alter_column(
'concurrency',
new_column_name='max_active_tasks',
type_=sa.Integer(),
nullable=False,
)
if is_sqlite:
op.execute("PRAGMA foreign_keys=on")
def downgrade():
"""Unapply Rename ``concurrency`` column in ``dag`` table to`` max_active_tasks``"""
with op.batch_alter_table('dag') as batch_op:
batch_op.alter_column(
'max_active_tasks',
new_column_name='concurrency',
type_=sa.Integer(),
nullable=False,
)
|
upgrade
|
migrate_tools_to_repositories.py
|
#!/usr/bin/env python
'''
Migrate old Galaxy tool shed to next gen Galaxy tool shed. Specifically, the tool archives stored as
files in the old tool shed will be migrated to mercurial repositories in the next gen tool shed. This
script can be run any number of times as it initially eliminates any current repositories and db records
associated with them, and migrates old tool shed stuff to new tool shed stuff.
====== CRITICAL =======
0. This script must be run on a repo updated to changeset: 5621:4618be57481b
1. Before running this script, make sure the following config setting is set in tool_shed_wsgi.ini
# Enable next-gen tool shed features
enable_next_gen_tool_shed = True
2. This script requires the Galaxy instance to use Postgres for database storage.
To run this script, use "sh migrate_tools_to_repositories.sh" from this directory
'''
import ConfigParser
import os
import shutil
import sys
import tarfile
import tempfile
from time import strftime
from mercurial import hg, ui
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'lib')))
import galaxy.webapps.tool_shed.app
assert sys.version_info[:2] >= ( 2, 4 )
def directory_hash_id( id ):
s = str( id )
l = len( s )
# Shortcut -- ids 0-999 go under ../000/
if l < 4:
return [ "000" ]
# Pad with zeros until a multiple of three
padded = ( ( ( 3 - len( s ) ) % 3 ) * "0" ) + s
# Drop the last three digits -- 1000 files per directory
padded = padded[:-3]
# Break into chunks of three
return [ padded[i * 3:(i + 1) * 3] for i in range( len( padded ) // 3 ) ]
def get_versions( app, item ):
"""Get all versions of item whose state is a valid state"""
valid_states = [ app.model.Tool.states.NEW,
app.model.Tool.states.WAITING,
app.model.Tool.states.APPROVED,
app.model.Tool.states.ARCHIVED ]
versions = [ item ]
this_item = item
while item.newer_version:
if item.newer_version.state in valid_states:
versions.append( item.newer_version )
item = item.newer_version
item = this_item
while item.older_version:
if item.older_version[ 0 ].state in valid_states:
versions.insert( 0, item.older_version[ 0 ] )
item = item.older_version[ 0 ]
return versions
def get_approved_tools( app, sa_session ):
"""Get only the latest version of each tool from the database whose state is approved"""
tools = []
for tool in sa_session.query( app.model.Tool ) \
.order_by( app.model.Tool.table.c.name ):
if tool.state == app.model.Tool.states.APPROVED:
tools.append( tool )
return tools
def create_repository_from_tool( app, sa_session, tool ):
# Make the repository name a form of the tool's tool_id by
# lower-casing everything and replacing any blank spaces with underscores.
repo_name = tool.tool_id.lower().replace( ' ', '_' )
print "Creating repository '%s' in database" % ( repo_name )
repository = app.model.Repository( name=repo_name,
description=tool.description,
user_id=tool.user_id )
# Flush to get the id
sa_session.add( repository )
sa_session.flush()
# Determine the local repository's path on disk
dir = os.path.join( app.config.file_path, *directory_hash_id( repository.id ) )
# Create directory if it does not exist
if not os.path.exists( dir ):
os.makedirs( dir )
# Define repository name inside hashed directory
repository_path = os.path.join( dir, "repo_%d" % repository.id )
# Create repository directory
if not os.path.exists( repository_path ):
os.makedirs( repository_path )
# Create the local hg repository
print "Creating repository '%s' on disk" % ( os.path.abspath( repository_path ) )
hg.repository( ui.ui(), os.path.abspath( repository_path ), create=True )
# Add an entry in the hgweb.config file for the new repository - this enables calls to repository.repo_path
add_hgweb_config_entry( repository, repository_path )
# Migrate tool categories
for tca in tool.categories:
category = tca.category
print "Associating category '%s' with repository '%s' in database" % ( category.name, repository.name )
rca = app.model.RepositoryCategoryAssociation( repository, category )
sa_session.add( rca )
sa_session.flush()
# Migrate tool ratings
print "Associating ratings for tool '%s' with repository '%s'" % ( tool.name, repository.name )
for tra in tool.ratings:
rra = app.model.RepositoryRatingAssociation( user=tra.user,
rating=tra.rating,
comment=tra.comment )
rra.repository = repository
sa_session.add( rra )
sa_session.flush()
|
def add_hgweb_config_entry( repository, repository_path ):
# Add an entry in the hgweb.config file for a new repository. This enables calls to repository.repo_path.
# An entry looks something like: repos/test/mira_assembler = database/community_files/000/repo_123
hgweb_config = "%s/hgweb.config" % os.getcwd()
entry = "repos/%s/%s = %s" % ( repository.user.username, repository.name, repository_path.lstrip( './' ) )
if os.path.exists( hgweb_config ):
output = open( hgweb_config, 'a' )
else:
output = open( hgweb_config, 'w' )
output.write( '[paths]\n' )
output.write( "%s\n" % entry )
output.close()
def create_hgrc_file( repository ):
# At this point, an entry for the repository is required to be in the hgweb.config
# file so we can call repository.repo_path.
# Create a .hg/hgrc file that looks something like this:
# [web]
# allow_push = test
# name = convert_characters1
# push_ssl = False
# Upon repository creation, only the owner can push to it ( allow_push setting ),
# and since we support both http and https, we set push_ssl to False to override
# the default (which is True) in the mercurial api.
hgrc_file = os.path.abspath( os.path.join( repository.repo_path, ".hg", "hgrc" ) )
output = open( hgrc_file, 'w' )
output.write( '[web]\n' )
output.write( 'allow_push = %s\n' % repository.user.username )
output.write( 'name = %s\n' % repository.name )
output.write( 'push_ssl = false\n' )
output.flush()
output.close()
def add_tool_files_to_repository( app, sa_session, tool ):
current_working_dir = os.getcwd()
# Get the repository to which the tool will be migrated
repo_name = tool.tool_id.lower().replace( ' ', '_' )
repository = get_repository_by_name( app, sa_session, repo_name )
repo_path = os.path.abspath( repository.repo_path )
# Get all valid versions of the tool
tool_versions = get_versions( app, tool )
for tool_version in tool_versions:
print "------------------------------"
print "Migrating tool '%s' version '%s' from archive to repository '%s'" % ( tool_version.tool_id, tool_version.version, repo_path )
# Make a temporary working directory
tmp_dir = tempfile.mkdtemp()
tmp_archive_dir = os.path.join( tmp_dir, 'tmp_archive_dir' )
if not os.path.exists( tmp_archive_dir ):
os.makedirs( tmp_archive_dir )
cmd = "hg clone %s" % repo_path
os.chdir( tmp_archive_dir )
os.system( cmd )
os.chdir( current_working_dir )
cloned_repo_dir = os.path.join( tmp_archive_dir, 'repo_%d' % repository.id )
# We want these change sets to be associated with the owner of the repository, so we'll
# set the HGUSER environment variable accordingly. We do this because in the mercurial
# api, the default username to be used in commits is determined in this order: $HGUSER,
# [ui] section of hgrcs, $EMAIL and stop searching if one of these is set.
os.environ[ 'HGUSER' ] = repository.user.username
# Copy the tool archive to the tmp_archive_dir. The src file cannot be derived from
# tool.file_name here because we have not loaded the Tool class in the model, so the
# tool.file_name defaults to /tmp/...
dir = os.path.join( app.config.file_path, 'tools', *directory_hash_id( tool_version.id ) )
src = os.path.abspath( os.path.join( dir, 'tool_%d.dat' % tool_version.id ) )
dst = os.path.join( tmp_archive_dir, tool_archive_file_name( tool_version, src ) )
shutil.copy( src, dst )
# Extract the archive to cloned_repo_dir
tarfile.open( dst ).extractall( path=cloned_repo_dir )
# Remove the archive
os.remove( dst )
# Change current working directory to the cloned repository
os.chdir( cloned_repo_dir )
for root, dirs, files in os.walk( cloned_repo_dir ):
if '.hg' in dirs:
# Don't visit .hg directories
dirs.remove( '.hg' )
if 'hgrc' in files:
# Don't include hgrc files in commit - should be impossible
# since we don't visit .hg dirs, but just in case...
files.remove( 'hgrc' )
for dir in dirs:
os.system( "hg add %s" % dir )
for name in files:
print "Adding file '%s' to cloned repository at %s" % ( name, str( os.getcwd() ) )
os.system( "hg add %s" % name )
print "Committing change set to cloned repository at %s" % str( os.getcwd() )
os.system( "hg commit -m 'Migrated tool version %s from old tool shed archive to new tool shed repository'" % tool_version.version )
print "Pushing changeset from cloned repository '%s' to repository '%s'" % ( cloned_repo_dir, repo_path )
cmd = "hg push %s" % repo_path
print "cmd is: ", cmd
os.system( cmd )
# The tool shed includes a repository source file browser, which currently depends upon
# copies of the hg repository file store in the repo_path for browsing. We'll do the
# following to make these copies.
os.chdir( repo_path )
os.system( 'hg update' )
# Change the current working directory to the original
os.chdir( current_working_dir )
# Now that we have out new repository made current with all change sets,
# we'll create a hgrc file for it.
create_hgrc_file( repository )
# Remove tmp directory
shutil.rmtree( tmp_dir )
def get_repository_by_name( app, sa_session, repo_name ):
"""Get a repository from the database"""
return sa_session.query( app.model.Repository ).filter_by( name=repo_name ).one()
def contains( containing_str, contained_str ):
return containing_str.lower().find( contained_str.lower() ) >= 0
def tool_archive_extension( file_name ):
extension = None
if extension is None:
head = open( file_name, 'rb' ).read( 4 )
try:
assert head[:3] == 'BZh'
assert int( head[-1] ) in range( 0, 10 )
extension = 'tar.bz2'
except AssertionError:
pass
if extension is None:
try:
assert head[:2] == '\037\213'
extension = 'tar.gz'
except:
pass
if extension is None:
extension = 'tar'
return extension
def tool_archive_file_name( tool, file_name ):
return '%s_%s.%s' % ( tool.tool_id, tool.version, tool_archive_extension( file_name ) )
def main():
if len( sys.argv ) < 2:
print "Usage: python %s <Tool shed config file>" % sys.argv[0]
sys.exit( 0 )
now = strftime( "%Y-%m-%d %H:%M:%S" )
print " "
print "##########################################"
print "%s - Migrating current tool archives to new tool repositories" % now
# tool_shed_wsgi.ini file
ini_file = sys.argv[1]
conf_parser = ConfigParser.ConfigParser( {'here': os.getcwd()} )
conf_parser.read( ini_file )
try:
db_conn_str = conf_parser.get( "app:main", "database_connection" )
except ConfigParser.NoOptionError:
db_conn_str = conf_parser.get( "app:main", "database_file" )
print 'DB Connection: ', db_conn_str
# Instantiate app
configuration = {}
for key, value in conf_parser.items( "app:main" ):
configuration[key] = value
app = galaxy.webapps.tool_shed.app.UniverseApplication( global_conf=dict( __file__=ini_file ), **configuration )
sa_session = app.model.context
# Remove the hgweb.config file if it exists
hgweb_config = "%s/hgweb.config" % os.getcwd()
if os.path.exists( hgweb_config ):
print "Removing old file: ", hgweb_config
os.remove( hgweb_config )
repo_records = 0
rca_records = 0
rra_records = 0
for repo in sa_session.query( app.model.Repository ):
# Remove the hg repository from disk. We have to be careful here, because old
# tool files exist in app.config.file_path/tools and we don't want to delete them
dir = os.path.join( app.config.file_path, *directory_hash_id( repo.id ) )
if os.path.exists( dir ):
print "Removing old repository file directory: ", dir
shutil.rmtree( dir )
# Delete all records from db tables:
# repository_category_association, repository_rating_association, repository
print "Deleting db records for repository: ", repo.name
for rca in repo.categories:
sa_session.delete( rca )
rca_records += 1
for rra in repo.ratings:
sa_session.delete( rra )
rra_records += 1
sa_session.delete( repo )
repo_records += 1
sa_session.flush()
print "Deleted %d rows from the repository table" % repo_records
print "Deleted %d rows from the repository_category_association table" % rca_records
print "Deleted %d rows from the repository_rating_association table" % rra_records
# Migrate database tool, tool category and tool rating records to new
# database repository, repository category and repository rating records
# and create the hg repository on disk for each.
for tool in get_approved_tools( app, sa_session ):
create_repository_from_tool( app, sa_session, tool )
# Add, commit and push all valid versions of each approved tool to the
# associated hg repository.
for tool in get_approved_tools( app, sa_session ):
add_tool_files_to_repository( app, sa_session, tool )
app.shutdown()
print ' '
print 'Migration to next gen tool shed complete...'
print "##########################################"
sys.exit(0)
if __name__ == "__main__":
main()
| |
compiler.rs
|
use anyhow::{bail, Result};
use crate::opcode::{Opcode, Reference, StackType};
use std::{collections::HashMap, iter::Peekable, str::Chars};
#[derive(Debug)]
enum ParserState {
LabelOrInstruction,
Argument,
}
fn is_letter(c: char) -> bool {
c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z'
}
fn is_digit(c: char) -> bool {
c >= '0' && c <= '9'
}
fn is_whitespace(c: char) -> bool {
c == ' ' || c == '\t' || c == '\n'
}
fn parse_number(s: &str) -> StackType {
s.parse::<StackType>().unwrap()
}
fn requires_argument(instruction: &str) -> Result<bool> {
match instruction.to_lowercase().as_str() {
"push" | "dup" | "bnz" => Ok(true),
"swp" | "add" | "sub" | "mul" => Ok(false),
_ => bail!("Unknown instruction: {}", instruction),
}
}
#[derive(Debug)]
pub struct Compiler<'a> {
iit: Peekable<Chars<'a>>,
state: ParserState,
labels: HashMap<String, u16>,
current_token: Vec<char>,
current_instruction: Option<String>,
}
impl<'a> Compiler<'a> {
pub fn new(input: &'a str) -> Self {
Compiler {
iit: input.chars().peekable(),
state: ParserState::LabelOrInstruction,
labels: HashMap::new(),
current_token: vec![],
current_instruction: None,
}
}
fn read_while<P>(&mut self, pred: P)
where
P: Fn(char) -> bool,
{
loop {
match self.iit.peek() {
None => break,
Some(c) => {
if pred(*c) {
self.current_token.push(*c);
// advance iterator if peek was successful
self.iit.next();
} else {
break;
}
}
}
}
}
fn compile_instruction(&mut self, instruction: &str, argument: Option<&str>) -> Result<Opcode> {
Ok(match instruction {
"push" => Opcode::PushConstant(parse_number(argument.unwrap())),
"dup" => Opcode::Dup(parse_number(argument.unwrap()) as u16),
"bnz" => {
Opcode::BranchIfNotZero(Reference::Unresolved(String::from(argument.unwrap())))
}
"drop" => Opcode::Drop,
"swp" => Opcode::Swap,
"add" => Opcode::Add,
"sub" => Opcode::Subtract,
"mul" => Opcode::Multiply,
"halt" => Opcode::Halt,
_ => bail!("unknown instruction {}", instruction),
})
}
pub fn compile(&mut self) -> Result<Vec<Opcode>> {
let mut pass1 = vec![];
loop {
match self.state {
ParserState::LabelOrInstruction => {
self.read_while(is_whitespace);
self.current_token = vec![];
self.read_while(is_letter);
let c = self.iit.peek();
let token = String::from_iter(&self.current_token);
println!("label or instr: {:?}, p: {:?}", token, c);
match c {
Some(c) if c == &':' => {
// is a label
self.iit.next(); // consume complete label
self.labels.insert(token, pass1.len() as u16);
}
_ if token.len() > 0 =>
|
_ => {} // ignore empty tokens
}
}
ParserState::Argument => {
self.read_while(is_whitespace);
self.current_token = vec![];
self.read_while(|c| is_digit(c) || is_letter(c));
pass1.push(
self.compile_instruction(
&(self.current_instruction.clone()).unwrap(),
Some(&String::from_iter(&self.current_token)),
)
.unwrap(),
);
self.state = ParserState::LabelOrInstruction;
}
}
if self.iit.peek() == None {
break;
}
}
Ok(pass1
.into_iter()
.map(|opcode| match opcode {
Opcode::BranchIfNotZero(Reference::Unresolved(reference)) => {
if let Some(adr) = self.labels.get(&reference) {
Opcode::BranchIfNotZero(Reference::Resolved(*adr))
} else {
panic!("undefined reference: {}", reference);
}
}
opcode => opcode,
})
.collect::<Vec<Opcode>>())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn no_arguments() {
let mut c = Compiler::new("add");
let output = c.compile().unwrap();
assert_eq!(output, vec![Opcode::Add]);
}
#[test]
fn with_argument() {
let mut c = Compiler::new("push 99");
let output = c.compile().unwrap();
assert_eq!(output, vec![Opcode::PushConstant(99i16)]);
}
#[test]
fn trailing_leading_ws() {
let mut c = Compiler::new("\n\n push \t 99 \n\n");
let output = c.compile().unwrap();
assert_eq!(output, vec![Opcode::PushConstant(99i16)]);
}
#[test]
fn multiple_instructions() {
let mut c = Compiler::new(" push 1\n push 1\n add");
let output = c.compile().unwrap();
assert_eq!(
output,
vec![
Opcode::PushConstant(1i16),
Opcode::PushConstant(1i16),
Opcode::Add
]
);
}
#[test]
fn label_bnz() {
let mut c = Compiler::new("loopa: push 10\n loopb: push 10\nbnz loopb");
let output = c.compile().unwrap();
println!("{:?}", output);
assert_eq!(
output,
vec![
Opcode::PushConstant(10i16),
Opcode::PushConstant(10i16),
Opcode::BranchIfNotZero(Reference::Resolved(1))
]
);
}
}
|
{
// is an instruction
if requires_argument(&token).unwrap() {
self.current_instruction = Some(String::from(token));
self.state = ParserState::Argument;
} else {
pass1.push(self.compile_instruction(&token, None).unwrap());
self.current_instruction = None;
}
}
|
desugar_expression_tree.rs
|
use crate::lowerer::Env;
use bstr::BString;
use ocamlrep::rc::RcOc;
use oxidized::{
aast,
aast_visitor::{AstParams, NodeMut, VisitorMut},
ast,
ast::{ClassId, ClassId_, Expr, Expr_, Hint_, Stmt, Stmt_},
ast_defs::*,
file_info,
pos::Pos,
};
/// Convert an expression tree to
/// ```
/// # Outer thunk
/// (() ==> {
/// # Spliced assignments
/// return new ExprTree(
/// # Metadata
/// # AST as smart constructor calls function
/// function (VisitorType $v) { $v->... },
/// );
/// )();
/// ```
pub fn desugar<TF>(hint: &aast::Hint, e: &Expr, env: &Env<TF>) -> Expr {
let visitor_name = {
if let Hint_::Happly(id, _) = &*hint.1 {
&id.1
} else {
""
}
};
let mut e = e.clone();
let mut e = virtualize_expr_types(visitor_name.to_string(), &mut e);
let e = virtualize_expr_calls(visitor_name.to_string(), &mut e);
let (e, extracted_splices) = extract_and_replace_splices(&e);
let splice_count = extracted_splices.len();
let temp_pos = e.0.clone();
// Create assignments of extracted splices
// `$__1 = spliced_expr;`
let mut thunk_body: Vec<Stmt> = extracted_splices
.into_iter()
.enumerate()
.map(|(i, expr)| {
Stmt::new(
expr.0.clone(),
Stmt_::Expr(Box::new(Expr::new(
expr.0.clone(),
Expr_::Binop(Box::new((Bop::Eq(None), temp_lvar(&expr.0, i), expr))),
))),
)
})
.collect();
// Create dict of spliced values
let key_value_pairs = (0..splice_count)
.into_iter()
.map(|i| {
let key = Expr::new(
Pos::make_none(),
Expr_::String(BString::from(temp_lvar_string(i))),
);
let value = temp_lvar(&Pos::make_none(), i);
(key, value)
})
.collect();
let spliced_dict = dict_literal(key_value_pairs);
// Make anonymous function of smart constructor calls
let visitor_expr = wrap_return(rewrite_expr(&e), &temp_pos.clone());
let visitor_body = ast::FuncBody {
ast: vec![visitor_expr],
annotation: (),
};
let param = ast::FunParam {
annotation: hint.0.clone(),
type_hint: ast::TypeHint((), Some(hint.clone())),
is_variadic: false,
pos: hint.0.clone(),
name: "$v".into(),
expr: None,
callconv: None,
user_attributes: vec![],
visibility: None,
};
let visitor_fun_ = wrap_fun_(visitor_body, vec![param], temp_pos.clone(), env);
let visitor_lambda = Expr::new(temp_pos.clone(), Expr_::mk_lfun(visitor_fun_, vec![]));
// Make anonymous function for typing purposes
let typing_fun = if env.codegen {
// throw new Exception()
Stmt::new(
temp_pos.clone(),
Stmt_::Throw(Box::new(new_obj(&temp_pos, "\\Exception", vec![]))),
)
} else {
// The original expression to be inferred
wrap_return(e, &temp_pos.clone())
};
let typing_fun_body = ast::FuncBody {
ast: vec![typing_fun],
annotation: (),
};
let typing_fun_ = wrap_fun_(typing_fun_body, vec![], temp_pos.clone(), env);
let typing_lambda = Expr::new(temp_pos.clone(), Expr_::mk_lfun(typing_fun_, vec![]));
// Make `return new ExprTree(...)`
let return_stmt = wrap_return(
new_obj(
&temp_pos,
"\\ExprTree",
vec![
exprpos(&temp_pos),
Expr::new(temp_pos.clone(), Expr_::Id(Box::new(make_id("__FILE__")))),
spliced_dict,
visitor_lambda,
typing_lambda,
],
),
&temp_pos.clone(),
);
// Add to the body of the thunk after the splice assignments
thunk_body.push(return_stmt);
// Create the thunk
let thunk_func_body = ast::FuncBody {
ast: thunk_body,
annotation: (),
};
let thunk_fun_ = wrap_fun_(thunk_func_body, vec![], temp_pos.clone(), env);
let thunk = Expr::new(temp_pos.clone(), Expr_::mk_lfun(thunk_fun_, vec![]));
// Call the thunk
Expr::new(
temp_pos.clone(),
Expr_::Call(Box::new((thunk, vec![], vec![], None))),
)
}
/// Convert `foo` to `return foo;`.
fn wrap_return(e: Expr, pos: &Pos) -> Stmt {
Stmt::new(pos.clone(), Stmt_::Return(Box::new(Some(e))))
}
/// Wrap a FuncBody into an anonymous Fun_
fn
|
<TF>(
body: ast::FuncBody,
params: Vec<ast::FunParam>,
pos: Pos,
env: &Env<TF>,
) -> ast::Fun_ {
ast::Fun_ {
span: pos,
annotation: (),
mode: file_info::Mode::Mstrict,
ret: ast::TypeHint((), None),
name: make_id(";anonymous"),
tparams: vec![],
where_constraints: vec![],
variadic: aast::FunVariadicity::FVnonVariadic,
params,
body,
fun_kind: ast::FunKind::FSync,
cap: ast::TypeHint((), None), // TODO(T70095684)
unsafe_cap: ast::TypeHint((), None), // TODO(T70095684)
user_attributes: vec![],
file_attributes: vec![],
external: false,
doc_comment: None,
namespace: RcOc::clone(&env.empty_ns_env),
static_: false,
}
}
/// Virtualizes expressions that could leak Hack type semantics
/// Converts literals, operators, and implicit boolean checks
fn virtualize_expr_types(visitor_name: String, mut e: &mut Expr) -> &mut Expr {
let mut visitor = TypeVirtualizer { visitor_name };
visitor.visit_expr(&mut (), &mut e).unwrap();
e
}
struct TypeVirtualizer {
visitor_name: String,
}
fn dummy_expr() -> Expr {
Expr::new(Pos::make_none(), aast::Expr_::Null)
}
// Converts `expr` to `expr->__bool()`
fn coerce_to_bool(receiver: &mut ast::Expr) -> ast::Expr {
let pos = receiver.0.clone();
let receiver = std::mem::replace(receiver, dummy_expr());
meth_call(receiver, "__bool", vec![], &pos)
}
impl<'ast> VisitorMut<'ast> for TypeVirtualizer {
type P = AstParams<(), ()>;
fn object(&mut self) -> &mut dyn VisitorMut<'ast, P = Self::P> {
self
}
fn visit_expr(&mut self, env: &mut (), e: &mut Expr) -> Result<(), ()> {
fn virtualize_binop(lhs: &mut Expr, meth_name: &str, rhs: &mut Expr, pos: &Pos) -> Expr {
let lhs = std::mem::replace(lhs, dummy_expr());
let rhs = std::mem::replace(rhs, dummy_expr());
meth_call(lhs, meth_name, vec![rhs], pos)
}
fn virtualize_unop(operand: &mut Expr, meth_name: &str, pos: &Pos) -> Expr {
let operand = std::mem::replace(operand, dummy_expr());
meth_call(operand, meth_name, vec![], pos)
}
use aast::Expr_::*;
let pos = e.0.clone();
let mk_splice = |e: Expr| -> Expr { Expr::new(pos.clone(), Expr_::ETSplice(Box::new(e))) };
match &mut e.1 {
// Convert `1` to `__splice__(Visitor::intLiteral(1))`.
Int(_) => {
*e = mk_splice(static_meth_call(
&self.visitor_name,
"intLiteral",
vec![e.clone()],
&pos,
))
}
// Convert `1.0` to `__splice__(Visitor::floatLiteral(1.0))`.
Float(_) => {
*e = mk_splice(static_meth_call(
&self.visitor_name,
"floatLiteral",
vec![e.clone()],
&pos,
))
}
// Convert `"foo"` to `__splice__(Visitor::stringLiteral("foo"))`
String(_) => {
*e = mk_splice(static_meth_call(
&self.visitor_name,
"stringLiteral",
vec![e.clone()],
&pos,
))
}
// Convert `true` to `__splice__(Visitor::boolLiteral(true))`
True | False => {
*e = mk_splice(static_meth_call(
&self.visitor_name,
"boolLiteral",
vec![e.clone()],
&pos,
))
}
// Convert `null` to `__splice__(Visitor::nullLiteral())`
Null => {
*e = mk_splice(static_meth_call(
&self.visitor_name,
"nullLiteral",
vec![],
&pos,
))
}
// Do not want to recurse into splices
ETSplice(_) => {}
Binop(ref mut bop) => {
let (ref op, ref mut lhs, ref mut rhs) = **bop;
// Recurse down the left and right hand sides
lhs.accept(env, self.object())?;
rhs.accept(env, self.object())?;
match op {
// Convert arithmetic operators `... + ...` to `$lhs->__plus(vec[$rhs])`
Bop::Plus => *e = virtualize_binop(lhs, "__plus", rhs, &e.0),
Bop::Minus => *e = virtualize_binop(lhs, "__minus", rhs, &e.0),
Bop::Star => *e = virtualize_binop(lhs, "__star", rhs, &e.0),
Bop::Slash => *e = virtualize_binop(lhs, "__slash", rhs, &e.0),
// Convert boolean &&, ||
Bop::Ampamp => *e = virtualize_binop(lhs, "__ampamp", rhs, &e.0),
Bop::Barbar => *e = virtualize_binop(lhs, "__barbar", rhs, &e.0),
// Convert comparison operators, <, <=, >, >=, ===, !==
Bop::Lt => *e = virtualize_binop(lhs, "__lessThan", rhs, &e.0),
Bop::Lte => *e = virtualize_binop(lhs, "__lessThanEqual", rhs, &e.0),
Bop::Gt => *e = virtualize_binop(lhs, "__greaterThan", rhs, &e.0),
Bop::Gte => *e = virtualize_binop(lhs, "__greaterThanEqual", rhs, &e.0),
Bop::Eqeqeq => *e = virtualize_binop(lhs, "__tripleEquals", rhs, &e.0),
Bop::Diff2 => *e = virtualize_binop(lhs, "__notTripleEquals", rhs, &e.0),
// Assignment is special and not virtualized
Bop::Eq(None) => {}
// The rest should be parser errors from expression_tree_check
_ => {}
}
}
Unop(ref mut unop) => {
let (ref op, ref mut operand) = **unop;
// Recurse into the operand
operand.accept(env, self.object())?;
match op {
Uop::Unot => *e = virtualize_unop(operand, "__exclamationMark", &e.0),
// The rest should be parser errors from expression_tree_check
_ => {}
}
}
// Convert `condition ? e1 : e2` to
// `condition->__bool() ? e1 : e2`
Eif(ref mut eif) => {
let (ref mut e1, ref mut e2, ref mut e3) = **eif;
e1.accept(env, self.object())?;
e2.accept(env, self.object())?;
e3.accept(env, self.object())?;
let e2 = e2.take();
let e3 = std::mem::replace(e3, dummy_expr());
*e = Expr::new(pos, Eif(Box::new((coerce_to_bool(e1), e2, e3))))
}
_ => e.recurse(env, self.object())?,
}
Ok(())
}
fn visit_stmt_(&mut self, env: &mut (), s: &mut Stmt_) -> Result<(), ()> {
use aast::Stmt_::*;
match s {
// Convert `while(condition) { block }` to
// `while(condition->coerceToBool()) { block }`
While(ref mut w) => {
let (ref mut condition, ref mut block) = **w;
condition.accept(env, self.object())?;
block.accept(env, self.object())?;
let block = std::mem::replace(block, vec![]);
*s = While(Box::new((coerce_to_bool(condition), block)))
}
// Convert `if(condition) { block }` to
// `if(condition->coerceToBool()) { block }`
If(i) => {
let (ref mut condition, ref mut b1, ref mut b2) = **i;
condition.accept(env, self.object())?;
b1.accept(env, self.object())?;
b2.accept(env, self.object())?;
let b1 = std::mem::replace(b1, vec![]);
let b2 = std::mem::replace(b2, vec![]);
*s = If(Box::new((coerce_to_bool(condition), b1, b2)))
}
// Convert `for(i; condition; j) { block }` to
// `for(i; condition->coerceToBool(); j) { block }`
For(f) => {
let (ref mut inits, ref mut condition, ref mut increments, ref mut block) = **f;
inits.accept(env, self.object())?;
increments.accept(env, self.object())?;
block.accept(env, self.object())?;
let inits = std::mem::replace(inits, vec![]);
let increments = std::mem::replace(increments, vec![]);
let block = std::mem::replace(block, vec![]);
let condition = if let Some(c) = condition {
c.accept(env, self.object())?;
Some(coerce_to_bool(c))
} else {
None
};
*s = For(Box::new((inits, condition, increments, block)))
}
_ => s.recurse(env, self.object())?,
}
Ok(())
}
}
/// Virtualizes function calls
fn virtualize_expr_calls(visitor_name: String, mut e: &mut Expr) -> &mut Expr {
let mut visitor = CallVirtualizer { visitor_name };
visitor.visit_expr(&mut (), &mut e).unwrap();
e
}
struct CallVirtualizer {
visitor_name: String,
}
impl<'ast> VisitorMut<'ast> for CallVirtualizer {
type P = AstParams<(), ()>;
fn object(&mut self) -> &mut dyn VisitorMut<'ast, P = Self::P> {
self
}
fn visit_expr(&mut self, env: &mut (), e: &mut Expr) -> Result<(), ()> {
use aast::Expr_::*;
let pos = e.0.clone();
let mk_splice = |e: Expr| -> Expr { Expr::new(pos.clone(), Expr_::ETSplice(Box::new(e))) };
match &mut e.1 {
// Convert `foo(...)` to `__splice__(Visitor::symbol('foo', foo<>))(...)`
Call(ref mut call) => {
let (ref recv, ref mut targs, ref mut args, ref mut variadic) = **call;
match &recv.1 {
Id(sid) => {
let fn_name = string_literal(&*sid.1);
targs.accept(env, self.object())?;
let targs = std::mem::replace(targs, vec![]);
let fp = Expr::new(
pos.clone(),
Expr_::FunctionPointer(Box::new((
ast::FunctionPtrId::FPId((**sid).clone()),
targs,
))),
);
let callee = mk_splice(static_meth_call(
&self.visitor_name,
"symbol",
vec![fn_name, fp],
&pos,
));
args.accept(env, self.object())?;
variadic.accept(env, self.object())?;
let args = std::mem::replace(args, vec![]);
let variadic = variadic.take();
e.1 = Call(Box::new((callee, vec![], args, variadic)))
}
// Convert `Foo::bar(...)` to `${ Visitor::symbol('Foo::bar', Foo::bar<>) }(...)`
ClassConst(cc) => {
let (ref cid, ref s) = **cc;
let fn_name = if let ClassId_::CIexpr(Expr(_, Id(sid))) = &cid.1 {
let name = format!("{}::{}", &*sid.1, &s.1);
string_literal(&name)
} else {
// Should be unreachable
string_literal("__ILLEGAL_STATIC_CALL_IN_EXPRESSION_TREE")
};
targs.accept(env, self.object())?;
let targs = std::mem::replace(targs, vec![]);
let fp = Expr::new(
pos.clone(),
Expr_::FunctionPointer(Box::new((
aast::FunctionPtrId::FPClassConst(cid.clone(), s.clone()),
targs,
))),
);
let callee = mk_splice(static_meth_call(
&self.visitor_name,
"symbol",
vec![fn_name, fp],
&pos,
));
args.accept(env, self.object())?;
variadic.accept(env, self.object())?;
let args = std::mem::replace(args, vec![]);
let variadic = variadic.take();
e.1 = Call(Box::new((callee, vec![], args, variadic)))
}
_ => e.recurse(env, self.object())?,
}
}
// Do not want to recurse into splices
ETSplice(_) => {}
_ => e.recurse(env, self.object())?,
}
Ok(())
}
}
/// Convert expression tree expressions to method calls.
fn rewrite_expr(e: &Expr) -> Expr {
use aast::Expr_::*;
let pos = exprpos(&e.0);
match &e.1 {
// Convert `$x` to `$v->localVar(new ExprPos(...), "$x")` (note the quoting).
Lvar(lid) => v_meth_call("localVar", vec![pos, string_literal(&((lid.1).1))], &e.0),
// Convert `... = ...` to `$v->assign(new ExprPos(...), $v->..., $v->...)`.
Binop(bop) => match &**bop {
(Bop::Eq(None), lhs, rhs) => v_meth_call(
"assign",
vec![pos, rewrite_expr(&lhs), rewrite_expr(&rhs)],
&e.0,
),
_ => v_meth_call(
"unsupportedSyntax",
vec![string_literal("bad binary operator")],
&e.0,
),
},
// Convert ... ? ... : ... to `$v->ternary(new ExprPos(...), $v->..., $v->..., $v->...)`
Eif(eif) => {
let (e1, e2o, e3) = &**eif;
let e2 = if let Some(e2) = e2o {
rewrite_expr(&e2)
} else {
null_literal()
};
v_meth_call(
"ternary",
vec![pos, rewrite_expr(&e1), e2, rewrite_expr(&e3)],
&e.0,
)
}
Call(call) => {
let (recv, _, args, _) = &**call;
match &recv.1 {
// Convert `$foo->bar(args)` to
// `$v->methCall(new ExprPos(...), $foo, 'bar', vec[args])`
// Parenthesized expressions e.g. `($foo->bar)(args)` unsupported.
ObjGet(objget) if !objget.as_ref().3 => {
let (receiver, meth, _, _) = &**objget;
match &meth.1 {
Id(sid) => {
let fn_name = string_literal(&*sid.1);
let desugared_args = vec![
pos,
rewrite_expr(&receiver),
fn_name,
vec_literal(args.iter().map(rewrite_expr).collect()),
];
v_meth_call("methCall", desugared_args, &e.0)
}
_ => v_meth_call(
"unsupportedSyntax",
vec![string_literal("invalid function call")],
&e.0,
),
}
}
// Convert expr( ... )(args) to `$v->call(new ExprPos(..), rewrite_expr(expr), vec[args])`
_ => {
let args = vec![
pos,
rewrite_expr(recv),
vec_literal(args.iter().map(rewrite_expr).collect()),
];
v_meth_call("call", args, &e.0)
}
}
}
// Convert `($x) ==> { ... }` to `$v->lambdaLiteral(new ExprPos(...), vec["$x"], vec[...])`.
Lfun(lf) => {
let fun_ = &lf.0;
let param_names = fun_
.params
.iter()
.map(|p| string_literal(&p.name))
.collect();
let body_stmts = rewrite_stmts(&fun_.body.ast);
v_meth_call(
"lambdaLiteral",
vec![pos, vec_literal(param_names), vec_literal(body_stmts)],
&e.0,
)
}
// Convert `{ expr }` to `$v->splice(new ExprPos(...), "\$var_name", expr )`
ETSplice(e) => {
// Assumes extract and replace has already occurred
let s = if let Lvar(lid) = &e.1 {
let aast::Lid(_, (_, lid)) = &**lid;
Expr::new(Pos::make_none(), Expr_::String(BString::from(lid.clone())))
} else {
null_literal()
};
v_meth_call("splice", vec![pos, s, *e.clone()], &e.0)
}
// Convert anything else to $v->unsupportedSyntax().
// Type checking should prevent us hitting these cases.
_ => v_meth_call(
"unsupportedSyntax",
vec![string_literal(&format!("{:#?}", &e.1))],
&e.0,
),
}
}
/// Convert expression tree statements to method calls.
fn rewrite_stmts(stmts: &[Stmt]) -> Vec<Expr> {
stmts.iter().filter_map(rewrite_stmt).collect()
}
fn rewrite_stmt(s: &Stmt) -> Option<Expr> {
use aast::Stmt_::*;
let pos = exprpos(&s.0);
match &s.1 {
Expr(e) => Some(rewrite_expr(&e)),
Return(e) => match &**e {
// Convert `return ...;` to `$v->returnStatement(new ExprPos(...), $v->...)`.
Some(e) => Some(v_meth_call(
"returnStatement",
vec![pos, rewrite_expr(&e)],
&s.0,
)),
// Convert `return;` to `$v->returnStatement(new ExprPos(...), null)`.
None => Some(v_meth_call(
"returnStatement",
vec![pos, null_literal()],
&s.0,
)),
},
// Convert `if (...) {...} else {...}` to
// `$v->ifStatement(new ExprPos(...), $v->..., vec[...], vec[...])`.
If(if_stmt) => {
let (e, then_block, else_block) = &**if_stmt;
let then_stmts = rewrite_stmts(then_block);
let else_stmts = rewrite_stmts(else_block);
Some(v_meth_call(
"ifStatement",
vec![
pos,
rewrite_expr(&e),
vec_literal(then_stmts),
vec_literal(else_stmts),
],
&s.0,
))
}
// Convert `while (...) {...}` to
// `$v->whileStatement(new ExprPos(...), $v->..., vec[...])`.
While(w) => {
let (e, body) = &**w;
let body_stmts = rewrite_stmts(body);
Some(v_meth_call(
"whileStatement",
vec![pos, rewrite_expr(&e), vec_literal(body_stmts)],
&s.0,
))
}
// Convert `for (...; ...; ...) {...}` to
// `$v->forStatement(new ExprPos(...), vec[...], ..., vec[...], vec[...])`.
For(w) => {
let (init, cond, incr, body) = &**w;
let init_exprs = init.iter().map(rewrite_expr).collect();
let cond_expr = match cond {
Some(cond) => rewrite_expr(cond),
None => null_literal(),
};
let incr_exprs = incr.iter().map(rewrite_expr).collect();
let body_stmts = rewrite_stmts(body);
Some(v_meth_call(
"forStatement",
vec![
pos,
vec_literal(init_exprs),
cond_expr,
vec_literal(incr_exprs),
vec_literal(body_stmts),
],
&s.0,
))
}
// Convert `break;` to `$v->breakStatement(new ExprPos(...))`
Break => Some(v_meth_call("breakStatement", vec![pos], &s.0)),
// Convert `continue;` to `$v->continueStatement(new ExprPos(...))`
Continue => Some(v_meth_call("continueStatement", vec![pos], &s.0)),
Noop => None,
// Convert anything else to $v->unsupportedSyntax().
// Type checking should prevent us hitting these cases.
_ => Some(v_meth_call(
"unsupportedSyntax",
vec![string_literal(&format!("{:#?}", &s.1))],
&s.0,
)),
}
}
fn null_literal() -> Expr {
Expr::new(Pos::make_none(), Expr_::Null)
}
fn string_literal(s: &str) -> Expr {
Expr::new(Pos::make_none(), Expr_::String(BString::from(s)))
}
fn int_literal(i: usize) -> Expr {
Expr::new(Pos::make_none(), Expr_::Int(i.to_string()))
}
fn vec_literal(items: Vec<Expr>) -> Expr {
let positions: Vec<_> = items.iter().map(|x| &x.0).collect();
let position = merge_positions(&positions);
let fields: Vec<_> = items.into_iter().map(|e| ast::Afield::AFvalue(e)).collect();
Expr::new(
position,
Expr_::Collection(Box::new((make_id("vec"), None, fields))),
)
}
fn dict_literal(key_value_pairs: Vec<(Expr, Expr)>) -> Expr {
let pos = Pos::make_none();
let fields = key_value_pairs
.into_iter()
.map(|(k, v)| ast::Afield::AFkvalue(k, v))
.collect();
Expr::new(
pos,
Expr_::Collection(Box::new((make_id("dict"), None, fields))),
)
}
fn make_id(name: &str) -> ast::Id {
ast::Id(Pos::make_none(), name.into())
}
/// Build `new classname(args)`
fn new_obj(pos: &Pos, classname: &str, args: Vec<Expr>) -> Expr {
Expr::new(
pos.clone(),
Expr_::New(Box::new((
ClassId(
pos.clone(),
ClassId_::CIexpr(Expr::new(
pos.clone(),
Expr_::Id(Box::new(Id(pos.clone(), classname.to_string()))),
)),
),
vec![],
args,
None,
pos.clone(),
))),
)
}
/// Build `$v->meth_name(args)`.
fn v_meth_call(meth_name: &str, args: Vec<Expr>, pos: &Pos) -> Expr {
let receiver = Expr::mk_lvar(pos, "$v");
let meth = Expr::new(
pos.clone(),
Expr_::Id(Box::new(ast::Id(pos.clone(), meth_name.into()))),
);
let c = Expr_::Call(Box::new((
Expr::new(
pos.clone(),
Expr_::ObjGet(Box::new((
receiver,
meth,
OgNullFlavor::OGNullthrows,
false,
))),
),
vec![],
args,
None,
)));
Expr::new(pos.clone(), c)
}
fn meth_call(receiver: Expr, meth_name: &str, args: Vec<Expr>, pos: &Pos) -> Expr {
let meth = Expr::new(
pos.clone(),
Expr_::Id(Box::new(ast::Id(pos.clone(), meth_name.into()))),
);
let c = Expr_::Call(Box::new((
Expr::new(
pos.clone(),
Expr_::ObjGet(Box::new((
receiver,
meth,
OgNullFlavor::OGNullthrows,
false,
))),
),
vec![],
args,
None,
)));
Expr::new(pos.clone(), c)
}
fn static_meth_call(classname: &str, meth_name: &str, args: Vec<Expr>, pos: &Pos) -> Expr {
let callee = Expr::new(
pos.clone(),
Expr_::ClassConst(Box::new((
// TODO: Refactor ClassId creation with new_obj
ClassId(
pos.clone(),
ClassId_::CIexpr(Expr::new(
pos.clone(),
Expr_::Id(Box::new(Id(pos.clone(), classname.to_string()))),
)),
),
(pos.clone(), meth_name.to_string()),
))),
);
Expr::new(
pos.clone(),
Expr_::Call(Box::new((callee, vec![], args, None))),
)
}
/// Join a slice of positions together into a single, larger position.
fn merge_positions(positions: &[&Pos]) -> Pos {
positions
.iter()
.fold(None, |acc, pos| match acc {
Some(res) => Some(Pos::merge(&res, pos).expect("Positions should be in the same file")),
None => Some((*pos).clone()),
})
.unwrap_or(Pos::make_none())
}
/// Extracts all the expression tree splices and replaces them with
/// placeholder variables.
///
/// ```
/// $c = Code`__splice__($x->foo()) + __splice__($y);
/// $c_after = Code`$__splice_1 + $__splice_2`;
/// ```
///
/// Returns the updated Expr and a vec of the extracted spliced expr
/// representing `vec![$x->foo(), $y]`.
fn extract_and_replace_splices(e: &Expr) -> (Expr, Vec<Expr>) {
let mut e_copy = e.clone();
let mut visitor = SpliceExtractor {
extracted_splices: vec![],
};
visitor.visit_expr(&mut (), &mut e_copy).unwrap();
return (e_copy, visitor.extracted_splices);
}
struct SpliceExtractor {
extracted_splices: Vec<Expr>,
}
impl<'ast> VisitorMut<'ast> for SpliceExtractor {
type P = AstParams<(), ()>;
fn object(&mut self) -> &mut dyn VisitorMut<'ast, P = Self::P> {
self
}
fn visit_expr_(&mut self, env: &mut (), e: &mut Expr_) -> Result<(), ()> {
use aast::Expr_::*;
match e {
ETSplice(ex) => {
let len = self.extracted_splices.len();
self.extracted_splices.push((**ex).clone());
*e = ETSplice(Box::new(temp_lvar(&ex.0, len)));
}
_ => e.recurse(env, self.object())?,
}
Ok(())
}
}
fn temp_lvar_string(num: usize) -> String {
format!("$__{}", num.to_string())
}
fn temp_lvar(pos: &Pos, num: usize) -> Expr {
Expr::mk_lvar(pos, &temp_lvar_string(num))
}
/// Given a Pos, returns `new ExprPos(...)`
/// In case of Pos.none or invalid position, all elements set to 0
fn exprpos(pos: &Pos) -> Expr {
if pos.is_none() || !pos.is_valid() {
null_literal()
} else {
let ((start_lnum, start_bol, start_cnum), (end_lnum, end_bol, end_cnum)) =
pos.to_start_and_end_lnum_bol_cnum();
new_obj(
&pos,
"\\ExprPos",
vec![
int_literal(start_lnum),
int_literal(start_cnum - start_bol),
int_literal(end_lnum),
int_literal(end_cnum - end_bol),
],
)
}
}
|
wrap_fun_
|
4.2.test.js
|
import test from 'tape';
import {
checkCriteria,
baseCase,
} from './4.2';
test('4.2: checkCriteria()', (t) => {
{
const msg = '4.2 example 1';
const expected = { last: '3', double: 3 };
const actual = '112233'.split('').reduce(checkCriteria, baseCase);
t.deepEqual(actual, expected, msg);
}
{
const msg = '4.2 example 2';
const expected = { last: '4', double: 0 };
const actual = '123444'.split('').reduce(checkCriteria, baseCase);
t.deepEqual(actual, expected, msg);
}
{
const msg = '4.2 example 3';
const expected = { last: '2', double: 1 };
const actual = '111122'.split('').reduce(checkCriteria, baseCase);
|
t.deepEqual(actual, expected, msg);
}
t.end();
});
| |
mimesis.py
|
from mimesis.providers.base import BaseProvider
from .helpers import generate
class MPANProvider(BaseProvider):
class Meta:
|
@staticmethod
def generate() -> str:
return generate()
|
name = "mpan"
|
apps.rs
|
use std::cmp::Reverse;
use std::ffi::CString;
use super::{Entry, EvalInfo};
use crate::usage_cache::Usage;
use crate::DesktopEntry;
const CACHE_PATH: &str = concat!(crate::prog_name!(), ".cache");
pub struct AppsMode {
entries: Vec<DesktopEntry>,
term: Vec<CString>,
usage: Usage,
}
impl AppsMode {
pub fn new(mut entries: Vec<DesktopEntry>, term: Vec<CString>) -> Self {
let usage = Usage::from_path(CACHE_PATH);
entries.sort_by_key(|e| Reverse(usage.entry_count(&e.desktop_fname)));
Self {
entries,
term,
usage,
}
}
pub fn eval(&mut self, info: EvalInfo<'_>) -> std::convert::Infallible {
let idx = info.index.unwrap();
let entry = &self.entries[idx];
let exec = if info.subindex == 0 {
&entry.entry.exec
} else {
&entry.actions[info.subindex - 1].exec
};
let args = shlex::split(exec)
.unwrap()
.into_iter()
.filter(|s| !s.starts_with('%')) // TODO: use placeholders somehow
.map(|s| CString::new(s).unwrap());
self.usage
.increment_entry_usage(entry.desktop_fname.clone());
self.usage.try_update_cache(CACHE_PATH);
|
};
crate::exec::exec(term, args, info.input_value)
}
pub fn entries_len(&self) -> usize {
self.entries.len()
}
pub fn subentries_len(&self, idx: usize) -> usize {
self.entries.get(idx).map(|e| e.actions.len()).unwrap_or(0)
}
pub fn entry(&self, idx: usize, subidx: usize) -> Entry<'_> {
let entry = &self.entries[idx];
Entry {
name: entry.entry.name.as_ref(),
subname: Some(entry.subname(subidx).unwrap_or("Default Action")),
icon: entry.icon(subidx).map(|i| i.as_image()),
}
}
pub fn text_entries(&self) -> impl Iterator<Item = &str> + super::ExactSizeIterator {
self.entries.iter().map(|e| e.name_with_keywords.as_str())
}
}
|
let term = if entry.is_terminal {
Some(std::mem::take(&mut self.term))
} else {
None
|
test.difference_20211204130540.js
|
import difference from "./../src/difference.js";
import chai from "./../node_modules/chai/chai.js";
import { assert } from "chai";
describe('Difference function', function(){
it('difference([2,1],[2,3]) should be equal to 1',function(){
assert.equal(difference([2,1]))
|
})
|
})
|
matching.py
|
import pandas as pd
import argparse
import logging
import sys
import json
def
|
(log_file, verbose):
# Setup logger - (Python logger breaks PEP8 by default)
logger = logging.getLogger(__name__)
if verbose:
logger.setLevel('DEBUG')
# file_handler logs to file, stream_handler to console
file_handler = logging.FileHandler(log_file)
stream_handler = logging.StreamHandler()
# formatter sets log format
formatter = logging.Formatter(
'%(asctime)s - %(name)s : %(levelname)s - %(message)s')
# add formatter to both handlers
file_handler.setFormatter(formatter)
stream_handler.setFormatter(formatter)
# add both handlers to logger
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
return logger
def compare_addresses(args):
"""Compare the addresses of two files
"""
logger.info('Started reading BOSA address file')
try:
bosa = pd.read_csv(args.input_file_1)
logger.info('Read the BOSA address file')
except IOError as io:
logger.fatal(io)
sys.exit(1)
logger.info('Started reading comparison file')
try:
comparison = pd.read_csv(args.input_file_2)
logger.info('Read the comparison file')
except IOError as io:
logger.fatal(io)
sys.exit(1)
comp_keys = []
bosa_ids = []
for comp_key, bosa_key in args.mapping.items():
try:
comp_keys.append(comp_key)
bosa_ids.append(bosa.columns.get_loc(bosa_key))
except KeyError as ke:
logger.error(
'Column %s of column mapping (%s -> %s) not found in BOSA file', ke, comp_key, bosa_key)
sys.exit(1)
address_dict = {}
logger.info('Building data structure to perform matching')
for i, row in enumerate(bosa.values):
if i % 50_000 == 0:
logger.info('Processed %i / %i addresses', i, len(bosa))
address_dict[tuple(el.lower() if type(
el) == str else el for el in row[bosa_ids])] = row
extended = perform_exact_matching(
bosa, comparison, address_dict, comp_keys)
try:
extended.to_csv(args.output_file, index=False)
except IOError as io:
logger.fatal(io)
sys.exit(1)
def perform_exact_matching(bosa, comparison, address_dict, comp_keys):
"""Match the addresses in the comparison file and add address_id and coordinates when matched
"""
addr_id = bosa.columns.get_loc('address_id')
lon_id = bosa.columns.get_loc('EPSG:4326_lon')
lat_id = bosa.columns.get_loc('EPSG:4326_lat')
extended = []
logger.info('Performing matching')
for i, row in comparison.iterrows():
if i % 50_000 == 0:
logger.info('Matched %i / %i addresses', i, len(comparison))
try:
key = tuple(el.lower() if type(el) ==
str else el for el in row[comp_keys])
except KeyError as ke:
logger.error('Column %s not found in the comparison file', ke)
sys.exit(1)
if key in address_dict:
# If the address is matched add address_id and coordinates to it
data = address_dict[key]
row['address_id'] = data[addr_id]
row['EPSG:4326_lon'] = data[lon_id]
row['EPSG:4326_lat'] = data[lat_id]
extended.append(row)
extended = pd.DataFrame(extended)
# Convert column to int type that can handle NaN
extended['address_id'] = extended['address_id'].astype('Int64')
return extended
if __name__ == "__main__":
# Setup argument parser
parser = argparse.ArgumentParser(
description='Compare addresses between two csv files.')
parser.add_argument(
'input_file_1', help='BOSA address file, in csv format')
parser.add_argument(
'input_file_2', help='Address file to compare to BOSA address file, in csv format')
parser.add_argument('output_file', help='Name of file to write output to')
parser.add_argument('--mode', default='exact',
choices=['exact'], help='How to compare the addresses.')
parser.add_argument(
'--mapping', default={}, type=json.loads, help='Column names to consider in the comparison and how they map to the \
column names of the BOSA address file. (as a json dict of {comparison_key: bosa_key})')
parser.add_argument('--log_name', default="compare.log",
help='name of the log file')
parser.add_argument('--verbose', action="store_true",
help="toggle verbose output", default=False)
args = parser.parse_args()
logger = get_best_logger(args.log_name, args.verbose)
compare_addresses(args)
|
get_best_logger
|
main.go
|
package main
import (
"fmt"
"log"
"path/filepath"
"github.com/leslie-wang/go-face"
)
// Path to directory with models and test images. Here it's assumed it
// points to the <https://github.com/Kagami/go-face-testdata> clone.
const dataDir = "testdata"
// This example shows the basic usage of the package: create an
// recognizer, recognize faces, classify them using few known ones.
func main() {
// Init the recognizer.
rec, err := face.NewRecognizer(dataDir)
if err != nil {
log.Fatalf("Can't init face recognizer: %v", err)
}
// Free the resources when you're finished.
defer rec.Close()
// Test image with 10 faces.
testImagePristin := filepath.Join(dataDir, "pristin.jpg")
// Recognize faces on that image.
faces, err := rec.RecognizeFile(testImagePristin)
if err != nil {
log.Fatalf("Can't recognize: %v", err)
}
if len(faces) != 10 {
log.Fatalf("Wrong number of faces")
}
// Fill known samples. In the real world you would use a lot of images
// for each person to get better classification results but in our
// example we just get them from one big image.
var samples []face.Descriptor
var cats []int32
for i, f := range faces {
samples = append(samples, f.Descriptor)
// Each face is unique on that image so goes to its own category.
|
labels := []string{
"Sungyeon", "Yehana", "Roa", "Eunwoo", "Xiyeon",
"Kyulkyung", "Nayoung", "Rena", "Kyla", "Yuha",
}
// Pass samples to the recognizer.
rec.SetSamples(samples, cats)
// Now let's try to classify some not yet known image.
testImageNayoung := filepath.Join(dataDir, "nayoung.jpg")
nayoungFace, err := rec.RecognizeSingleFile(testImageNayoung)
if err != nil {
log.Fatalf("Can't recognize: %v", err)
}
if nayoungFace == nil {
log.Fatalf("Not a single face on the image")
}
catID := rec.Classify(nayoungFace.Descriptor)
if catID < 0 {
log.Fatalf("Can't classify")
}
// Finally print the classified label. It should be "Nayoung".
fmt.Println(labels[catID])
}
|
cats = append(cats, int32(i))
}
// Name the categories, i.e. people on the image.
|
exponentation.rs
|
use crate::util::{ExprFactory, StmtLike};
use ast::*;
use swc_common::{Fold, FoldWith, Mark, Span, Spanned, Visit, VisitWith, DUMMY_SP};
/// `@babel/plugin-transform-exponentiation-operator`
///
/// # Example
///
/// ## In
///
/// ```js
/// let x = 10 ** 2;
///
/// x **= 3;
/// ```
///
/// ## Out
///
/// ```js
/// let x = Math.pow(10, 2);
///
/// x = Math.pow(x, 3);
/// ```
pub fn exponentation() -> impl Fold<Module> + Clone + Copy {
Exponentation
}
#[derive(Debug, Clone, Copy)]
struct Exponentation;
#[derive(Default)]
struct AssignFolder {
vars: Vec<VarDeclarator>,
}
impl Fold<Expr> for AssignFolder {
fn fold(&mut self, e: Expr) -> Expr {
let e = e.fold_children(self);
match e {
Expr::Assign(AssignExpr {
span,
left,
op: op!("**="),
right,
}) => {
let lhs: Ident = match left {
PatOrExpr::Pat(box Pat::Ident(ref i))
| PatOrExpr::Expr(box Expr::Ident(ref i)) => i.clone(),
// unimplemented
PatOrExpr::Expr(ref e) => {
let mark = Mark::fresh(Mark::root());
let span = e.span().apply_mark(mark);
self.vars.push(VarDeclarator {
span: DUMMY_SP,
name: quote_ident!(span, "ref").into(),
init: Some(e.clone()),
});
quote_ident!(span, "ref")
}
left => {
return Expr::Assign(AssignExpr {
span,
left,
op: op!("="),
right,
});
}
};
return Expr::Assign(AssignExpr {
span,
left,
op: op!("="),
right: box mk_call(span, box lhs.into(), right),
});
}
Expr::Bin(BinExpr {
span,
left,
op: op!("**"),
right,
}) => mk_call(span, left, right),
_ => e,
}
}
}
impl<T: StmtLike + VisitWith<ShouldFold>> Fold<Vec<T>> for Exponentation
where
Vec<T>: FoldWith<Self>,
{
fn fold(&mut self, stmts: Vec<T>) -> Vec<T> {
if !should_fold(&stmts) {
return stmts;
}
let stmts = stmts.fold_children(self);
let mut buf = vec![];
for stmt in stmts {
match stmt.try_into_stmt() {
Err(module_item) => buf.push(module_item),
Ok(stmt) => {
let mut folder = AssignFolder::default();
let stmt = stmt.fold_with(&mut folder);
// Add variable declaration
// e.g. var ref
if !folder.vars.is_empty() {
buf.push(T::from_stmt(Stmt::Decl(Decl::Var(VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
decls: folder.vars,
}))));
}
buf.push(T::from_stmt(stmt));
}
}
}
buf
}
}
fn mk_call(span: Span, left: Box<Expr>, right: Box<Expr>) -> Expr
|
fn should_fold<N>(node: &N) -> bool
where
N: VisitWith<ShouldFold>,
{
let mut v = ShouldFold { found: false };
node.visit_with(&mut v);
v.found
}
struct ShouldFold {
found: bool,
}
impl Visit<BinExpr> for ShouldFold {
fn visit(&mut self, e: &BinExpr) {
if e.op == op!("**") {
self.found = true;
}
}
}
impl Visit<AssignExpr> for ShouldFold {
fn visit(&mut self, e: &AssignExpr) {
if e.op == op!("**=") {
self.found = true;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
test!(Exponentation, babel_binary, "2 ** 2", "Math.pow(2, 2)");
test_exec!(
ignore,
|_| Exponentation,
babel_comprehensive,
r#"expect(2 ** 3).toBe(8);
expect(3 * 2 ** 3).toBe(24);
var x = 2;
expect(2 ** ++x).toBe(8);
expect(2 ** -1 * 2).toBe(1);
var calls = 0;
var q = {q: 3};
var o = {
get p() {
calls++;
return q;
}
};
o.p.q **= 2;
expect(calls).toBe(1);
expect(o.p.q).toBe(9);
expect(2 ** (3 ** 2)).toBe(512);
expect(2 ** 3 ** 2).toBe(512);"#
);
test_exec!(
// FIXME
ignore,
|_| Exponentation,
babel_memoize_object,
r#"var counters = 0;
Object.defineProperty(global, "reader", {
get: function () {
counters += 1;
return { x: 2 };
},
configurable: true
});
reader.x **= 2;
expect(counters).toBe(1);"#
);
test!(
Exponentation,
assign,
r#"x **= 3"#,
r#"x = Math.pow(x, 3)"#,
ok_if_code_eq
);
// test!(
// Exponentation,
// babel_4403,
// "var a, b;
// a[`${b++}`] **= 1;",
// "var _ref;
// var a, b;
// _ref = `${b++}`, a[_ref] = Math.pow(a[_ref], 1);"
// );
}
|
{
// Math.pow()
Expr::Call(CallExpr {
span,
callee: member_expr!(span, Math.pow).as_callee(),
args: vec![left.as_arg(), right.as_arg()],
})
}
|
format_utils.py
|
# pylint: disable=invalid-name
""" Template formatting helpers """
from datetime import datetime
import locale
import re
from django import template
from django.utils.safestring import mark_safe
from django.utils.timesince import timesince
locale.setlocale(locale.LC_ALL, '')
register = template.Library()
@register.filter
def linebreaksli(value):
""" Converts strings with newlines into <li></li>s """
value = re.sub(r'\r\n|\r|\n', '\n', value.strip()) # normalize newlines
lines = re.split('\n', value)
lines = ['<li>%s</li>' % line for line in lines if line and not line.isspace()]
return mark_safe('\n'.join(lines))
@register.filter
def date_formatter(value):
""" Changes date format from dd/mm/yyyy to dd mmm yyyy """
if value is None or value == '':
return ''
try:
date = datetime.strptime(value, '%d/%m/%Y')
except ValueError:
date = None
if date is None:
date = datetime.strptime(value, '%b %d, %Y')
return date.strftime('%d %b %Y')
@register.simple_tag()
def response(field, size=None, trail='', as_date=False):
""" Return the required field value or the not-entered span """
if field.strip():
return '%s%s' % (date_formatter(field) if as_date else field, trail)
style = ('min-width: %spx' % size) if size is not None else ''
return '<span class="form-entry not-complete" style="%s"></span>' % style
@register.simple_tag()
def required(field, size=None, trail=''):
""" Return the required field value or the not-entered span """
return response(field, size, trail)
@register.simple_tag(takes_context=True)
def checkbox(context, *args, **kwargs):
"""
Return a checkbox icon, checked if all args true.
Standalone arguments are evaluated as booleans according to normal python
rules for truthy values. A boolean is itself; an empty string is False
while a non-empty string is True; etc.
Keyword arguments are treated as the key being a question key in the
responses dict, and the value as a matching value with the response. If the
response is a list, the value matches if its in the list.
The weakness of this tag is that it can't handle negation (e.g., a response
doesn't equal a particular value). I don't have a clever way to handle that
here that doesn't involve adding syntax or unconventional usage (e.g.,
'^value' means not value).
NOTE: using the tag with no arguments will result in a checked box since
all() evaluates empty lists to True. To simply print an empty checkbox,
pass a False value as an argument.
"""
args_pass = all(args)
kwargs_pass = all([value in context['responses'].get(question, '')
for question, value in kwargs.items()])
return mark_safe('<i class="fa fa%s-square-o" aria-hidden="true"></i>' %
('-check' if args_pass and kwargs_pass else ''))
@register.filter
def claimantize(value):
""" Summarize 'lives with' as Claimant 1, 2, or both """
if 'you' in value:
|
elif 'spouse' in value:
return 'Claimant 2'
elif 'both' in value:
return 'Claimant 1 & Claimant 2'
return value
@register.filter
def age(date):
"""
Return the difference between now and date in the largest unit.
This uses Django's timesince filter but takes only the first term,
printing '46 years' instead of print '46 years, 7 months'.
"""
try:
birth = datetime.strptime(date, '%b %d, %Y')
except ValueError:
try:
birth = datetime.strptime(date, '%b %d, %Y')
except ValueError:
birth = None
if birth is not None:
return timesince(birth).split(',')[0]
return ''
@register.filter
def money(amount, symbol=True):
""" Return a properly formatted currency string including symbol """
try:
return locale.currency(float(amount), symbol, grouping=True)
except ValueError:
pass
return ''
@register.simple_tag(takes_context=True)
def payorize(context):
payor = 'the payor'
child_support_payor = context.get('child_support_payor', None)
if child_support_payor == 'Myself (Claimant 1)':
payor = context.get('name_you', child_support_payor)
elif child_support_payor == 'My Spouse (Claimant 2)':
payor = context.get('name_spouse', child_support_payor)
elif child_support_payor == 'Both myself and my spouse':
payor = '{} and {}'.format(context.get('name_you', 'myself'),
context.get('name_spouse', 'my spouse'))
return payor
@register.filter
def child_or_children(value):
""" Return num followed by 'child' or 'children' as appropriate """
try:
value = int(value)
except ValueError:
return ''
if value == 1:
return '1 child'
return '%d children'
@register.filter
def integer(value):
""" Return value as an int or nothing """
try:
print(value)
print(float(value))
print(int(float(value)))
return int(float(value))
except ValueError:
return ''
|
return 'Claimant 1'
|
api_op_VoteOnProposal.go
|
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package managedblockchain
import (
"context"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/internal/awsutil"
"github.com/aws/aws-sdk-go-v2/private/protocol"
)
// Please also see https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/VoteOnProposalInput
type VoteOnProposalInput struct {
_ struct{} `type:"structure"`
// The unique identifier of the network.
//
// NetworkId is a required field
NetworkId *string `location:"uri" locationName:"networkId" min:"1" type:"string" required:"true"`
// The unique identifier of the proposal.
//
// ProposalId is a required field
ProposalId *string `location:"uri" locationName:"proposalId" min:"1" type:"string" required:"true"`
// The value of the vote.
//
// Vote is a required field
Vote VoteValue `type:"string" required:"true" enum:"true"`
// The unique identifier of the member casting the vote.
//
// VoterMemberId is a required field
VoterMemberId *string `min:"1" type:"string" required:"true"`
}
// String returns the string representation
func (s VoteOnProposalInput) String() string {
return awsutil.Prettify(s)
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *VoteOnProposalInput) Validate() error {
invalidParams := aws.ErrInvalidParams{Context: "VoteOnProposalInput"}
if s.NetworkId == nil {
invalidParams.Add(aws.NewErrParamRequired("NetworkId"))
}
if s.NetworkId != nil && len(*s.NetworkId) < 1 {
invalidParams.Add(aws.NewErrParamMinLen("NetworkId", 1))
}
if s.ProposalId == nil {
invalidParams.Add(aws.NewErrParamRequired("ProposalId"))
}
if s.ProposalId != nil && len(*s.ProposalId) < 1 {
invalidParams.Add(aws.NewErrParamMinLen("ProposalId", 1))
}
if len(s.Vote) == 0 {
invalidParams.Add(aws.NewErrParamRequired("Vote"))
}
if s.VoterMemberId == nil {
invalidParams.Add(aws.NewErrParamRequired("VoterMemberId"))
}
if s.VoterMemberId != nil && len(*s.VoterMemberId) < 1 {
invalidParams.Add(aws.NewErrParamMinLen("VoterMemberId", 1))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// MarshalFields encodes the AWS API shape using the passed in protocol encoder.
func (s VoteOnProposalInput) MarshalFields(e protocol.FieldEncoder) error {
e.SetValue(protocol.HeaderTarget, "Content-Type", protocol.StringValue("application/json"), protocol.Metadata{})
if len(s.Vote) > 0 {
v := s.Vote
metadata := protocol.Metadata{}
e.SetValue(protocol.BodyTarget, "Vote", protocol.QuotedValue{ValueMarshaler: v}, metadata)
}
if s.VoterMemberId != nil {
v := *s.VoterMemberId
metadata := protocol.Metadata{}
e.SetValue(protocol.BodyTarget, "VoterMemberId", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata)
}
|
v := *s.NetworkId
metadata := protocol.Metadata{}
e.SetValue(protocol.PathTarget, "networkId", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata)
}
if s.ProposalId != nil {
v := *s.ProposalId
metadata := protocol.Metadata{}
e.SetValue(protocol.PathTarget, "proposalId", protocol.QuotedValue{ValueMarshaler: protocol.StringValue(v)}, metadata)
}
return nil
}
// Please also see https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/VoteOnProposalOutput
type VoteOnProposalOutput struct {
_ struct{} `type:"structure"`
}
// String returns the string representation
func (s VoteOnProposalOutput) String() string {
return awsutil.Prettify(s)
}
// MarshalFields encodes the AWS API shape using the passed in protocol encoder.
func (s VoteOnProposalOutput) MarshalFields(e protocol.FieldEncoder) error {
return nil
}
const opVoteOnProposal = "VoteOnProposal"
// VoteOnProposalRequest returns a request value for making API operation for
// Amazon Managed Blockchain.
//
// Casts a vote for a specified ProposalId on behalf of a member. The member
// to vote as, specified by VoterMemberId, must be in the same AWS account as
// the principal that calls the action.
//
// // Example sending a request using VoteOnProposalRequest.
// req := client.VoteOnProposalRequest(params)
// resp, err := req.Send(context.TODO())
// if err == nil {
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/managedblockchain-2018-09-24/VoteOnProposal
func (c *Client) VoteOnProposalRequest(input *VoteOnProposalInput) VoteOnProposalRequest {
op := &aws.Operation{
Name: opVoteOnProposal,
HTTPMethod: "POST",
HTTPPath: "/networks/{networkId}/proposals/{proposalId}/votes",
}
if input == nil {
input = &VoteOnProposalInput{}
}
req := c.newRequest(op, input, &VoteOnProposalOutput{})
return VoteOnProposalRequest{Request: req, Input: input, Copy: c.VoteOnProposalRequest}
}
// VoteOnProposalRequest is the request type for the
// VoteOnProposal API operation.
type VoteOnProposalRequest struct {
*aws.Request
Input *VoteOnProposalInput
Copy func(*VoteOnProposalInput) VoteOnProposalRequest
}
// Send marshals and sends the VoteOnProposal API request.
func (r VoteOnProposalRequest) Send(ctx context.Context) (*VoteOnProposalResponse, error) {
r.Request.SetContext(ctx)
err := r.Request.Send()
if err != nil {
return nil, err
}
resp := &VoteOnProposalResponse{
VoteOnProposalOutput: r.Request.Data.(*VoteOnProposalOutput),
response: &aws.Response{Request: r.Request},
}
return resp, nil
}
// VoteOnProposalResponse is the response type for the
// VoteOnProposal API operation.
type VoteOnProposalResponse struct {
*VoteOnProposalOutput
response *aws.Response
}
// SDKResponseMetdata returns the response metadata for the
// VoteOnProposal request.
func (r *VoteOnProposalResponse) SDKResponseMetdata() *aws.Response {
return r.response
}
|
if s.NetworkId != nil {
|
bot.js
|
'use strict';
const _ = require('lodash');
const Promise = require('bluebird');
var config = require('./config');
var db = require('./services/db');
var commands = require('./commands');
const tasks = require('./tasks');
const warn = _.memoize(console.warn);
const bot = require('./services/irc').setUp(config.irc);
if (!config.db) {
// Old config. Maybe we should give the user an option to rewrite the config
console.error("Config format has changed, please reformat");
process.exit(1);
}
if (config.db.enabled) {
db.setUp(config.db, commands);
} else {
console.log("The following modules, which require database connectivity, have been disabled: ["+db.listModules().join(", ")+"]");
}
function outputResponse(target, messages) {
if (!messages) {
return;
}
if (typeof messages === 'string') {
bot.say(target, messages);
} else if (Array.isArray(messages)) {
for (let i = 0; i < messages.length; i++) {
outputResponse(target, messages[i]);
}
} else if (_.isObject(messages) && typeof messages.then === 'function') {
messages.then(function (results) {
outputResponse(target, results);
}, function (error) {
handleError(target, error);
});
} else if (typeof messages === 'object' && ('response_type' in messages)) {
if ('target' in messages) {
target = messages['target'];
}
switch (messages['response_type']) {
case 'text':
bot.say(target, messages['message']);
break;
case 'action':
bot.action(target, messages['message']);
break;
default:
console.log("Message containing invalid `response_type` passed to outputResponse()");
}
} else {
throw 'Invalid `messages` argument passed to outputResponse()';
}
}
function defaultAllow ({isPM, isMod, isAuthenticated}) { // The default allow() function that gets used for a command if allow() is not provided
return !isPM || isMod && isAuthenticated;
}
// Main listener for channel messages/PMs
function executeCommands (event, author, channel, text) {
let isPM = channel === bot.nick;
let target = isPM ? author : channel;
for (let i in commands[event]) {
let message_match = (commands[event][i].message_regex || /.*/).exec(text);
let author_match = (commands[event][i].author_regex || /.*/).exec(author);
if (message_match && author_match && author !== bot.nick && (isPM || checkEnabled(channel, i, config.irc.channels[channel]))) {
Promise.join(checkIfUserIsMod(author), checkAuthenticated(author), (isMod, isAuthenticated) => {
if ((commands[event][i].allow || defaultAllow)({isPM, isMod, isAuthenticated})) {
outputResponse(target, commands[event][i].response({bot, message_match, author_match, channel, isMod, isAuthenticated, eventType: event, isPM}));
} else if (config.debug) {
outputResponse(target, "You are not authorised to run that command");
}
}).catch(_.partial(handleError, target));
|
}
function handleError (target, error) {
if (error.error_message) {
outputResponse(target, error.error_message);
}
if (_.isError(error)) {
console.error(error);
}
}
function checkIfUserIsMod (username) { // Returns a Promise that will resolve as true if the user is in the mod database, and false otherwise
if (!config.db.enabled || db.conn == null) {
return Promise.resolve(true);
}
return db.conn.query('SELECT * FROM User U JOIN Alias A ON U.UserID = A.UserID WHERE A.Alias = ? AND A.isNick = TRUE', [username]).then(res => !!res.length);
}
function checkAuthenticated (username) { // Returns a Promise that will resolve as true if the user is identified, and false otherwise
bot.say('NickServ', `STATUS ${username}`);
var awaitResponse = () => new Promise(resolve => {
bot.once('notice', (nick, to, text) => {
if (nick === 'NickServ' && to === bot.nick && text.indexOf(`STATUS ${username} `) === 0) {
resolve(text.slice(-1) === '3');
} else { // The notice was something unrelated, set up the listener again
resolve(awaitResponse());
}
});
});
return awaitResponse().timeout(5000, 'Timed out waiting for NickServ response');
}
function checkEnabled (channelName, itemName, itemConfig) {
if (itemConfig === undefined) {
warn(`Warning: No channel-specific configuration found for the channel ${channelName}. All commands on this channel will be ignored.`);
return false;
}
if (_.isBoolean(itemConfig)) {
return itemConfig;
}
if (_.isRegExp(itemConfig)) {
return itemConfig.test(itemName);
}
if (_.isArray(itemConfig)) {
return _.includes(itemConfig, itemName);
}
if (_.isString(itemConfig)) {
return itemConfig === itemName;
}
if (_.isFunction(itemConfig)) {
return !!itemConfig(itemName);
}
warn(`Warning: Failed to parse channel-specific configuration for the channel ${channelName}. All commands on this channel will be ignored.`);
return false;
}
bot.on('error', console.error);
bot.on('message', _.partial(executeCommands, 'message'));
bot.on('join', (chan, user) => executeCommands('join', user, chan));
bot.on('action', _.partial(executeCommands, 'action'));
bot.on('+mode', (chan, by, mode, argument) => executeCommands(`mode +${mode}`, by, chan, argument));
bot.on('-mode', (chan, by, mode, argument) => executeCommands(`mode -${mode}`, by, chan, argument));
function executeTask(taskName) {
const params = tasks[taskName];
const iteratee = params.concurrent ? params.task : _.once(params.task);
_.forOwn(config.irc.tasks, (channelConfig, channel) => {
if (checkEnabled(channel, taskName, channelConfig)) {
outputResponse(channel, iteratee({bot, channel: params.concurrent ? channel : null}));
}
});
}
bot.once('join', () => {
_.forOwn(tasks, (params, taskName) => {
if (params.onStart) {
executeTask(taskName);
}
setInterval(executeTask, params.period * 1000, taskName);
});
});
|
}
}
|
spirv-capabilities.rs
|
//! Test SPIR-V backend capability checks.
#![cfg(all(feature = "wgsl-in", feature = "spv-out"))]
use spirv::Capability as Ca;
fn capabilities_used(source: &str) -> naga::FastHashSet<Ca> {
use naga::back::spv;
use naga::valid;
let module = naga::front::wgsl::parse_str(source).unwrap_or_else(|e| {
panic!(
"expected WGSL to parse successfully:\n{}",
e.emit_to_string(source)
);
});
let info = valid::Validator::new(valid::ValidationFlags::all(), valid::Capabilities::all())
.validate(&module)
.expect("validation failed");
let mut words = vec![];
let mut writer = spv::Writer::new(&spv::Options::default()).unwrap();
writer.write(&module, &info, None, &mut words).unwrap();
writer.get_capabilities_used().clone()
}
fn require(capabilities: &[Ca], source: &str) {
require_and_forbid(capabilities, &[], source);
}
fn require_and_forbid(required: &[Ca], forbidden: &[Ca], source: &str) {
let caps_used = capabilities_used(source);
let missing_caps: Vec<_> = required
.iter()
.filter(|cap| !caps_used.contains(cap))
.cloned()
.collect();
if !missing_caps.is_empty() {
panic!(
"shader code should have requested these caps: {:?}\n\n{}",
missing_caps, source
);
}
let forbidden_caps: Vec<_> = forbidden
.iter()
.filter(|cap| caps_used.contains(cap))
.cloned()
.collect();
if !forbidden_caps.is_empty() {
panic!(
"shader code should not have requested these caps: {:?}\n\n{}",
forbidden_caps, source
);
}
}
#[test]
fn sampler1d() {
require(
&[Ca::Sampled1D],
r#"
@group(0) @binding(0)
var image_1d: texture_1d<f32>;
"#,
);
}
#[test]
fn storage1d() {
require(
&[Ca::Image1D],
r#"
@group(0) @binding(0)
var image_1d: texture_storage_1d<rgba8unorm,write>;
"#,
);
}
#[test]
fn cube_array() {
// ImageCubeArray is only for storage cube array images, which WGSL doesn't
// support
require_and_forbid(
&[Ca::SampledCubeArray],
&[Ca::ImageCubeArray],
r#"
@group(0) @binding(0)
var image_cube: texture_cube_array<f32>;
"#,
);
}
#[test]
fn image_queries() {
require(
&[Ca::ImageQuery],
r#"
fn f(i: texture_2d<f32>) -> vec2<i32> {
return textureDimensions(i);
}
"#,
);
require(
&[Ca::ImageQuery],
r#"
fn f(i: texture_2d_array<f32>) -> i32 {
return textureNumLayers(i);
}
"#,
);
require(
&[Ca::ImageQuery],
r#"
fn f(i: texture_2d<f32>) -> i32 {
return textureNumLevels(i);
}
"#,
);
require(
&[Ca::ImageQuery],
r#"
fn f(i: texture_multisampled_2d<f32>) -> i32 {
return textureNumSamples(i);
}
"#,
);
}
#[test]
fn
|
() {
require(
&[Ca::SampleRateShading],
r#"
@stage(fragment)
fn f(@location(0) @interpolate(perspective, sample) x: f32) { }
"#,
);
require(
&[Ca::SampleRateShading],
r#"
@stage(fragment)
fn f(@builtin(sample_index) x: u32) { }
"#,
);
}
#[test]
fn geometry() {
require(
&[Ca::Geometry],
r#"
@stage(fragment)
fn f(@builtin(primitive_index) x: u32) { }
"#,
);
}
#[test]
fn storage_image_formats() {
require_and_forbid(
&[Ca::Shader],
&[Ca::StorageImageExtendedFormats],
r#"
@group(0) @binding(0)
var image_rg32f: texture_storage_2d<rgba16uint, read>;
"#,
);
require(
&[Ca::StorageImageExtendedFormats],
r#"
@group(0) @binding(0)
var image_rg32f: texture_storage_2d<rg32float, read>;
"#,
);
}
|
sample_rate_shading
|
functions_table.rs
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::any::Any;
use std::sync::Arc;
use common_datablocks::DataBlock;
use common_datavalues::prelude::*;
use common_exception::Result;
use common_functions::aggregates::AggregateFunctionFactory;
use common_functions::scalars::FunctionFactory;
use common_meta_types::TableIdent;
use common_meta_types::TableInfo;
use common_meta_types::TableMeta;
use common_planners::ReadDataSourcePlan;
use common_streams::DataBlockStream;
use common_streams::SendableDataBlockStream;
use crate::catalogs::Table;
use crate::sessions::QueryContext;
pub struct FunctionsTable {
table_info: TableInfo,
}
impl FunctionsTable {
pub fn create(table_id: u64) -> Self {
let schema = DataSchemaRefExt::create(vec![
DataField::new("name", DataType::String, false),
DataField::new("is_aggregate", DataType::Boolean, false),
]);
let table_info = TableInfo {
desc: "'system'.'functions'".to_string(),
name: "functions".to_string(),
ident: TableIdent::new(table_id, 0),
meta: TableMeta {
schema,
engine: "SystemFunctions".to_string(),
..Default::default()
},
};
FunctionsTable { table_info }
}
}
#[async_trait::async_trait]
impl Table for FunctionsTable {
fn as_any(&self) -> &dyn Any {
self
}
fn get_table_info(&self) -> &TableInfo {
&self.table_info
}
async fn read(
&self,
_ctx: Arc<QueryContext>,
_plan: &ReadDataSourcePlan,
) -> Result<SendableDataBlockStream> {
let function_factory = FunctionFactory::instance();
let aggregate_function_factory = AggregateFunctionFactory::instance();
let func_names = function_factory.registered_names();
let aggr_func_names = aggregate_function_factory.registered_names();
let names: Vec<&[u8]> = func_names
.iter()
.chain(aggr_func_names.iter())
.map(|x| x.as_bytes())
.collect();
let is_aggregate = (0..names.len())
.map(|i| i >= func_names.len())
.collect::<Vec<bool>>();
|
let block = DataBlock::create_by_array(self.table_info.schema(), vec![
Series::new(names),
Series::new(is_aggregate),
]);
Ok(Box::pin(DataBlockStream::create(
self.table_info.schema(),
None,
vec![block],
)))
}
}
| |
test_keydb.py
|
#!/usr/bin/env python
"""
<Program Name>
test_keydb.py
<Author>
Vladimir Diaz <[email protected]>
<Started>
October 2012.
<Copyright>
See LICENSE for licensing information.
<Purpose>
Unit test for 'keydb.py'.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import unittest
import logging
import tuf
import tuf.formats
import tuf.keys
import tuf.keydb
import tuf.log
logger = logging.getLogger('tuf.test_keydb')
# Generate the three keys to use in our test cases.
KEYS = []
for junk in range(3):
KEYS.append(tuf.keys.generate_rsa_key(2048))
class TestKeydb(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
tuf.keydb.clear_keydb()
def test_clear_keydb(self):
# Test condition ensuring 'clear_keydb()' clears the keydb database.
# Test the length of the keydb before and after adding a key.
self.assertEqual(0, len(tuf.keydb._keydb_dict))
rsakey = KEYS[0]
keyid = KEYS[0]['keyid']
tuf.keydb._keydb_dict[keyid] = rsakey
self.assertEqual(1, len(tuf.keydb._keydb_dict))
tuf.keydb.clear_keydb()
self.assertEqual(0, len(tuf.keydb._keydb_dict))
# Test condition for unexpected argument.
self.assertRaises(TypeError, tuf.keydb.clear_keydb, 'unexpected_argument')
def
|
(self):
# Test conditions using valid 'keyid' arguments.
rsakey = KEYS[0]
keyid = KEYS[0]['keyid']
tuf.keydb._keydb_dict[keyid] = rsakey
rsakey2 = KEYS[1]
keyid2 = KEYS[1]['keyid']
tuf.keydb._keydb_dict[keyid2] = rsakey2
self.assertEqual(rsakey, tuf.keydb.get_key(keyid))
self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2))
self.assertNotEqual(rsakey2, tuf.keydb.get_key(keyid))
self.assertNotEqual(rsakey, tuf.keydb.get_key(keyid2))
# Test conditions using invalid arguments.
self.assertRaises(tuf.FormatError, tuf.keydb.get_key, None)
self.assertRaises(tuf.FormatError, tuf.keydb.get_key, 123)
self.assertRaises(tuf.FormatError, tuf.keydb.get_key, ['123'])
self.assertRaises(tuf.FormatError, tuf.keydb.get_key, {'keyid': '123'})
self.assertRaises(tuf.FormatError, tuf.keydb.get_key, '')
# Test condition using a 'keyid' that has not been added yet.
keyid3 = KEYS[2]['keyid']
self.assertRaises(tuf.UnknownKeyError, tuf.keydb.get_key, keyid3)
def test_add_key(self):
# Test conditions using valid 'keyid' arguments.
rsakey = KEYS[0]
keyid = KEYS[0]['keyid']
rsakey2 = KEYS[1]
keyid2 = KEYS[1]['keyid']
rsakey3 = KEYS[2]
keyid3 = KEYS[2]['keyid']
self.assertEqual(None, tuf.keydb.add_key(rsakey, keyid))
self.assertEqual(None, tuf.keydb.add_key(rsakey2, keyid2))
self.assertEqual(None, tuf.keydb.add_key(rsakey3))
self.assertEqual(rsakey, tuf.keydb.get_key(keyid))
self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2))
self.assertEqual(rsakey3, tuf.keydb.get_key(keyid3))
# Test conditions using arguments with invalid formats.
tuf.keydb.clear_keydb()
rsakey3['keytype'] = 'bad_keytype'
self.assertRaises(tuf.FormatError, tuf.keydb.add_key, None, keyid)
self.assertRaises(tuf.FormatError, tuf.keydb.add_key, '', keyid)
self.assertRaises(tuf.FormatError, tuf.keydb.add_key, ['123'], keyid)
self.assertRaises(tuf.FormatError, tuf.keydb.add_key, {'a': 'b'}, keyid)
self.assertRaises(tuf.FormatError, tuf.keydb.add_key, rsakey, {'keyid': ''})
self.assertRaises(tuf.FormatError, tuf.keydb.add_key, rsakey, 123)
self.assertRaises(tuf.FormatError, tuf.keydb.add_key, rsakey, False)
self.assertRaises(tuf.FormatError, tuf.keydb.add_key, rsakey, ['keyid'])
self.assertRaises(tuf.FormatError, tuf.keydb.add_key, rsakey3, keyid3)
rsakey3['keytype'] = 'rsa'
# Test conditions where keyid does not match the rsakey.
self.assertRaises(tuf.Error, tuf.keydb.add_key, rsakey, keyid2)
self.assertRaises(tuf.Error, tuf.keydb.add_key, rsakey2, keyid)
# Test conditions using keyids that have already been added.
tuf.keydb.add_key(rsakey, keyid)
tuf.keydb.add_key(rsakey2, keyid2)
self.assertRaises(tuf.KeyAlreadyExistsError, tuf.keydb.add_key, rsakey)
self.assertRaises(tuf.KeyAlreadyExistsError, tuf.keydb.add_key, rsakey2)
def test_remove_key(self):
# Test conditions using valid keyids.
rsakey = KEYS[0]
keyid = KEYS[0]['keyid']
rsakey2 = KEYS[1]
keyid2 = KEYS[1]['keyid']
rsakey3 = KEYS[2]
keyid3 = KEYS[2]['keyid']
tuf.keydb.add_key(rsakey, keyid)
tuf.keydb.add_key(rsakey2, keyid2)
tuf.keydb.add_key(rsakey3, keyid3)
self.assertEqual(None, tuf.keydb.remove_key(keyid))
self.assertEqual(None, tuf.keydb.remove_key(keyid2))
# Ensure the keys were actually removed.
self.assertRaises(tuf.UnknownKeyError, tuf.keydb.get_key, keyid)
self.assertRaises(tuf.UnknownKeyError, tuf.keydb.get_key, keyid2)
# Test for 'keyid' not in keydb.
self.assertRaises(tuf.UnknownKeyError, tuf.keydb.remove_key, keyid)
# Test condition for unknown key argument.
self.assertRaises(tuf.UnknownKeyError, tuf.keydb.remove_key, '1')
# Test conditions for arguments with invalid formats.
self.assertRaises(tuf.FormatError, tuf.keydb.remove_key, None)
self.assertRaises(tuf.FormatError, tuf.keydb.remove_key, '')
self.assertRaises(tuf.FormatError, tuf.keydb.remove_key, 123)
self.assertRaises(tuf.FormatError, tuf.keydb.remove_key, ['123'])
self.assertRaises(tuf.FormatError, tuf.keydb.remove_key, {'bad': '123'})
self.assertRaises(tuf.Error, tuf.keydb.remove_key, rsakey3)
def test_create_keydb_from_root_metadata(self):
# Test condition using a valid 'root_metadata' argument.
rsakey = KEYS[0]
keyid = KEYS[0]['keyid']
rsakey2 = KEYS[1]
keyid2 = KEYS[1]['keyid']
keydict = {keyid: rsakey, keyid2: rsakey2, keyid: rsakey}
# Add a duplicate 'keyid' to log/trigger a 'tuf.KeyAlreadyExistsError'
# block (loading continues).
roledict = {'Root': {'keyids': [keyid], 'threshold': 1},
'Targets': {'keyids': [keyid2], 'threshold': 1}}
version = 8
consistent_snapshot = False
expires = '1985-10-21T01:21:00Z'
tuf.keydb.add_key(rsakey)
root_metadata = tuf.formats.RootFile.make_metadata(version,
expires,
keydict, roledict,
consistent_snapshot)
self.assertEqual(None, tuf.keydb.create_keydb_from_root_metadata(root_metadata))
tuf.keydb.create_keydb_from_root_metadata(root_metadata)
# Ensure 'keyid' and 'keyid2' were added to the keydb database.
self.assertEqual(rsakey, tuf.keydb.get_key(keyid))
self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2))
# Test conditions for arguments with invalid formats.
self.assertRaises(tuf.FormatError,
tuf.keydb.create_keydb_from_root_metadata, None)
self.assertRaises(tuf.FormatError,
tuf.keydb.create_keydb_from_root_metadata, '')
self.assertRaises(tuf.FormatError,
tuf.keydb.create_keydb_from_root_metadata, 123)
self.assertRaises(tuf.FormatError,
tuf.keydb.create_keydb_from_root_metadata, ['123'])
self.assertRaises(tuf.FormatError,
tuf.keydb.create_keydb_from_root_metadata, {'bad': '123'})
# Test conditions for correctly formatted 'root_metadata' arguments but
# containing incorrect keyids or key types. In these conditions, the keys
# should not be added to the keydb database and a warning should be logged.
tuf.keydb.clear_keydb()
# 'keyid' does not match 'rsakey2'.
keydict[keyid] = rsakey2
# Key with invalid keytype.
rsakey3 = KEYS[2]
keyid3 = KEYS[2]['keyid']
rsakey3['keytype'] = 'bad_keytype'
keydict[keyid3] = rsakey3
version = 8
expires = '1985-10-21T01:21:00Z'
root_metadata = tuf.formats.RootFile.make_metadata(version,
expires,
keydict, roledict,
consistent_snapshot)
self.assertEqual(None, tuf.keydb.create_keydb_from_root_metadata(root_metadata))
# Ensure only 'keyid2' was added to the keydb database. 'keyid' and
# 'keyid3' should not be stored.
self.assertEqual(rsakey2, tuf.keydb.get_key(keyid2))
self.assertRaises(tuf.UnknownKeyError, tuf.keydb.get_key, keyid)
self.assertRaises(tuf.UnknownKeyError, tuf.keydb.get_key, keyid3)
rsakey3['keytype'] = 'rsa'
# Run unit test.
if __name__ == '__main__':
unittest.main()
|
test_get_key
|
guild_member_add.go
|
package events
import (
"context"
"encoding/json"
"fmt"
"math/rand"
"moderari/internal/config"
"moderari/internal/db"
"moderari/internal/embeds"
"moderari/internal/http"
"time"
"github.com/andersfylling/disgord"
"github.com/getsentry/sentry-go"
)
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
func generateIdentifier() string {
rand.Seed(time.Now().UnixNano())
b := make([]byte, 8)
for i := range b {
b[i] = letters[rand.Int63()%int64(len(letters))]
}
return string(b)
}
func init() {
events[disgord.EvtGuildMemberAdd] = func(session disgord.Session, evt *disgord.GuildMemberAdd) {
guildString, err := db.Client.Get("guild:" + evt.Member.GuildID.String()).Result()
chk(err, session)
guildDoc := db.GuildModel{}
_ = json.Unmarshal([]byte(guildString), &guildDoc)
if !guildDoc.Gotcha {
return
}
guild, err := session.GetGuild(context.Background(), evt.Member.GuildID)
chk(err, session)
identifier := generateIdentifier()
message := disgord.NewMessage()
message.Embeds = append(message.Embeds, embeds.Info(
"Hey! "+guild.Name+" is protected with Gotcha.",
fmt.Sprintf(
"To gain access, see %s/verify/%s.\n[Here](https://github.com/fjah/gotcha)'s the source.",
config.C.URL,
identifier,
),
"You have one hour to do this.",
))
if _, _, err := evt.Member.User.SendMsg(context.Background(), session, message); err != nil {
// The user most likely has DMs off or blocked the bot. No need to log the error.
return
}
go func() {
message = disgord.NewMessage()
message.Embeds = append(message.Embeds, embeds.Info("You're in!", "We hope you enjoy your time.", ""))
stat := http.Gotcha.Await(identifier)
switch stat {
case 1:
message.Embeds[0] = embeds.ErrorString(
"Uhh...",
"You didn't respond in one hour. Please contact server staff.",
)
case 2:
message.Embeds[0] = embeds.ErrorString(
"Uhh...",
"You seem to be on our blocklist. Have you joined the same server already today?",
)
}
if guildDoc.MemberRole != "" {
if err := session.AddGuildMemberRole(
context.Background(),
evt.Member.GuildID,
evt.Member.User.ID,
disgord.ParseSnowflakeString(guildDoc.MemberRole),
); err != nil {
message.Embeds[0] = embeds.ErrorString(
"Uhh...",
"Sorry, but I'm not able to add the role to you. Please contact server staff.",
)
_, _, _ = evt.Member.User.SendMsg(context.Background(), session, message)
return
}
}
if _, _, err := evt.Member.User.SendMsg(context.Background(), session, message); err != nil
|
}()
}
}
func chk(err error, session disgord.Session) {
if err != nil {
if err == db.Nil {
return
}
sentry.CaptureException(err)
session.Logger().Error(err)
return
}
}
|
{
return
}
|
bt_translation_request_info.py
|
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
class BTTranslationRequestInfo(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("request_state",): {"ACTIVE": "ACTIVE", "DONE": "DONE", "FAILED": "FAILED",},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"document_id": (str,), # noqa: E501
"failure_reason": (str,), # noqa: E501
"href": (str,), # noqa: E501
"id": (str,), # noqa: E501
"name": (str,), # noqa: E501
"request_element_id": (str,), # noqa: E501
"request_state": (str,), # noqa: E501
"result_document_id": (str,), # noqa: E501
"result_element_ids": ([str],), # noqa: E501
"result_external_data_ids": ([str],), # noqa: E501
"result_workspace_id": (str,), # noqa: E501
"version_id": (str,), # noqa: E501
"view_ref": (str,), # noqa: E501
"workspace_id": (str,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"document_id": "documentId", # noqa: E501
"failure_reason": "failureReason", # noqa: E501
"href": "href", # noqa: E501
"id": "id", # noqa: E501
"name": "name", # noqa: E501
"request_element_id": "requestElementId", # noqa: E501
"request_state": "requestState", # noqa: E501
"result_document_id": "resultDocumentId", # noqa: E501
"result_element_ids": "resultElementIds", # noqa: E501
"result_external_data_ids": "resultExternalDataIds", # noqa: E501
"result_workspace_id": "resultWorkspaceId", # noqa: E501
"version_id": "versionId", # noqa: E501
"view_ref": "viewRef", # noqa: E501
"workspace_id": "workspaceId", # noqa: E501
}
@staticmethod
def _composed_schemas():
return None
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
|
"""bt_translation_request_info.BTTranslationRequestInfo - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
document_id (str): [optional] # noqa: E501
failure_reason (str): [optional] # noqa: E501
href (str): [optional] # noqa: E501
id (str): [optional] # noqa: E501
name (str): [optional] # noqa: E501
request_element_id (str): [optional] # noqa: E501
request_state (str): [optional] # noqa: E501
result_document_id (str): [optional] # noqa: E501
result_element_ids ([str]): [optional] # noqa: E501
result_external_data_ids ([str]): [optional] # noqa: E501
result_workspace_id (str): [optional] # noqa: E501
version_id (str): [optional] # noqa: E501
view_ref (str): [optional] # noqa: E501
workspace_id (str): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
for var_name, var_value in six.iteritems(kwargs):
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
|
|
debug.py
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2020 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
import pyglet
def
|
(enabled_or_option='debug'):
"""Get a debug printer that is enabled based on a boolean input or a pyglet option.
The debug print function returned should be used in an assert. This way it can be
optimized out when running python with the -O flag.
Usage example::
from pyglet.debug import debug_print
_debug_media = debug_print('debug_media')
def some_func():
assert _debug_media('My debug statement')
:parameters:
`enabled_or_options` : bool or str
If a bool is passed, debug printing is enabled if it is True. If str is passed
debug printing is enabled if the pyglet option with that name is True.
:returns: Function for debug printing.
"""
if isinstance(enabled_or_option, bool):
enabled = enabled_or_option
else:
enabled = pyglet.options.get(enabled_or_option, False)
if enabled:
def _debug_print(*args, **kwargs):
print(*args, **kwargs)
return True
else:
def _debug_print(*args, **kwargs):
return True
return _debug_print
|
debug_print
|
socket.service.ts
|
import { Injectable } from '@angular/core';
import { BehaviorSubject, Observable } from 'rxjs';
import { io } from "socket.io-client";
import { environment } from 'src/environments/environment';
import { AppConstant } from '../common/appconstants';
@Injectable({
providedIn: 'root'
})
export class SocketService {
private socketEvent: BehaviorSubject<string> = new BehaviorSubject('');
socketEvent$ = this.socketEvent.asObservable();
socket = io(environment.socket_url);
constructor() {
this.getNewMessage()
}
public getNewMessage = () => {
this.socket.on('message', (data) => {
// data.value = JSON.parse(data.value)
// console.log(data.value)
// console.log(data.value.parse)
// this.message$.next(data);
this.sendSocketEvent(data)
});
|
private sendSocketEvent(data) {
this.socketEvent.next(data);
}
}
|
};
|
atron.py
|
import click
import time
import platform
import os
from minifier import minify
from .board import Board, BoardException, DirectoryExistsError
from .board import PyboardError
_board = None
@click.group()
@click.option(
"--port",
"-p",
envvar="ATRON_PORT",
default="",
type=click.STRING,
help="Name of serial port for connected board. Can optionally specify with ATRON_PORT environment variable.",
metavar="PORT",
)
@click.option(
"--baud",
"-b",
envvar="ATRON_BAUD",
default=115200,
type=click.INT,
help="Baud rate for the serial connection (default 115200). Can optionally specify with ATRON_BAUD environment variable.",
metavar="BAUD",
)
@click.version_option()
def cli(port, baud):
global _board
if platform.system() == "Windows":
if port == '':
click.secho('you have to choose a COM port.', bold=True, fg='red')
return
if not re.match("^COM(\d+)$", port):
click.secho('invalid port {}'.format(port), fg='red')
return
else:
if port == '':
port = '/dev/ttyUSB0'
seconds = 1
while True:
try:
_board = Board(port, baud)
break
except BoardException as error:
click.secho(str(error), bold=True, fg='yellow')
click.secho(
'reonnecting to board after {} seconds. press ctrl+c to cancel'.format(seconds), fg='green')
time.sleep(seconds)
seconds *= 2
@cli.command()
@click.option(
"-h",
"--hard",
"hard",
is_flag=True,
default=False,
help="Perform a hard reboot, including running init.py",
)
def reset(hard):
if not hard:
_board.soft_reset()
return
# TODO: Hard reset is not implemented.
@cli.command()
def raw_command():
click.secho(
'the raw-command is under construction and may have some bugs.', fg='yellow')
click.secho('entering raw-command mode ...', fg='green')
_board.soft_reset()
time.sleep(1)
_board.board.enter_raw_repl()
try:
while True:
command = raw_input(">>> ")
result = _board.board.exec_raw(command)
if result[0]:
print(result[0])
finally:
_board.board.exit_raw_repl()
_board.soft_reset()
@cli.command()
@click.argument("remote_folder")
def rmdir(remote_folder):
|
@cli.command()
@click.argument(
"local",
default="main.py",
)
@click.argument(
"remote",
default="main.py",
)
def upload(local, remote):
if remote is None:
remote = os.path.basename(os.path.abspath(local))
_board.files.put(remote, minify(local))
@cli.command()
@click.argument(
"local",
default="main.py",
)
@click.argument(
"remote",
required=False,
)
def put(local, remote):
if remote is None:
remote = os.path.basename(os.path.abspath(local))
if os.path.isdir(local):
board_files = _board.files
for parent, child_dirs, child_files in os.walk(local):
remote_parent = posixpath.normpath(
posixpath.join(remote, os.path.relpath(parent, local))
)
try:
board_files.mkdir(remote_parent)
for filename in child_files:
with open(os.path.join(parent, filename), "rb") as infile:
remote_filename = posixpath.join(
remote_parent, filename)
board_files.put(remote_filename, infile.read())
except DirectoryExistsError:
pass
else:
with open(local, "rb") as infile:
_board.files.put(remote, infile.read())
@cli.command()
@click.argument("remote_file")
def rm(remote_file):
_board.files.rm(remote_file)
@cli.command()
@click.argument("local_file")
@click.option(
"--no-output",
"-n",
is_flag=True,
help="Run the code without waiting for it to finish and print output. Use this when running code with main loops that never return.",
)
def run(local_file, no_output):
try:
output = _board.files.run(local_file, not no_output)
if output is not None:
click.secho(output.decode("utf-8"))
except IOError:
click.echo(
"Failed to find or read input file: {0}".format(local_file), err=True
)
@cli.command()
@click.argument("directory", default="/")
@click.option(
"--long_format",
"-l",
is_flag=True,
help="Print long format info including size of files. Note the size of directories is not supported and will show 0 values.",
)
@click.option(
"--recursive",
"-r",
is_flag=True,
help="recursively list all files and (empty) directories.",
)
def ls(directory, long_format, recursive):
try:
files = _board.files.ls(directory, long_format=long_format, recursive=recursive)
except PyboardError as err:
click.secho('PyBoard Exception.', fg='red')
click.secho(str(err), fg='yellow')
return
for f in files:
if not long_format:
click.secho(
f,
fg='green' if os.path.splitext(f)[1].lower() == '.py' else 'white',
)
else:
click.echo(f)
if __name__ == '__main__':
try:
cli()
finally:
if _board is not None:
try:
_board.close()
except:
pass
|
_board.files.rmdir(remote_folder)
|
lib.rs
|
#[cfg(test)]
#[macro_use]
pub extern crate codeviz_macros;
pub extern crate codeviz_common as common;
pub extern crate codeviz_java as java;
pub extern crate codeviz_js as js;
pub extern crate codeviz_python as python;
pub extern crate codeviz_rust as rust;
#[cfg(test)]
mod python_tests {
use python::*;
#[test]
fn test_python()
|
}
#[cfg(test)]
mod java_tests {
use java::*;
#[test]
fn test_test_java() {
let string_type = Type::class("java.lang", "String");
let list_type = Type::class("java.util", "List");
let json_creator_type = Type::class("com.fasterxml.jackson.annotation", "JsonCreator");
let list_of_strings = list_type.with_arguments(vec![&string_type]);
let values_field = FieldSpec::new(
mods![Modifier::Private, Modifier::Final],
&list_of_strings,
"values",
);
let values_argument = ArgumentSpec::new(mods![Modifier::Final], &list_of_strings, "values");
let mut constructor = ConstructorSpec::new(mods![Modifier::Public]);
constructor.push_annotation(AnnotationSpec::new(json_creator_type));
constructor.push_argument(values_argument.clone());
constructor.push(stmt!["this.values = ", values_argument, ";"]);
let mut values_getter = MethodSpec::new(mods![Modifier::Public], "getValues");
values_getter.returns(&list_of_strings);
values_getter.push(stmt!["return this.", &values_field, ";"]);
let mut class = ClassSpec::new(mods![Modifier::Public], "Test");
class.push_field(&values_field);
class.push_constructor(&constructor);
class.push(&values_getter);
let mut file = FileSpec::new("se.tedro");
file.push(&class);
println!("result = {}", file.to_string());
let reference = ::std::str::from_utf8(include_bytes!("tests/Test.java")).unwrap();
assert_eq!(reference, file.to_string());
}
#[test]
fn test_class_spec() {
let class = ClassSpec::new(mods![Modifier::Public], "Foo");
let class: Element = class.into();
assert_eq!("public class Foo {\n}", class.to_string());
}
#[test]
fn test_interface_spec() {
let class = InterfaceSpec::new(mods![Modifier::Public], "Foo");
let class: Element = class.into();
assert_eq!("public interface Foo {\n}", class.to_string());
}
#[test]
fn test_enum_spec() {
let class = EnumSpec::new(mods![Modifier::Public], "Foo");
let class: Element = class.into();
assert_eq!("public enum Foo {\n ;\n}", class.to_string());
}
}
#[cfg(test)]
mod js_tests {
use js::*;
#[test]
fn test_file() {
let mut foo = FunctionSpec::new("foo");
let m = Name::imported("foo", "hello");
foo.push(stmt!["return ", m, "();"]);
let mut file = FileSpec::new();
file.push(foo);
let result = file.to_string();
assert_eq!(
"import {hello} from \"foo.js\";\n\nfunction foo() {\n return hello();\n}\n",
result
);
}
}
|
{
let static_method = Name::built_in("staticmethod");
let exit = Name::imported("sys", "exit");
let mut file = FileSpec::new();
let mut hello = MethodSpec::new("hello");
hello.push_decorator(static_method);
hello.push(stmt!["return 12"]);
let mut bye = MethodSpec::new("bye");
bye.push(stmt![exit, "(1)"]);
let mut foo = ClassSpec::new("Foo");
foo.push(hello);
foo.push(bye);
file.push(foo);
let reference = ::std::str::from_utf8(include_bytes!("tests/test.py")).unwrap();
assert_eq!(reference, file.to_string());
}
|
jobs.go
|
package client
import (
"bytes"
"encoding/json"
"io/ioutil"
"net/http"
"strconv"
"unicode/utf8"
"github.com/pkg/errors"
"github.com/taglme/nfc-goclient/pkg/models"
)
//JobFilter defines filter for job list
type JobFilter struct {
Status *models.JobStatus
SortBy *string
SortDir *string
Limit *int
Offset *int
}
//JobService job service interface
type JobService interface {
GetAll(adapterID string) ([]models.Job, models.PageInfo, error)
GetFiltered(adapterID string, filter JobFilter) ([]models.Job, models.PageInfo, error)
Delete(adapterID string, jobID string) error
DeleteAll(adapterID string) error
Get(adapterID string, jobID string) (models.Job, error)
Add(adapterID string, job models.NewJob) (models.Job, error)
UpdateStatus(adapterID string, jobID string, status models.JobStatus) (models.Job, error)
}
type jobService struct {
url string
basePath string
path string
client *http.Client
}
func newJobService(c *http.Client, url string) JobService {
return &jobService{
url: url,
client: c,
path: "/jobs",
basePath: "/adapters",
}
}
// Get Job list for adapter with all details
func (s *jobService) GetAll(adapterID string) ([]models.Job, models.PageInfo, error) {
return s.GetFiltered(adapterID, JobFilter{})
}
// Get Job list for adapter with all details
// adapterId – Unique identifier in form of UUID representing a specific adapter.
// filter.status – Jobs' status filter
// filter.limit – Limit number of jobs in response.
// filter.offset – Offset from start of list.
// filter.sortBy – Sort field for list.
// filter.sortDir – Sort direction for list
func (s *jobService) GetFiltered(adapterID string, filter JobFilter) (jobs []models.Job, pagInfo models.PageInfo, err error) {
targetUrl := s.url + s.basePath + "/" + adapterID + s.path + buildJobsQueryParams(filter)
resp, err := s.client.Get(targetUrl)
if err != nil {
return jobs, pagInfo, errors.Wrap(err, "Can't get jobs")
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return jobs, pagInfo, errors.Wrap(err, "Can't convert jobs to byte slice")
}
err = handleHttpResponseCode(resp.StatusCode, body)
if err != nil {
return jobs, pagInfo, errors.Wrap(err, "Error in fetching jobs")
}
var jListResource models.JobListResource
err = json.Unmarshal(body, &jListResource)
if err != nil {
return jobs, pagInfo, errors.Wrap(err, "Can't unmarshal jobs response")
}
jobs = make([]models.Job, len(jListResource.Items))
for i, e := range jListResource.Items {
jobs[i], err = e.ToJob()
if err != nil {
return jobs, pagInfo, errors.Wrap(err, "Can't convert job resource to job model")
}
}
return jobs, jListResource.GetPaginationInfo(), nil
}
// Get Job list for adapter with all details
// adapterId – Unique identifier in form of UUID representing a specific adapter.
// jobId – Unique identifier in form of UUID representing a specific job.
func (s *jobService) Get(adapterID string, jobID string) (job models.Job, err error) {
targetUrl := s.url + s.basePath + "/" + adapterID + s.path + "/" + jobID
resp, err := s.client.Get(targetUrl)
if err != nil {
return job, errors.Wrap(err, "Can't get job")
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return job, errors.Wrap(err, "Can't convert job to byte slice")
}
err = handleHttpResponseCode(resp.StatusCode, body)
if err != nil {
return job, errors.Wrap(err, "Error in fetching job")
}
var jResource models.JobResource
err = json.Unmarshal(body, &jResource)
if err != nil {
return job, errors.Wrap(err, "Can't unmarshal job response")
}
return jResource.ToJob()
}
// Send job with list of steps to adapter
// adapterId – Unique identifier in form of UUID representing a specific adapter.
func (s *jobService) Add(adapterID string, job models.NewJob) (event models.Job, err error) {
reqBody, err := json.Marshal(job)
if err != nil {
return event, errors.Wrap(err, "Can't marshall req body for add job")
}
resp, err := s.client.Post(s.url+s.basePath+"/"+adapterID+s.path, "application/json", bytes.NewBuffer(reqBody))
if err != nil {
return event, errors.Wrap(err, "Can't post job")
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return event, errors.Wrap(err, "Can't convert resp job to byte slice")
}
err = handleHttpResponseCode(resp.StatusCode, body)
if err != nil {
return event,
|
.JobResource
err = json.Unmarshal(body, &eRes)
if err != nil {
return event, errors.Wrap(err, "Can't unmarshal post job response")
}
return eRes.ToJob()
}
// Delete all jobs from adapter
// adapterId – Unique identifier in form of UUID representing a specific adapter.
func (s *jobService) DeleteAll(adapterID string) (err error) {
// Create request
req, err := http.NewRequest("DELETE", s.url+s.basePath+"/"+adapterID+s.path, nil)
if err != nil {
return errors.Wrap(err, "Can't build delete all jobs request")
}
resp, err := s.client.Do(req)
if err != nil {
return errors.Wrap(err, "Can't delete all jobs")
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return errors.Wrap(err, "Can't convert resp delete all jobs to byte slice")
}
err = handleHttpResponseCode(resp.StatusCode, body)
if err != nil {
return errors.Wrap(err, "Error in delete all jobs")
}
return err
}
// Delete job from adapter
// adapterId – Unique identifier in form of UUID representing a specific adapter.
// jobId – Unique identifier in form of UUID representing a specific job.
func (s *jobService) Delete(adapterID string, jobID string) (err error) {
req, err := http.NewRequest("DELETE", s.url+s.basePath+"/"+adapterID+s.path+"/"+jobID, nil)
if err != nil {
return errors.Wrap(err, "Can't build delete job request")
}
resp, err := s.client.Do(req)
if err != nil {
return errors.Wrap(err, "Can't delete job")
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return errors.Wrap(err, "Can't convert resp delete job to byte slice")
}
err = handleHttpResponseCode(resp.StatusCode, body)
if err != nil {
return errors.Wrap(err, "Error in delete job")
}
return err
}
// Update job status in adapter
// adapterId – Unique identifier in form of UUID representing a specific adapter.
// jobId – Unique identifier in form of UUID representing a specific job.
func (s *jobService) UpdateStatus(adapterID string, jobID string, status models.JobStatus) (job models.Job, err error) {
reqBody, err := json.Marshal(models.JobStatusUpdate{Status: status.String()})
if err != nil {
return job, errors.Wrap(err, "Can't marshall req body for patch job status")
}
req, err := http.NewRequest("PATCH", s.url+s.basePath+"/"+adapterID+s.path+"/"+jobID, bytes.NewBuffer(reqBody))
if err != nil {
return job, errors.Wrap(err, "Can't build patch job status request")
}
resp, err := s.client.Do(req)
if err != nil {
return job, errors.Wrap(err, "Can't patch job status")
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return job, errors.Wrap(err, "Can't convert resp patch job status to byte slice")
}
err = handleHttpResponseCode(resp.StatusCode, body)
if err != nil {
return job, errors.Wrap(err, "Error in patch job status")
}
var eRes models.JobResource
err = json.Unmarshal(body, &eRes)
if err != nil {
return job, errors.Wrap(err, "Can't unmarshal patch job status response")
}
return eRes.ToJob()
}
// Function builds jobs get query params
func buildJobsQueryParams(filter JobFilter) (queryParams string) {
queryParams = ""
if filter.Status != nil {
queryParams += "&status=" + filter.Status.String()
}
if filter.SortBy != nil {
queryParams += "&sortby=" + *filter.SortBy
}
if filter.SortDir != nil {
queryParams += "&sortdir=" + *filter.SortDir
}
if filter.Offset != nil {
queryParams += "&offset=" + strconv.Itoa(*filter.Offset)
}
if filter.Limit != nil {
queryParams += "&limit=" + strconv.Itoa(*filter.Limit)
}
if len(queryParams) > 0 {
// remove first & and add ?
_, i := utf8.DecodeRuneInString(queryParams)
return "?" + queryParams[i:]
}
return queryParams
}
|
errors.Wrap(err, "Error in post job")
}
var eRes models
|
tests.rs
|
#![allow(clippy::float_cmp)]
use crate::{builtins::Number, forward, forward_val, value::AbstractRelation, Context};
#[test]
fn integer_number_primitive_to_number_object() {
let mut context = Context::default();
let scenario = r#"
(100).toString() === "100"
"#;
assert_eq!(forward(&mut context, scenario), "true");
}
#[test]
fn call_number() {
let mut context = Context::default();
let init = r#"
var default_zero = Number();
var int_one = Number(1);
var float_two = Number(2.1);
var str_three = Number('3.2');
var bool_one = Number(true);
var bool_zero = Number(false);
var invalid_nan = Number("I am not a number");
var from_exp = Number("2.34e+2");
"#;
eprintln!("{}", forward(&mut context, init));
let default_zero = forward_val(&mut context, "default_zero").unwrap();
let int_one = forward_val(&mut context, "int_one").unwrap();
let float_two = forward_val(&mut context, "float_two").unwrap();
let str_three = forward_val(&mut context, "str_three").unwrap();
let bool_one = forward_val(&mut context, "bool_one").unwrap();
let bool_zero = forward_val(&mut context, "bool_zero").unwrap();
let invalid_nan = forward_val(&mut context, "invalid_nan").unwrap();
let from_exp = forward_val(&mut context, "from_exp").unwrap();
assert_eq!(default_zero.to_number(&mut context).unwrap(), 0_f64);
assert_eq!(int_one.to_number(&mut context).unwrap(), 1_f64);
assert_eq!(float_two.to_number(&mut context).unwrap(), 2.1);
assert_eq!(str_three.to_number(&mut context).unwrap(), 3.2);
assert_eq!(bool_one.to_number(&mut context).unwrap(), 1_f64);
assert!(invalid_nan.to_number(&mut context).unwrap().is_nan());
assert_eq!(bool_zero.to_number(&mut context).unwrap(), 0_f64);
assert_eq!(from_exp.to_number(&mut context).unwrap(), 234_f64);
}
#[test]
fn to_exponential() {
let mut context = Context::default();
let init = r#"
var default_exp = Number().toExponential();
var int_exp = Number(5).toExponential();
var float_exp = Number(1.234).toExponential();
var big_exp = Number(1234).toExponential();
var nan_exp = Number("I am also not a number").toExponential();
var noop_exp = Number("1.23e+2").toExponential();
"#;
eprintln!("{}", forward(&mut context, init));
let default_exp = forward(&mut context, "default_exp");
let int_exp = forward(&mut context, "int_exp");
let float_exp = forward(&mut context, "float_exp");
let big_exp = forward(&mut context, "big_exp");
let nan_exp = forward(&mut context, "nan_exp");
let noop_exp = forward(&mut context, "noop_exp");
assert_eq!(default_exp, "\"0e+0\"");
assert_eq!(int_exp, "\"5e+0\"");
assert_eq!(float_exp, "\"1.234e+0\"");
assert_eq!(big_exp, "\"1.234e+3\"");
assert_eq!(nan_exp, "\"NaN\"");
assert_eq!(noop_exp, "\"1.23e+2\"");
}
#[test]
fn to_fixed() {
let mut context = Context::default();
let init = r#"
var default_fixed = Number().toFixed();
var pos_fixed = Number("3.456e+4").toFixed();
var neg_fixed = Number("3.456e-4").toFixed();
var noop_fixed = Number(5).toFixed();
var nan_fixed = Number("I am not a number").toFixed();
"#;
eprintln!("{}", forward(&mut context, init));
let default_fixed = forward(&mut context, "default_fixed");
let pos_fixed = forward(&mut context, "pos_fixed");
let neg_fixed = forward(&mut context, "neg_fixed");
let noop_fixed = forward(&mut context, "noop_fixed");
let nan_fixed = forward(&mut context, "nan_fixed");
assert_eq!(default_fixed, "\"0\"");
assert_eq!(pos_fixed, "\"34560\"");
assert_eq!(neg_fixed, "\"0\"");
assert_eq!(noop_fixed, "\"5\"");
assert_eq!(nan_fixed, "\"NaN\"");
}
#[test]
fn to_locale_string() {
let mut context = Context::default();
let init = r#"
var default_locale = Number().toLocaleString();
var small_locale = Number(5).toLocaleString();
var big_locale = Number("345600").toLocaleString();
var neg_locale = Number(-25).toLocaleString();
"#;
// TODO: We don't actually do any locale checking here
// To honor the spec we should print numbers according to user locale.
eprintln!("{}", forward(&mut context, init));
let default_locale = forward(&mut context, "default_locale");
let small_locale = forward(&mut context, "small_locale");
let big_locale = forward(&mut context, "big_locale");
let neg_locale = forward(&mut context, "neg_locale");
assert_eq!(default_locale, "\"0\"");
assert_eq!(small_locale, "\"5\"");
assert_eq!(big_locale, "\"345600\"");
assert_eq!(neg_locale, "\"-25\"");
}
#[test]
fn to_precision() {
let mut context = Context::default();
let init = r#"
var infinity = (1/0).toPrecision(3);
var default_precision = Number().toPrecision();
var explicit_ud_precision = Number().toPrecision(undefined);
var low_precision = (123456789).toPrecision(1);
var more_precision = (123456789).toPrecision(4);
var exact_precision = (123456789).toPrecision(9);
var over_precision = (123456789).toPrecision(50);
var neg_precision = (-123456789).toPrecision(4);
var neg_exponent = (0.1).toPrecision(4);
var ieee754_limits = (1/3).toPrecision(60);
"#;
eprintln!("{}", forward(&mut context, init));
let infinity = forward(&mut context, "infinity");
let default_precision = forward(&mut context, "default_precision");
let explicit_ud_precision = forward(&mut context, "explicit_ud_precision");
let low_precision = forward(&mut context, "low_precision");
let more_precision = forward(&mut context, "more_precision");
let exact_precision = forward(&mut context, "exact_precision");
let over_precision = forward(&mut context, "over_precision");
let neg_precision = forward(&mut context, "neg_precision");
let neg_exponent = forward(&mut context, "neg_exponent");
let ieee754_limits = forward(&mut context, "ieee754_limits");
assert_eq!(infinity, String::from("\"Infinity\""));
assert_eq!(default_precision, String::from("\"0\""));
assert_eq!(explicit_ud_precision, String::from("\"0\""));
assert_eq!(low_precision, String::from("\"1e+8\""));
assert_eq!(more_precision, String::from("\"1.235e+8\""));
assert_eq!(exact_precision, String::from("\"123456789\""));
assert_eq!(neg_precision, String::from("\"-1.235e+8\""));
assert_eq!(
over_precision,
String::from("\"123456789.00000000000000000000000000000000000000000\"")
);
assert_eq!(neg_exponent, String::from("\"0.1000\""));
assert_eq!(
ieee754_limits,
String::from("\"0.333333333333333314829616256247390992939472198486328125000000\"")
);
let expected = "Uncaught \"RangeError\": \"precision must be an integer at least 1 and no greater than 100\"";
let range_error_1 = r#"(1).toPrecision(101);"#;
let range_error_2 = r#"(1).toPrecision(0);"#;
let range_error_3 = r#"(1).toPrecision(-2000);"#;
let range_error_4 = r#"(1).toPrecision('%');"#;
assert_eq!(forward(&mut context, range_error_1), expected);
assert_eq!(forward(&mut context, range_error_2), expected);
assert_eq!(forward(&mut context, range_error_3), expected);
assert_eq!(forward(&mut context, range_error_4), expected);
}
#[test]
fn to_string() {
let mut context = Context::default();
assert_eq!("\"NaN\"", &forward(&mut context, "Number(NaN).toString()"));
assert_eq!(
"\"Infinity\"",
&forward(&mut context, "Number(1/0).toString()")
);
assert_eq!(
"\"-Infinity\"",
&forward(&mut context, "Number(-1/0).toString()")
);
assert_eq!("\"0\"", &forward(&mut context, "Number(0).toString()"));
assert_eq!("\"9\"", &forward(&mut context, "Number(9).toString()"));
assert_eq!("\"90\"", &forward(&mut context, "Number(90).toString()"));
assert_eq!(
"\"90.12\"",
&forward(&mut context, "Number(90.12).toString()")
);
assert_eq!("\"0.1\"", &forward(&mut context, "Number(0.1).toString()"));
assert_eq!(
"\"0.01\"",
&forward(&mut context, "Number(0.01).toString()")
);
assert_eq!(
"\"0.0123\"",
&forward(&mut context, "Number(0.0123).toString()")
);
assert_eq!(
"\"0.00001\"",
&forward(&mut context, "Number(0.00001).toString()")
);
assert_eq!(
"\"0.000001\"",
&forward(&mut context, "Number(0.000001).toString()")
);
assert_eq!(
"\"NaN\"",
&forward(&mut context, "Number(NaN).toString(16)")
);
assert_eq!(
"\"Infinity\"",
&forward(&mut context, "Number(1/0).toString(16)")
);
assert_eq!(
"\"-Infinity\"",
&forward(&mut context, "Number(-1/0).toString(16)")
);
assert_eq!("\"0\"", &forward(&mut context, "Number(0).toString(16)"));
assert_eq!("\"9\"", &forward(&mut context, "Number(9).toString(16)"));
assert_eq!("\"5a\"", &forward(&mut context, "Number(90).toString(16)"));
assert_eq!(
"\"5a.1eb851eb852\"",
&forward(&mut context, "Number(90.12).toString(16)")
);
assert_eq!(
"\"0.1999999999999a\"",
&forward(&mut context, "Number(0.1).toString(16)")
);
assert_eq!(
"\"0.028f5c28f5c28f6\"",
&forward(&mut context, "Number(0.01).toString(16)")
);
assert_eq!(
"\"0.032617c1bda511a\"",
&forward(&mut context, "Number(0.0123).toString(16)")
);
assert_eq!(
"\"605f9f6dd18bc8000\"",
&forward(&mut context, "Number(111111111111111111111).toString(16)")
);
assert_eq!(
"\"3c3bc3a4a2f75c0000\"",
&forward(&mut context, "Number(1111111111111111111111).toString(16)")
);
assert_eq!(
"\"25a55a46e5da9a00000\"",
&forward(&mut context, "Number(11111111111111111111111).toString(16)")
);
assert_eq!(
"\"0.0000a7c5ac471b4788\"",
&forward(&mut context, "Number(0.00001).toString(16)")
);
assert_eq!(
"\"0.000010c6f7a0b5ed8d\"",
&forward(&mut context, "Number(0.000001).toString(16)")
);
assert_eq!(
"\"0.000001ad7f29abcaf48\"",
&forward(&mut context, "Number(0.0000001).toString(16)")
);
assert_eq!(
"\"0.000002036565348d256\"",
&forward(&mut context, "Number(0.00000012).toString(16)")
);
assert_eq!(
"\"0.0000021047ee22aa466\"",
&forward(&mut context, "Number(0.000000123).toString(16)")
);
assert_eq!(
"\"0.0000002af31dc4611874\"",
&forward(&mut context, "Number(0.00000001).toString(16)")
);
assert_eq!(
"\"0.000000338a23b87483be\"",
&forward(&mut context, "Number(0.000000012).toString(16)")
);
assert_eq!(
"\"0.00000034d3fe36aaa0a2\"",
&forward(&mut context, "Number(0.0000000123).toString(16)")
);
assert_eq!("\"0\"", &forward(&mut context, "Number(-0).toString(16)"));
assert_eq!("\"-9\"", &forward(&mut context, "Number(-9).toString(16)"));
assert_eq!(
"\"-5a\"",
&forward(&mut context, "Number(-90).toString(16)")
);
assert_eq!(
"\"-5a.1eb851eb852\"",
&forward(&mut context, "Number(-90.12).toString(16)")
);
assert_eq!(
"\"-0.1999999999999a\"",
&forward(&mut context, "Number(-0.1).toString(16)")
);
assert_eq!(
"\"-0.028f5c28f5c28f6\"",
&forward(&mut context, "Number(-0.01).toString(16)")
);
assert_eq!(
"\"-0.032617c1bda511a\"",
&forward(&mut context, "Number(-0.0123).toString(16)")
);
assert_eq!(
"\"-605f9f6dd18bc8000\"",
&forward(&mut context, "Number(-111111111111111111111).toString(16)")
);
assert_eq!(
"\"-3c3bc3a4a2f75c0000\"",
&forward(&mut context, "Number(-1111111111111111111111).toString(16)")
);
assert_eq!(
"\"-25a55a46e5da9a00000\"",
&forward(
&mut context,
"Number(-11111111111111111111111).toString(16)"
)
);
assert_eq!(
"\"-0.0000a7c5ac471b4788\"",
&forward(&mut context, "Number(-0.00001).toString(16)")
);
assert_eq!(
"\"-0.000010c6f7a0b5ed8d\"",
&forward(&mut context, "Number(-0.000001).toString(16)")
);
assert_eq!(
"\"-0.000001ad7f29abcaf48\"",
&forward(&mut context, "Number(-0.0000001).toString(16)")
);
assert_eq!(
"\"-0.000002036565348d256\"",
&forward(&mut context, "Number(-0.00000012).toString(16)")
);
assert_eq!(
"\"-0.0000021047ee22aa466\"",
&forward(&mut context, "Number(-0.000000123).toString(16)")
);
assert_eq!(
"\"-0.0000002af31dc4611874\"",
&forward(&mut context, "Number(-0.00000001).toString(16)")
);
assert_eq!(
"\"-0.000000338a23b87483be\"",
&forward(&mut context, "Number(-0.000000012).toString(16)")
);
assert_eq!(
"\"-0.00000034d3fe36aaa0a2\"",
&forward(&mut context, "Number(-0.0000000123).toString(16)")
);
}
#[test]
fn num_to_string_exponential() {
let mut context = Context::default();
assert_eq!("\"0\"", forward(&mut context, "(0).toString()"));
assert_eq!("\"0\"", forward(&mut context, "(-0).toString()"));
assert_eq!(
"\"111111111111111110000\"",
forward(&mut context, "(111111111111111111111).toString()")
);
assert_eq!(
"\"1.1111111111111111e+21\"",
forward(&mut context, "(1111111111111111111111).toString()")
);
assert_eq!(
"\"1.1111111111111111e+22\"",
forward(&mut context, "(11111111111111111111111).toString()")
);
assert_eq!("\"1e-7\"", forward(&mut context, "(0.0000001).toString()"));
assert_eq!(
"\"1.2e-7\"",
forward(&mut context, "(0.00000012).toString()")
);
assert_eq!(
"\"1.23e-7\"",
forward(&mut context, "(0.000000123).toString()")
);
assert_eq!("\"1e-8\"", forward(&mut context, "(0.00000001).toString()"));
assert_eq!(
"\"1.2e-8\"",
forward(&mut context, "(0.000000012).toString()")
);
assert_eq!(
"\"1.23e-8\"",
forward(&mut context, "(0.0000000123).toString()")
);
}
#[test]
fn value_of() {
let mut context = Context::default();
// TODO: In addition to parsing numbers from strings, parse them bare As of October 2019
// the parser does not understand scientific e.g., Xe+Y or -Xe-Y notation.
let init = r#"
var default_val = Number().valueOf();
var int_val = Number("123").valueOf();
var float_val = Number(1.234).valueOf();
var exp_val = Number("1.2e+4").valueOf()
var neg_val = Number("-1.2e+4").valueOf()
"#;
eprintln!("{}", forward(&mut context, init));
let default_val = forward_val(&mut context, "default_val").unwrap();
let int_val = forward_val(&mut context, "int_val").unwrap();
let float_val = forward_val(&mut context, "float_val").unwrap();
let exp_val = forward_val(&mut context, "exp_val").unwrap();
let neg_val = forward_val(&mut context, "neg_val").unwrap();
assert_eq!(default_val.to_number(&mut context).unwrap(), 0_f64);
assert_eq!(int_val.to_number(&mut context).unwrap(), 123_f64);
assert_eq!(float_val.to_number(&mut context).unwrap(), 1.234);
assert_eq!(exp_val.to_number(&mut context).unwrap(), 12_000_f64);
assert_eq!(neg_val.to_number(&mut context).unwrap(), -12_000_f64);
}
#[test]
fn equal() {
assert!(Number::equal(0.0, 0.0));
assert!(Number::equal(-0.0, 0.0));
assert!(Number::equal(0.0, -0.0));
assert!(!Number::equal(f64::NAN, -0.0));
assert!(!Number::equal(0.0, f64::NAN));
assert!(Number::equal(1.0, 1.0));
}
#[test]
fn same_value() {
assert!(Number::same_value(0.0, 0.0));
assert!(!Number::same_value(-0.0, 0.0));
assert!(!Number::same_value(0.0, -0.0));
assert!(!Number::same_value(f64::NAN, -0.0));
assert!(!Number::same_value(0.0, f64::NAN));
assert!(Number::equal(1.0, 1.0));
}
#[test]
fn less_than() {
assert_eq!(
Number::less_than(f64::NAN, 0.0),
AbstractRelation::Undefined
);
assert_eq!(
Number::less_than(0.0, f64::NAN),
AbstractRelation::Undefined
);
assert_eq!(
Number::less_than(f64::NEG_INFINITY, 0.0),
AbstractRelation::True
);
assert_eq!(
Number::less_than(0.0, f64::NEG_INFINITY),
AbstractRelation::False
);
assert_eq!(
Number::less_than(f64::INFINITY, 0.0),
AbstractRelation::False
);
assert_eq!(
Number::less_than(0.0, f64::INFINITY),
AbstractRelation::True
);
}
#[test]
fn same_value_zero() {
assert!(Number::same_value_zero(0.0, 0.0));
assert!(Number::same_value_zero(-0.0, 0.0));
assert!(Number::same_value_zero(0.0, -0.0));
assert!(!Number::same_value_zero(f64::NAN, -0.0));
assert!(!Number::same_value_zero(0.0, f64::NAN));
assert!(Number::equal(1.0, 1.0));
}
#[test]
fn from_bigint() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "Number(0n)"), "0",);
assert_eq!(&forward(&mut context, "Number(100000n)"), "100000",);
assert_eq!(&forward(&mut context, "Number(100000n)"), "100000",);
assert_eq!(&forward(&mut context, "Number(1n << 1240n)"), "Infinity",);
}
#[test]
fn number_constants() {
let mut context = Context::default();
assert!(!forward_val(&mut context, "Number.EPSILON")
.unwrap()
.is_null_or_undefined());
assert!(!forward_val(&mut context, "Number.MAX_SAFE_INTEGER")
.unwrap()
.is_null_or_undefined());
assert!(!forward_val(&mut context, "Number.MIN_SAFE_INTEGER")
.unwrap()
.is_null_or_undefined());
assert!(!forward_val(&mut context, "Number.MAX_VALUE")
.unwrap()
.is_null_or_undefined());
assert!(!forward_val(&mut context, "Number.MIN_VALUE")
.unwrap()
.is_null_or_undefined());
assert!(!forward_val(&mut context, "Number.NEGATIVE_INFINITY")
.unwrap()
.is_null_or_undefined());
assert!(!forward_val(&mut context, "Number.POSITIVE_INFINITY")
.unwrap()
.is_null_or_undefined());
}
#[test]
fn parse_int_simple() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt(\"6\")"), "6");
}
#[test]
fn parse_int_negative() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt(\"-9\")"), "-9");
}
#[test]
fn parse_int_already_int() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt(100)"), "100");
}
#[test]
fn parse_int_float() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt(100.5)"), "100");
}
#[test]
fn parse_int_float_str() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt(\"100.5\")"), "100");
}
#[test]
fn parse_int_inferred_hex() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt(\"0xA\")"), "10");
}
/// This test demonstrates that this version of parseInt treats strings starting with 0 to be parsed with
/// a radix 10 if no radix is specified. Some alternative implementations default to a radix of 8.
#[test]
fn parse_int_zero_start() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt(\"018\")"), "18");
}
#[test]
fn parse_int_varying_radix() {
let mut context = Context::default();
let base_str = "1000";
for radix in 2..36 {
let expected = i32::from_str_radix(base_str, radix).unwrap();
assert_eq!(
forward(&mut context, &format!("parseInt(\"{base_str}\", {radix} )")),
expected.to_string()
);
}
}
#[test]
fn parse_int_negative_varying_radix() {
let mut context = Context::default();
let base_str = "-1000";
for radix in 2..36 {
let expected = i32::from_str_radix(base_str, radix).unwrap();
assert_eq!(
forward(&mut context, &format!("parseInt(\"{base_str}\", {radix} )")),
expected.to_string()
);
}
}
#[test]
fn parse_int_malformed_str() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt(\"hello\")"), "NaN");
}
#[test]
fn parse_int_undefined() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt(undefined)"), "NaN");
}
/// Shows that no arguments to parseInt is treated the same as if undefined was
/// passed as the first argument.
#[test]
fn parse_int_no_args() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt()"), "NaN");
}
/// Shows that extra arguments to parseInt are ignored.
#[test]
fn parse_int_too_many_args() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseInt(\"100\", 10, 10)"), "100");
}
#[test]
fn parse_float_simple() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseFloat(\"6.5\")"), "6.5");
}
#[test]
fn parse_float_int() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseFloat(10)"), "10");
}
#[test]
fn parse_float_int_str() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseFloat(\"8\")"), "8");
}
#[test]
fn parse_float_already_float() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseFloat(17.5)"), "17.5");
}
#[test]
fn parse_float_negative() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseFloat(\"-99.7\")"), "-99.7");
}
#[test]
fn parse_float_malformed_str() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseFloat(\"hello\")"), "NaN");
}
#[test]
fn parse_float_undefined() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseFloat(undefined)"), "NaN");
}
/// No arguments to parseFloat is treated the same as passing undefined as the first argument.
#[test]
fn parse_float_no_args() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseFloat()"), "NaN");
}
/// Shows that the parseFloat function ignores extra arguments.
#[test]
fn parse_float_too_many_args() {
let mut context = Context::default();
assert_eq!(&forward(&mut context, "parseFloat(\"100.5\", 10)"), "100.5");
}
#[test]
fn global_is_finite() {
let mut context = Context::default();
assert_eq!("false", &forward(&mut context, "isFinite(Infinity)"));
assert_eq!("false", &forward(&mut context, "isFinite(NaN)"));
assert_eq!("false", &forward(&mut context, "isFinite(-Infinity)"));
assert_eq!("true", &forward(&mut context, "isFinite(0)"));
assert_eq!("true", &forward(&mut context, "isFinite(2e64)"));
assert_eq!("true", &forward(&mut context, "isFinite(910)"));
assert_eq!("true", &forward(&mut context, "isFinite(null)"));
assert_eq!("true", &forward(&mut context, "isFinite('0')"));
assert_eq!("false", &forward(&mut context, "isFinite()"));
}
#[test]
fn global_is_nan() {
let mut context = Context::default();
assert_eq!("true", &forward(&mut context, "isNaN(NaN)"));
assert_eq!("true", &forward(&mut context, "isNaN('NaN')"));
assert_eq!("true", &forward(&mut context, "isNaN(undefined)"));
assert_eq!("true", &forward(&mut context, "isNaN({})"));
assert_eq!("false", &forward(&mut context, "isNaN(true)"));
assert_eq!("false", &forward(&mut context, "isNaN(null)"));
assert_eq!("false", &forward(&mut context, "isNaN(37)"));
assert_eq!("false", &forward(&mut context, "isNaN('37')"));
assert_eq!("false", &forward(&mut context, "isNaN('37.37')"));
assert_eq!("true", &forward(&mut context, "isNaN('37,5')"));
assert_eq!("true", &forward(&mut context, "isNaN('123ABC')"));
// Incorrect due to ToNumber implementation inconsistencies.
//assert_eq!("false", &forward(&mut context, "isNaN('')"));
//assert_eq!("false", &forward(&mut context, "isNaN(' ')"));
assert_eq!("true", &forward(&mut context, "isNaN('blabla')"));
}
#[test]
fn number_is_finite() {
let mut context = Context::default();
assert_eq!("false", &forward(&mut context, "Number.isFinite(Infinity)"));
assert_eq!("false", &forward(&mut context, "Number.isFinite(NaN)"));
assert_eq!(
"false",
&forward(&mut context, "Number.isFinite(-Infinity)")
);
assert_eq!("true", &forward(&mut context, "Number.isFinite(0)"));
assert_eq!("true", &forward(&mut context, "Number.isFinite(2e64)"));
assert_eq!("true", &forward(&mut context, "Number.isFinite(910)"));
assert_eq!("false", &forward(&mut context, "Number.isFinite(null)"));
assert_eq!("false", &forward(&mut context, "Number.isFinite('0')"));
assert_eq!("false", &forward(&mut context, "Number.isFinite()"));
assert_eq!("false", &forward(&mut context, "Number.isFinite({})"));
assert_eq!("true", &forward(&mut context, "Number.isFinite(Number(5))"));
assert_eq!(
"false",
&forward(&mut context, "Number.isFinite(new Number(5))")
);
assert_eq!(
"false",
&forward(&mut context, "Number.isFinite(new Number(NaN))")
);
assert_eq!(
"false",
&forward(&mut context, "Number.isFinite(BigInt(5))")
);
}
#[test]
fn number_is_integer() {
let mut context = Context::default();
assert_eq!("true", &forward(&mut context, "Number.isInteger(0)"));
assert_eq!("true", &forward(&mut context, "Number.isInteger(1)"));
assert_eq!("true", &forward(&mut context, "Number.isInteger(-100000)"));
assert_eq!(
"true",
&forward(&mut context, "Number.isInteger(99999999999999999999999)")
);
assert_eq!("false", &forward(&mut context, "Number.isInteger(0.1)"));
assert_eq!("false", &forward(&mut context, "Number.isInteger(Math.PI)"));
assert_eq!("false", &forward(&mut context, "Number.isInteger(NaN)"));
assert_eq!(
"false",
&forward(&mut context, "Number.isInteger(Infinity)")
);
assert_eq!(
"false",
&forward(&mut context, "Number.isInteger(-Infinity)")
);
assert_eq!("false", &forward(&mut context, "Number.isInteger('10')"));
assert_eq!("false", &forward(&mut context, "Number.isInteger(true)"));
assert_eq!("false", &forward(&mut context, "Number.isInteger(false)"));
assert_eq!("false", &forward(&mut context, "Number.isInteger([1])"));
assert_eq!("true", &forward(&mut context, "Number.isInteger(5.0)"));
assert_eq!(
"false",
&forward(&mut context, "Number.isInteger(5.000000000000001)")
);
assert_eq!(
"true",
&forward(&mut context, "Number.isInteger(5.0000000000000001)")
);
assert_eq!(
"false",
&forward(&mut context, "Number.isInteger(Number(5.000000000000001))")
);
assert_eq!(
"true",
&forward(&mut context, "Number.isInteger(Number(5.0000000000000001))")
);
assert_eq!("false", &forward(&mut context, "Number.isInteger()"));
assert_eq!(
"false",
&forward(&mut context, "Number.isInteger(new Number(5))")
);
}
#[test]
fn number_is_nan() {
let mut context = Context::default();
assert_eq!("true", &forward(&mut context, "Number.isNaN(NaN)"));
assert_eq!("true", &forward(&mut context, "Number.isNaN(Number.NaN)"));
assert_eq!("true", &forward(&mut context, "Number.isNaN(0 / 0)"));
assert_eq!("false", &forward(&mut context, "Number.isNaN(undefined)"));
assert_eq!("false", &forward(&mut context, "Number.isNaN({})"));
assert_eq!("false", &forward(&mut context, "Number.isNaN(true)"));
assert_eq!("false", &forward(&mut context, "Number.isNaN(null)"));
assert_eq!("false", &forward(&mut context, "Number.isNaN(37)"));
assert_eq!("false", &forward(&mut context, "Number.isNaN('37')"));
assert_eq!("false", &forward(&mut context, "Number.isNaN('37.37')"));
assert_eq!("false", &forward(&mut context, "Number.isNaN('37,5')"));
assert_eq!("false", &forward(&mut context, "Number.isNaN('123ABC')"));
// Incorrect due to ToNumber implementation inconsistencies.
//assert_eq!("false", &forward(&mut context, "Number.isNaN('')"));
//assert_eq!("false", &forward(&mut context, "Number.isNaN(' ')"));
assert_eq!("false", &forward(&mut context, "Number.isNaN('blabla')"));
assert_eq!("false", &forward(&mut context, "Number.isNaN(Number(5))"));
assert_eq!("true", &forward(&mut context, "Number.isNaN(Number(NaN))"));
assert_eq!("false", &forward(&mut context, "Number.isNaN(BigInt(5))"));
assert_eq!(
"false",
&forward(&mut context, "Number.isNaN(new Number(5))")
);
assert_eq!(
"false",
&forward(&mut context, "Number.isNaN(new Number(NaN))")
);
}
#[test]
fn
|
() {
let mut context = Context::default();
assert_eq!("true", &forward(&mut context, "Number.isSafeInteger(3)"));
assert_eq!(
"false",
&forward(&mut context, "Number.isSafeInteger(Math.pow(2, 53))")
);
assert_eq!(
"true",
&forward(&mut context, "Number.isSafeInteger(Math.pow(2, 53) - 1)")
);
assert_eq!("false", &forward(&mut context, "Number.isSafeInteger(NaN)"));
assert_eq!(
"false",
&forward(&mut context, "Number.isSafeInteger(Infinity)")
);
assert_eq!("false", &forward(&mut context, "Number.isSafeInteger('3')"));
assert_eq!("false", &forward(&mut context, "Number.isSafeInteger(3.1)"));
assert_eq!("true", &forward(&mut context, "Number.isSafeInteger(3.0)"));
assert_eq!(
"false",
&forward(&mut context, "Number.isSafeInteger(new Number(5))")
);
}
|
number_is_safe_integer
|
circular_buf.go
|
package netutils
import (
"errors"
)
/* Circular byte buffer */
// a a circular byte buffer
type CircularBuf struct {
buf []byte
start, end int
}
func NewCircularBuf(capacity int) *CircularBuf
|
func (b *CircularBuf) GimmeBuf() []byte { return b.buf }
// the read consumes the buffer
func (b *CircularBuf) Read(p []byte) (bytesCopied int, err error) {
err = nil
if b.start == b.end {
return 0, nil
} else if b.start < b.end {
bytesCopied = copy(p, b.buf[b.start:b.end])
b.start = b.start + bytesCopied
} else if b.end < b.start {
bytesCopied = copy(p, b.buf[b.start:])
bytesCopied += copy(p[bytesCopied:], b.buf[0:b.end])
b.start = (b.start + bytesCopied) % b.Capacity()
}
return
}
func (b *CircularBuf) Write(buf []byte) (bytesCopied int, err error) {
if b.Capacity()-b.Size() < len(buf) {
err = errors.New("Buffer doesn't have capacity to hold the entire input.")
}
if b.start > b.end {
bytesCopied = copy(b.buf[b.end:b.start], buf)
b.end = b.end + bytesCopied
} else if b.start <= b.end {
bytesCopied = copy(b.buf[b.end:], buf)
bytesCopied += copy(b.buf[0:b.start], buf[bytesCopied:])
b.end = (b.end + bytesCopied) % b.Capacity()
}
return
}
func (b *CircularBuf) Size() int {
if b.start <= b.end {
return b.end - b.start
} else {
return b.Capacity() - (b.start - b.end)
}
}
func (b *CircularBuf) Capacity() int {
return len(b.buf)
}
|
{
buf := make([]byte, capacity)
return &CircularBuf{buf, 0, 0}
}
|
test_end_to_end_tests.py
|
import os
import csv
from shopify_csv import ShopifyRow
def
|
():
with open(
os.path.join(
os.getcwd(), "shopify_csv", "tests", "fixtures", "product_template.csv"
),
"r",
) as file:
reader = csv.reader(file, delimiter=";")
return [row for row in reader]
def get_shopify_rows():
return_rows = []
return_rows.append(ShopifyRow.FIELDS)
row = ShopifyRow()
row.handle = "example-product"
row.title = "Some product"
row.vendor = "Vendor"
row.type = "product"
row.tags = "tag1"
row.published = True
row.option1_name = "Title"
row.option1_value = "Some option value"
row.variant_grams = 3629
row.variant_inventory_policy = "deny"
row.variant_fulfillment_service = "manual"
row.variant_price = 25
row.variant_requires_shipping = True
row.variant_taxable = True
row.image_src = "https://test.com/product.jpg"
row.image_position = 1
row.gift_card = False
row.seo_title = "Seo title."
row.seo_description = "Description"
row.google_shopping_google_product_category = "Products > Products"
row.google_shopping_gender = "Unisex"
row.google_shopping_age_group = "Adult"
row.google_shopping_mpn = "man"
row.google_shopping_adwords_grouping = "products"
row.google_shopping_adwords_labels = "labels"
row.google_shopping_condition = "used"
row.google_shopping_custom_product = "FALSE"
row.variant_weight_unit = "g"
row.status = "active"
row.validate_required_fields()
return_rows.append(row.writable)
row = ShopifyRow()
row.handle = "example-t-shirt"
row.option1_value = "Small"
row.variant_sku = "example-product-s"
row.variant_grams = 200
row.variant_inventory_policy = "deny"
row.variant_fulfillment_service = "manual"
row.variant_price = 29.99
row.variant_compare_at_price = 34.99
row.variant_requires_shipping = True
row.variant_taxable = True
row.variant_weight_unit = "g"
row.validate_required_fields(is_variant=True)
return_rows.append(row.writable)
row = ShopifyRow()
row.handle = "example-t-shirt"
row.option1_value = "Medium"
row.variant_sku = "example-product-m"
row.variant_grams = 200
row.variant_inventory_tracker = "shopify"
row.variant_inventory_policy = "deny"
row.variant_fulfillment_service = "manual"
row.variant_price = 29.99
row.variant_compare_at_price = 34.99
row.variant_requires_shipping = True
row.variant_taxable = True
row.variant_weight_unit = "g"
row.validate_required_fields(is_variant=True)
return_rows.append(row.writable)
return return_rows
def test_should_produce_template_csv():
template_rows = get_template_rows()
shopify_rows = get_shopify_rows()
for template_row, shopify_row in zip(template_rows, shopify_rows):
for field1, field2 in zip(template_row, shopify_row):
assert field1 == field2
|
get_template_rows
|
__init__.py
|
#!/usr/bin/python
#
# Copyright 2019 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
from __future__ import absolute_import, division, print_function
from hestia.list_utils import to_list
from hestia.string_utils import strip_spaces
from marshmallow import ValidationError, fields, validates_schema
from polyaxon.schemas.base import BaseConfig, BaseSchema
from polyaxon.schemas.fields import ObjectOrListObject
from polyaxon.schemas.fields.docker_image import validate_image
def get_container_command_args(config):
def sanitize_str(value):
if not value:
return
value = strip_spaces(value=value, join=False)
value = [c.strip().strip("\\") for c in value if (c and c != "\\")]
value = [c for c in value if (c and c != "\\")]
return " ".join(value)
def sanitize(value):
return (
[sanitize_str(v) for v in value]
if isinstance(value, list)
else to_list(sanitize_str(value), check_none=True)
)
return to_list(config.command, check_none=True), sanitize(config.args)
class ContainerSchema(BaseSchema):
image = fields.Str(required=True)
image_pull_policy = fields.Str(allow_none=True)
command = ObjectOrListObject(fields.Str, allow_none=True)
args = ObjectOrListObject(fields.Str, allow_none=True)
@staticmethod
def schema_config():
return ContainerConfig
@validates_schema
def validate_container(self, values):
validate_image(values.get("image"))
class ContainerConfig(BaseConfig):
SCHEMA = ContainerSchema
IDENTIFIER = "container"
REDUCED_ATTRIBUTES = ["image_pull_policy", "command", "args"]
def __init__(self, image=None, image_pull_policy=None, command=None, args=None):
|
def get_container_command_args(self):
return get_container_command_args(self)
|
validate_image(image)
self.image = image
self.image_pull_policy = image_pull_policy
self.command = command
self.args = args
|
main.go
|
package main
import (
"github.com/inlets/inlets/cmd"
)
// These values will be injected into these variables at the build time.
var (
Version string
GitCommit string
)
func
|
() {
if err := cmd.Execute(Version, GitCommit); err != nil {
panic(err)
}
}
|
main
|
paren.rs
|
fn main() {
|
let x = (1);
let y = (/* comment */(2));
let z = ((3)/* comment */);
let a = (4/* comment */);
}
| |
KNN.py
|
"""
@Filename: KNN.py
@Author: Danc1elion
@Author: ffcccc
@Create Date: 2019-04-29
@Update Date: 2019-05-03
@Description: Implement of KNN
"""
import numpy as np
import operator as op
import AClassifier
import preProcess
class KNNClassifier(AClassifier.aClassifier):
def __init__(self, k, norm_type="Normalization"):
|
'''
Function: Normalization
Description: Normalize input data. For vector x, the normalization process is given by
normalization(x) = (x - min(x))/(max(x) - min(x))
Input: data dataType: ndarray description: input data
Output: norm_data dataType: ndarray description: output data after normalization
'''
# def Normalization(self, data):
# # get the max and min value of each column
# min_value = data.min(axis=0)
# max_value = data.max(axis=0)
# diff = max_value - min_value
# # normalization
# min_data = np.tile(min_value, (data.shape[0], 1))
# norm_data = (data - min_data)/np.tile(diff, (data.shape[0], 1))
# return norm_data
'''
Function: Standardization
Description: Standardize input data. For vector x, the normalization process is given by
Standardization(x) = x - mean(x)/std(x)
Input: data dataType: ndarray description: input data
Output: standard_data dataType: ndarray description: output data after standardization
'''
# def Standardization(self, data):
# # get the mean and the variance of each column
# mean_value = data.mean(axis=0)
# var_value = data.std(axis=0)
# standard_data = (data - np.tile(mean_value, (data.shape[0], 1)))/np.tile(var_value, (data.shape[0], 1))
# return standard_data
'''
Function: train
Description: train the model
Input: train_data dataType: ndarray description: features
test_data dataType: ndarray description: labels
Output: self dataType: obj description:
'''
def train(self, train_data, train_label):
if self.norm_type == "Standardization":
train_data = preProcess.Standardization(train_data)
else:
train_data = preProcess.Normalization(train_data)
self.x_train = train_data
self.y_train = train_label
return self
'''
Function: predict
Description: give the prediction for test data
Input: test_data dataType: ndarray description: data for testing
test_abel dataType: ndarray description: labels of train data
norm_type dataType: string description: type of normalization, default:Normalization
probability dataType: bool description: if true return label and probability, else return label only
showResult dataType: bool description: display the prediction result
Output: results dataType: ndarray description: label or probability
'''
def predict(self, test_data):
# Normalization
if self.norm_type == "Standardization":
testData = preProcess.Standardization(test_data)
else:
testData = preProcess.Normalization(test_data)
test_num = testData.shape[0]
prediction = np.zeros([test_num, 1])
probability = np.zeros([test_num, 1])
# predict each samples in test data
for i in range(test_num):
prediction[i], probability[i] = self.calculateDistance(testData[i], self.x_train, self.y_train, self.k)
self.prediction = prediction
self.probability = probability
return prediction
'''
Function: calculateDistance
Description: calcuate the distance between input vector and train data
Input: input dataType: ndarray description: input vector
traind_ata dataType: ndarray description: data for training
train_label dataType: ndarray description: labels of train data
k dataType: int description: select the first k distances
Output: prob dataType: float description: max probability of prediction
label dataType: int description: prediction label of input vector
'''
def calculateDistance(self, input, train_data, train_label, k):
train_num = train_data.shape[0]
# calcuate the distances
distances = np.tile(input, (train_num, 1)) - train_data
distances = distances**2
distances = distances.sum(axis=1)
distances = distances**0.5
# get the labels of the first k distances
disIndex = distances.argsort()
labelCount = {}
for i in range(k):
label = train_label[disIndex[i]]
labelCount[label] = labelCount.get(label, 0) + 1
prediction = sorted(labelCount.items(), key=op.itemgetter(1), reverse=True)
label = prediction[0][0]
prob = prediction[0][1]/k
return label, prob
'''
Function: showDetectionResult
Description: show detection result
Input: test_data dataType: ndarray description: data for test
test_label dataType: ndarray description: labels of test data
Output: accuracy dataType: float description: detection accuarcy
'''
# def showDetectionResult(self, test_data, test_label):
# test_label = np.expand_dims(test_label, axis=1)
# prediction = self.predict(test_data)
# accuarcy = sum(prediction == test_label)/len(test_label)
# return accuarcy
|
self.k = k
self.norm_type = "Normalization"
self.x_train = None
self.y_train = None
|
shadows.js
|
const shadows = [
{
name: "Google docs",
shadow: "0 0 0 0.75pt #d1d1d1, 0 0 3pt 0.75pt #ccc",
border: "0"
},
{
name: "Google Account Login",
shadow: "0 2px 2px 0 rgba(0,0,0,0.14), 0 3px 1px -2px rgba(0,0,0,0.12), 0 1px 5px 0 rgba(0,0,0,0.2)",
border: "0"
},
{
name: "Google Notification",
shadow: "0 16px 24px 2px rgba(0,0,0,0.14), 0 6px 30px 5px rgba(0,0,0,0.12), 0 8px 10px -5px rgba(0,0,0,0.2)",
border: "0"
},
{
name: "Google Cards",
shadow: "0 2px 2px 0 rgba(0,0,0,0.16), 0 0 0 1px rgba(0,0,0,0.08)",
border: "0"
},
{
name: "Udacity Chapters",
shadow: "0 0 20px 0 rgba(0, 0, 0, 0.12)",
border: "0"
},
{
name: "Udacity Buttons",
|
{
name: "Udacity Testimonial",
shadow: "0 0 4px 0 rgba(17,22,26,.16), 0 2px 4px 0 rgba(17,22,26,.08), 0 4px 8px 0 rgba(17,22,26,.08), 0 -3px #02b3e4",
border: "0"
},
{
name: "Angular",
shadow: "0 6px 6px rgba(10,16,20,.15), 0 0 52px rgba(10,16,20,.12)",
border: "0"
},
{
name: "MDLBlog Cards",
shadow: "0 2px 5px rgba(0, 0, 0, 0.4)",
border: "0"
},
{
name: "Android.com Slider",
shadow: "-25px 50px 30px rgba(0,0,0,0.125)",
border: "0"
},
{
name: "I/O 2015",
shadow: "0px 2px 5px 0px rgba(0,0,0,0.16), 0px 2px 5px 0px rgba(0,0,0,0.23)",
border: "0"
},
{
name: "medium post",
shadow: "0 1px 4px rgba(0,0,0,.04)",
border: "1px solid rgba(0,0,0,.09)"
},
{
name: "medium bottom",
shadow: "0 4px 2px -2px rgba(0,0,0,.05)",
border: "0"
},
{
name: "whatsapp web dropdown",
shadow: "0 2px 5px 0 rgba(0,0,0,0.26), 0 2px 10px 0 rgba(0,0,0,0.16)",
border: "0"
},
{
name: "testmysite.io",
shadow: "0 0 25px rgba(0,0,0,.05)",
border: "1px solid #f1f4ff"
},
{
name: "flipkart search",
shadow: "0 2px 4px 0 rgba(0, 0, 0, 0.23)",
border: "0"
},
{
name: "LinkedIn Post",
shadow: "0 0 0 1px rgba(0,0,0,.1), 0 2px 3px rgba(0,0,0,.2)",
border: "0"
},
{
name: "Digital Ocean",
shadow: "0 2px 4px rgba(3,27,78,.06)",
border: "1px solid #e5e8ed"
},
{
name: "Telegram login",
shadow: "0 1px 1px rgba(97,127,152,.2), 1px 0 0 rgba(97,127,152,.1), -1px 0 0 rgba(97,127,152,.1)",
border: "0"
},
{
name: "LinkedIn Home Welcome Screen",
shadow: "0 0 0 1px rgba(0,0,0,0.15), 0 4px 6px rgba(0,0,0,0.2)",
border: "none"
},
{
name: "Chat Button Widget",
shadow: "0 1px 6px rgba(0,0,0,.1), 0 2px 24px rgba(0,0,0,.2)",
border: "0"
},
{
name: "Simple Material UI Button",
shadow: "0 2px 5px 0 rgba(0, 0, 0, 0.225)",
border: "0"
},
{
name: "A Nice Simple Shadow",
shadow: "0px 17px 10px -10px rgba(0,0,0,0.4)",
border: "0"
},
{
name: "Android 10 Highlights",
shadow: "4px 12px 47px 0 rgba(0,0,0,.1)",
border: "0"
},
{
name: "Github button",
shadow: "0 0 0 .2em rgba(3,102,214,.3)",
border: "0"
},
{
name: "Rounded Box",
shadow: "0px 3px 8px 0 rgba(219, 219, 219, 0.56)",
border: "0"
},
{
name: "colored box labels",
shadow: "1px 1px 2px #e5e5e5",
border: "0"
},
{
name: "spotify login box shadow",
shadow: "1px 0px 11px 6px rgba(0,0,0,0.49)",
border: "0"
},
{
name: "Instagram login box shadow",
shadow: "1px 0px 11px 6px rgba(0,0,0,0.49)",
border: "0"
},
{
name: "AirBnb Landing Page Card",
shadow: "0 16px 40px rgba(0,0,0,0.12)",
border: "0"
},
{
name: "Aisha Aqeel",
shadow: "5px 10px #888888",
border: "1px solid"
},
{
name: "Floating Button",
shadow: "0px 17px 10px -10px rgba(0,0,0,0.4)",
border: "1px solid"
},
{
name: "anime.to search",
shadow: "0 0 10px 0 rgba(0, 0, 0, 0.15)",
border: "0"
},
{
name: "Solid New",
shadow: "10px 10px #888, -10px -10px #f4f4f4, 0px 0px 5px 5px #cc6600",
border: "0"
},
{
name: "Nice and Crispy",
shadow: "0px 1px 2px 0px rgba(0, 0, 0, 0.5)",
border: "0"
},
{
name: "Zeit",
shadow: "rgba(0, 0, 0, 0.12) 0px 5px 10px 0px",
border: "0"
},
{
name: "Firebase Landing Page",
shadow: "0 1px 2px 0 rgba(60,64,67,.3), 0 1px 3px 1px rgba(60,64,67,.15)",
border: "1px solid #eceff1"
},
{
name: "Twitter Tweet Button",
shadow: "rgba(0, 0, 0, 0.08) 0px 8px 28px",
border: "0"
},
{
name: "Waze",
shadow: "0 1px 1px rgba(2,219,238,.2), 1px 0 0 rgba(5,180,223,.1), -1px 0 0 rgba(5,158,215,.1)",
border: "0"
},
{
name: "bitbucket",
shadow: "0 1px 1px rgba(38,132,255,.2), 1px 0 0 rgba(38,132,255,.1), -1px 0 0 rgba(0,81,206,.1)",
border: "0"
},
{
name: "Udemy Courses",
shadow: "0 0 1px 1px rgba(20,23,28,.1), 0 3px 1px 0 rgba(20,23,28,.1)",
border: "0"
},
{
name: "Netflix Login Modal",
shadow: "0 0 8px 0 rgba(0,0,0,.32)",
border: "0"
},
{
name: "Paytm",
shadow: "0 0 15px 0 rgba(0,0,0,0.03), 0 20px 40px 10px rgba(224,224,224,0.2)",
border: "0"
},
{
name: "Google Calendar",
shadow: "0 3px 5px -1px rgba(0,0,0,0.2), 0 6px 10px 0 rgba(0,0,0,0.14), 0 1px 18px 0 rgba(0,0,0,0.12)",
border: "1px"
},
{
name: "Inside Shadow",
shadow: "inset 0 0 10px 0 rgba(95, 71, 71, 0.35)",
border: "0"
},
{
name: "Amazon Button",
shadow: " 0 2px 5px rgba(0,0,0,0.54), 0 12px 3px rgba(0,0,0,0.32)",
border: "0"
},
{
name: "npm - main buttons",
shadow: "inset 4px 4px 0 #fff, 7px 7px 0 rgba(128, 83, 35, 0.2)",
border: "3px solid #000"
},
{
name: "Glitch - Main Window",
shadow: "4px 4px #c3c3c3",
border: "1px solid #c3c3c3"
},
{
name: "Codementor - Feature Card",
shadow: "0 6px 12px 0 rgba(51,51,51,0.1)",
border: "1px solid #e8f5f9"
},
{
name: "Netlify Sign Up",
shadow: "0 0 0 1px rgba(255,255,255,.1), 0 2px 4px 0 rgba(14,30,37,.12)",
border: "0"
},
{
name: "dimly lit hover shadow",
shadow: "0 0 11px rgba(33,33,33,.2)",
border: "1px solid #ccc"
},
{
name: "Codepen Material design",
shadow: " 0 19px 38px rgba(0,0,0,0.30), 0 15px 12px rgba(0,0,0,0.22)",
border: "0"
},
{
name: "Bulma.io",
shadow: "0 .5em 1em -.125em rgba(10,10,10,.1),0 0 0 1px rgba(10,10,10,.02)",
border: "0"
},
{
name: "Spectre.css",
shadow: "0 .25rem 1rem rgba(48,55,66,.15)",
border: "0"
},
{
name: "SemanticUI",
shadow: "0 1px 3px 0 #d4d4d5,0 0 0 1px #d4d4d5",
border: "0"
},
{
name: "UIKit",
shadow: "0 5px 15px rgba(0,0,0,0.08)",
border: "0"
},
{
name: "Leaf",
shadow: "0px 1px 2px 1px rgba(0,0,0,0.12)",
border: "0"
},
{
name: "MaterializeCSS",
shadow: "0 2px 2px 0 rgba(0,0,0,0.14),0 3px 1px -2px rgba(0,0,0,0.12),0 1px 5px 0 rgba(0,0,0,0.2)",
border: "0"
},
{
name: "Quicken Loans Shadow",
shadow: "0 20px 30px 0 #e5e5e5",
border: "2px solid #2081bf"
},
{
name: "Google Search Recommendation",
shadow: "0 4px 6px 0 rgba(32,33,36,0.28)",
border: "0"
},
{
name: "gaana login box",
shadow: "1px 0px 11px 6px rgba(0,0,0,0.49)",
border: "0"
},
{
name: "AWS Solutions Box",
shadow: "0 1px 3px 0 rgba(0,0,0,.3), 0 0 0 1px rgba(0,0,0,.04)",
border: "1px solid #232f3e"
},
{
name: "Internet Explorer Box Shadow",
shadow: "5px 5px 5px rgba(68,68,68,0.6), rgb(68,68,68),",
border: "0"
},
{
name: "Fury",
shadow: "0px 0px 10px 4px #e0e0e0",
border: "1px solid #daf1ff"
},
{
name: "Reddit",
shadow: "00px 17px 10px -10px #e0e0e0",
border: "1px solid #FF0000"
},
{
name: "Reddit",
shadow: "00px 17px 10px -10px #e0e0e0",
border: "1px solid #FF0000"
},
{
name: "Facebook Login Button",
shadow: "0 3px 8px rgba(0, 0, 0, .3)",
border: "0 solid"
},
{
name: "Youtube Search",
shadow: "rgbargba(0, 0, 0, 0) 0px 1px 2px 0px inset",
border: "1px solid rgb(204, 204, 204)"
},
{
name: "WebFx multiple shadow design",
shadow: "-5px -5px 30px 5px #FF0000, 5px 5px 30px 5px #0000FF",
border: "0"
},
{
name: "Zapier homepage",
shadow: "0px 2px 10px rgba(0, 0, 0, 0.2)",
border: " 1px solid #e6e6e6"
},
{
name: "Auth0 homepage",
shadow: "rgba(0, 0, 0, 0.05) 0px 0.1rem 0.3rem, rgba(0, 0, 0, 0.05) 0px 0.1rem 0.2rem, rgba(0, 0, 0, 0.05) 0px 0.5rem 1.5rem",
border: "0"
},
{
name: "Hacktoberfest infobox",
shadow: "0px 50px 100px rgba(0,0,0,0.2)",
border: "0"
},
{
name: "Slack homepage",
shadow: "0 1rem 2rem rgba(0,0,0,.1)",
border: "0"
},
{
name: "Stackoverflow signup",
shadow: "inset 0 1px 0 #66bfff",
border: "1px solid #07c"
},
{
name: "CallOfDuty homepage button",
shadow: "inset 0px 0px 5px #81898c",
border: "1px solid #81898c"
},
{
name: "ING Broker PL",
shadow: "0 3px 2px #bbb",
border: "1px solid #bbb"
},
];
export default shadows;
|
shadow: "12px 15px 20px rgba(0, 0, 0, 0.1)",
border: "0"
},
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.